hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b889e86e0a94bf54d90a0d8e3809a08fa855b2f6
| 200
|
py
|
Python
|
zvt/recorders/joinquant/finance_qtr/__init__.py
|
markqiu/zvt
|
1bcfb71279f2652c3600f0f8e45d941f98ceaa10
|
[
"MIT"
] | 6
|
2020-09-03T10:02:00.000Z
|
2021-02-04T02:51:47.000Z
|
zvt/recorders/joinquant/finance_qtr/__init__.py
|
wlwd13303/zvt
|
23105a5bfdc3a5080c6c22d11e9e53d216688dea
|
[
"MIT"
] | null | null | null |
zvt/recorders/joinquant/finance_qtr/__init__.py
|
wlwd13303/zvt
|
23105a5bfdc3a5080c6c22d11e9e53d216688dea
|
[
"MIT"
] | 2
|
2020-07-08T04:15:40.000Z
|
2021-06-08T08:51:31.000Z
|
# -*- coding: utf-8 -*-
from zvt.recorders.joinquant.finance_qtr.china_stock_income_statement_qtr_recorder import *
from zvt.recorders.joinquant.finance_qtr.china_stock_cash_flow_qtr_recorder import *
| 66.666667
| 91
| 0.845
| 29
| 200
| 5.413793
| 0.586207
| 0.089172
| 0.203822
| 0.318471
| 0.573248
| 0.573248
| 0.573248
| 0.573248
| 0
| 0
| 0
| 0.005319
| 0.06
| 200
| 3
| 92
| 66.666667
| 0.829787
| 0.105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b2149e5515a9b2e2fa6b855bf590d85d4ea54f3f
| 110,591
|
py
|
Python
|
qualys_cs_api/api/registry_api.py
|
jlk/qualys-cs-python-client
|
e2e39fd64d41fd6671d45343843ef36fa3ab59a4
|
[
"Apache-2.0"
] | null | null | null |
qualys_cs_api/api/registry_api.py
|
jlk/qualys-cs-python-client
|
e2e39fd64d41fd6671d45343843ef36fa3ab59a4
|
[
"Apache-2.0"
] | null | null | null |
qualys_cs_api/api/registry_api.py
|
jlk/qualys-cs-python-client
|
e2e39fd64d41fd6671d45343843ef36fa3ab59a4
|
[
"Apache-2.0"
] | 1
|
2020-05-15T04:20:48.000Z
|
2020-05-15T04:20:48.000Z
|
# coding: utf-8
"""
Container Security APIs
All features of the Container Security are available through REST APIs.<br/>Access support information at www.qualys.com/support/<br/><br/><b>Permissions:</b><br/>User must have the Container module enabled<br/>User must have API ACCESS permission # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from qualys_cs_api.api_client import ApiClient
from qualys_cs_api.exceptions import (
ApiTypeError,
ApiValueError
)
class RegistryApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def cancel_schedule_using_post(self, registry_id, schedule_id, **kwargs): # noqa: E501
"""Cancel registry schedule in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_schedule_using_post(registry_id, schedule_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID/UUID of the registry you want to cancel the schedule for. (required)
:param str schedule_id: Provide the ID/UUID of the schedule you want to cancel. You can only cancel schedules which are in the state: Created, Queued, Paused, Running, BaselineQueued or BasinelineRunning (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.cancel_schedule_using_post_with_http_info(registry_id, schedule_id, **kwargs) # noqa: E501
def cancel_schedule_using_post_with_http_info(self, registry_id, schedule_id, **kwargs): # noqa: E501
"""Cancel registry schedule in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_schedule_using_post_with_http_info(registry_id, schedule_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID/UUID of the registry you want to cancel the schedule for. (required)
:param str schedule_id: Provide the ID/UUID of the schedule you want to cancel. You can only cancel schedules which are in the state: Created, Queued, Paused, Running, BaselineQueued or BasinelineRunning (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(str, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['registry_id', 'schedule_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method cancel_schedule_using_post" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'registry_id' is set
if self.api_client.client_side_validation and ('registry_id' not in local_var_params or # noqa: E501
local_var_params['registry_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registry_id` when calling `cancel_schedule_using_post`") # noqa: E501
# verify the required parameter 'schedule_id' is set
if self.api_client.client_side_validation and ('schedule_id' not in local_var_params or # noqa: E501
local_var_params['schedule_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schedule_id` when calling `cancel_schedule_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'registry_id' in local_var_params:
path_params['registryId'] = local_var_params['registry_id'] # noqa: E501
if 'schedule_id' in local_var_params:
path_params['scheduleId'] = local_var_params['schedule_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry/{registryId}/schedule/{scheduleId}/cancel', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_aws_connector_using_post(self, aws_connector_request, **kwargs): # noqa: E501
"""Create new AWS connector # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_aws_connector_using_post(aws_connector_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param AWSConnectorRequest aws_connector_request: Provide parameter values in the format shown under Example Value. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_aws_connector_using_post_with_http_info(aws_connector_request, **kwargs) # noqa: E501
def create_aws_connector_using_post_with_http_info(self, aws_connector_request, **kwargs): # noqa: E501
"""Create new AWS connector # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_aws_connector_using_post_with_http_info(aws_connector_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param AWSConnectorRequest aws_connector_request: Provide parameter values in the format shown under Example Value. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['aws_connector_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_aws_connector_using_post" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'aws_connector_request' is set
if self.api_client.client_side_validation and ('aws_connector_request' not in local_var_params or # noqa: E501
local_var_params['aws_connector_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `aws_connector_request` when calling `create_aws_connector_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'aws_connector_request' in local_var_params:
body_params = local_var_params['aws_connector_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry/aws/connector', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_registry_using_post(self, registry_request, **kwargs): # noqa: E501
"""Create a new registry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_registry_using_post(registry_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param RegistryRequest registry_request: Provide parameter values in the format shown under Example Value. Parameters accountId, arn, and region are required when the registryType is AWS ECR and you want to create a new AWS connector. Specify the ARN if you want to use an existing AWS connector, or if you want to create a new connector. All parameters are required other than dockerHubOrgName which is optional. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_registry_using_post_with_http_info(registry_request, **kwargs) # noqa: E501
def create_registry_using_post_with_http_info(self, registry_request, **kwargs): # noqa: E501
"""Create a new registry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_registry_using_post_with_http_info(registry_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param RegistryRequest registry_request: Provide parameter values in the format shown under Example Value. Parameters accountId, arn, and region are required when the registryType is AWS ECR and you want to create a new AWS connector. Specify the ARN if you want to use an existing AWS connector, or if you want to create a new connector. All parameters are required other than dockerHubOrgName which is optional. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(str, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['registry_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_registry_using_post" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'registry_request' is set
if self.api_client.client_side_validation and ('registry_request' not in local_var_params or # noqa: E501
local_var_params['registry_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registry_request` when calling `create_registry_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'registry_request' in local_var_params:
body_params = local_var_params['registry_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_schedule_using_post(self, registry_id, schedule_request, **kwargs): # noqa: E501
"""Create a new registry scan schedule # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_schedule_using_post(registry_id, schedule_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID of the registry you want to scan. (required)
:param ScheduleRequest schedule_request: Provide parameter values in the format shown under Example Value. Specify \"onDemand\": true if you want to scan immediately. Otherwise, Automatic scan will be triggered everyday at a set time. For days, specify 1 to 7 days / 14 (for last two weeks). For schedule, specify time in UTC, e.g., 19:30. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_schedule_using_post_with_http_info(registry_id, schedule_request, **kwargs) # noqa: E501
def create_schedule_using_post_with_http_info(self, registry_id, schedule_request, **kwargs): # noqa: E501
"""Create a new registry scan schedule # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_schedule_using_post_with_http_info(registry_id, schedule_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID of the registry you want to scan. (required)
:param ScheduleRequest schedule_request: Provide parameter values in the format shown under Example Value. Specify \"onDemand\": true if you want to scan immediately. Otherwise, Automatic scan will be triggered everyday at a set time. For days, specify 1 to 7 days / 14 (for last two weeks). For schedule, specify time in UTC, e.g., 19:30. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(str, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['registry_id', 'schedule_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_schedule_using_post" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'registry_id' is set
if self.api_client.client_side_validation and ('registry_id' not in local_var_params or # noqa: E501
local_var_params['registry_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registry_id` when calling `create_schedule_using_post`") # noqa: E501
# verify the required parameter 'schedule_request' is set
if self.api_client.client_side_validation and ('schedule_request' not in local_var_params or # noqa: E501
local_var_params['schedule_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schedule_request` when calling `create_schedule_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'registry_id' in local_var_params:
path_params['registryId'] = local_var_params['registry_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'schedule_request' in local_var_params:
body_params = local_var_params['schedule_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry/{registryId}/schedule', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_registries_using_delete(self, registry_ids, **kwargs): # noqa: E501
"""Delete multiple registries in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_registries_using_delete(registry_ids, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param list[str] registry_ids: Provide ID/UUIDs of the registries you want to delete. Should be in the form of an array, [\"regID1\",\"regID2\",\"regID3\"]. Note: You cannot delete registries whose schedules are in “Running” state. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_registries_using_delete_with_http_info(registry_ids, **kwargs) # noqa: E501
def delete_registries_using_delete_with_http_info(self, registry_ids, **kwargs): # noqa: E501
"""Delete multiple registries in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_registries_using_delete_with_http_info(registry_ids, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param list[str] registry_ids: Provide ID/UUIDs of the registries you want to delete. Should be in the form of an array, [\"regID1\",\"regID2\",\"regID3\"]. Note: You cannot delete registries whose schedules are in “Running” state. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(str, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['registry_ids'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_registries_using_delete" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'registry_ids' is set
if self.api_client.client_side_validation and ('registry_ids' not in local_var_params or # noqa: E501
local_var_params['registry_ids'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registry_ids` when calling `delete_registries_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'registry_ids' in local_var_params:
body_params = local_var_params['registry_ids']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_registry_using_delete(self, registry_id, **kwargs): # noqa: E501
"""Delete registry in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_registry_using_delete(registry_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID/UUID of the registry you want to delete. Note: You cannot delete a registry whose schedules are in “Running” state. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_registry_using_delete_with_http_info(registry_id, **kwargs) # noqa: E501
def delete_registry_using_delete_with_http_info(self, registry_id, **kwargs): # noqa: E501
"""Delete registry in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_registry_using_delete_with_http_info(registry_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID/UUID of the registry you want to delete. Note: You cannot delete a registry whose schedules are in “Running” state. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(str, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['registry_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_registry_using_delete" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'registry_id' is set
if self.api_client.client_side_validation and ('registry_id' not in local_var_params or # noqa: E501
local_var_params['registry_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registry_id` when calling `delete_registry_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'registry_id' in local_var_params:
path_params['registryId'] = local_var_params['registry_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry/{registryId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_schedule_using_delete(self, registry_id, schedule_id, **kwargs): # noqa: E501
"""Delete registry schedule in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_schedule_using_delete(registry_id, schedule_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID/UUID of the registry you want to delete. (required)
:param str schedule_id: Provide the ID/UUID of the schedule you want to delete. Note: You cannot delete a schedule which is in “Running” state. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_schedule_using_delete_with_http_info(registry_id, schedule_id, **kwargs) # noqa: E501
def delete_schedule_using_delete_with_http_info(self, registry_id, schedule_id, **kwargs): # noqa: E501
"""Delete registry schedule in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_schedule_using_delete_with_http_info(registry_id, schedule_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID/UUID of the registry you want to delete. (required)
:param str schedule_id: Provide the ID/UUID of the schedule you want to delete. Note: You cannot delete a schedule which is in “Running” state. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(str, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['registry_id', 'schedule_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_schedule_using_delete" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'registry_id' is set
if self.api_client.client_side_validation and ('registry_id' not in local_var_params or # noqa: E501
local_var_params['registry_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registry_id` when calling `delete_schedule_using_delete`") # noqa: E501
# verify the required parameter 'schedule_id' is set
if self.api_client.client_side_validation and ('schedule_id' not in local_var_params or # noqa: E501
local_var_params['schedule_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schedule_id` when calling `delete_schedule_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'registry_id' in local_var_params:
path_params['registryId'] = local_var_params['registry_id'] # noqa: E501
if 'schedule_id' in local_var_params:
path_params['scheduleId'] = local_var_params['schedule_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry/{registryId}/schedule/{scheduleId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_schedules_using_delete(self, registry_id, schedule_ids, **kwargs): # noqa: E501
"""Delete multiple registry schedules in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_schedules_using_delete(registry_id, schedule_ids, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID/UUID of the registry you want to delete. (required)
:param list[str] schedule_ids: Provide the ID/UUIDs of the schedules you want to delete. Should be in the form of an array, [\"schID1\",\"schID2\",\"schID3\"]. Note: You cannot delete schedules that are in “Running” state (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_schedules_using_delete_with_http_info(registry_id, schedule_ids, **kwargs) # noqa: E501
def delete_schedules_using_delete_with_http_info(self, registry_id, schedule_ids, **kwargs): # noqa: E501
"""Delete multiple registry schedules in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_schedules_using_delete_with_http_info(registry_id, schedule_ids, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID/UUID of the registry you want to delete. (required)
:param list[str] schedule_ids: Provide the ID/UUIDs of the schedules you want to delete. Should be in the form of an array, [\"schID1\",\"schID2\",\"schID3\"]. Note: You cannot delete schedules that are in “Running” state (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(str, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['registry_id', 'schedule_ids'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_schedules_using_delete" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'registry_id' is set
if self.api_client.client_side_validation and ('registry_id' not in local_var_params or # noqa: E501
local_var_params['registry_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registry_id` when calling `delete_schedules_using_delete`") # noqa: E501
# verify the required parameter 'schedule_ids' is set
if self.api_client.client_side_validation and ('schedule_ids' not in local_var_params or # noqa: E501
local_var_params['schedule_ids'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schedule_ids` when calling `delete_schedules_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'registry_id' in local_var_params:
path_params['registryId'] = local_var_params['registry_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'schedule_ids' in local_var_params:
body_params = local_var_params['schedule_ids']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry/{registryId}/schedule/', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_aws_base_using_get(self, **kwargs): # noqa: E501
"""Fetch AWS account ID and External ID for your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_aws_base_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: AWSBase
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_aws_base_using_get_with_http_info(**kwargs) # noqa: E501
def get_aws_base_using_get_with_http_info(self, **kwargs): # noqa: E501
"""Fetch AWS account ID and External ID for your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_aws_base_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(AWSBase, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_aws_base_using_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry/aws-base', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AWSBase', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_aws_connectors_list_using_get(self, **kwargs): # noqa: E501
"""Show a list of AWS connectors in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_aws_connectors_list_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[AWSConnector]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_aws_connectors_list_using_get_with_http_info(**kwargs) # noqa: E501
def get_aws_connectors_list_using_get_with_http_info(self, **kwargs): # noqa: E501
"""Show a list of AWS connectors in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_aws_connectors_list_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[AWSConnector], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_aws_connectors_list_using_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry/aws/connectors', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[AWSConnector]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_aws_connectors_via_customer_account_id_using_get(self, account_id, **kwargs): # noqa: E501
"""Show a list of AWS connectors for an AWS account ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_aws_connectors_via_customer_account_id_using_get(account_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str account_id: Provide the AWS account Id to get a list of connectors. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[AWSConnector]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_aws_connectors_via_customer_account_id_using_get_with_http_info(account_id, **kwargs) # noqa: E501
def get_aws_connectors_via_customer_account_id_using_get_with_http_info(self, account_id, **kwargs): # noqa: E501
"""Show a list of AWS connectors for an AWS account ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_aws_connectors_via_customer_account_id_using_get_with_http_info(account_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str account_id: Provide the AWS account Id to get a list of connectors. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[AWSConnector], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['account_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_aws_connectors_via_customer_account_id_using_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'account_id' is set
if self.api_client.client_side_validation and ('account_id' not in local_var_params or # noqa: E501
local_var_params['account_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `account_id` when calling `get_aws_connectors_via_customer_account_id_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'account_id' in local_var_params:
path_params['accountId'] = local_var_params['account_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry/aws/connectors/{accountId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[AWSConnector]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_registry_details_using_get(self, registry_id, **kwargs): # noqa: E501
"""Show details of a registry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_registry_details_using_get(registry_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID/UUID of the registry you want to fetch the details for. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: RegistryDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_registry_details_using_get_with_http_info(registry_id, **kwargs) # noqa: E501
def get_registry_details_using_get_with_http_info(self, registry_id, **kwargs): # noqa: E501
"""Show details of a registry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_registry_details_using_get_with_http_info(registry_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID/UUID of the registry you want to fetch the details for. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(RegistryDetails, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['registry_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_registry_details_using_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'registry_id' is set
if self.api_client.client_side_validation and ('registry_id' not in local_var_params or # noqa: E501
local_var_params['registry_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registry_id` when calling `get_registry_details_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'registry_id' in local_var_params:
path_params['registryId'] = local_var_params['registry_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry/{registryId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RegistryDetails', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_registry_pivot_data_with_list_using_get(self, page_no, page_size, **kwargs): # noqa: E501
"""Show a list of registries in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_registry_pivot_data_with_list_using_get(page_no, page_size, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int page_no: The page to be returned. (required)
:param int page_size: The number of records per page to be included in the response. (required)
:param str filter: Filter the registries list by providing a query using Qualys syntax. <a href='/cs/help/search/language.htm' target='_blank'>Click here</a> for help with creating your query.
:param str sort: Sort the results using a Qualys token. For example <a href='/cs/help/search_tips/sortable_tokens.htm'>Click here</a> for a listing of tokens.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PivotListResponseRegistryResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_registry_pivot_data_with_list_using_get_with_http_info(page_no, page_size, **kwargs) # noqa: E501
def get_registry_pivot_data_with_list_using_get_with_http_info(self, page_no, page_size, **kwargs): # noqa: E501
"""Show a list of registries in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_registry_pivot_data_with_list_using_get_with_http_info(page_no, page_size, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int page_no: The page to be returned. (required)
:param int page_size: The number of records per page to be included in the response. (required)
:param str filter: Filter the registries list by providing a query using Qualys syntax. <a href='/cs/help/search/language.htm' target='_blank'>Click here</a> for help with creating your query.
:param str sort: Sort the results using a Qualys token. For example <a href='/cs/help/search_tips/sortable_tokens.htm'>Click here</a> for a listing of tokens.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PivotListResponseRegistryResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['page_no', 'page_size', 'filter', 'sort'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_registry_pivot_data_with_list_using_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'page_no' is set
if self.api_client.client_side_validation and ('page_no' not in local_var_params or # noqa: E501
local_var_params['page_no'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `page_no` when calling `get_registry_pivot_data_with_list_using_get`") # noqa: E501
# verify the required parameter 'page_size' is set
if self.api_client.client_side_validation and ('page_size' not in local_var_params or # noqa: E501
local_var_params['page_size'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `page_size` when calling `get_registry_pivot_data_with_list_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501
query_params.append(('filter', local_var_params['filter'])) # noqa: E501
if 'page_no' in local_var_params and local_var_params['page_no'] is not None: # noqa: E501
query_params.append(('pageNo', local_var_params['page_no'])) # noqa: E501
if 'page_size' in local_var_params and local_var_params['page_size'] is not None: # noqa: E501
query_params.append(('pageSize', local_var_params['page_size'])) # noqa: E501
if 'sort' in local_var_params and local_var_params['sort'] is not None: # noqa: E501
query_params.append(('sort', local_var_params['sort'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PivotListResponseRegistryResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_registry_repo_pivot_list_using_get(self, registry_id, page_no, page_size, **kwargs): # noqa: E501
"""Show a list of repositories in a registry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_registry_repo_pivot_list_using_get(registry_id, page_no, page_size, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID of the registry for which you want to list the repositories. (required)
:param int page_no: The page to be returned. (required)
:param int page_size: The number of records per page to be included in the response. (required)
:param str filter: Filter the repository list by providing a query using Qualys syntax. <a href='/cs/help/search/language.htm' target='_blank'>Click here</a> for help with creating your query.
:param str sort: Sort the results using a Qualys token. For example <a href='/cs/help/search_tips/sortable_tokens.htm'>Click here</a> for a listing of tokens.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PivotListResponseRegistryRepoResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_registry_repo_pivot_list_using_get_with_http_info(registry_id, page_no, page_size, **kwargs) # noqa: E501
def get_registry_repo_pivot_list_using_get_with_http_info(self, registry_id, page_no, page_size, **kwargs): # noqa: E501
"""Show a list of repositories in a registry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_registry_repo_pivot_list_using_get_with_http_info(registry_id, page_no, page_size, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID of the registry for which you want to list the repositories. (required)
:param int page_no: The page to be returned. (required)
:param int page_size: The number of records per page to be included in the response. (required)
:param str filter: Filter the repository list by providing a query using Qualys syntax. <a href='/cs/help/search/language.htm' target='_blank'>Click here</a> for help with creating your query.
:param str sort: Sort the results using a Qualys token. For example <a href='/cs/help/search_tips/sortable_tokens.htm'>Click here</a> for a listing of tokens.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PivotListResponseRegistryRepoResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['registry_id', 'page_no', 'page_size', 'filter', 'sort'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_registry_repo_pivot_list_using_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'registry_id' is set
if self.api_client.client_side_validation and ('registry_id' not in local_var_params or # noqa: E501
local_var_params['registry_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registry_id` when calling `get_registry_repo_pivot_list_using_get`") # noqa: E501
# verify the required parameter 'page_no' is set
if self.api_client.client_side_validation and ('page_no' not in local_var_params or # noqa: E501
local_var_params['page_no'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `page_no` when calling `get_registry_repo_pivot_list_using_get`") # noqa: E501
# verify the required parameter 'page_size' is set
if self.api_client.client_side_validation and ('page_size' not in local_var_params or # noqa: E501
local_var_params['page_size'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `page_size` when calling `get_registry_repo_pivot_list_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'registry_id' in local_var_params:
path_params['registryId'] = local_var_params['registry_id'] # noqa: E501
query_params = []
if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501
query_params.append(('filter', local_var_params['filter'])) # noqa: E501
if 'page_no' in local_var_params and local_var_params['page_no'] is not None: # noqa: E501
query_params.append(('pageNo', local_var_params['page_no'])) # noqa: E501
if 'page_size' in local_var_params and local_var_params['page_size'] is not None: # noqa: E501
query_params.append(('pageSize', local_var_params['page_size'])) # noqa: E501
if 'sort' in local_var_params and local_var_params['sort'] is not None: # noqa: E501
query_params.append(('sort', local_var_params['sort'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry/{registryId}/repository', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PivotListResponseRegistryRepoResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_schedule_pivot_list_using_get(self, registry_id, page_no, page_size, **kwargs): # noqa: E501
"""Show a list of schedules created for a registry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_schedule_pivot_list_using_get(registry_id, page_no, page_size, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID of the registry for which you want to list the schedules. (required)
:param int page_no: The page to be returned. (required)
:param int page_size: The number of records per page to be included in the response. (required)
:param str filter: Filter the repository list by providing a query using Qualys syntax. <a href='/cs/help/search/language.htm' target='_blank'>Click here</a> for help with creating your query.
:param str sort: Sort the results using a Qualys token. For example <a href='/cs/help/search_tips/sortable_tokens.htm'>Click here</a> for a listing of tokens.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: PivotListResponseScheduleResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_schedule_pivot_list_using_get_with_http_info(registry_id, page_no, page_size, **kwargs) # noqa: E501
def get_schedule_pivot_list_using_get_with_http_info(self, registry_id, page_no, page_size, **kwargs): # noqa: E501
"""Show a list of schedules created for a registry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_schedule_pivot_list_using_get_with_http_info(registry_id, page_no, page_size, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID of the registry for which you want to list the schedules. (required)
:param int page_no: The page to be returned. (required)
:param int page_size: The number of records per page to be included in the response. (required)
:param str filter: Filter the repository list by providing a query using Qualys syntax. <a href='/cs/help/search/language.htm' target='_blank'>Click here</a> for help with creating your query.
:param str sort: Sort the results using a Qualys token. For example <a href='/cs/help/search_tips/sortable_tokens.htm'>Click here</a> for a listing of tokens.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(PivotListResponseScheduleResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['registry_id', 'page_no', 'page_size', 'filter', 'sort'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_schedule_pivot_list_using_get" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'registry_id' is set
if self.api_client.client_side_validation and ('registry_id' not in local_var_params or # noqa: E501
local_var_params['registry_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registry_id` when calling `get_schedule_pivot_list_using_get`") # noqa: E501
# verify the required parameter 'page_no' is set
if self.api_client.client_side_validation and ('page_no' not in local_var_params or # noqa: E501
local_var_params['page_no'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `page_no` when calling `get_schedule_pivot_list_using_get`") # noqa: E501
# verify the required parameter 'page_size' is set
if self.api_client.client_side_validation and ('page_size' not in local_var_params or # noqa: E501
local_var_params['page_size'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `page_size` when calling `get_schedule_pivot_list_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'registry_id' in local_var_params:
path_params['registryId'] = local_var_params['registry_id'] # noqa: E501
query_params = []
if 'filter' in local_var_params and local_var_params['filter'] is not None: # noqa: E501
query_params.append(('filter', local_var_params['filter'])) # noqa: E501
if 'page_no' in local_var_params and local_var_params['page_no'] is not None: # noqa: E501
query_params.append(('pageNo', local_var_params['page_no'])) # noqa: E501
if 'page_size' in local_var_params and local_var_params['page_size'] is not None: # noqa: E501
query_params.append(('pageSize', local_var_params['page_size'])) # noqa: E501
if 'sort' in local_var_params and local_var_params['sort'] is not None: # noqa: E501
query_params.append(('sort', local_var_params['sort'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry/{registryId}/schedule', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PivotListResponseScheduleResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_registry_using_put(self, registry_id, registry_request, **kwargs): # noqa: E501
"""Update existing registry in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_registry_using_put(registry_id, registry_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID/UUID of the registry you want to update. (required)
:param RegistryRequest registry_request: Provide parameter values in the format shown under Example Value. registryType and registryUri are required even though they are not updatable. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_registry_using_put_with_http_info(registry_id, registry_request, **kwargs) # noqa: E501
def update_registry_using_put_with_http_info(self, registry_id, registry_request, **kwargs): # noqa: E501
"""Update existing registry in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_registry_using_put_with_http_info(registry_id, registry_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID/UUID of the registry you want to update. (required)
:param RegistryRequest registry_request: Provide parameter values in the format shown under Example Value. registryType and registryUri are required even though they are not updatable. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(str, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['registry_id', 'registry_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_registry_using_put" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'registry_id' is set
if self.api_client.client_side_validation and ('registry_id' not in local_var_params or # noqa: E501
local_var_params['registry_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registry_id` when calling `update_registry_using_put`") # noqa: E501
# verify the required parameter 'registry_request' is set
if self.api_client.client_side_validation and ('registry_request' not in local_var_params or # noqa: E501
local_var_params['registry_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registry_request` when calling `update_registry_using_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'registry_id' in local_var_params:
path_params['registryId'] = local_var_params['registry_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'registry_request' in local_var_params:
body_params = local_var_params['registry_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry/{registryId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_schedule_using_put(self, registry_id, schedule_id, schedule_request, **kwargs): # noqa: E501
"""Update existing registry schedule in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_schedule_using_put(registry_id, schedule_id, schedule_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID/UUID of the registry you want to update. (required)
:param str schedule_id: Provide the ID/UUID of the schedule you want to update. (required)
:param ScheduleRequest schedule_request: Provide parameter values in the format shown under Example Value. Specify \"onDemand\": true if you want to scan immediately. Otherwise, Automatic scan will be triggered everyday at a set time. For days, specify 1 to 7 days / 14 (for last two weeks). For schedule, specify time in UTC, e.g., 19:30. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_schedule_using_put_with_http_info(registry_id, schedule_id, schedule_request, **kwargs) # noqa: E501
def update_schedule_using_put_with_http_info(self, registry_id, schedule_id, schedule_request, **kwargs): # noqa: E501
"""Update existing registry schedule in your account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_schedule_using_put_with_http_info(registry_id, schedule_id, schedule_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str registry_id: Provide the ID/UUID of the registry you want to update. (required)
:param str schedule_id: Provide the ID/UUID of the schedule you want to update. (required)
:param ScheduleRequest schedule_request: Provide parameter values in the format shown under Example Value. Specify \"onDemand\": true if you want to scan immediately. Otherwise, Automatic scan will be triggered everyday at a set time. For days, specify 1 to 7 days / 14 (for last two weeks). For schedule, specify time in UTC, e.g., 19:30. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(str, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['registry_id', 'schedule_id', 'schedule_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_schedule_using_put" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'registry_id' is set
if self.api_client.client_side_validation and ('registry_id' not in local_var_params or # noqa: E501
local_var_params['registry_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registry_id` when calling `update_schedule_using_put`") # noqa: E501
# verify the required parameter 'schedule_id' is set
if self.api_client.client_side_validation and ('schedule_id' not in local_var_params or # noqa: E501
local_var_params['schedule_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schedule_id` when calling `update_schedule_using_put`") # noqa: E501
# verify the required parameter 'schedule_request' is set
if self.api_client.client_side_validation and ('schedule_request' not in local_var_params or # noqa: E501
local_var_params['schedule_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `schedule_request` when calling `update_schedule_using_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'registry_id' in local_var_params:
path_params['registryId'] = local_var_params['registry_id'] # noqa: E501
if 'schedule_id' in local_var_params:
path_params['scheduleId'] = local_var_params['schedule_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'schedule_request' in local_var_params:
body_params = local_var_params['schedule_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry/{registryId}/schedule/{scheduleId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def validate_registry_using_post(self, registry_request, **kwargs): # noqa: E501
"""Validate information for new registry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.validate_registry_using_post(registry_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param RegistryRequest registry_request: Validate parameters for a registry you intend to create. Provide parameter values in the format shown under Example Value. Parameters accountId, arn, and region are required when the registryType is AWS ECR and you want to create a new AWS connector. Specify the ARN if you want to use an existing AWS connector, or if you want to create a new connector. All parameters are required other than dockerHubOrgName which is optional. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: bool
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.validate_registry_using_post_with_http_info(registry_request, **kwargs) # noqa: E501
def validate_registry_using_post_with_http_info(self, registry_request, **kwargs): # noqa: E501
"""Validate information for new registry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.validate_registry_using_post_with_http_info(registry_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param RegistryRequest registry_request: Validate parameters for a registry you intend to create. Provide parameter values in the format shown under Example Value. Parameters accountId, arn, and region are required when the registryType is AWS ECR and you want to create a new AWS connector. Specify the ARN if you want to use an existing AWS connector, or if you want to create a new connector. All parameters are required other than dockerHubOrgName which is optional. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(bool, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['registry_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method validate_registry_using_post" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'registry_request' is set
if self.api_client.client_side_validation and ('registry_request' not in local_var_params or # noqa: E501
local_var_params['registry_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `registry_request` when calling `validate_registry_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'registry_request' in local_var_params:
body_params = local_var_params['registry_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v1.1/registry/validate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='bool', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 53.117675
| 489
| 0.62657
| 13,138
| 110,591
| 5.01606
| 0.024966
| 0.038725
| 0.059483
| 0.024582
| 0.979894
| 0.976692
| 0.974067
| 0.969758
| 0.965949
| 0.957421
| 0
| 0.01399
| 0.300675
| 110,591
| 2,081
| 490
| 53.1432
| 0.838115
| 0.475283
| 0
| 0.811518
| 0
| 0
| 0.194906
| 0.065861
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038743
| false
| 0
| 0.005236
| 0
| 0.082723
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b29a6f3baaddf80c3b1e86a7874a671920097023
| 21,543
|
py
|
Python
|
cinder/tests/unit/api/v3/test_group_snapshots.py
|
2020human/cinder
|
04528318848620e4ce2639ea2dd5323783dc7a1f
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/unit/api/v3/test_group_snapshots.py
|
2020human/cinder
|
04528318848620e4ce2639ea2dd5323783dc7a1f
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/unit/api/v3/test_group_snapshots.py
|
2020human/cinder
|
04528318848620e4ce2639ea2dd5323783dc7a1f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (C) 2016 EMC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for group_snapshot code.
"""
import ddt
import mock
import webob
from cinder.api.v3 import group_snapshots as v3_group_snapshots
from cinder import context
from cinder import db
from cinder import exception
from cinder.group import api as group_api
from cinder import objects
from cinder.objects import fields
from cinder import test
from cinder.tests.unit.api import fakes
from cinder.tests.unit import fake_constants as fake
from cinder.tests.unit import utils
import cinder.volume
GROUP_MICRO_VERSION = '3.14'
@ddt.ddt
class GroupSnapshotsAPITestCase(test.TestCase):
"""Test Case for group_snapshots API."""
def setUp(self):
super(GroupSnapshotsAPITestCase, self).setUp()
self.controller = v3_group_snapshots.GroupSnapshotsController()
self.volume_api = cinder.volume.API()
self.context = context.get_admin_context()
self.context.project_id = fake.PROJECT_ID
self.context.user_id = fake.USER_ID
self.user_ctxt = context.RequestContext(
fake.USER_ID, fake.PROJECT_ID, auth_token=True)
def test_show_group_snapshot(self):
group = utils.create_group(
self.context,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],)
volume_id = utils.create_volume(
self.context,
group_id=group.id,
volume_type_id=fake.VOLUME_TYPE_ID)['id']
group_snapshot = utils.create_group_snapshot(
self.context, group_id=group.id)
req = fakes.HTTPRequest.blank('/v3/%s/group_snapshots/%s' %
(fake.PROJECT_ID, group_snapshot.id),
version=GROUP_MICRO_VERSION)
res_dict = self.controller.show(req, group_snapshot.id)
self.assertEqual(1, len(res_dict))
self.assertEqual('this is a test group snapshot',
res_dict['group_snapshot']['description'])
self.assertEqual('test_group_snapshot',
res_dict['group_snapshot']['name'])
self.assertEqual('creating', res_dict['group_snapshot']['status'])
group_snapshot.destroy()
db.volume_destroy(context.get_admin_context(),
volume_id)
group.destroy()
def test_show_group_snapshot_with_group_snapshot_NotFound(self):
req = fakes.HTTPRequest.blank('/v3/%s/group_snapshots/%s' %
(fake.PROJECT_ID,
fake.WILL_NOT_BE_FOUND_ID),
version=GROUP_MICRO_VERSION)
self.assertRaises(exception.GroupSnapshotNotFound,
self.controller.show,
req, fake.WILL_NOT_BE_FOUND_ID)
def test_list_group_snapshots_json(self):
group = utils.create_group(
self.context,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],)
volume_id = utils.create_volume(
self.context,
group_id=group.id,
volume_type_id=fake.VOLUME_TYPE_ID)['id']
group_snapshot1 = utils.create_group_snapshot(
self.context, group_id=group.id,
group_type_id=group.group_type_id)
group_snapshot2 = utils.create_group_snapshot(
self.context, group_id=group.id,
group_type_id=group.group_type_id)
group_snapshot3 = utils.create_group_snapshot(
self.context, group_id=group.id,
group_type_id=group.group_type_id)
req = fakes.HTTPRequest.blank('/v3/%s/group_snapshots' %
fake.PROJECT_ID,
version=GROUP_MICRO_VERSION)
res_dict = self.controller.index(req)
self.assertEqual(1, len(res_dict))
self.assertEqual(group_snapshot1.id,
res_dict['group_snapshots'][0]['id'])
self.assertEqual('test_group_snapshot',
res_dict['group_snapshots'][0]['name'])
self.assertEqual(group_snapshot2.id,
res_dict['group_snapshots'][1]['id'])
self.assertEqual('test_group_snapshot',
res_dict['group_snapshots'][1]['name'])
self.assertEqual(group_snapshot3.id,
res_dict['group_snapshots'][2]['id'])
self.assertEqual('test_group_snapshot',
res_dict['group_snapshots'][2]['name'])
group_snapshot3.destroy()
group_snapshot2.destroy()
group_snapshot1.destroy()
db.volume_destroy(context.get_admin_context(),
volume_id)
group.destroy()
def test_list_group_snapshots_detail_json(self):
group = utils.create_group(
self.context,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],)
volume_id = utils.create_volume(
self.context,
group_id=group.id,
volume_type_id=fake.VOLUME_TYPE_ID)['id']
group_snapshot1 = utils.create_group_snapshot(
self.context, group_id=group.id)
group_snapshot2 = utils.create_group_snapshot(
self.context, group_id=group.id)
group_snapshot3 = utils.create_group_snapshot(
self.context, group_id=group.id)
req = fakes.HTTPRequest.blank('/v3/%s/group_snapshots/detail' %
fake.PROJECT_ID,
version=GROUP_MICRO_VERSION)
res_dict = self.controller.detail(req)
self.assertEqual(1, len(res_dict))
self.assertEqual(3, len(res_dict['group_snapshots']))
self.assertEqual('this is a test group snapshot',
res_dict['group_snapshots'][0]['description'])
self.assertEqual('test_group_snapshot',
res_dict['group_snapshots'][0]['name'])
self.assertEqual(group_snapshot1.id,
res_dict['group_snapshots'][0]['id'])
self.assertEqual('creating',
res_dict['group_snapshots'][0]['status'])
self.assertEqual('this is a test group snapshot',
res_dict['group_snapshots'][1]['description'])
self.assertEqual('test_group_snapshot',
res_dict['group_snapshots'][1]['name'])
self.assertEqual(group_snapshot2.id,
res_dict['group_snapshots'][1]['id'])
self.assertEqual('creating',
res_dict['group_snapshots'][1]['status'])
self.assertEqual('this is a test group snapshot',
res_dict['group_snapshots'][2]['description'])
self.assertEqual('test_group_snapshot',
res_dict['group_snapshots'][2]['name'])
self.assertEqual(group_snapshot3.id,
res_dict['group_snapshots'][2]['id'])
self.assertEqual('creating',
res_dict['group_snapshots'][2]['status'])
group_snapshot3.destroy()
group_snapshot2.destroy()
group_snapshot1.destroy()
db.volume_destroy(context.get_admin_context(),
volume_id)
group.destroy()
@mock.patch(
'cinder.api.openstack.wsgi.Controller.validate_name_and_description')
@mock.patch('cinder.db.volume_type_get')
@mock.patch('cinder.quota.VolumeTypeQuotaEngine.reserve')
def test_create_group_snapshot_json(self, mock_quota, mock_vol_type,
mock_validate):
group = utils.create_group(
self.context,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],)
volume_id = utils.create_volume(
self.context,
group_id=group.id,
volume_type_id=fake.VOLUME_TYPE_ID)['id']
body = {"group_snapshot": {"name": "group_snapshot1",
"description":
"Group Snapshot 1",
"group_id": group.id}}
req = fakes.HTTPRequest.blank('/v3/%s/group_snapshots' %
fake.PROJECT_ID,
version=GROUP_MICRO_VERSION)
res_dict = self.controller.create(req, body)
self.assertEqual(1, len(res_dict))
self.assertIn('id', res_dict['group_snapshot'])
self.assertTrue(mock_validate.called)
group.destroy()
group_snapshot = objects.GroupSnapshot.get_by_id(
context.get_admin_context(), res_dict['group_snapshot']['id'])
db.volume_destroy(context.get_admin_context(),
volume_id)
group_snapshot.destroy()
@mock.patch(
'cinder.api.openstack.wsgi.Controller.validate_name_and_description')
@mock.patch('cinder.db.volume_type_get')
def test_create_group_snapshot_when_volume_in_error_status(
self, mock_vol_type, mock_validate):
group = utils.create_group(
self.context,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],)
volume_id = utils.create_volume(
self.context,
status='error',
group_id=group.id,
volume_type_id=fake.VOLUME_TYPE_ID)['id']
body = {"group_snapshot": {"name": "group_snapshot1",
"description":
"Group Snapshot 1",
"group_id": group.id}}
req = fakes.HTTPRequest.blank('/v3/%s/group_snapshots' %
fake.PROJECT_ID,
version=GROUP_MICRO_VERSION)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, body)
self.assertTrue(mock_validate.called)
group.destroy()
db.volume_destroy(context.get_admin_context(),
volume_id)
def test_create_group_snapshot_with_no_body(self):
# omit body from the request
req = fakes.HTTPRequest.blank('/v3/%s/group_snapshots' %
fake.PROJECT_ID,
version=GROUP_MICRO_VERSION)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, None)
@mock.patch.object(group_api.API, 'create_group_snapshot',
side_effect=exception.InvalidGroupSnapshot(
reason='Invalid group snapshot'))
def test_create_with_invalid_group_snapshot(self, mock_create_group_snap):
group = utils.create_group(
self.context,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],)
volume_id = utils.create_volume(
self.context,
status='error',
group_id=group.id,
volume_type_id=fake.VOLUME_TYPE_ID)['id']
body = {"group_snapshot": {"name": "group_snapshot1",
"description":
"Group Snapshot 1",
"group_id": group.id}}
req = fakes.HTTPRequest.blank('/v3/%s/group_snapshots' %
fake.PROJECT_ID,
version=GROUP_MICRO_VERSION)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, body)
group.destroy()
db.volume_destroy(context.get_admin_context(),
volume_id)
@mock.patch.object(group_api.API, 'create_group_snapshot',
side_effect=exception.GroupSnapshotNotFound(
group_snapshot_id='invalid_id'))
def test_create_with_group_snapshot_not_found(self, mock_create_grp_snap):
group = utils.create_group(
self.context,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],)
volume_id = utils.create_volume(
self.context,
status='error',
group_id=group.id,
volume_type_id=fake.VOLUME_TYPE_ID)['id']
body = {"group_snapshot": {"name": "group_snapshot1",
"description":
"Group Snapshot 1",
"group_id": group.id}}
req = fakes.HTTPRequest.blank('/v3/%s/group_snapshots' %
fake.PROJECT_ID,
version=GROUP_MICRO_VERSION)
self.assertRaises(exception.GroupSnapshotNotFound,
self.controller.create,
req, body)
group.destroy()
db.volume_destroy(context.get_admin_context(),
volume_id)
def test_create_group_snapshot_from_empty_group(self):
group = utils.create_group(
self.context,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],)
body = {"group_snapshot": {"name": "group_snapshot1",
"description":
"Group Snapshot 1",
"group_id": group.id}}
req = fakes.HTTPRequest.blank('/v3/%s/group_snapshots' %
fake.PROJECT_ID,
version=GROUP_MICRO_VERSION)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, body)
group.destroy()
def test_delete_group_snapshot_available(self):
group = utils.create_group(
self.context,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],)
volume_id = utils.create_volume(
self.context,
group_id=group.id,
volume_type_id=fake.VOLUME_TYPE_ID)['id']
group_snapshot = utils.create_group_snapshot(
self.context,
group_id=group.id,
status='available')
req = fakes.HTTPRequest.blank('/v3/%s/group_snapshots/%s' %
(fake.PROJECT_ID, group_snapshot.id),
version=GROUP_MICRO_VERSION)
res_dict = self.controller.delete(req, group_snapshot.id)
group_snapshot = objects.GroupSnapshot.get_by_id(self.context,
group_snapshot.id)
self.assertEqual(202, res_dict.status_int)
self.assertEqual('deleting', group_snapshot.status)
group_snapshot.destroy()
db.volume_destroy(context.get_admin_context(),
volume_id)
group.destroy()
def test_delete_group_snapshot_available_used_as_source(self):
group = utils.create_group(
self.context,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],)
volume_id = utils.create_volume(
self.context,
group_id=group.id,
volume_type_id=fake.VOLUME_TYPE_ID)['id']
group_snapshot = utils.create_group_snapshot(
self.context,
group_id=group.id,
status='available')
group2 = utils.create_group(
self.context, status='creating',
group_snapshot_id=group_snapshot.id,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],)
req = fakes.HTTPRequest.blank('/v3/%s/group_snapshots/%s' %
(fake.PROJECT_ID, group_snapshot.id),
version=GROUP_MICRO_VERSION)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
req, group_snapshot.id)
group_snapshot.destroy()
db.volume_destroy(context.get_admin_context(),
volume_id)
group.destroy()
group2.destroy()
def test_delete_group_snapshot_with_group_snapshot_NotFound(self):
req = fakes.HTTPRequest.blank('/v3/%s/group_snapshots/%s' %
(fake.PROJECT_ID,
fake.WILL_NOT_BE_FOUND_ID),
version=GROUP_MICRO_VERSION)
self.assertRaises(exception.GroupSnapshotNotFound,
self.controller.delete,
req, fake.WILL_NOT_BE_FOUND_ID)
def test_delete_group_snapshot_with_invalid_group_snapshot(self):
group = utils.create_group(
self.context,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID],)
volume_id = utils.create_volume(
self.context,
group_id=group.id,
volume_type_id=fake.VOLUME_TYPE_ID)['id']
group_snapshot = utils.create_group_snapshot(
self.context,
group_id=group.id,
status='invalid')
req = fakes.HTTPRequest.blank('/v3/%s/group_snapshots/%s' %
(fake.PROJECT_ID, group_snapshot.id),
version=GROUP_MICRO_VERSION)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
req, group_snapshot.id)
group_snapshot.destroy()
db.volume_destroy(context.get_admin_context(),
volume_id)
group.destroy()
@ddt.data(('3.11', 'fake_snapshot_001',
fields.GroupSnapshotStatus.AVAILABLE,
exception.VersionNotFoundForAPIMethod),
('3.18', 'fake_snapshot_001',
fields.GroupSnapshotStatus.AVAILABLE,
exception.VersionNotFoundForAPIMethod),
('3.19', 'fake_snapshot_001',
fields.GroupSnapshotStatus.AVAILABLE,
exception.GroupSnapshotNotFound))
@ddt.unpack
def test_reset_group_snapshot_status_illegal(self, version,
group_snapshot_id,
status, exceptions):
req = fakes.HTTPRequest.blank('/v3/%s/group_snapshots/%s/action' %
(fake.PROJECT_ID, group_snapshot_id),
version=version)
body = {"reset_status": {
"status": status
}}
self.assertRaises(exceptions,
self.controller.reset_status,
req, group_snapshot_id, body)
def test_reset_group_snapshot_status_invalid_status(self):
group = utils.create_group(
self.context,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID])
group_snapshot = utils.create_group_snapshot(
self.context,
group_id=group.id,
status=fields.GroupSnapshotStatus.CREATING)
req = fakes.HTTPRequest.blank('/v3/%s/group_snapshots/%s/action' %
(fake.PROJECT_ID, group_snapshot.id),
version='3.19')
body = {"reset_status": {
"status": "invalid_test_status"
}}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.reset_status,
req, group_snapshot.id, body)
def test_reset_group_snapshot_status(self):
group = utils.create_group(
self.context,
group_type_id=fake.GROUP_TYPE_ID,
volume_type_ids=[fake.VOLUME_TYPE_ID])
group_snapshot = utils.create_group_snapshot(
self.context,
group_id=group.id,
status=fields.GroupSnapshotStatus.CREATING)
req = fakes.HTTPRequest.blank('/v3/%s/group_snapshots/%s/action' %
(fake.PROJECT_ID, group_snapshot.id),
version='3.19')
body = {"reset_status": {
"status": fields.GroupSnapshotStatus.AVAILABLE
}}
response = self.controller.reset_status(req, group_snapshot.id,
body)
g_snapshot = objects.GroupSnapshot.get_by_id(self.context,
group_snapshot.id)
self.assertEqual(202, response.status_int)
self.assertEqual(fields.GroupSnapshotStatus.AVAILABLE,
g_snapshot.status)
| 43.697769
| 78
| 0.570348
| 2,249
| 21,543
| 5.160071
| 0.088039
| 0.105299
| 0.046876
| 0.032572
| 0.821542
| 0.799052
| 0.78897
| 0.766738
| 0.751314
| 0.713744
| 0
| 0.007981
| 0.336954
| 21,543
| 492
| 79
| 43.786585
| 0.804467
| 0.032493
| 0
| 0.75644
| 0
| 0
| 0.097954
| 0.033388
| 0
| 0
| 0
| 0
| 0.103045
| 1
| 0.042155
| false
| 0
| 0.035129
| 0
| 0.079625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b2ac571a355e52266fbb237ce4228917534e4002
| 88,425
|
py
|
Python
|
examples/prevention.py
|
git-afsantos/hpl-rv-gen
|
3ffb2298bc8706669b00980b01e9e23fffdd839a
|
[
"MIT"
] | 1
|
2021-07-16T05:19:27.000Z
|
2021-07-16T05:19:27.000Z
|
examples/prevention.py
|
git-afsantos/hpl-rv-gen
|
3ffb2298bc8706669b00980b01e9e23fffdd839a
|
[
"MIT"
] | null | null | null |
examples/prevention.py
|
git-afsantos/hpl-rv-gen
|
3ffb2298bc8706669b00980b01e9e23fffdd839a
|
[
"MIT"
] | null | null | null |
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''globally: /a { True } forbids /b { True }'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b': self.on_msg__b,
'/a': self.on_msg__a,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
return True
def on_msg__b(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
self.witness.append(rec)
self.witness.append(MsgRecord('/b', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
return False # nothing to do
if self._state == 3:
self._pool.append(MsgRecord('/a', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def _reset(self):
self.witness = []
self._pool = deque((), 1)
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''globally: /a { True } forbids /b { True } within 0.1s'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b': self.on_msg__b,
'/a': self.on_msg__a,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
return True
def on_msg__b(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
self.witness.append(rec)
self.witness.append(MsgRecord('/b', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
self._pool.append(MsgRecord('/a', stamp, msg))
return True
if self._state == 3:
self._pool.append(MsgRecord('/a', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def _reset(self):
self.witness = []
self._pool = deque((), 1)
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''globally: /a { (data > 0) } forbids /b { (data < 0) }'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b': self.on_msg__b,
'/a': self.on_msg__a,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
return True
def on_msg__b(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
if (msg.data < 0):
self.witness.append(rec)
self.witness.append(MsgRecord('/b', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
return False # nothing to do
if self._state == 3:
if (msg.data > 0):
self._pool.append(MsgRecord('/a', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def _reset(self):
self.witness = []
self._pool = deque((), 1)
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''globally: /a { (data > 0) } forbids /b { (data < 0) } within 0.1s'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b': self.on_msg__b,
'/a': self.on_msg__a,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
return True
def on_msg__b(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
if (msg.data < 0):
self.witness.append(rec)
self.witness.append(MsgRecord('/b', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
if (msg.data > 0):
self._pool.append(MsgRecord('/a', stamp, msg))
return True
if self._state == 3:
if (msg.data > 0):
self._pool.append(MsgRecord('/a', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def _reset(self):
self.witness = []
self._pool = deque((), 1)
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''globally: (/a1 { (data > 0) } or /a2 { (data < 0) }) forbids /b { True } within 0.1s'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b': self.on_msg__b,
'/a1': self.on_msg__a1,
'/a2': self.on_msg__a2,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
return True
def on_msg__b(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
self.witness.append(rec)
self.witness.append(MsgRecord('/b', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__a1(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
if (msg.data > 0):
self._pool.append(MsgRecord('/a1', stamp, msg))
return True
if self._state == 3:
if (msg.data > 0):
self._pool.append(MsgRecord('/a1', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def on_msg__a2(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
if (msg.data < 0):
self._pool.append(MsgRecord('/a2', stamp, msg))
return True
if self._state == 3:
if (msg.data < 0):
self._pool.append(MsgRecord('/a2', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def _reset(self):
self.witness = []
self._pool = deque((), 1)
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''globally: /a { True } forbids (/b1 { (data > 0) } or /b2 { (data < 0) }) within 0.1s'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b2': self.on_msg__b2,
'/a': self.on_msg__a,
'/b1': self.on_msg__b1,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
return True
def on_msg__b2(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
if (msg.data < 0):
self.witness.append(rec)
self.witness.append(MsgRecord('/b2', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
self._pool.append(MsgRecord('/a', stamp, msg))
return True
if self._state == 3:
self._pool.append(MsgRecord('/a', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def on_msg__b1(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
if (msg.data > 0):
self.witness.append(rec)
self.witness.append(MsgRecord('/b1', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def _reset(self):
self.witness = []
self._pool = deque((), 1)
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''after /p { True }: /a { True } forbids /b { True } within 0.1s'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b': self.on_msg__b,
'/a': self.on_msg__a,
'/p': self.on_msg__p,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 1
self.time_state = stamp
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
return True
def on_msg__b(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
self.witness.append(rec)
self.witness.append(MsgRecord('/b', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
self._pool.append(MsgRecord('/a', stamp, msg))
return True
if self._state == 3:
self._pool.append(MsgRecord('/a', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def on_msg__p(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 1:
self.witness.append(MsgRecord('/p', stamp, msg))
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
return False
def _reset(self):
self.witness = []
self._pool = deque((), 1)
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''after /p { phi }: /a { (data > 0) } forbids /b { (data < 0) } within 0.1s'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b': self.on_msg__b,
'/a': self.on_msg__a,
'/p': self.on_msg__p,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 1
self.time_state = stamp
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
return True
def on_msg__b(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
if (msg.data < 0):
self.witness.append(rec)
self.witness.append(MsgRecord('/b', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
if (msg.data > 0):
self._pool.append(MsgRecord('/a', stamp, msg))
return True
if self._state == 3:
if (msg.data > 0):
self._pool.append(MsgRecord('/a', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def on_msg__p(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 1:
if msg.phi:
self.witness.append(MsgRecord('/p', stamp, msg))
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
return False
def _reset(self):
self.witness = []
self._pool = deque((), 1)
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''until /q { True }: /a { phi } forbids /b { psi } within 0.1s'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b': self.on_msg__b,
'/q': self.on_msg__q,
'/a': self.on_msg__a,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
return True
def on_msg__b(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
if msg.psi:
self.witness.append(rec)
self.witness.append(MsgRecord('/b', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__q(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
self._pool.clear()
self.witness.append(MsgRecord('/q', stamp, msg))
self._state = -1
self.time_state = stamp
self.on_exit_scope(stamp)
self.on_success(stamp, self.witness)
return True
if self._state == 3:
self._pool.clear()
self.witness.append(MsgRecord('/q', stamp, msg))
self._state = -1
self.time_state = stamp
self.on_exit_scope(stamp)
self.on_success(stamp, self.witness)
return True
return False
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
if msg.phi:
self._pool.append(MsgRecord('/a', stamp, msg))
return True
if self._state == 3:
if msg.phi:
self._pool.append(MsgRecord('/a', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def _reset(self):
self.witness = []
self._pool = deque((), 1)
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''until /b { True }: /a { True } forbids /b { True } within 0.1s'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b': self.on_msg__b,
'/a': self.on_msg__a,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
return True
def on_msg__b(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
self._pool.clear()
self.witness.append(MsgRecord('/b', stamp, msg))
self._state = -1
self.time_state = stamp
self.on_exit_scope(stamp)
self.on_success(stamp, self.witness)
return True
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
self.witness.append(rec)
self.witness.append(MsgRecord('/b', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
if self._state == 3:
self._pool.clear()
self.witness.append(MsgRecord('/b', stamp, msg))
self._state = -1
self.time_state = stamp
self.on_exit_scope(stamp)
self.on_success(stamp, self.witness)
return True
return False
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
self._pool.append(MsgRecord('/a', stamp, msg))
return True
if self._state == 3:
self._pool.append(MsgRecord('/a', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def _reset(self):
self.witness = []
self._pool = deque((), 1)
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''until /a { True }: /a { True } forbids /b { True } within 0.1s'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b': self.on_msg__b,
'/a': self.on_msg__a,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
return True
def on_msg__b(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
self.witness.append(rec)
self.witness.append(MsgRecord('/b', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
self._pool.clear()
self.witness.append(MsgRecord('/a', stamp, msg))
self._state = -1
self.time_state = stamp
self.on_exit_scope(stamp)
self.on_success(stamp, self.witness)
return True
self._pool.append(MsgRecord('/a', stamp, msg))
return True
if self._state == 3:
self._pool.clear()
self.witness.append(MsgRecord('/a', stamp, msg))
self._state = -1
self.time_state = stamp
self.on_exit_scope(stamp)
self.on_success(stamp, self.witness)
return True
self._pool.append(MsgRecord('/a', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def _reset(self):
self.witness = []
self._pool = deque((), 1)
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''after /p { phi } until /q { psi }: /a { alpha } forbids /b { beta }'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/a': self.on_msg__a,
'/b': self.on_msg__b,
'/q': self.on_msg__q,
'/p': self.on_msg__p,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 1
self.time_state = stamp
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
return True
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
return False # nothing to do
if self._state == 3:
if msg.alpha:
self._pool.append(MsgRecord('/a', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def on_msg__b(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
if msg.beta:
self.witness.append(rec)
self.witness.append(MsgRecord('/b', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__q(self, msg, stamp):
with self._lock:
if self._state == 2:
if msg.psi:
self._pool.clear()
self.witness = []
self._state = 1
self.time_state = stamp
self.on_exit_scope(stamp)
return True
if self._state == 3:
if msg.psi:
self._pool.clear()
self.witness = []
self._state = 1
self.time_state = stamp
self.on_exit_scope(stamp)
return True
return False
def on_msg__p(self, msg, stamp):
with self._lock:
if self._state == 1:
if msg.phi:
self.witness.append(MsgRecord('/p', stamp, msg))
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
return False
def _reset(self):
self.witness = []
self._pool = deque((), 1)
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''after /p { phi } until /q { psi }: /a { alpha } forbids /b { beta } within 0.1s'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/a': self.on_msg__a,
'/b': self.on_msg__b,
'/q': self.on_msg__q,
'/p': self.on_msg__p,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 1
self.time_state = stamp
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
return True
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
if msg.alpha:
self._pool.append(MsgRecord('/a', stamp, msg))
return True
if self._state == 3:
if msg.alpha:
self._pool.append(MsgRecord('/a', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def on_msg__b(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
if msg.beta:
self.witness.append(rec)
self.witness.append(MsgRecord('/b', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__q(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
if msg.psi:
self._pool.clear()
self.witness = []
self._state = 1
self.time_state = stamp
self.on_exit_scope(stamp)
return True
if self._state == 3:
if msg.psi:
self._pool.clear()
self.witness = []
self._state = 1
self.time_state = stamp
self.on_exit_scope(stamp)
return True
return False
def on_msg__p(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 1:
if msg.phi:
self.witness.append(MsgRecord('/p', stamp, msg))
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
return False
def _reset(self):
self.witness = []
self._pool = deque((), 1)
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''globally: /a as A { True } forbids /b { (x < @A.x) }'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b': self.on_msg__b,
'/a': self.on_msg__a,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
return True
def on_msg__b(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
for rec in self._pool:
v_A = rec.msg
if (msg.x < v_A.x):
self.witness.append(rec)
self.witness.append(MsgRecord('/b', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
rec = MsgRecord('/a', stamp, msg)
self._pool_insert(rec)
return True
if self._state == 3:
rec = MsgRecord('/a', stamp, msg)
self._pool_insert(rec)
self._state = 2
self.time_state = stamp
return True
return False
def _reset(self):
self.witness = []
self._pool = deque()
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _pool_insert(self, rec):
# this method is only needed to ensure Python 2.7 compatibility
if not self._pool:
return self._pool.append(rec)
stamp = rec.timestamp
if len(self._pool) == 1:
if stamp >= self._pool[0].timestamp:
return self._pool.append(rec)
return self._pool.appendleft(rec)
for i in range(len(self._pool), 0, -1):
if stamp >= self._pool[i-1].timestamp:
try:
self._pool.insert(i, rec) # Python >= 3.5
except AttributeError as e:
tmp = [self._pool.pop() for j in range(i, len(self._pool))]
self._pool.append(rec)
self._pool.extend(reversed(tmp))
break
else:
self._pool.appendleft(rec)
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''globally: /a as A { (x > 0) } forbids /b { (x < @A.x) } within 0.1s'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b': self.on_msg__b,
'/a': self.on_msg__a,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
return True
def on_msg__b(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
for rec in self._pool:
v_A = rec.msg
if (msg.x < v_A.x):
self.witness.append(rec)
self.witness.append(MsgRecord('/b', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
if (msg.x > 0):
rec = MsgRecord('/a', stamp, msg)
self._pool_insert(rec)
return True
if self._state == 3:
if (msg.x > 0):
rec = MsgRecord('/a', stamp, msg)
self._pool_insert(rec)
self._state = 2
self.time_state = stamp
return True
return False
def _reset(self):
self.witness = []
self._pool = deque()
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _pool_insert(self, rec):
# this method is only needed to ensure Python 2.7 compatibility
if not self._pool:
return self._pool.append(rec)
stamp = rec.timestamp
if len(self._pool) == 1:
if stamp >= self._pool[0].timestamp:
return self._pool.append(rec)
return self._pool.appendleft(rec)
for i in range(len(self._pool), 0, -1):
if stamp >= self._pool[i-1].timestamp:
try:
self._pool.insert(i, rec) # Python >= 3.5
except AttributeError as e:
tmp = [self._pool.pop() for j in range(i, len(self._pool))]
self._pool.append(rec)
self._pool.extend(reversed(tmp))
break
else:
self._pool.appendleft(rec)
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''globally: /a as A { (x > 0) } forbids (/b1 { (x < @A.x) } or /b2 { (y < @A.y) }) within 0.1s'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b2': self.on_msg__b2,
'/a': self.on_msg__a,
'/b1': self.on_msg__b1,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
return True
def on_msg__b2(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
for rec in self._pool:
v_A = rec.msg
if (msg.y < v_A.y):
self.witness.append(rec)
self.witness.append(MsgRecord('/b2', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
if (msg.x > 0):
rec = MsgRecord('/a', stamp, msg)
self._pool_insert(rec)
return True
if self._state == 3:
if (msg.x > 0):
rec = MsgRecord('/a', stamp, msg)
self._pool_insert(rec)
self._state = 2
self.time_state = stamp
return True
return False
def on_msg__b1(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
for rec in self._pool:
v_A = rec.msg
if (msg.x < v_A.x):
self.witness.append(rec)
self.witness.append(MsgRecord('/b1', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def _reset(self):
self.witness = []
self._pool = deque()
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _pool_insert(self, rec):
# this method is only needed to ensure Python 2.7 compatibility
if not self._pool:
return self._pool.append(rec)
stamp = rec.timestamp
if len(self._pool) == 1:
if stamp >= self._pool[0].timestamp:
return self._pool.append(rec)
return self._pool.appendleft(rec)
for i in range(len(self._pool), 0, -1):
if stamp >= self._pool[i-1].timestamp:
try:
self._pool.insert(i, rec) # Python >= 3.5
except AttributeError as e:
tmp = [self._pool.pop() for j in range(i, len(self._pool))]
self._pool.append(rec)
self._pool.extend(reversed(tmp))
break
else:
self._pool.appendleft(rec)
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''after /p as P { True } until /q { (x > @P.x) }: /a as A { (x = @P.x) } forbids (/b1 { (x < (@A.x + @P.x)) } or /b2 { (x in {@P.x, @A.x}) }) within 0.1s'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b1': self.on_msg__b1,
'/b2': self.on_msg__b2,
'/a': self.on_msg__a,
'/q': self.on_msg__q,
'/p': self.on_msg__p,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 1
self.time_state = stamp
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
return True
def on_msg__b1(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
assert len(self.witness) >= 1, 'missing activator event'
v_P = self.witness[0].msg
for rec in self._pool:
v_A = rec.msg
if (msg.x < (v_A.x + v_P.x)):
self.witness.append(rec)
self.witness.append(MsgRecord('/b1', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__b2(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
assert len(self.witness) >= 1, 'missing activator event'
v_P = self.witness[0].msg
for rec in self._pool:
v_A = rec.msg
if (msg.x in (v_P.x, v_A.x)):
self.witness.append(rec)
self.witness.append(MsgRecord('/b2', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self.witness) >= 1, 'missing activator'
v_P = self.witness[0].msg
if (msg.x == v_P.x):
rec = MsgRecord('/a', stamp, msg)
self._pool_insert(rec)
return True
if self._state == 3:
assert len(self.witness) >= 1, 'missing activator'
v_P = self.witness[0].msg
if (msg.x == v_P.x):
rec = MsgRecord('/a', stamp, msg)
self._pool_insert(rec)
self._state = 2
self.time_state = stamp
return True
return False
def on_msg__q(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self.witness) >= 1, 'missing activator event'
v_P = self.witness[0].msg
if (msg.x > v_P.x):
self._pool.clear()
self.witness = []
self._state = 1
self.time_state = stamp
self.on_exit_scope(stamp)
return True
if self._state == 3:
assert len(self.witness) >= 1, 'missing activator event'
v_P = self.witness[0].msg
if (msg.x > v_P.x):
self._pool.clear()
self.witness = []
self._state = 1
self.time_state = stamp
self.on_exit_scope(stamp)
return True
return False
def on_msg__p(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 1:
self.witness.append(MsgRecord('/p', stamp, msg))
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
return False
def _reset(self):
self.witness = []
self._pool = deque()
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _pool_insert(self, rec):
# this method is only needed to ensure Python 2.7 compatibility
if not self._pool:
return self._pool.append(rec)
stamp = rec.timestamp
if len(self._pool) == 1:
if stamp >= self._pool[0].timestamp:
return self._pool.append(rec)
return self._pool.appendleft(rec)
for i in range(len(self._pool), 0, -1):
if stamp >= self._pool[i-1].timestamp:
try:
self._pool.insert(i, rec) # Python >= 3.5
except AttributeError as e:
tmp = [self._pool.pop() for j in range(i, len(self._pool))]
self._pool.append(rec)
self._pool.extend(reversed(tmp))
break
else:
self._pool.appendleft(rec)
def _noop(self, *args):
pass
| 35.684019
| 177
| 0.496692
| 9,952
| 88,425
| 4.185993
| 0.015072
| 0.082191
| 0.049641
| 0.060059
| 0.996783
| 0.995727
| 0.995727
| 0.995727
| 0.995727
| 0.995727
| 0
| 0.015033
| 0.407215
| 88,425
| 2,477
| 178
| 35.698426
| 0.779728
| 0.079073
| 0
| 0.974268
| 0
| 0.006655
| 0.075354
| 0
| 0
| 0
| 0
| 0
| 0.033718
| 1
| 0.075421
| false
| 0.007542
| 0
| 0.001775
| 0.22449
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a27c5c33826f2e8ec0d032de4fd8df3852021af2
| 142
|
py
|
Python
|
gym-sapyen/gym_sapyen/envs/__init__.py
|
MRzNone/SAPIEN-Release
|
ae6662b7fb8f3c65a0eb6bb3bc3d7d53001d846e
|
[
"MIT"
] | null | null | null |
gym-sapyen/gym_sapyen/envs/__init__.py
|
MRzNone/SAPIEN-Release
|
ae6662b7fb8f3c65a0eb6bb3bc3d7d53001d846e
|
[
"MIT"
] | null | null | null |
gym-sapyen/gym_sapyen/envs/__init__.py
|
MRzNone/SAPIEN-Release
|
ae6662b7fb8f3c65a0eb6bb3bc3d7d53001d846e
|
[
"MIT"
] | null | null | null |
from gym_sapyen.envs.sapyen_env import SapyenEnv
from gym_sapyen.envs.ant import AntEnv
from gym_sapyen.envs.halfcheetah import HalfCheetahEnv
| 47.333333
| 54
| 0.880282
| 22
| 142
| 5.5
| 0.5
| 0.173554
| 0.322314
| 0.421488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077465
| 142
| 3
| 54
| 47.333333
| 0.923664
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a2b28096582d4145484b6da7a11a47227a7b2ba0
| 247
|
py
|
Python
|
ALC/__init__.py
|
SimiPixel/automatic_label_correction
|
c3e785cd9e2b185b4306e92677c1838a883eb2b3
|
[
"MIT"
] | null | null | null |
ALC/__init__.py
|
SimiPixel/automatic_label_correction
|
c3e785cd9e2b185b4306e92677c1838a883eb2b3
|
[
"MIT"
] | null | null | null |
ALC/__init__.py
|
SimiPixel/automatic_label_correction
|
c3e785cd9e2b185b4306e92677c1838a883eb2b3
|
[
"MIT"
] | null | null | null |
from .automatic_data_enhancement import AutomaticDataEnhancement
from .binary_cluster_correction import BinaryClusterCorrection
from .cluster_correction import ClusterCorrection
from .nearest_neighbour_correction import NearestNeighbourCorrection
| 49.4
| 68
| 0.919028
| 23
| 247
| 9.565217
| 0.608696
| 0.218182
| 0.209091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064777
| 247
| 4
| 69
| 61.75
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a2b714ee9389e69c22aa48c443cfc3b99f807e9c
| 60,306
|
py
|
Python
|
setup.py
|
Halftruth08/Game_AI
|
629dbe9481568b024495c64e97fff27215bd606b
|
[
"MIT"
] | 1
|
2017-09-26T13:23:34.000Z
|
2017-09-26T13:23:34.000Z
|
setup.py
|
Halftruth08/Game_AI
|
629dbe9481568b024495c64e97fff27215bd606b
|
[
"MIT"
] | 5
|
2019-06-15T17:22:32.000Z
|
2021-03-22T17:15:59.000Z
|
setup.py
|
Halftruth08/Game_AI
|
629dbe9481568b024495c64e97fff27215bd606b
|
[
"MIT"
] | 1
|
2017-09-25T01:01:09.000Z
|
2017-09-25T01:01:09.000Z
|
from setuptools import setup
setup(
name='codenames',
version='0.1.0',
packages=['virtualenv.lib.python3.6.distutils', 'virtualenv.lib.python3.6.encodings',
'virtualenv.lib.python3.6.importlib', 'virtualenv.lib.python3.6.collections',
'virtualenv.lib.python3.6.site-packages.pip', 'virtualenv.lib.python3.6.site-packages.pip.req',
'virtualenv.lib.python3.6.site-packages.pip.vcs', 'virtualenv.lib.python3.6.site-packages.pip.utils',
'virtualenv.lib.python3.6.site-packages.pip.compat', 'virtualenv.lib.python3.6.site-packages.pip.models',
'virtualenv.lib.python3.6.site-packages.pip._vendor',
'virtualenv.lib.python3.6.site-packages.pip._vendor.distlib',
'virtualenv.lib.python3.6.site-packages.pip._vendor.distlib._backport',
'virtualenv.lib.python3.6.site-packages.pip._vendor.colorama',
'virtualenv.lib.python3.6.site-packages.pip._vendor.html5lib',
'virtualenv.lib.python3.6.site-packages.pip._vendor.html5lib._trie',
'virtualenv.lib.python3.6.site-packages.pip._vendor.html5lib.filters',
'virtualenv.lib.python3.6.site-packages.pip._vendor.html5lib.treewalkers',
'virtualenv.lib.python3.6.site-packages.pip._vendor.html5lib.treeadapters',
'virtualenv.lib.python3.6.site-packages.pip._vendor.html5lib.treebuilders',
'virtualenv.lib.python3.6.site-packages.pip._vendor.lockfile',
'virtualenv.lib.python3.6.site-packages.pip._vendor.progress',
'virtualenv.lib.python3.6.site-packages.pip._vendor.requests',
'virtualenv.lib.python3.6.site-packages.pip._vendor.requests.packages',
'virtualenv.lib.python3.6.site-packages.pip._vendor.requests.packages.chardet',
'virtualenv.lib.python3.6.site-packages.pip._vendor.requests.packages.urllib3',
'virtualenv.lib.python3.6.site-packages.pip._vendor.requests.packages.urllib3.util',
'virtualenv.lib.python3.6.site-packages.pip._vendor.requests.packages.urllib3.contrib',
'virtualenv.lib.python3.6.site-packages.pip._vendor.requests.packages.urllib3.packages',
'virtualenv.lib.python3.6.site-packages.pip._vendor.requests.packages.urllib3.packages.ssl_match_hostname',
'virtualenv.lib.python3.6.site-packages.pip._vendor.packaging',
'virtualenv.lib.python3.6.site-packages.pip._vendor.cachecontrol',
'virtualenv.lib.python3.6.site-packages.pip._vendor.cachecontrol.caches',
'virtualenv.lib.python3.6.site-packages.pip._vendor.webencodings',
'virtualenv.lib.python3.6.site-packages.pip._vendor.pkg_resources',
'virtualenv.lib.python3.6.site-packages.pip.commands',
'virtualenv.lib.python3.6.site-packages.pip.operations', 'virtualenv.lib.python3.6.site-packages.enum',
'virtualenv.lib.python3.6.site-packages.idna', 'virtualenv.lib.python3.6.site-packages.lxml',
'virtualenv.lib.python3.6.site-packages.lxml.html',
'virtualenv.lib.python3.6.site-packages.lxml.includes',
'virtualenv.lib.python3.6.site-packages.lxml.isoschematron',
'virtualenv.lib.python3.6.site-packages.pytz', 'virtualenv.lib.python3.6.site-packages.numpy',
'virtualenv.lib.python3.6.site-packages.numpy.ma', 'virtualenv.lib.python3.6.site-packages.numpy.doc',
'virtualenv.lib.python3.6.site-packages.numpy.fft', 'virtualenv.lib.python3.6.site-packages.numpy.lib',
'virtualenv.lib.python3.6.site-packages.numpy.core', 'virtualenv.lib.python3.6.site-packages.numpy.f2py',
'virtualenv.lib.python3.6.site-packages.numpy.compat',
'virtualenv.lib.python3.6.site-packages.numpy.linalg',
'virtualenv.lib.python3.6.site-packages.numpy.random',
'virtualenv.lib.python3.6.site-packages.numpy.testing',
'virtualenv.lib.python3.6.site-packages.numpy.distutils',
'virtualenv.lib.python3.6.site-packages.numpy.distutils.command',
'virtualenv.lib.python3.6.site-packages.numpy.distutils.fcompiler',
'virtualenv.lib.python3.6.site-packages.numpy.matrixlib',
'virtualenv.lib.python3.6.site-packages.numpy.polynomial', 'virtualenv.lib.python3.6.site-packages.scipy',
'virtualenv.lib.python3.6.site-packages.scipy.io', 'virtualenv.lib.python3.6.site-packages.scipy.io.arff',
'virtualenv.lib.python3.6.site-packages.scipy.io.arff.tests',
'virtualenv.lib.python3.6.site-packages.scipy.io.tests',
'virtualenv.lib.python3.6.site-packages.scipy.io.matlab',
'virtualenv.lib.python3.6.site-packages.scipy.io.matlab.tests',
'virtualenv.lib.python3.6.site-packages.scipy.io.harwell_boeing',
'virtualenv.lib.python3.6.site-packages.scipy.io.harwell_boeing.tests',
'virtualenv.lib.python3.6.site-packages.scipy.odr',
'virtualenv.lib.python3.6.site-packages.scipy.odr.tests',
'virtualenv.lib.python3.6.site-packages.scipy._lib',
'virtualenv.lib.python3.6.site-packages.scipy._lib.tests',
'virtualenv.lib.python3.6.site-packages.scipy.misc',
'virtualenv.lib.python3.6.site-packages.scipy.misc.tests',
'virtualenv.lib.python3.6.site-packages.scipy.stats',
'virtualenv.lib.python3.6.site-packages.scipy.stats.tests',
'virtualenv.lib.python3.6.site-packages.scipy.linalg',
'virtualenv.lib.python3.6.site-packages.scipy.linalg.tests',
'virtualenv.lib.python3.6.site-packages.scipy.signal',
'virtualenv.lib.python3.6.site-packages.scipy.signal.tests',
'virtualenv.lib.python3.6.site-packages.scipy.sparse',
'virtualenv.lib.python3.6.site-packages.scipy.sparse.tests',
'virtualenv.lib.python3.6.site-packages.scipy.sparse.linalg',
'virtualenv.lib.python3.6.site-packages.scipy.sparse.linalg.eigen',
'virtualenv.lib.python3.6.site-packages.scipy.sparse.linalg.eigen.arpack',
'virtualenv.lib.python3.6.site-packages.scipy.sparse.linalg.eigen.arpack.tests',
'virtualenv.lib.python3.6.site-packages.scipy.sparse.linalg.eigen.lobpcg',
'virtualenv.lib.python3.6.site-packages.scipy.sparse.linalg.eigen.lobpcg.tests',
'virtualenv.lib.python3.6.site-packages.scipy.sparse.linalg.tests',
'virtualenv.lib.python3.6.site-packages.scipy.sparse.linalg.dsolve',
'virtualenv.lib.python3.6.site-packages.scipy.sparse.linalg.dsolve.tests',
'virtualenv.lib.python3.6.site-packages.scipy.sparse.linalg.isolve',
'virtualenv.lib.python3.6.site-packages.scipy.sparse.linalg.isolve.tests',
'virtualenv.lib.python3.6.site-packages.scipy.sparse.csgraph',
'virtualenv.lib.python3.6.site-packages.scipy.sparse.csgraph.tests',
'virtualenv.lib.python3.6.site-packages.scipy.cluster',
'virtualenv.lib.python3.6.site-packages.scipy.cluster.tests',
'virtualenv.lib.python3.6.site-packages.scipy.fftpack',
'virtualenv.lib.python3.6.site-packages.scipy.fftpack.tests',
'virtualenv.lib.python3.6.site-packages.scipy.ndimage',
'virtualenv.lib.python3.6.site-packages.scipy.ndimage.tests',
'virtualenv.lib.python3.6.site-packages.scipy.spatial',
'virtualenv.lib.python3.6.site-packages.scipy.spatial.tests',
'virtualenv.lib.python3.6.site-packages.scipy.special',
'virtualenv.lib.python3.6.site-packages.scipy.special.tests',
'virtualenv.lib.python3.6.site-packages.scipy.special._precompute',
'virtualenv.lib.python3.6.site-packages.scipy.optimize',
'virtualenv.lib.python3.6.site-packages.scipy.optimize._lsq',
'virtualenv.lib.python3.6.site-packages.scipy.optimize.tests',
'virtualenv.lib.python3.6.site-packages.scipy.optimize._trlib',
'virtualenv.lib.python3.6.site-packages.scipy.constants',
'virtualenv.lib.python3.6.site-packages.scipy.constants.tests',
'virtualenv.lib.python3.6.site-packages.scipy.integrate',
'virtualenv.lib.python3.6.site-packages.scipy.integrate._ivp',
'virtualenv.lib.python3.6.site-packages.scipy.integrate.tests',
'virtualenv.lib.python3.6.site-packages.scipy.interpolate',
'virtualenv.lib.python3.6.site-packages.scipy.interpolate.tests',
'virtualenv.lib.python3.6.site-packages.scipy._build_utils',
'virtualenv.lib.python3.6.site-packages.wheel', 'virtualenv.lib.python3.6.site-packages.wheel.tool',
'virtualenv.lib.python3.6.site-packages.wheel.signatures',
'virtualenv.lib.python3.6.site-packages.bleach', 'virtualenv.lib.python3.6.site-packages.google.protobuf',
'virtualenv.lib.python3.6.site-packages.google.protobuf.util',
'virtualenv.lib.python3.6.site-packages.google.protobuf.pyext',
'virtualenv.lib.python3.6.site-packages.google.protobuf.compiler',
'virtualenv.lib.python3.6.site-packages.google.protobuf.internal',
'virtualenv.lib.python3.6.site-packages.google.protobuf.internal.import_test_package',
'virtualenv.lib.python3.6.site-packages.pandas', 'virtualenv.lib.python3.6.site-packages.pandas.io',
'virtualenv.lib.python3.6.site-packages.pandas.io.sas',
'virtualenv.lib.python3.6.site-packages.pandas.io.json',
'virtualenv.lib.python3.6.site-packages.pandas.io.formats',
'virtualenv.lib.python3.6.site-packages.pandas.io.msgpack',
'virtualenv.lib.python3.6.site-packages.pandas.io.clipboard',
'virtualenv.lib.python3.6.site-packages.pandas.api',
'virtualenv.lib.python3.6.site-packages.pandas.api.types',
'virtualenv.lib.python3.6.site-packages.pandas.core',
'virtualenv.lib.python3.6.site-packages.pandas.core.util',
'virtualenv.lib.python3.6.site-packages.pandas.core.tools',
'virtualenv.lib.python3.6.site-packages.pandas.core.dtypes',
'virtualenv.lib.python3.6.site-packages.pandas.core.sparse',
'virtualenv.lib.python3.6.site-packages.pandas.core.indexes',
'virtualenv.lib.python3.6.site-packages.pandas.core.reshape',
'virtualenv.lib.python3.6.site-packages.pandas.core.computation',
'virtualenv.lib.python3.6.site-packages.pandas.util',
'virtualenv.lib.python3.6.site-packages.pandas._libs',
'virtualenv.lib.python3.6.site-packages.pandas._libs.tslibs',
'virtualenv.lib.python3.6.site-packages.pandas.stats',
'virtualenv.lib.python3.6.site-packages.pandas.tests',
'virtualenv.lib.python3.6.site-packages.pandas.tests.io',
'virtualenv.lib.python3.6.site-packages.pandas.tests.io.sas',
'virtualenv.lib.python3.6.site-packages.pandas.tests.io.json',
'virtualenv.lib.python3.6.site-packages.pandas.tests.io.parser',
'virtualenv.lib.python3.6.site-packages.pandas.tests.io.formats',
'virtualenv.lib.python3.6.site-packages.pandas.tests.io.msgpack',
'virtualenv.lib.python3.6.site-packages.pandas.tests.api',
'virtualenv.lib.python3.6.site-packages.pandas.tests.util',
'virtualenv.lib.python3.6.site-packages.pandas.tests.frame',
'virtualenv.lib.python3.6.site-packages.pandas.tests.tools',
'virtualenv.lib.python3.6.site-packages.pandas.tests.dtypes',
'virtualenv.lib.python3.6.site-packages.pandas.tests.scalar',
'virtualenv.lib.python3.6.site-packages.pandas.tests.series',
'virtualenv.lib.python3.6.site-packages.pandas.tests.sparse',
'virtualenv.lib.python3.6.site-packages.pandas.tests.groupby',
'virtualenv.lib.python3.6.site-packages.pandas.tests.indexes',
'virtualenv.lib.python3.6.site-packages.pandas.tests.indexes.period',
'virtualenv.lib.python3.6.site-packages.pandas.tests.indexes.datetimes',
'virtualenv.lib.python3.6.site-packages.pandas.tests.indexes.timedeltas',
'virtualenv.lib.python3.6.site-packages.pandas.tests.reshape',
'virtualenv.lib.python3.6.site-packages.pandas.tests.tseries',
'virtualenv.lib.python3.6.site-packages.pandas.tests.indexing',
'virtualenv.lib.python3.6.site-packages.pandas.tests.plotting',
'virtualenv.lib.python3.6.site-packages.pandas.tests.internals',
'virtualenv.lib.python3.6.site-packages.pandas.tests.computation',
'virtualenv.lib.python3.6.site-packages.pandas.tools',
'virtualenv.lib.python3.6.site-packages.pandas.types',
'virtualenv.lib.python3.6.site-packages.pandas.compat',
'virtualenv.lib.python3.6.site-packages.pandas.compat.numpy',
'virtualenv.lib.python3.6.site-packages.pandas.errors',
'virtualenv.lib.python3.6.site-packages.pandas.formats',
'virtualenv.lib.python3.6.site-packages.pandas.tseries',
'virtualenv.lib.python3.6.site-packages.pandas.plotting',
'virtualenv.lib.python3.6.site-packages.pandas.computation',
'virtualenv.lib.python3.6.site-packages.certifi', 'virtualenv.lib.python3.6.site-packages.chardet',
'virtualenv.lib.python3.6.site-packages.chardet.cli', 'virtualenv.lib.python3.6.site-packages.sklearn',
'virtualenv.lib.python3.6.site-packages.sklearn.svm',
'virtualenv.lib.python3.6.site-packages.sklearn.svm.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.tree',
'virtualenv.lib.python3.6.site-packages.sklearn.tree.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.utils',
'virtualenv.lib.python3.6.site-packages.sklearn.utils.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.utils.sparsetools',
'virtualenv.lib.python3.6.site-packages.sklearn.utils.sparsetools.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.cluster',
'virtualenv.lib.python3.6.site-packages.sklearn.cluster.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.metrics',
'virtualenv.lib.python3.6.site-packages.sklearn.metrics.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.metrics.cluster',
'virtualenv.lib.python3.6.site-packages.sklearn.metrics.cluster.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.mixture',
'virtualenv.lib.python3.6.site-packages.sklearn.mixture.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.datasets',
'virtualenv.lib.python3.6.site-packages.sklearn.datasets.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.ensemble',
'virtualenv.lib.python3.6.site-packages.sklearn.ensemble.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.manifold',
'virtualenv.lib.python3.6.site-packages.sklearn.manifold.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.externals',
'virtualenv.lib.python3.6.site-packages.sklearn.externals.joblib',
'virtualenv.lib.python3.6.site-packages.sklearn.neighbors',
'virtualenv.lib.python3.6.site-packages.sklearn.neighbors.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.covariance',
'virtualenv.lib.python3.6.site-packages.sklearn.covariance.tests',
'virtualenv.lib.python3.6.site-packages.sklearn._build_utils',
'virtualenv.lib.python3.6.site-packages.sklearn.linear_model',
'virtualenv.lib.python3.6.site-packages.sklearn.linear_model.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.__check_build',
'virtualenv.lib.python3.6.site-packages.sklearn.decomposition',
'virtualenv.lib.python3.6.site-packages.sklearn.decomposition.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.preprocessing',
'virtualenv.lib.python3.6.site-packages.sklearn.preprocessing.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.neural_network',
'virtualenv.lib.python3.6.site-packages.sklearn.neural_network.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.model_selection',
'virtualenv.lib.python3.6.site-packages.sklearn.model_selection.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.semi_supervised',
'virtualenv.lib.python3.6.site-packages.sklearn.semi_supervised.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.gaussian_process',
'virtualenv.lib.python3.6.site-packages.sklearn.gaussian_process.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.feature_selection',
'virtualenv.lib.python3.6.site-packages.sklearn.feature_selection.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.feature_extraction',
'virtualenv.lib.python3.6.site-packages.sklearn.feature_extraction.tests',
'virtualenv.lib.python3.6.site-packages.sklearn.cross_decomposition',
'virtualenv.lib.python3.6.site-packages.sklearn.cross_decomposition.tests',
'virtualenv.lib.python3.6.site-packages.urllib3', 'virtualenv.lib.python3.6.site-packages.urllib3.util',
'virtualenv.lib.python3.6.site-packages.urllib3.contrib',
'virtualenv.lib.python3.6.site-packages.urllib3.contrib._securetransport',
'virtualenv.lib.python3.6.site-packages.urllib3.packages',
'virtualenv.lib.python3.6.site-packages.urllib3.packages.backports',
'virtualenv.lib.python3.6.site-packages.urllib3.packages.ssl_match_hostname',
'virtualenv.lib.python3.6.site-packages.dateutil', 'virtualenv.lib.python3.6.site-packages.dateutil.tz',
'virtualenv.lib.python3.6.site-packages.dateutil.zoneinfo',
'virtualenv.lib.python3.6.site-packages.external',
'virtualenv.lib.python3.6.site-packages.external.six_archive',
'virtualenv.lib.python3.6.site-packages.external.protobuf_archive',
'virtualenv.lib.python3.6.site-packages.external.protobuf_archive.python',
'virtualenv.lib.python3.6.site-packages.external.protobuf_archive.python.google',
'virtualenv.lib.python3.6.site-packages.external.protobuf_archive.python.google.protobuf',
'virtualenv.lib.python3.6.site-packages.external.protobuf_archive.python.google.protobuf.pyext',
'virtualenv.lib.python3.6.site-packages.external.protobuf_archive.python.google.protobuf.compiler',
'virtualenv.lib.python3.6.site-packages.external.protobuf_archive.python.google.protobuf.internal',
'virtualenv.lib.python3.6.site-packages.external.org_python_pypi_backports_weakref',
'virtualenv.lib.python3.6.site-packages.external.org_python_pypi_backports_weakref.backports',
'virtualenv.lib.python3.6.site-packages.html5lib',
'virtualenv.lib.python3.6.site-packages.html5lib._trie',
'virtualenv.lib.python3.6.site-packages.html5lib.filters',
'virtualenv.lib.python3.6.site-packages.html5lib.treewalkers',
'virtualenv.lib.python3.6.site-packages.html5lib.treeadapters',
'virtualenv.lib.python3.6.site-packages.html5lib.treebuilders',
'virtualenv.lib.python3.6.site-packages.markdown',
'virtualenv.lib.python3.6.site-packages.markdown.extensions',
'virtualenv.lib.python3.6.site-packages.requests', 'virtualenv.lib.python3.6.site-packages.werkzeug',
'virtualenv.lib.python3.6.site-packages.werkzeug.debug',
'virtualenv.lib.python3.6.site-packages.werkzeug.contrib',
'virtualenv.lib.python3.6.site-packages.matplotlib',
'virtualenv.lib.python3.6.site-packages.matplotlib.tri',
'virtualenv.lib.python3.6.site-packages.matplotlib.axes',
'virtualenv.lib.python3.6.site-packages.matplotlib.cbook',
'virtualenv.lib.python3.6.site-packages.matplotlib.style',
'virtualenv.lib.python3.6.site-packages.matplotlib.tests',
'virtualenv.lib.python3.6.site-packages.matplotlib.compat',
'virtualenv.lib.python3.6.site-packages.matplotlib.testing',
'virtualenv.lib.python3.6.site-packages.matplotlib.testing._nose',
'virtualenv.lib.python3.6.site-packages.matplotlib.testing._nose.plugins',
'virtualenv.lib.python3.6.site-packages.matplotlib.testing.jpl_units',
'virtualenv.lib.python3.6.site-packages.matplotlib.backends',
'virtualenv.lib.python3.6.site-packages.matplotlib.backends.qt_editor',
'virtualenv.lib.python3.6.site-packages.matplotlib.sphinxext',
'virtualenv.lib.python3.6.site-packages.matplotlib.sphinxext.tests',
'virtualenv.lib.python3.6.site-packages.matplotlib.projections',
'virtualenv.lib.python3.6.site-packages.setuptools',
'virtualenv.lib.python3.6.site-packages.setuptools.extern',
'virtualenv.lib.python3.6.site-packages.setuptools.command',
'virtualenv.lib.python3.6.site-packages.tensorflow',
'virtualenv.lib.python3.6.site-packages.tensorflow.core',
'virtualenv.lib.python3.6.site-packages.tensorflow.core.lib',
'virtualenv.lib.python3.6.site-packages.tensorflow.core.lib.core',
'virtualenv.lib.python3.6.site-packages.tensorflow.core.util',
'virtualenv.lib.python3.6.site-packages.tensorflow.core.debug',
'virtualenv.lib.python3.6.site-packages.tensorflow.core.example',
'virtualenv.lib.python3.6.site-packages.tensorflow.core.grappler',
'virtualenv.lib.python3.6.site-packages.tensorflow.core.grappler.costs',
'virtualenv.lib.python3.6.site-packages.tensorflow.core.profiler',
'virtualenv.lib.python3.6.site-packages.tensorflow.core.protobuf',
'virtualenv.lib.python3.6.site-packages.tensorflow.core.framework',
'virtualenv.lib.python3.6.site-packages.tensorflow.tools',
'virtualenv.lib.python3.6.site-packages.tensorflow.tools.dist_test',
'virtualenv.lib.python3.6.site-packages.tensorflow.tools.dist_test.server',
'virtualenv.lib.python3.6.site-packages.tensorflow.tools.pip_package',
'virtualenv.lib.python3.6.site-packages.tensorflow.tools.graph_transforms',
'virtualenv.lib.python3.6.site-packages.tensorflow.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.lib',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.lib.io',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.ops.losses',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.ops.distributions',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.data',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.data.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.data.util',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.util',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.util.protobuf',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.debug',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.debug.cli',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.debug.lib',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.debug.examples',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.debug.wrappers',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.eager',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras._impl',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras._impl.keras',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras._impl.keras.utils',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras._impl.keras.engine',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras._impl.keras.layers',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras._impl.keras.datasets',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras._impl.keras.wrappers',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras._impl.keras.applications',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras._impl.keras.preprocessing',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.utils',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.layers',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.losses',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.models',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.backend',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.metrics',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.datasets',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.datasets.imdb',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.datasets.mnist',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.datasets.cifar10',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.datasets.reuters',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.datasets.cifar100',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.datasets.boston_housing',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.wrappers',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.wrappers.scikit_learn',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.callbacks',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.estimator',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.optimizers',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.activations',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.constraints',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.applications',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.applications.vgg16',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.applications.vgg19',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.applications.resnet50',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.applications.xception',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.applications.mobilenet',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.applications.inception_v3',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.initializers',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.regularizers',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.preprocessing',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.preprocessing.text',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.preprocessing.image',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.keras.preprocessing.sequence',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.tools',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.client',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.layers',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.summary',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.summary.writer',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.grappler',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.platform',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.profiler',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.profiler.internal',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.training',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.user_ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.estimator',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.estimator.canned',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.estimator.export',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.estimator.inputs',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.estimator.inputs.queues',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.framework',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.saved_model',
'virtualenv.lib.python3.6.site-packages.tensorflow.python.feature_column',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.nn',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.nn.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.nn.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.crf',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.crf.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.crf.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.gan',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.gan.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.gan.python.eval',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.gan.python.eval.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.gan.python.losses',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.gan.python.losses.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.gan.python.features',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.gan.python.features.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.gan.python.estimator',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.gan.python.estimator.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.opt',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.opt.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.opt.python.training',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.rnn',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.rnn.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.rnn.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.rnn.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.rnn.python.tools',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tpu',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tpu.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tpu.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tpu.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tpu.python.tpu',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tpu.python.profiler',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tpu.profiler',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.data',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.data.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.data.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.kfac',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.kfac.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.kfac.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.nccl',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.nccl.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.nccl.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.nccl.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.slim',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.slim.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.slim.python.slim',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.slim.python.slim.data',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.slim.python.slim.nets',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.text',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.text.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.text.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.util',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.cloud',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.cloud.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.cloud.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.cmake',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.cmake.tools',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.eager',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.eager.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.hooks',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.hooks.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.hooks.python.training',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.image',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.image.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.image.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.image.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.utils',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.layers',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.losses',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.models',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.backend',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.metrics',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.datasets',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.datasets.imdb',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.datasets.mnist',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.datasets.cifar10',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.datasets.reuters',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.datasets.cifar100',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.datasets.boston_housing',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.wrappers',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.wrappers.scikit_learn',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.callbacks',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.optimizers',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.activations',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.constraints',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.applications',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.applications.vgg16',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.applications.vgg19',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.applications.resnet50',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.applications.xception',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.applications.mobilenet',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.applications.inception_v3',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.initializers',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.regularizers',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.preprocessing',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.preprocessing.text',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.preprocessing.image',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.keras.api.keras.preprocessing.sequence',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.learn',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.learn.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.learn.python.learn',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.learn.python.learn.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.learn.python.learn.utils',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.learn.python.learn.datasets',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.learn.python.learn.learn_io',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.learn.python.learn.estimators',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.learn.python.learn.preprocessing',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.learn.python.learn.preprocessing.tests',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.specs',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.specs.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.ffmpeg',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.ffmpeg.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.layers',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.layers.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.layers.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.layers.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.layers.python.layers',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.linalg',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.linalg.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.linalg.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.lookup',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.losses',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.losses.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.losses.python.losses',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.ndlstm',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.ndlstm.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.signal',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.signal.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.signal.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.signal.python.kernel_tests',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tfprof',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.metrics',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.metrics.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.metrics.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.metrics.python.metrics',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.seq2seq',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.seq2seq.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.seq2seq.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.seq2seq.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.solvers',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.solvers.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.solvers.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.staging',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.summary',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.testing',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.testing.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.testing.python.framework',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.batching',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.batching.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.batching.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.batching.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.compiler',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.grid_rnn',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.grid_rnn.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.grid_rnn.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.quantize',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.quantize.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.training',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.training.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.training.python.training',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.bayesflow',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.bayesflow.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.bayesflow.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.cudnn_rnn',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.cudnn_rnn.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.cudnn_rnn.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.cudnn_rnn.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.cudnn_rnn.python.layers',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.estimator',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.estimator.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.estimator.python.estimator',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.framework',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.framework.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.framework.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.framework.python.framework',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.integrate',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.integrate.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.integrate.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.predictor',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.resampler',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.resampler.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.resampler.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.resampler.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.sparsemax',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.sparsemax.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.sparsemax.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.stateless',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.all_reduce',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.all_reduce.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.copy_graph',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.copy_graph.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.copy_graph.python.util',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.deprecated',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.fused_conv',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.fused_conv.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.fused_conv.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.fused_conv.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.timeseries',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.timeseries.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.timeseries.python.timeseries',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.timeseries.python.timeseries.state_space_models',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.timeseries.examples',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.saved_model',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.saved_model.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.saved_model.python.saved_model',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tensorboard',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tensorboard.plugins',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tensorboard.plugins.trace',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tensorboard.plugins.projector',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tensorboard.graph_explorer',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tensorboard.graph_explorer.proto',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.graph_editor',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.graph_editor.tests',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.memory_stats',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.memory_stats.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.memory_stats.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.memory_stats.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.quantization',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.quantization.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.boosted_trees',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.boosted_trees.lib',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.boosted_trees.lib.learner',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.boosted_trees.lib.learner.batch',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.boosted_trees.proto',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.boosted_trees.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.boosted_trees.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.boosted_trees.python.utils',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.boosted_trees.python.training',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.boosted_trees.python.training.functions',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.boosted_trees.estimator_batch',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.distributions',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.distributions.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.distributions.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.distributions.python.ops.bijectors',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.factorization',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.factorization.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.factorization.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tensor_forest',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tensor_forest.proto',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tensor_forest.client',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tensor_forest.hybrid',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tensor_forest.hybrid.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tensor_forest.hybrid.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tensor_forest.hybrid.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tensor_forest.hybrid.python.layers',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tensor_forest.hybrid.python.models',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tensor_forest.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.tensor_forest.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.decision_trees',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.decision_trees.proto',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.input_pipeline',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.input_pipeline.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.input_pipeline.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.input_pipeline.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.kernel_methods',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.kernel_methods.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.kernel_methods.python.mappers',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.labeled_tensor',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.labeled_tensor.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.labeled_tensor.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.legacy_seq2seq',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.legacy_seq2seq.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.legacy_seq2seq.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.session_bundle',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.receptive_field',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.receptive_field.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.receptive_field.python.util',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.stat_summarizer',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.cluster_resolver',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.cluster_resolver.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.cluster_resolver.python.training',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.linear_optimizer',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.linear_optimizer.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.linear_optimizer.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.nearest_neighbor',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.nearest_neighbor.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.nearest_neighbor.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.nearest_neighbor.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.reduce_slice_ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.reduce_slice_ops.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.reduce_slice_ops.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.reduce_slice_ops.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.remote_fused_graph',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.remote_fused_graph.pylib',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.remote_fused_graph.pylib.python',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.remote_fused_graph.pylib.python.ops',
'virtualenv.lib.python3.6.site-packages.tensorflow.contrib.meta_graph_transform',
'virtualenv.lib.python3.6.site-packages.tensorflow.examples',
'virtualenv.lib.python3.6.site-packages.tensorflow.examples.tutorials',
'virtualenv.lib.python3.6.site-packages.tensorflow.examples.tutorials.mnist',
'virtualenv.lib.python3.6.site-packages.tensorboard',
'virtualenv.lib.python3.6.site-packages.tensorboard.backend',
'virtualenv.lib.python3.6.site-packages.tensorboard.backend.event_processing',
'virtualenv.lib.python3.6.site-packages.tensorboard.plugins',
'virtualenv.lib.python3.6.site-packages.tensorboard.plugins.core',
'virtualenv.lib.python3.6.site-packages.tensorboard.plugins.text',
'virtualenv.lib.python3.6.site-packages.tensorboard.plugins.audio',
'virtualenv.lib.python3.6.site-packages.tensorboard.plugins.graph',
'virtualenv.lib.python3.6.site-packages.tensorboard.plugins.image',
'virtualenv.lib.python3.6.site-packages.tensorboard.plugins.scalar',
'virtualenv.lib.python3.6.site-packages.tensorboard.plugins.profile',
'virtualenv.lib.python3.6.site-packages.tensorboard.plugins.debugger',
'virtualenv.lib.python3.6.site-packages.tensorboard.plugins.pr_curve',
'virtualenv.lib.python3.6.site-packages.tensorboard.plugins.histogram',
'virtualenv.lib.python3.6.site-packages.tensorboard.plugins.projector',
'virtualenv.lib.python3.6.site-packages.tensorboard.plugins.distribution',
'virtualenv.lib.python3.6.site-packages.tensorboard.pip_package',
'virtualenv.lib.python3.6.site-packages.mpl_toolkits.tests',
'virtualenv.lib.python3.6.site-packages.mpl_toolkits.mplot3d',
'virtualenv.lib.python3.6.site-packages.mpl_toolkits.axes_grid',
'virtualenv.lib.python3.6.site-packages.mpl_toolkits.axes_grid1',
'virtualenv.lib.python3.6.site-packages.mpl_toolkits.axisartist',
'virtualenv.lib.python3.6.site-packages.webencodings',
'virtualenv.lib.python3.6.site-packages.pkg_resources',
'virtualenv.lib.python3.6.site-packages.pkg_resources.extern',
'virtualenv.lib.python3.6.site-packages.pkg_resources._vendor',
'virtualenv.lib.python3.6.site-packages.pkg_resources._vendor.packaging'],
url='https://github.com/Halftruth08/Game_AI',
license='MIT',
author='Halftruth08, kanwei',
author_email='Halftruth08@github.com',
description='Codenames Model + Game Playing'
)
| 86.398281
| 122
| 0.692502
| 6,949
| 60,306
| 5.976256
| 0.054252
| 0.221315
| 0.340485
| 0.357509
| 0.975896
| 0.974476
| 0.969708
| 0.910544
| 0.714007
| 0.513304
| 0
| 0.029645
| 0.171592
| 60,306
| 697
| 123
| 86.522238
| 0.801633
| 0
| 0
| 0
| 0
| 0.238506
| 0.791331
| 0.789606
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.00431
| 0
| 0.00431
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
a2d8f9980da6372870482a9f917bf39f100c872e
| 17,445
|
py
|
Python
|
dev/writeCCode.py
|
gmarupilla/libSEDML
|
3a2a207d78afccefb4389b65fea9eeb7e446d09e
|
[
"BSD-2-Clause"
] | 1
|
2020-10-21T01:41:14.000Z
|
2020-10-21T01:41:14.000Z
|
dev/writeCCode.py
|
gmarupilla/libSEDML
|
3a2a207d78afccefb4389b65fea9eeb7e446d09e
|
[
"BSD-2-Clause"
] | null | null | null |
dev/writeCCode.py
|
gmarupilla/libSEDML
|
3a2a207d78afccefb4389b65fea9eeb7e446d09e
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python
#
# @file writeCCode.py
# @brief Create the C code for a class
# @author Sarah Keating
#
import sys
import fileHeaders
import generalFunctions
import strFunctions
def writeConstructors(element, package, output):
indent = strFunctions.getIndent(element)
output.write('/**\n')
output.write(' * write comments\n')
output.write(' */\n')
output.write('LIBSEDML_EXTERN\n')
output.write('{0}_t *\n'.format(element))
output.write('{0}_create'.format(element))
output.write('(unsigned int level, unsigned int version)\n')
output.write('{\n')
output.write(' return new {0}(level, version);\n'.format(element))
output.write('}\n\n\n')
# output.write('/**\n')
# output.write(' * write comments\n')
# output.write(' */\n')
# output.write('LIBSEDML_EXTERN\n')
# output.write('{0}_t *\n'.format(element))
# output.write('{0}_createWithNS'.format(element))
# output.write('(SedNamespaces_t *sedmlns)\n')
# output.write('{\n')
# output.write(' return new {0}(sedmlns);\n'.format(element))
# output.write('}\n\n\n')
output.write('/**\n')
output.write(' * write comments\n')
output.write(' */\n')
output.write('LIBSEDML_EXTERN\n')
output.write('void\n')
output.write('{0}_free'.format(element))
output.write('({0}_t * {1})\n'.format(element, strFunctions.objAbbrev(element)))
output.write('{\n')
output.write(' if ({0} != NULL)\n'.format(strFunctions.objAbbrev(element)))
output.write(' delete {0};\n'.format(strFunctions.objAbbrev(element)))
output.write('}\n\n\n')
output.write('/**\n')
output.write(' * write comments\n')
output.write(' */\n')
output.write('LIBSEDML_EXTERN\n')
output.write('{0}_t *\n'.format(element))
output.write('{0}_clone'.format(element))
output.write('({0}_t * {1})\n'.format(element, strFunctions.objAbbrev(element)))
output.write('{\n')
output.write(' if ({0} != NULL)\n'.format(strFunctions.objAbbrev(element)))
output.write(' {\n')
output.write(' return static_cast<{0}_t*>({1}->clone());\n'.format(element, strFunctions.objAbbrev(element)))
output.write(' }\n')
output.write(' else\n')
output.write(' {\n')
output.write(' return NULL;\n')
output.write(' }\n')
output.write('}\n\n\n')
def writeAttributeFunctions(attrs, output, element):
for i in range(0, len(attrs)):
if attrs[i]['type'] != 'lo_element':
writeGetFunction(attrs[i], output, element)
for i in range(0, len(attrs)):
if attrs[i]['type'] != 'lo_element':
writeIsSetFunction(attrs[i], output, element)
for i in range(0, len(attrs)):
if attrs[i]['type'] != 'lo_element':
writeSetFunction(attrs[i], output, element)
for i in range(0, len(attrs)):
if attrs[i]['type'] != 'lo_element':
writeUnsetFunction(attrs[i], output, element)
for i in range(0, len(attrs)):
if attrs[i]['type'] == 'lo_element':
writeListOfSubElements(attrs[i], output, element)
def writeListOfSubElements(attrib, output, element):
loname = generalFunctions.writeListOf(strFunctions.cap(attrib['name']))
output.write('LIBSEDML_EXTERN\n')
output.write('int\n')
output.write('{0}_add{1}({0}_t * {2}, '.format(element, strFunctions.cap(attrib['name']), strFunctions.objAbbrev(element)))
output.write('{0}_t * {1})\n'.format(attrib['element'], strFunctions.objAbbrev(attrib['element'])))
output.write('{\n')
output.write(' return ({0} != NULL) ? {0}->add{1}({2}) : LIBSBML_INVALID_OBJECT;\n'.format(strFunctions.objAbbrev(element),strFunctions.cap(attrib['name']),strFunctions.objAbbrev(attrib['element'])))
output.write('}\n\n')
if attrib.has_key('abstract') == False or (attrib.has_key('abstract') and attrib['abstract'] == False):
output.write('LIBSEDML_EXTERN\n')
output.write('{0}_t *\n'.format(attrib['element']))
output.write('{0}_create{1}({0}_t * {2})\n' .format(element, strFunctions.cap(attrib['name']), strFunctions.objAbbrev(element)))
output.write('{\n')
output.write(' return ({0} != NULL) ? {0}->create{1}() : NULL;\n'.format(strFunctions.objAbbrev(element),strFunctions.cap(attrib['name'])))
output.write('}\n\n')
elif attrib.has_key('concrete'):
for elem in attrib['concrete']:
output.write('LIBSEDML_EXTERN\n')
output.write('{0}_t *\n'.format(elem['element']))
output.write('{0}_create{1}({0}_t * {2})\n' .format(element, strFunctions.cap(elem['name']), strFunctions.objAbbrev(element)))
output.write('{\n')
output.write(' return ({0} != NULL) ? {0}->create{1}() : NULL;\n'.format(strFunctions.objAbbrev(element),strFunctions.cap(elem['name'])))
output.write('}\n\n')
output.write('LIBSEDML_EXTERN\n')
output.write('SedListOf_t *\n')
output.write('{0}_get{1}({0}_t * {2})\n'.format(element, loname, strFunctions.objAbbrev(element)))
output.write('{\n')
output.write(' return ({0} != NULL) ? (SedListOf_t *){0}->getListOf{1}s() : NULL;\n'.format(strFunctions.objAbbrev(element),strFunctions.cap(attrib['name'])))
output.write('}\n\n')
output.write('LIBSEDML_EXTERN\n')
output.write('{0}_t *\n'.format(attrib['element']))
output.write('{0}_get{1}({0}_t * {2}, '.format(element, strFunctions.cap(attrib['name']), strFunctions.objAbbrev(element)))
output.write('unsigned int n)\n')
output.write('{\n')
output.write(' return ({0} != NULL) ? {0}->get{1}(n) : NULL;\n'.format(strFunctions.objAbbrev(element),strFunctions.cap(attrib['name'])))
output.write('}\n\n')
output.write('LIBSEDML_EXTERN\n')
output.write('{0}_t *\n'.format(attrib['element']))
output.write('{0}_get{1}ById({0}_t * {2}, '.format(element, strFunctions.cap(attrib['name']), strFunctions.objAbbrev(element)))
output.write('const char * sid)\n')
output.write('{\n')
output.write(' return ({0} != NULL) ? {0}->get{1}(sid) : NULL;\n'.format(strFunctions.objAbbrev(element),strFunctions.cap(attrib['name'])))
output.write('}\n\n')
output.write('LIBSEDML_EXTERN\n')
output.write('unsigned int\n')
output.write('{0}_getNum{1}s({0}_t * {2})\n' .format(element, strFunctions.cap(attrib['name']), strFunctions.objAbbrev(element)))
output.write('{\n')
output.write(' return ({0} != NULL) ? {0}->getNum{1}s() : SEDML_INT_MAX;\n'.format(strFunctions.objAbbrev(element),strFunctions.cap(attrib['name'])))
output.write('}\n\n')
output.write('LIBSEDML_EXTERN\n')
output.write('{0}_t *\n'.format(attrib['element']))
output.write('{0}_remove{1}({0}_t * {2}, '.format(element, strFunctions.cap(attrib['name']), strFunctions.objAbbrev(element)))
output.write('unsigned int n)\n')
output.write('{\n')
output.write(' return ({0} != NULL) ? {0}->remove{1}(n) : NULL;\n'.format(strFunctions.objAbbrev(element),strFunctions.cap(attrib['name'])))
output.write('}\n\n')
output.write('LIBSEDML_EXTERN\n')
output.write('{0}_t *\n'.format(attrib['element']))
output.write('{0}_remove{1}ById({0}_t * {2}, '.format(element, strFunctions.cap(attrib['name']), strFunctions.objAbbrev(element)))
output.write('const char * sid)\n')
output.write('{\n')
output.write(' return ({0} != NULL) ? {0}->remove{1}(sid) : NULL;\n'.format(strFunctions.objAbbrev(element),strFunctions.cap(attrib['name'])))
output.write('}\n\n')
# writeListOfHeader.writeGetFunctions(output, attrib['element'], True, element)
# writeListOfHeader.writeRemoveFunctions(output, attrib['element'], True, element)
def writeGetFunction(attrib, output, element):
att = generalFunctions.parseAttributeForC(attrib)
attName = att[0]
capAttName = att[1]
attType = att[2]
if att[3] == 'const char *':
attTypeCode = 'char *'
else:
attTypeCode = att[3]
num = att[4]
varname = strFunctions.objAbbrev(element)
output.write('/**\n')
output.write(' * write comments\n')
output.write(' */\n')
if attrib['type'] == 'std::vector<double>':
return
elif attrib['type'] != 'element' and attrib['type'] != 'lo_element' and attrib['type'] != 'XMLNode*':
output.write('LIBSEDML_EXTERN\n')
output.write('{0}\n'.format(attTypeCode))
output.write('{0}_get{1}'.format(element, capAttName))
output.write('({0}_t * {1})\n'.format(element, varname))
output.write('{\n')
if attType == 'string':
output.write(' if ({0} == NULL)\n'.format(varname))
output.write(' return NULL;\n\n')
output.write(' return {0}->get{1}().empty() ? NULL : safe_strdup({0}->get{1}().c_str());\n'.format(varname, capAttName))
elif num == True:
if attTypeCode == 'double':
output.write(' return ({0} != NULL) ? {0}->get{1}() : numeric_limits<double>::quiet_NaN();\n'.format(varname, capAttName))
else:
output.write(' return ({0} != NULL) ? {0}->get{1}() : SEDML_INT_MAX;\n'.format(varname, capAttName))
elif attType == 'boolean':
output.write(' return ({0} != NULL) ? static_cast<int>({0}->get{1}()) : 0;\n'.format(varname, capAttName))
elif attType == 'DimensionDescription*':
output.write(' return ({0} != NULL) ? NULL : const_cast<DimensionDescription*>({0}->get{1}());\n'.format(varname, capAttName))
output.write('}\n\n\n')
elif attrib['type'] == 'XMLNode*':
output.write('LIBSEDML_EXTERN\n')
output.write('XMLNode_t*\n')
output.write('{0}_get{1}'.format(element, capAttName))
output.write('({0}_t * {1})\n'.format(element, varname))
output.write('{\n')
output.write(' if ({0} == NULL)\n'.format(varname))
output.write(' return NULL;\n\n')
output.write(' return ({0}_t*){1}->get{2}();\n'.format('XMLNode',varname, capAttName))
output.write('}\n\n\n')
elif attrib['type'] == 'element':
if attrib['name'] == 'Math' or attrib['name'] == 'math':
output.write('LIBSEDML_EXTERN\n')
output.write('ASTNode_t*\n')
output.write('{0}_get{1}'.format(element, capAttName))
output.write('({0}_t * {1})\n'.format(element, varname))
output.write('{\n')
output.write(' if ({0} == NULL)\n'.format(varname))
output.write(' return NULL;\n\n')
output.write(' return (ASTNode_t*){0}->get{1}();\n'.format(varname, capAttName))
output.write('}\n\n\n')
else:
output.write('LIBSEDML_EXTERN\n')
output.write('{0}_t*\n'.format(attrib['element']))
output.write('{0}_get{1}'.format(element, capAttName))
output.write('({0}_t * {1})\n'.format(element, varname))
output.write('{\n')
output.write(' if ({0} == NULL)\n'.format(varname))
output.write(' return NULL;\n\n')
output.write(' return ({0}_t*){1}->get{2}();\n'.format(attrib['element'],varname, capAttName))
output.write('}\n\n\n')
output.write('LIBSEDML_EXTERN\n')
output.write('{0}_t*\n'.format(attrib['element']))
output.write('{0}_create{1}'.format(element, capAttName))
output.write('({0}_t * {1})\n'.format(element, varname))
output.write('{\n')
output.write(' if ({0} == NULL)\n'.format(varname))
output.write(' return NULL;\n\n')
output.write(' return ({0}_t*){1}->create{2}();\n'.format(attrib['element'],varname, capAttName))
output.write('}\n\n\n')
def writeIsSetFunction(attrib, output, element):
att = generalFunctions.parseAttributeForC(attrib)
attName = att[0]
capAttName = att[1]
attType = att[2]
attTypeCode = att[3]
num = att[4]
if attrib['type'] == 'lo_element':
return
if attrib['type'] == 'std::vector<double>':
return
varname = strFunctions.objAbbrev(element)
output.write('/**\n')
output.write(' * write comments\n')
output.write(' */\n')
output.write('LIBSEDML_EXTERN\n')
output.write('int\n')
output.write('{0}_isSet{1}'.format(element, capAttName))
output.write('({0}_t * {1})\n'.format(element, varname))
output.write('{\n')
output.write(' return ({0} != NULL) ? static_cast<int>({0}->isSet{1}()) : 0;\n'.format(varname, capAttName))
output.write('}\n\n\n')
def writeSetFunction(attrib, output, element):
att = generalFunctions.parseAttributeForC(attrib)
attName = att[0]
capAttName = att[1]
attType = att[2]
attTypeCode = att[3]
num = att[4]
if attrib['type'] == 'lo_element':
return
varname = strFunctions.objAbbrev(element)
output.write('/**\n')
output.write(' * write comments\n')
output.write(' */\n')
if attrib['type'] == 'std::vector<double>':
return
elif attrib['type'] != 'element' and attrib['type'] != 'lo_element':
output.write('LIBSEDML_EXTERN\n')
output.write('int\n')
output.write('{0}_set{1}'.format(element, capAttName))
output.write('({0}_t * {1},'.format(element, varname))
output.write(' {0} {1})\n'.format(attTypeCode, attName))
output.write('{\n')
output.write(' return ({0} != NULL) ? {0}->set{1}({2}) : LIBSEDML_INVALID_OBJECT;\n'.format(varname, capAttName, attName))
output.write('}\n\n\n')
elif attrib['type'] == 'element':
if attrib['name'] == 'Math' or attrib['name'] == 'math':
output.write('LIBSEDML_EXTERN\n')
output.write('int\n')
output.write('{0}_set{1}'.format(element, capAttName))
output.write('({0}_t * {1},'.format(element, varname))
output.write(' {0} {1})\n'.format('ASTNode_t*', attName))
output.write('{\n')
output.write(' return ({0} != NULL) ? {0}->set{1}({2}) : LIBSEDML_INVALID_OBJECT;\n'.format(varname, capAttName, attName))
output.write('}\n\n\n')
else:
output.write('LIBSEDML_EXTERN\n')
output.write('int\n')
output.write('{0}_set{1}'.format(element, capAttName))
output.write('({0}_t * {1},'.format(element, varname))
output.write(' {0}_t* {1})\n'.format(attrib['element'], attName))
output.write('{\n')
output.write(' return ({0} != NULL) ? {0}->set{1}({2}) : LIBSEDML_INVALID_OBJECT;\n'.format(varname, capAttName, attName))
output.write('}\n\n\n')
def writeUnsetFunction(attrib, output, element):
att = generalFunctions.parseAttributeForC(attrib)
attName = att[0]
capAttName = att[1]
attType = att[2]
attTypeCode = att[3]
num = att[4]
if attrib['type'] == 'lo_element':
return
if attrib['type'] == 'std::vector<double>':
return
varname = strFunctions.objAbbrev(element)
output.write('/**\n')
output.write(' * write comments\n')
output.write(' */\n')
output.write('LIBSEDML_EXTERN\n')
output.write('int\n')
output.write('{0}_unset{1}'.format(element, capAttName))
output.write('({0}_t * {1})\n'.format(element, varname))
output.write('{\n')
output.write(' return ({0} != NULL) ? {0}->unset{1}() : LIBSEDML_INVALID_OBJECT;\n'.format(varname, capAttName))
output.write('}\n\n\n')
def writeHasReqdAttrFunction(output, element):
varname = strFunctions.objAbbrev(element)
output.write('/**\n')
output.write(' * write comments\n')
output.write(' */\n')
output.write('LIBSEDML_EXTERN\n')
output.write('int\n')
output.write('{0}_hasRequiredAttributes'.format(element))
output.write('({0}_t * {1})\n'.format(element, varname))
output.write('{\n')
output.write(' return ({0} != NULL) ? static_cast<int>({0}->hasRequiredAttributes()) : 0;\n'.format(varname))
output.write('}\n\n\n')
def writeHasReqdElementsFunction(output, element):
varname = strFunctions.objAbbrev(element)
output.write('/**\n')
output.write(' * write comments\n')
output.write(' */\n')
output.write('LIBSEDML_EXTERN\n')
output.write('int\n')
output.write('{0}_hasRequiredElements'.format(element))
output.write('({0}_t * {1})\n'.format(element, varname))
output.write('{\n')
output.write(' return ({0} != NULL) ? static_cast<int>({0}->hasRequiredElements()) : 0;\n'.format(varname))
output.write('}\n\n\n')
def writeListOfCode(output, element, type):
loelement = generalFunctions.writeListOf(element)
output.write('/**\n')
output.write(' * write comments\n')
output.write(' */\n')
output.write('LIBSEDML_EXTERN\n')
output.write('{0}_t *\n'.format(type))
output.write('{0}_getById'.format(loelement))
output.write('(SedListOf_t * lo, const char * sid)\n')
output.write('{\n')
output.write(' if (lo == NULL)\n')
output.write(' return NULL;\n\n')
output.write(' return (sid != NULL) ? static_cast <{0} *>(lo)->get(sid) : NULL;\n'.format(loelement))
output.write('}\n\n\n')
output.write('/**\n')
output.write(' * write comments\n')
output.write(' */\n')
output.write('LIBSEDML_EXTERN\n')
output.write('{0}_t *\n'.format(type))
output.write('{0}_removeById'.format(loelement))
output.write('(SedListOf_t * lo, const char * sid)\n')
output.write('{\n')
output.write(' if (lo == NULL)\n')
output.write(' return NULL;\n\n')
output.write(' return (sid != NULL) ? static_cast <{0} *>(lo)->remove(sid) : NULL;\n'.format(loelement))
output.write('}\n\n\n')
# write the code file
def createCode(element, code):
type = element['name']
name = element['name']
if element.has_key('elementName'):
name = strFunctions.cap(element['elementName'])
if element.has_key('element'):
type = element['element']
writeConstructors(element['name'], element['package'], code)
writeAttributeFunctions(element['attribs'], code, element['name'])
writeHasReqdAttrFunction(code, element['name'])
if element['hasChildren'] == True or element['hasMath'] == True:
writeHasReqdElementsFunction(code, element['name'])
if element['hasSedListOf'] == True:
writeListOfCode(code, name, type)
code.write('\n\n');
code.write('LIBSEDML_CPP_NAMESPACE_END\n')
code.write('\n\n');
# to de done
| 44.27665
| 203
| 0.63961
| 2,303
| 17,445
| 4.780287
| 0.067738
| 0.249796
| 0.146062
| 0.085021
| 0.823781
| 0.801254
| 0.786902
| 0.774185
| 0.764647
| 0.743937
| 0
| 0.015597
| 0.136314
| 17,445
| 394
| 204
| 44.27665
| 0.715073
| 0.038005
| 0
| 0.686441
| 0
| 0.048023
| 0.297202
| 0.042415
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031073
| false
| 0
| 0.011299
| 0
| 0.062147
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a2f4ad23bbab3adab7c1544d3c0fa638f2eaf743
| 10,113
|
py
|
Python
|
attacks/weights/norms.py
|
davidstutz/random-bit-error-robustness
|
59d8533c8db87ba1b220a64032cf929e5d67fbfa
|
[
"Unlicense"
] | null | null | null |
attacks/weights/norms.py
|
davidstutz/random-bit-error-robustness
|
59d8533c8db87ba1b220a64032cf929e5d67fbfa
|
[
"Unlicense"
] | null | null | null |
attacks/weights/norms.py
|
davidstutz/random-bit-error-robustness
|
59d8533c8db87ba1b220a64032cf929e5d67fbfa
|
[
"Unlicense"
] | null | null | null |
"""
Norms for weight attacks.
"""
import torch
import common.torch
class Norm:
def __init__(self):
"""
Constructor.
"""
self.norms = []
""" ([float]) Norms per layer. """
def __call__(self, model, perturbed_model, layers, quantization=None, quantization_contexts=None):
"""
Norm.
:param model: original model
:type model: torch.nn.Module
:param perturbed_model: perturbed model
:type perturbed_model: torch.nn.Module
:param layers: layers to compute norm on
:type layers: [int]
:param quantization: quantization if required
:type quantization: Quantization
:param quantization_contexts: quantization contexts for each layer
:type quantization_contexts: [dict]
"""
raise NotImplementedError()
class LInfNorm(Norm):
def __call__(self, model, perturbed_model, layers, quantization=None, quantization_contexts=None):
"""
Norm.
:param model: original model
:type model: torch.nn.Module
:param perturbed_model: perturbed model
:type perturbed_model: torch.nn.Module
:param layers: layers to compute norm on
:type layers: [int]
:param quantization: quantization if required
:type quantization: Quantization
:param quantization_contexts: quantization contexts for each layer
:type quantization_contexts: [dict]
"""
norm = 0
norms = []
parameters = list(model.parameters())
perturbed_parameters = list(perturbed_model.parameters())
for i in layers:
perturbation = perturbed_parameters[i].data - parameters[i].data
norm_i = torch.max(torch.abs(perturbation)).item()
norm = max(norm_i, norm) # .item() important to avoid GPU memory overhead
norms.append(norm_i)
self.norms = norms
return norm
class RelativeLInfNorm(Norm):
def __call__(self, model, perturbed_model, layers, quantization=None, quantization_contexts=None):
"""
Norm.
:param model: original model
:type model: torch.nn.Module
:param perturbed_model: perturbed model
:type perturbed_model: torch.nn.Module
:param layers: layers to compute norm on
:type layers: [int]
:param quantization: quantization if required
:type quantization: Quantization
:param quantization_contexts: quantization contexts for each layer
:type quantization_contexts: [dict]
"""
norm = 0
norms = []
parameters = list(model.parameters())
perturbed_parameters = list(perturbed_model.parameters())
max_parameter = 0
for i in layers:
max_parameter = max(max_parameter, torch.max(torch.abs(parameters[i].data)).item())
perturbation = perturbed_parameters[i].data - parameters[i].data
norm_i = torch.max(torch.abs(perturbation)).item()
norm = max(norm_i, norm) # .item() important to avoid GPU memory overhead
norms.append(norm_i)
self.norms = norms
norm /= 2*max_parameter
return norm
class L2Norm(Norm):
def __call__(self, model, perturbed_model, layers, quantization=None, quantization_contexts=None):
"""
Norm.
:param model: original model
:type model: torch.nn.Module
:param perturbed_model: perturbed model
:type perturbed_model: torch.nn.Module
:param layers: layers to compute norm on
:type layers: [int]
:param quantization: quantization if required
:type quantization: Quantization
:param quantization_contexts: quantization contexts for each layer
:type quantization_contexts: [dict]
"""
norms = []
parameters = list(model.parameters())
perturbed_parameters = list(perturbed_model.parameters())
perturbations = None
for i in layers:
perturbation = perturbed_parameters[i].data - parameters[i].data
perturbations = common.torch.concatenate(perturbations, perturbation.view(-1))
norms.append(torch.norm(perturbation, p=2).item())
self.norms = norms
return torch.norm(perturbations, p=2).item() # important to avoid GPU memory overhead
class L1Norm(Norm):
def __call__(self, model, perturbed_model, layers, quantization=None, quantization_contexts=None):
"""
Norm.
:param model: original model
:type model: torch.nn.Module
:param perturbed_model: perturbed model
:type perturbed_model: torch.nn.Module
:param layers: layers to compute norm on
:type layers: [int]
:param quantization: quantization if required
:type quantization: Quantization
:param quantization_contexts: quantization contexts for each layer
:type quantization_contexts: [dict]
"""
norms = []
parameters = list(model.parameters())
perturbed_parameters = list(perturbed_model.parameters())
perturbations = None
for i in layers:
perturbation = perturbed_parameters[i].data - parameters[i].data
perturbations = common.torch.concatenate(perturbations, perturbation.view(-1))
norms.append(torch.norm(perturbation, p=2).item())
self.norms = norms
return torch.norm(perturbations, p=1).item() # important to avoid GPU memory overhead
class L0Norm(Norm):
def __init__(self, fraction=0.01):
"""
Constructor.
:param fraction: fraction of elements to keep in normalization
:type fraction: float
"""
assert fraction > 0
assert fraction <= 1
self.fraction = fraction
def __call__(self, model, perturbed_model, layers, quantization=None, quantization_contexts=None):
"""
Norm.
:param model: original model
:type model: torch.nn.Module
:param perturbed_model: perturbed model
:type perturbed_model: torch.nn.Module
:param layers: layers to compute norm on
:type layers: [int]
:param quantization: quantization if required
:type quantization: Quantization
:param quantization_contexts: quantization contexts for each layer
:type quantization_contexts: [dict]
"""
norm = 0
norms = []
parameters = list(model.parameters())
perturbed_parameters = list(perturbed_model.parameters())
for i in layers:
perturbation = perturbed_parameters[i].data - parameters[i].data
norm_i = torch.norm(perturbation, p=0).item()# important to avoid GPU memory overhead
norm += norm_i
norms.append(norm_i)
self.norms = norms
return norm
class HammingNorm(Norm):
def __call__(self, model, perturbed_model, layers, quantization=None, quantization_contexts=None):
"""
Norm.
:param model: original model
:type model: torch.nn.Module
:param perturbed_model: perturbed model
:type perturbed_model: torch.nn.Module
:param layers: layers to compute norm on
:type layers: [int]
:param quantization: quantization if required
:type quantization: Quantization
:param quantization_contexts: quantization contexts for each layer
:type quantization_contexts: [dict]
"""
assert quantization is not None
#assert quantization_contexts is not None
assert isinstance(quantization_contexts, list)
norm = 0
norms = []
parameters = list(model.parameters())
perturbed_parameters = list(perturbed_model.parameters())
for i in layers:
weights = parameters[i].data
perturbed_weights = perturbed_parameters[i].data
quantized_weights, _ = quantization.quantize(weights, context=quantization_contexts[i])
quantized_perturbed_weights, _ = quantization.quantize(perturbed_weights, context=quantization_contexts[i])
distances = common.torch.int_hamming_distance(quantized_weights, quantized_perturbed_weights)
norm_i = torch.sum(distances).item()
norm += norm_i
norms.append(norm_i)
self.norms = norms
return norm
class RelativeHammingNorm(Norm):
def __call__(self, model, perturbed_model, layers, quantization=None, quantization_contexts=None):
"""
Norm.
:param model: original model
:type model: torch.nn.Module
:param perturbed_model: perturbed model
:type perturbed_model: torch.nn.Module
:param layers: layers to compute norm on
:type layers: [int]
:param quantization: quantization if required
:type quantization: Quantization
:param quantization_contexts: quantization contexts for each layer
:type quantization_contexts: [dict]
"""
assert quantization is not None
#assert quantization_contexts is not None
#assert isinstance(quantization_contexts, list)
norm = 0
norms = []
parameters = list(model.parameters())
perturbed_parameters = list(perturbed_model.parameters())
for i in layers:
weights = parameters[i].data
perturbed_weights = perturbed_parameters[i].data
quantized_weights, _ = quantization.quantize(weights, context=quantization_contexts[i])
quantized_perturbed_weights, _ = quantization.quantize(perturbed_weights, context=quantization_contexts[i])
distances = common.torch.int_hamming_distance(quantized_weights, quantized_perturbed_weights)
norm_i = torch.sum(distances).item()
norm += norm_i
norms.append(norm_i)
self.norms = norms
n, _, _, _ = common.torch.parameter_sizes(model)
norm /= n*quantization.repr_precision
return norm
| 34.397959
| 119
| 0.641452
| 1,064
| 10,113
| 5.941729
| 0.085526
| 0.126542
| 0.048086
| 0.045555
| 0.909206
| 0.909206
| 0.909206
| 0.903353
| 0.890066
| 0.890066
| 0
| 0.002995
| 0.273707
| 10,113
| 293
| 120
| 34.515358
| 0.857726
| 0.35311
| 0
| 0.775862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043103
| 1
| 0.086207
| false
| 0
| 0.017241
| 0
| 0.232759
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0c44919ccc746bfb862c413a0d310cce9b8d061d
| 178
|
py
|
Python
|
app/models/__init__.py
|
galbino/miocardiopediatra
|
8b60bbce45d15616058a81f6c329f6eaa063f07f
|
[
"MIT"
] | null | null | null |
app/models/__init__.py
|
galbino/miocardiopediatra
|
8b60bbce45d15616058a81f6c329f6eaa063f07f
|
[
"MIT"
] | 2
|
2019-11-06T00:37:17.000Z
|
2019-11-07T23:10:06.000Z
|
app/models/__init__.py
|
galbino/miocardiopediatra
|
8b60bbce45d15616058a81f6c329f6eaa063f07f
|
[
"MIT"
] | null | null | null |
from app.models.User import User
from app.models.FAQ import *
from app.models.Anamnese import *
from app.models.Especialidade import Especialidade
from app.models.Exame import *
| 29.666667
| 50
| 0.814607
| 27
| 178
| 5.37037
| 0.333333
| 0.241379
| 0.448276
| 0.262069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11236
| 178
| 5
| 51
| 35.6
| 0.917722
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
0c56c33d49106d0a495d02e8a344adb562ac79a6
| 98
|
py
|
Python
|
src/__main__.py
|
AlbertSuarez/hackeps-pokemonwebspider
|
362502e6dd89055c9311cbf7981a8477da32dc53
|
[
"MIT"
] | null | null | null |
src/__main__.py
|
AlbertSuarez/hackeps-pokemonwebspider
|
362502e6dd89055c9311cbf7981a8477da32dc53
|
[
"MIT"
] | null | null | null |
src/__main__.py
|
AlbertSuarez/hackeps-pokemonwebspider
|
362502e6dd89055c9311cbf7981a8477da32dc53
|
[
"MIT"
] | 1
|
2019-11-10T21:27:22.000Z
|
2019-11-10T21:27:22.000Z
|
from src import pokemon_web_spider
if __name__ == '__main__':
pokemon_web_spider.generate()
| 16.333333
| 34
| 0.765306
| 13
| 98
| 4.846154
| 0.769231
| 0.31746
| 0.507937
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153061
| 98
| 5
| 35
| 19.6
| 0.759036
| 0
| 0
| 0
| 1
| 0
| 0.081633
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
38f22b58d7d098238b57358da5d33d853ef1c2af
| 135
|
py
|
Python
|
generic/data_provider/batchifier.py
|
ibrahimSouleiman/GuessWhat
|
60d140de1aae5ccda27e7d3eef2b9fb9548f0854
|
[
"Apache-2.0"
] | null | null | null |
generic/data_provider/batchifier.py
|
ibrahimSouleiman/GuessWhat
|
60d140de1aae5ccda27e7d3eef2b9fb9548f0854
|
[
"Apache-2.0"
] | null | null | null |
generic/data_provider/batchifier.py
|
ibrahimSouleiman/GuessWhat
|
60d140de1aae5ccda27e7d3eef2b9fb9548f0854
|
[
"Apache-2.0"
] | null | null | null |
class AbstractBatchifier(object):
def filter(self, games):
return games
def apply(self, games):
return games
| 16.875
| 33
| 0.637037
| 15
| 135
| 5.733333
| 0.6
| 0.209302
| 0.348837
| 0.465116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.281481
| 135
| 7
| 34
| 19.285714
| 0.886598
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
ac2c91cbd1075ba7e524560567d07e5c0bf7ab9a
| 4,005
|
py
|
Python
|
helper_scripts/csv_to_items.py
|
osagha/turktools
|
389c1243e837e946080b28a5ca83faf62890f4b4
|
[
"Unlicense",
"MIT"
] | null | null | null |
helper_scripts/csv_to_items.py
|
osagha/turktools
|
389c1243e837e946080b28a5ca83faf62890f4b4
|
[
"Unlicense",
"MIT"
] | null | null | null |
helper_scripts/csv_to_items.py
|
osagha/turktools
|
389c1243e837e946080b28a5ca83faf62890f4b4
|
[
"Unlicense",
"MIT"
] | null | null | null |
import csv
contexts = ['context-low-bias', 'context-positive-bias', 'context-negative-bias']
answers = ['answer-high-certainty', 'answer-low-certainty', 'answer-reductive-answer', 'answer-exhaustive-answer', 'answer-non-answer']
# create prior items text file
out_file_lines = []
with open('stimuli/relevance_stimuli_all.csv') as csvfile:
reader = csv.DictReader(csvfile)
item_counter = 1
for row in reader:
for c in contexts:
out_file_lines.append('# target ' + str(item_counter) + ' ' + c)
out_file_lines.append(row[c])
out_file_lines.append(row["prompt-likelihood"])
out_file_lines.append('')
item_counter += 1
with open('stimuli/prior-items.txt', 'w') as outfile:
with open('stimuli/prior-fillers.txt', 'r') as fillers:
for line in fillers:
outfile.write(line)
outfile.write("\n")
for line in out_file_lines:
outfile.write('%s\n' % line)
# create posterior items text file
out_file_lines = []
with open('stimuli/relevance_stimuli_all.csv') as csvfile:
reader = csv.DictReader(csvfile)
item_counter = 1
for row in reader:
for c in contexts:
for a in answers:
out_file_lines.append('# target ' + str(item_counter) + ' ' + c + ' ' + a)
out_file_lines.append(row[c])
out_file_lines.append(row["setup-posterior"])
out_file_lines.append(row[a])
out_file_lines.append(row["prompt-likelihood"])
out_file_lines.append('')
item_counter += 1
with open('stimuli/posterior-items.txt', 'w') as outfile:
with open('stimuli/posterior-fillers.txt', 'r') as fillers:
for line in fillers:
outfile.write(line)
outfile.write("\n")
for line in out_file_lines:
outfile.write('%s\n' % line)
# create helpfulness items text file
out_file_lines = []
with open('stimuli/relevance_stimuli_all.csv') as csvfile:
reader = csv.DictReader(csvfile)
item_counter = 1
for row in reader:
for c in contexts:
for a in answers:
out_file_lines.append('# target ' + str(item_counter) + ' ' + c + ' ' + a)
out_file_lines.append(row[c])
out_file_lines.append(row["setup-relevance-your-turn"])
out_file_lines.append(row["Question"])
out_file_lines.append(row["name"] + " responds:")
out_file_lines.append(row[a])
out_file_lines.append(row["prompt-helpfulness"].format(name=row["name"]))
out_file_lines.append('')
item_counter += 1
with open('stimuli/helpfulness-items.txt', 'w') as outfile:
with open('stimuli/helpfulness-fillers.txt', 'r') as fillers:
for line in fillers:
outfile.write(line)
outfile.write("\n")
for line in out_file_lines:
outfile.write('%s\n' % line)
# create relevance items text file
out_file_lines = []
with open('stimuli/relevance_stimuli_all.csv') as csvfile:
reader = csv.DictReader(csvfile)
item_counter = 1
for row in reader:
for c in contexts:
for a in answers:
out_file_lines.append('# target ' + str(item_counter) + ' ' + c + ' ' + a)
out_file_lines.append(row[c])
out_file_lines.append(row["setup-relevance-your-turn"])
out_file_lines.append(row["Question"])
out_file_lines.append(row["name"] + " responds:")
out_file_lines.append(row[a])
out_file_lines.append(row["prompt-relevance"].format(name=row["name"]))
out_file_lines.append('')
item_counter += 1
with open('stimuli/relevance-items.txt', 'w') as outfile:
with open('stimuli/relevance-fillers.txt', 'r') as fillers:
for line in fillers:
outfile.write(line)
outfile.write("\n")
for line in out_file_lines:
outfile.write('%s\n' % line)
| 37.429907
| 135
| 0.609238
| 520
| 4,005
| 4.523077
| 0.119231
| 0.10119
| 0.173469
| 0.19898
| 0.869048
| 0.869048
| 0.869048
| 0.869048
| 0.812925
| 0.796344
| 0
| 0.002701
| 0.260424
| 4,005
| 106
| 136
| 37.783019
| 0.791357
| 0.03221
| 0
| 0.829545
| 0
| 0
| 0.200207
| 0.132266
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011364
| 0
| 0.011364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ac575718613b23892ad7b591cc5481cee6c07fa5
| 48
|
py
|
Python
|
src/blueprints/credit/__init__.py
|
ingvalles/serverless-flask-challenge
|
ff5c8d2b31fd49d5c9e39648589974dd2a39e1de
|
[
"Unlicense"
] | null | null | null |
src/blueprints/credit/__init__.py
|
ingvalles/serverless-flask-challenge
|
ff5c8d2b31fd49d5c9e39648589974dd2a39e1de
|
[
"Unlicense"
] | null | null | null |
src/blueprints/credit/__init__.py
|
ingvalles/serverless-flask-challenge
|
ff5c8d2b31fd49d5c9e39648589974dd2a39e1de
|
[
"Unlicense"
] | null | null | null |
from src.blueprints.credit.routes import credit
| 24
| 47
| 0.854167
| 7
| 48
| 5.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 48
| 1
| 48
| 48
| 0.931818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
ac8c2ee215c64ae21ae469430899615208b6d6f2
| 184
|
py
|
Python
|
colour/volume/datasets/__init__.py
|
OmarWagih1/colour
|
bdc880a2783ff523dafb19f1233212dd03a639bd
|
[
"BSD-3-Clause"
] | 2
|
2020-06-20T03:44:41.000Z
|
2020-06-20T14:08:41.000Z
|
colour/volume/datasets/__init__.py
|
OmarWagih1/colour
|
bdc880a2783ff523dafb19f1233212dd03a639bd
|
[
"BSD-3-Clause"
] | null | null | null |
colour/volume/datasets/__init__.py
|
OmarWagih1/colour
|
bdc880a2783ff523dafb19f1233212dd03a639bd
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .optimal_colour_stimuli import ILLUMINANT_OPTIMAL_COLOUR_STIMULI
__all__ = ['ILLUMINANT_OPTIMAL_COLOUR_STIMULI']
| 23
| 69
| 0.815217
| 22
| 184
| 6.045455
| 0.545455
| 0.293233
| 0.451128
| 0.451128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006061
| 0.103261
| 184
| 7
| 70
| 26.285714
| 0.8
| 0.11413
| 0
| 0
| 0
| 0
| 0.204969
| 0.204969
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3bd75b583dc60920dbaa3159e030e85ba8dbd63c
| 571
|
py
|
Python
|
Examples/TV_Remote/pronto_code_converter.py
|
alex-sherman/MRPC-ESP8266
|
5e7cef34657411b026fed933b84594c8a446af93
|
[
"MIT"
] | 2
|
2017-09-28T21:29:26.000Z
|
2021-02-02T05:36:58.000Z
|
Examples/TV_Remote/pronto_code_converter.py
|
alex-sherman/MRPC-ESP8266
|
5e7cef34657411b026fed933b84594c8a446af93
|
[
"MIT"
] | null | null | null |
Examples/TV_Remote/pronto_code_converter.py
|
alex-sherman/MRPC-ESP8266
|
5e7cef34657411b026fed933b84594c8a446af93
|
[
"MIT"
] | 1
|
2017-03-13T02:30:54.000Z
|
2017-03-13T02:30:54.000Z
|
# Some sites give IR codes in this weird pronto format
# Cut off the preamble and footer and find which value represents a 1 in the spacing
code = "0000 0016 0000 0016 0000 0016 0000 003F 0000 003F 0000 003F 0000 0016 0000 003F 0000 0016 0000 0016 0000 0016 0000 0016 0000 0016 0000 0016 0000 0016 0000 0016 0000 0016 0000 003F 0000 0016 0000 003F 0000 003F 0000 0016 0000 0016 0000 003F 0000 003F 0000 0016 0000 003F 0000 0016 0000 0016 0000 003F 0000 003F 0000 0016 0000"
print(int(''.join(list(map(lambda x: "1" if x == '003F' else "0", code.split(' ')[1::2]))), 2))
| 95.166667
| 333
| 0.744308
| 110
| 571
| 3.863636
| 0.354545
| 0.376471
| 0.564706
| 0.451765
| 0.611765
| 0.611765
| 0.611765
| 0.611765
| 0.611765
| 0.489412
| 0
| 0.555076
| 0.189142
| 571
| 6
| 334
| 95.166667
| 0.362851
| 0.236427
| 0
| 0
| 0
| 0.5
| 0.762673
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
3bf9fdfba2dc1f0af5d203127895ff577f3f1ab5
| 81
|
py
|
Python
|
kevin/developing/notation/__init__.py
|
cantbeblank96/kevin_toolbox
|
a258b2a42c9b4d042decb193354ecb7419bd837c
|
[
"MIT"
] | null | null | null |
kevin/developing/notation/__init__.py
|
cantbeblank96/kevin_toolbox
|
a258b2a42c9b4d042decb193354ecb7419bd837c
|
[
"MIT"
] | null | null | null |
kevin/developing/notation/__init__.py
|
cantbeblank96/kevin_toolbox
|
a258b2a42c9b4d042decb193354ecb7419bd837c
|
[
"MIT"
] | null | null | null |
from .write import Kevin_Notation_Writer
from .read import Kevin_Notation_Reader
| 27
| 40
| 0.876543
| 12
| 81
| 5.583333
| 0.666667
| 0.328358
| 0.567164
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098765
| 81
| 2
| 41
| 40.5
| 0.917808
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0202146ed214ebaff3e8bfae0e22f1064cdd5442
| 154
|
py
|
Python
|
test/test_fan_failure.py
|
erichaase/topcoder-python
|
de285d8092a94f2ec1b5c0c33eba55b5c27a5390
|
[
"MIT"
] | 1
|
2017-03-25T17:40:57.000Z
|
2017-03-25T17:40:57.000Z
|
test/test_fan_failure.py
|
erichaase/topcoder-python
|
de285d8092a94f2ec1b5c0c33eba55b5c27a5390
|
[
"MIT"
] | null | null | null |
test/test_fan_failure.py
|
erichaase/topcoder-python
|
de285d8092a94f2ec1b5c0c33eba55b5c27a5390
|
[
"MIT"
] | null | null | null |
from test.assert_json import assert_json
from topcoder.fan_failure import solution
def test_fan_failure ():
assert_json('fan_failure', solution)
| 25.666667
| 44
| 0.792208
| 22
| 154
| 5.227273
| 0.454545
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 154
| 5
| 45
| 30.8
| 0.871212
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
028892e3e48151db42f67943eea0c8cb278ee0f9
| 1,764
|
py
|
Python
|
Homework/2.homework/Tests/test_add.py
|
mevljas/Quality_and_testing
|
6a39610084b1538eae270682a6842270e8971b7f
|
[
"MIT"
] | null | null | null |
Homework/2.homework/Tests/test_add.py
|
mevljas/Quality_and_testing
|
6a39610084b1538eae270682a6842270e8971b7f
|
[
"MIT"
] | null | null | null |
Homework/2.homework/Tests/test_add.py
|
mevljas/Quality_and_testing
|
6a39610084b1538eae270682a6842270e8971b7f
|
[
"MIT"
] | null | null | null |
import pexpect
def test_bst_print():
baza = pexpect.pexpect()
try:
baza.expect("command>")
baza.send("add")
baza.expect("add> EMSO:")
baza.send("3105940500232")
baza.expect("add> NAME:")
baza.send("Janez Albert")
baza.expect("add> SURNAME:")
baza.send("Novak")
baza.expect("add> AGE:")
baza.send("80")
baza.expect(">> OK")
baza.expect("command>")
baza.send("add")
baza.expect("add> EMSO:")
baza.send("310594050023")
baza.expect("add> NAME:")
baza.send("Janez Albertt")
baza.expect("add> SURNAME:")
baza.send("Novakk")
baza.expect("add> AGE:")
baza.send("1")
baza.expect(">> Invalid input data")
baza.expect("command>")
baza.send("add")
baza.expect("add> EMSO:")
baza.send("3105940500235")
baza.expect("add> NAME:")
baza.send("Janez Albertt")
baza.expect("add> SURNAME:")
baza.send("Novakk")
baza.expect("add> AGE:")
baza.send("-1")
baza.expect(">> Invalid input data")
baza.expect("command>")
baza.send("add")
baza.expect("add> EMSO:")
baza.send("3105940500232")
baza.expect("add> NAME:")
baza.send("Janez Albert")
baza.expect("add> SURNAME:")
baza.send("Novak")
baza.expect("add> AGE:")
baza.send("80")
baza.expect(">> Patient already exists")
baza.expect("command>")
baza.send("exit")
baza.expect("Bye")
print "PASSED\ttest_add"
except:
print "FAILED\ttest_add"
finally:
baza.kill()
if __name__ == "__main__":
test_bst_print()
| 24.164384
| 48
| 0.524376
| 195
| 1,764
| 4.671795
| 0.220513
| 0.285401
| 0.228321
| 0.115258
| 0.787047
| 0.759605
| 0.759605
| 0.759605
| 0.759605
| 0.759605
| 0
| 0.046266
| 0.301587
| 1,764
| 72
| 49
| 24.5
| 0.693182
| 0
| 0
| 0.672414
| 0
| 0
| 0.266024
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.017241
| 0.017241
| null | null | 0.068966
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5a32a99db50efa45125e7e6383dc64180d794e73
| 39,026
|
py
|
Python
|
scripts/tf_cnn_benchmarks/leading_indicators_test.py
|
kyuucr/benchmarks
|
5054769776afbec4f920d6e4b6d4dc325a9ed1d2
|
[
"Apache-2.0"
] | null | null | null |
scripts/tf_cnn_benchmarks/leading_indicators_test.py
|
kyuucr/benchmarks
|
5054769776afbec4f920d6e4b6d4dc325a9ed1d2
|
[
"Apache-2.0"
] | null | null | null |
scripts/tf_cnn_benchmarks/leading_indicators_test.py
|
kyuucr/benchmarks
|
5054769776afbec4f920d6e4b6d4dc325a9ed1d2
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmark various leading indicators CNNs.
The purpose of these tests is to test each model as a high level baseline and
to ensure the various variable_update options have not regressing. Not all
options are tested. The tests focus on the most viable options.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import ctypes
import logging
import os
import sys
from absl import flags
from absl.testing import absltest # pylint: disable=unused-import
import benchmark_cnn
from platforms import util as platforms_util
import tensorflow as tf
flags.DEFINE_integer('num_batches', None,
'number of batches to run, excluding warmup')
class BenchmarkBase(tf.test.Benchmark):
"""Base class for all benchmarks in this file."""
def __init__(self, output_dir=None, root_data_dir=None):
"""Base class for all benchmarks in this file.
Args:
output_dir: directory where to output e.g. log files
root_data_dir: directory under which to look for dataset
"""
# Load default values if the benchmark is not run with absl.app.run()
if not flags.FLAGS.is_parsed():
flags.FLAGS.mark_as_parsed()
self.fake_data_dir = os.path.join(platforms_util.get_test_data_dir(),
'fake_tf_record_data')
if root_data_dir is None:
self.data_dir = ('/readahead/200M/placer/prod/home/distbelief/'
'imagenet-tensorflow/imagenet-2012-tfrecord')
else:
self.data_dir = os.path.join(root_data_dir, 'imagenet')
self.output_dir = output_dir
def _run_benchmark(self, params):
"""Run a CNN benchmark and report its results.
Args:
params: Params tuple, typically created by benchmark_cnn.make_params or
benchmark_cnn.make_params_from_flags.
"""
logging.info('Running benchmark [%s]', self._get_name())
params = benchmark_cnn.setup(params)
bench = benchmark_cnn.BenchmarkCNN(params)
bench.print_info()
stats = bench.run()
extras = {}
extras['examples_per_sec'] = stats.get('images_per_sec')
if 'last_average_loss' in stats:
extras['last_average_loss'] = stats['last_average_loss']
if 'top_1_accuracy' in stats:
extras['top_1_accuracy'] = stats['top_1_accuracy']
if 'top_5_accuracy' in stats:
extras['top_5_accuracy'] = stats['top_5_accuracy']
self.report_benchmark(
iters=stats.get('num_steps'),
wall_time=stats.get('average_wall_time'),
extras=extras)
def _shared_params(self):
"""Returns shared parameters for all benchmarks in this file."""
params = {}
if flags.FLAGS.num_batches is not None:
params['num_batches'] = flags.FLAGS.num_batches
if self.output_dir is not None:
params['benchmark_log_dir'] = self.output_dir
return benchmark_cnn.make_params(**params)
def _binary_search_batch_size(self, params, init_batch_size):
"""Find the max batch_size using binary search."""
assert init_batch_size > 0
low_batch_size = 0
high_batch_size = None
batch_size = init_batch_size
# No need to run a warmup or many batches; if it doesn't OOM after 10
# batches, it should work in general.
params = params._replace(num_batches=10, num_warmup_batches=0)
# Find high_batch_size first.
tf.logging.info(
'Looking for upper bound to batch size, starting with %d' % batch_size)
while high_batch_size is None:
tf.logging.info('Trying batch_size %d' % batch_size)
params = params._replace(batch_size=batch_size)
bench = benchmark_cnn.BenchmarkCNN(params)
bench.print_info()
try:
bench.run()
low_batch_size = batch_size
batch_size *= 2
except tf.errors.ResourceExhaustedError:
high_batch_size = batch_size - 1
# Binary Search
tf.logging.info(
'Max batch size is in range (%d, %d]. Starting binary search to find '
'exact max batch size.' % (low_batch_size, batch_size))
while low_batch_size < high_batch_size:
batch_size = (low_batch_size + high_batch_size + 1) // 2
tf.logging.info('Trying batch_size %d' % batch_size)
params = params._replace(batch_size=batch_size)
bench = benchmark_cnn.BenchmarkCNN(params)
bench.print_info()
try:
bench.run()
low_batch_size = batch_size
except tf.errors.ResourceExhaustedError:
high_batch_size = batch_size - 1
self.report_benchmark(extras={'max_batch_size': low_batch_size})
class Resnet50BenchmarksInferenceCpu(BenchmarkBase):
""""Benchmarks for ResNet50 inference on CPU."""
def _shared_params(self):
"""Returns shared parameters for all ResNet50 benchmarks."""
return BenchmarkBase._shared_params(self)._replace(
num_gpus=1,
model='resnet50',
num_warmup_batches=5,
num_batches=50,
distortions=False,
forward_only=True,
device='cpu',
data_format='NHWC',
num_intra_threads=0)
def benchmark_synth_forward_batch1(self):
"""Tests 1 CPU batch size 1."""
params = self._shared_params()._replace(batch_size=1)
self._run_benchmark(params)
def benchmark_synth_forward_batch16(self):
"""Tests 1 CPU batch size 16."""
params = self._shared_params()._replace(batch_size=16)
self._run_benchmark(params)
class FrozenResnet50BenchmarksInferenceCpu(Resnet50BenchmarksInferenceCpu):
""""Benchmarks for ResNet50 frozen graph inference on CPU."""
def _shared_params(self):
return super(FrozenResnet50BenchmarksInferenceCpu,
self)._shared_params()._replace(freeze_when_forward_only=True)
class Resnet50BenchmarksInference(BenchmarkBase):
""""Benchmarks for ResNet50 inference."""
def _shared_params(self):
"""Returns shared parameters for all ResNet50 benchmarks."""
return BenchmarkBase._shared_params(self)._replace(
num_gpus=1, model='resnet50', distortions=False, forward_only=True)
def benchmark_synth_forward_batch128(self):
"""Tests 1 GPU batch size 128."""
params = self._shared_params()._replace(batch_size=128)
self._run_benchmark(params)
def benchmark_fp16_synth_forward_batch128(self):
"""Tests 1 GPU batch size 128 FP16."""
params = self._shared_params()._replace(batch_size=128, use_fp16=True)
self._run_benchmark(params)
def benchmark_fp16_synth_forward_batch16(self):
"""Tests 1 GPU batch size 16 FP16."""
params = self._shared_params()._replace(batch_size=16, use_fp16=True)
self._run_benchmark(params)
def benchmark_xla_synth_forward_batch128(self):
"""Tests 1 GPU batch size 128 with XLA."""
params = self._shared_params()._replace(batch_size=128, xla=True)
self._run_benchmark(params)
def benchmark_fp16_xla_synth_forward_batch128(self):
"""Tests 1 GPU batch size 128 FP16 with XLA."""
params = self._shared_params()._replace(
batch_size=128, use_fp16=True, xla=True)
self._run_benchmark(params)
def benchmark_fp16_xla_synth_forward_batch16(self):
"""Tests 1 GPU batch size 16 FP16 with XLA."""
params = self._shared_params()._replace(
batch_size=16, use_fp16=True, xla=True)
self._run_benchmark(params)
class FrozenResnet50BenchmarksInference(Resnet50BenchmarksInference):
""""Benchmarks for ResNet50 frozen graph inference."""
def _shared_params(self):
return super(FrozenResnet50BenchmarksInference,
self)._shared_params()._replace(freeze_when_forward_only=True)
def benchmark_trt_synth_forward_batch128(self):
"""Tests 1 GPU batch size 128."""
params = self._shared_params()._replace(batch_size=128, trt_mode='FP32')
self._run_benchmark(params)
# TODO(laigd): enable fp16 tests for TF-TRT, it's currently not supported yet.
# def benchmark_fp16_trt_synth_forward_batch128(self):
# """Tests 1 GPU batch size 128 FP16."""
# params = self._shared_params()._replace(
# batch_size=128, use_fp16=True, trt_mode='FP16')
# self._run_benchmark(params)
# Test with batch size 16 to compare with native TF GPU implementation and
# XLA.
# def benchmark_fp16_trt_synth_forward_batch16(self):
# """Tests 1 GPU batch size 16 FP16."""
# params = self._shared_params()._replace(
# batch_size=16, use_fp16=True, trt_mode='FP16')
# self._run_benchmark(params)
class Resnet50Benchmarks(BenchmarkBase):
""""Benchmark resnet50 configurations."""
def _shared_params(self):
"""Returns shared parameters for all ResNet50 benchmarks."""
return BenchmarkBase._shared_params(self)._replace(
model='resnet50', batch_size=128, distortions=False)
def _shared_params_fp16(self):
"""Returns shared parameters for all ResNet50 FP16 benchmarks."""
return BenchmarkBase._shared_params(self)._replace(
model='resnet50',
batch_size=256,
distortions=False,
use_fp16=True,
)
def benchmark_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with synthetic data."""
params = self._shared_params()._replace(num_gpus=1)
self._run_benchmark(params)
def benchmark_synth_1gpu_gpuparams_batch64(self):
"""Tests 1 gpu with synthetic data."""
params = self._shared_params()._replace(num_gpus=1, batch_size=64)
self._run_benchmark(params)
def benchmark_fake_1gpu_gpuparams(self):
"""Tests 1 gpu with fake data."""
params = self._shared_params()._replace(
num_gpus=1, data_dir=self.fake_data_dir, data_name='imagenet')
self._run_benchmark(params)
def benchmark_synth_1gpu_max_batch_size(self):
"""Finds largest batch size that can be run with 1 gpu using synth data."""
params = self._shared_params()._replace(
num_gpus=1, variable_update='parameter_server')
self._binary_search_batch_size(params, init_batch_size=128)
def benchmark_synth_4gpu_gpuparams(self):
"""Tests 4 gpus with synthetic data with parameters on the gpus."""
params = self._shared_params()._replace(
num_gpus=4, variable_update='parameter_server')
self._run_benchmark(params)
def benchmark_fake_4gpu_gpuparams(self):
"""Tests 4 gpus with fake data with parameters on the gpus."""
params = self._shared_params()._replace(
num_gpus=4,
data_dir=self.fake_data_dir,
data_name='imagenet',
variable_update='parameter_server')
self._run_benchmark(params)
def benchmark_synth_4gpu_cpuparams(self):
"""Tests 4 gpus with synthetic data with parameters on the cpu."""
params = self._shared_params()._replace(
num_gpus=4,
variable_update='parameter_server',
local_parameter_device='cpu')
self._run_benchmark(params)
def benchmark_synth_8gpu_cpuparams(self):
"""Tests 8 gpus with synthetic data with parameters on the cpu."""
params = self._shared_params()._replace(
num_gpus=8,
variable_update='parameter_server',
local_parameter_device='cpu')
self._run_benchmark(params)
def benchmark_fake_4gpu_cpuparams(self):
"""Tests 4 gpus with fake data with parameters on the cpu."""
params = self._shared_params()._replace(
num_gpus=4,
data_dir=self.fake_data_dir,
data_name='imagenet',
variable_update='parameter_server',
local_parameter_device='cpu')
self._run_benchmark(params)
def benchmark_fake_8gpu_cpuparams(self):
"""Tests 8 gpus with fake data with parameters on the cpu."""
params = self._shared_params()._replace(
num_gpus=8,
data_dir=self.fake_data_dir,
data_name='imagenet',
variable_update='parameter_server',
local_parameter_device='cpu')
self._run_benchmark(params)
def benchmark_synth_4gpu_gpureplicated(self):
"""Tests 4 gpu with synthetic data with parameters replicated."""
params = self._shared_params()._replace(
num_gpus=4,
variable_update='replicated',
all_reduce_spec='nccl',
compact_gradient_transfer=False,
gradient_repacking=2)
self._run_benchmark(params)
def benchmark_synth_8gpu_gpureplicated(self):
"""Tests 8 gpu with synthetic data with parameters replicated."""
params = self._shared_params()._replace(
num_gpus=8,
variable_update='replicated',
all_reduce_spec='nccl',
compact_gradient_transfer=False,
gradient_repacking=2)
self._run_benchmark(params)
def benchmark_fake_8gpu_gpureplicated(self):
"""Tests 8 gpu with fake data with parameters replicated."""
params = self._shared_params()._replace(
num_gpus=8,
data_dir=self.fake_data_dir,
data_name='imagenet',
variable_update='replicated',
all_reduce_spec='nccl',
compact_gradient_transfer=False,
gradient_repacking=2)
self._run_benchmark(params)
# FP16 mixed-precisions tests.
def benchmark_fp16_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with synthetic data with parameters on the gpu."""
params = self._shared_params_fp16()._replace(
num_gpus=1, variable_update='parameter_server')
self._run_benchmark(params)
def benchmark_fp16_synth_1gpu_gpuparams_batch128(self):
"""Tests 1 gpu with synthetic data with parameters on the gpu."""
params = self._shared_params_fp16()._replace(
num_gpus=1, batch_size=128, variable_update='parameter_server')
self._run_benchmark(params)
def benchmark_fp16_synth_1gpu_gpuparams_batch64(self):
"""Tests 1 gpu with synthetic data with parameters on the gpu."""
params = self._shared_params_fp16()._replace(
num_gpus=1, batch_size=64, variable_update='parameter_server')
self._run_benchmark(params)
def benchmark_fp16_synth_4gpu_gpuparams(self):
"""Tests 4 gpus with synthetic data with parameters on the gpus."""
params = self._shared_params_fp16()._replace(
num_gpus=4, variable_update='parameter_server')
self._run_benchmark(params)
def benchmark_fp16_synth_4gpu_gpureplicated(self):
"""Tests 4 gpu with synthetic data with nccl and all_reduce."""
params = self._shared_params_fp16()._replace(
num_gpus=4,
variable_update='replicated',
all_reduce_spec='nccl',
compact_gradient_transfer=False,
gradient_repacking=2)
self._run_benchmark(params)
def benchmark_fp16_synth_8gpu_gpureplicated(self):
"""Tests 8 gpu with synthetic with nccl and all_reduce."""
params = self._shared_params_fp16()._replace(
num_gpus=8,
variable_update='replicated',
all_reduce_spec='nccl',
compact_gradient_transfer=False,
gradient_repacking=2)
self._run_benchmark(params)
def benchmark_fp16_fake_1gpu_gpuparams(self):
"""Tests 1 gpus with fake data."""
params = self._shared_params_fp16()._replace(
num_gpus=1,
data_dir=self.fake_data_dir,
data_name='imagenet',
variable_update='parameter_server')
self._run_benchmark(params)
def benchmark_fp16_fake_8gpu_gpureplicated(self):
"""Tests 8 gpus with fake data."""
params = self._shared_params_fp16()._replace(
num_gpus=8,
data_dir=self.fake_data_dir,
data_name='imagenet',
variable_update='replicated',
all_reduce_spec='nccl',
compact_gradient_transfer=False,
gradient_repacking=2)
self._run_benchmark(params)
def benchmark_fp16_fakedistort_8gpu_gpureplicated(self):
"""Tests 8 gpus with fake distorted data."""
params = self._shared_params_fp16()._replace(
num_gpus=8,
data_dir=self.fake_data_dir,
data_name='imagenet',
distortions=True,
variable_update='replicated',
all_reduce_spec='nccl',
compact_gradient_transfer=False,
gradient_repacking=2)
self._run_benchmark(params)
# XLA versions of Resnet50 tests only for single GPU.
def benchmark_xla_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with synthetic data with XLA."""
params = self._shared_params()._replace(
num_gpus=1, variable_update='parameter_server', xla=True)
self._run_benchmark(params)
def benchmark_fp16_xla_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with fp16, synthetic data with XLA."""
params = self._shared_params_fp16()._replace(
num_gpus=1, variable_update='parameter_server', xla=True, use_fp16=True)
self._run_benchmark(params)
# Test does not run as part of continuous testing on guitar.
def benchmark_ng_xla_batch64_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with XLA, synth data, and batch 64."""
params = self._shared_params()._replace(
num_gpus=1, batch_size=64, variable_update='parameter_server', xla=True)
self._run_benchmark(params)
def benchmark_fp16_xla_batch64_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with fp16, XLA, synth data, and batch 64."""
params = self._shared_params_fp16()._replace(
num_gpus=1,
batch_size=64,
variable_update='parameter_server',
xla=True,
use_fp16=True)
self._run_benchmark(params)
def benchmark_fp16_xla_batch128_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with fp16, XLA, and synth data."""
params = self._shared_params_fp16()._replace(
num_gpus=1,
batch_size=128,
variable_update='parameter_server',
xla=True,
use_fp16=True)
self._run_benchmark(params)
def benchmark_xla_synth_1gpu_max_batch_size(self):
"""Finds largest batch that can be run with XLA, 1 gpu, and synth data."""
params = self._shared_params()._replace(
num_gpus=1, variable_update='parameter_server', xla=True)
self._binary_search_batch_size(params, init_batch_size=128)
def benchmark_xla_real_1gpu_gpuparams(self):
"""Tests 1 gpu with real data with XLA."""
params = self._shared_params()._replace(
num_gpus=1,
data_dir=self.data_dir,
variable_update='parameter_server',
xla=True)
self._run_benchmark(params)
# Test does not run as part of continuous testing.
def benchmark_xla_fake_1gpu_gpuparams(self):
"""Tests 1 gpu with fake data with XLA."""
params = self._shared_params()._replace(
num_gpus=1,
data_dir=self.fake_data_dir,
data_name='imagenet',
variable_update='parameter_server',
xla=True)
self._run_benchmark(params)
# Test does not run as part of continuous testing.
def benchmark_xla_fakedistort_1gpu_gpuparams(self):
"""Tests 1 gpu with fake distorted data with XLA."""
params = self._shared_params()._replace(
num_gpus=1,
data_dir=self.fake_data_dir,
data_name='imagenet',
distortions=True,
variable_update='parameter_server',
xla=True)
self._run_benchmark(params)
class Resnet50v15Benchmarks(BenchmarkBase):
""""Benchmark various ResNet50V1.5 configurations.
ResNetV1.5 differs from V1 in stride 2 is used in the first 3x3 convolution of
each block instead of the first 1x1 convolution.
"""
def _shared_params_fp16(self):
"""Returns shared parameters for all ResNet50v1.5 FP16 benchmarks."""
return BenchmarkBase._shared_params(self)._replace(
model='resnet50_v1.5',
batch_size=256,
distortions=False,
use_fp16=True,
)
def benchmark_fp16_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with synthetic data."""
params = self._shared_params_fp16()._replace(num_gpus=1)
self._run_benchmark(params)
def benchmark_fp16_batch256_synth_8gpu_gpuparams(self):
"""Tests 8 gpus with synthetic data at batch 256."""
params = self._shared_params_fp16()._replace(num_gpus=8, batch_size=256)
self._run_benchmark(params)
def benchmark_fp16_batch128_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with synthetic data at batch 128 (useful for small GPUs)."""
params = self._shared_params_fp16()._replace(num_gpus=1, batch_size=128)
self._run_benchmark(params)
def benchmark_fp16_fake_1gpu_gpuparams(self):
"""Tests 1 gpu with fake data."""
params = self._shared_params_fp16()._replace(
num_gpus=1, data_dir=self.fake_data_dir, data_name='imagenet')
self._run_benchmark(params)
def benchmark_fp16_synth_4gpu_cpuparams(self):
"""Tests 4 gpus with synthetic data with parameters on the cpu."""
params = self._shared_params_fp16()._replace(
num_gpus=4,
variable_update='parameter_server',
local_parameter_device='cpu')
self._run_benchmark(params)
def benchmark_fp16_synth_8gpu_gpureplicated(self):
"""Tests 8 gpu with synthetic data with parameters replicated."""
params = self._shared_params_fp16()._replace(
num_gpus=8,
num_batches=200,
variable_update='replicated',
all_reduce_spec='nccl',
gradient_repacking=2)
self._run_benchmark(params)
def benchmark_fp16_fake_8gpu_gpureplicated(self):
"""Tests 8 gpu with fake data with parameters replicated."""
params = self._shared_params_fp16()._replace(
num_gpus=8,
num_batches=200,
data_dir=self.fake_data_dir,
data_name='imagenet',
variable_update='replicated',
all_reduce_spec='nccl',
gradient_repacking=2)
self._run_benchmark(params)
# XLA versions of Resnet50v1.5 tests.
def benchmark_fp16_xla_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with fp16, synthetic data with XLA."""
params = self._shared_params_fp16()._replace(num_gpus=1, xla=True)
self._run_benchmark(params)
def benchmark_fp16_xla_batch128_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with fp16, batch128, synthetic data with XLA."""
params = self._shared_params_fp16()._replace(
num_gpus=1, batch_size=128, xla=True)
self._run_benchmark(params)
def benchmark_fp16_xla_compile_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with synthetic data."""
params = self._shared_params_fp16()._replace(num_gpus=1, xla_compile=True)
self._run_benchmark(params)
def benchmark_fp16_xla_compile_batch128_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with synthetic data at batch 128 (useful for small GPUs)."""
params = self._shared_params_fp16()._replace(
num_gpus=1, num_batches=200, batch_size=128, xla_compile=True)
self._run_benchmark(params)
def benchmark_fp16_xla_batch256_synth_8gpu_gpuparams(self):
"""Tests 8 gpu with synthetic data and xla autojit."""
params = self._shared_params_fp16()._replace(
num_gpus=8, num_batches=200, batch_size=256, xla=True)
self._run_benchmark(params)
def benchmark_fp16_xla_compile_fake_1gpu_gpuparams(self):
"""Tests 1 gpu with fake data."""
params = self._shared_params_fp16()._replace(
num_gpus=1,
data_dir=self.fake_data_dir,
data_name='imagenet',
xla_compile=True)
self._run_benchmark(params)
def benchmark_fp16_xla_compile_synth_8gpu_gpureplicated(self):
"""Tests 8 gpu with synthetic data with parameters replicated."""
params = self._shared_params_fp16()._replace(
num_gpus=8,
num_batches=200,
variable_update='replicated',
all_reduce_spec='nccl',
gradient_repacking=2,
xla_compile=True)
self._run_benchmark(params)
def benchmark_fp16_xla_compile_fake_8gpu_gpureplicated(self):
"""Tests 8 gpu with fake data with parameters replicated."""
params = self._shared_params_fp16()._replace(
num_gpus=8,
num_batches=200,
data_dir=self.fake_data_dir,
data_name='imagenet',
variable_update='replicated',
all_reduce_spec='nccl',
gradient_repacking=2,
xla_compile=True)
self._run_benchmark(params)
class Vgg16Benchmarks(BenchmarkBase):
""""Benchmark various vgg16 configurations."""
def _shared_params(self):
"""Returns shared parameters for all vgg16 benchmarks."""
return BenchmarkBase._shared_params(self)._replace(
model='vgg16', batch_size=128, distortions=False)
def benchmark_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with synthetic data with parameters on gpu."""
params = self._shared_params()._replace(
num_gpus=1, variable_update='parameter_server')
self._run_benchmark(params)
def benchmark_fp16_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with synthetic data with parameters on gpu."""
params = self._shared_params()._replace(
num_gpus=1, use_fp16=True, variable_update='parameter_server')
self._run_benchmark(params)
def benchmark_synth_4gpu_gpureplicated(self):
"""Tests 4 gpu with synthetic data with parameters replicated."""
params = self._shared_params()._replace(
num_gpus=4,
all_reduce_spec='nccl',
variable_update='replicated',
compact_gradient_transfer=False,
gradient_repacking=2)
self._run_benchmark(params)
def benchmark_synth_8gpu_gpureplicated(self):
"""Tests 8 gpu with synthetic data with parameters replicated."""
params = self._shared_params()._replace(
num_gpus=8,
all_reduce_spec='nccl',
variable_update='replicated',
compact_gradient_transfer=False,
gradient_repacking=2)
self._run_benchmark(params)
# XLA versions of VGG16 tests only for single GPU.
def benchmark_xla_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with synthetic data and XLA."""
params = self._shared_params()._replace(
num_gpus=1, variable_update='parameter_server', xla=True)
self._run_benchmark(params)
def benchmark_fp16_xla_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with fp16, synthetic data, and XLA."""
params = self._shared_params()._replace(
num_gpus=1, variable_update='parameter_server', xla=True, use_fp16=True)
self._run_benchmark(params)
# Test does not run as part of continuous testing.
def benchmark_xla_fake_1gpu_gpuparams(self):
"""Tests 1 gpu with fake data and XLA."""
params = self._shared_params()._replace(
num_gpus=1,
data_dir=self.fake_data_dir,
data_name='imagenet',
variable_update='parameter_server',
xla=True)
self._run_benchmark(params)
def benchmark_xla_real_1gpu_gpuparams(self):
"""Tests 1 gpu with real data and XLA."""
params = self._shared_params()._replace(
num_gpus=1,
data_dir=self.data_dir,
variable_update='parameter_server',
xla=True)
self._run_benchmark(params)
class TrivialBenchmarks(BenchmarkBase):
""""Benchmarks for trivial model.
The purpose of these tests is to verify the upper bound for the input
pipeline. Fake data creates an upperbound on the input pipeline throughput.
"""
def _shared_params(self):
"""Returns shared parameters for all trivial benchmarks."""
return BenchmarkBase._shared_params(self)._replace(
model='trivial',
num_gpus=8,
distortions=False,
variable_update='independent',
data_dir=self.fake_data_dir)
def benchmark_fake_64batch(self):
params = self._shared_params()._replace(batch_size=64, data_name='imagenet')
self._run_benchmark(params)
def benchmark_fake_128batch(self):
params = self._shared_params()._replace(
batch_size=128, data_name='imagenet')
self._run_benchmark(params)
def benchmark_fake_256batch(self):
params = self._shared_params()._replace(
batch_size=256, data_name='imagenet')
self._run_benchmark(params)
def benchmark_fakedistort_128batch(self):
params = self._shared_params()._replace(
batch_size=128, data_name='imagenet', distortions=True)
self._run_benchmark(params)
class AlexnetBenchmarks(BenchmarkBase):
""""Benchmarks for alexnet."""
def _shared_params(self):
"""Returns shared parameters for all alexnet benchmarks."""
return BenchmarkBase._shared_params(self)._replace(
model='alexnet', batch_size=512, distortions=False)
def benchmark_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with synthetic data with parameters on gpu."""
params = self._shared_params()._replace(
num_gpus=1, variable_update='parameter_server')
self._run_benchmark(params)
def benchmark_fp16_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with synthetic data with parameters on gpu."""
params = self._shared_params()._replace(
num_gpus=1, use_fp16=True, variable_update='parameter_server')
self._run_benchmark(params)
def benchmark_synth_8gpu_gpureplicated(self):
"""Tests 8 gpus with synthetic data with parameters replicated."""
params = self._shared_params()._replace(
num_gpus=8,
variable_update='replicated',
all_reduce_spec='nccl',
compact_gradient_transfer=False,
gradient_repacking=2)
self._run_benchmark(params)
def benchmark_fake_8gpu_gpureplicated(self):
"""Tests 8 gpus with fake data with parameters replicated."""
params = self._shared_params()._replace(
num_gpus=8,
data_dir=self.fake_data_dir,
data_name='imagenet',
variable_update='replicated',
all_reduce_spec='nccl',
compact_gradient_transfer=False,
gradient_repacking=2)
self._run_benchmark(params)
# XLA Benchmark tests for AlexNet.
def benchmark_xla_synth_1gpuparams(self):
"""Tests 1 gpu with synthetic data and XLA."""
params = self._shared_params()._replace(
num_gpus=1, variable_update='parameter_server', xla=True)
self._run_benchmark(params)
def benchmark_fp16_xla_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with fp16, synthetic data and XLA."""
params = self._shared_params()._replace(
num_gpus=1, variable_update='parameter_server', xla=True, use_fp16=True)
self._run_benchmark(params)
# Test does not run as part of continuous testing.
def benchmark_xla_fake_1gpuparams(self):
"""Tests 1 gpu with fake data and XLA."""
params = self._shared_params()._replace(
num_gpus=1,
data_dir=self.fake_data_dir,
data_name='imagenet',
variable_update='parameter_server',
xla=True)
self._run_benchmark(params)
def benchmark_xla_real_1gpuparams(self):
"""Tests 1 gpu with real data and XLA."""
params = self._shared_params()._replace(
num_gpus=1,
data_dir=self.data_dir,
variable_update='parameter_server',
xla=True)
self._run_benchmark(params)
class InceptionV3Benchmarks(BenchmarkBase):
""""Benchmark for InceptionV3."""
def _shared_params(self):
"""Returns shared parameters for all InceptionV3 benchmarks."""
return BenchmarkBase._shared_params(self)._replace(
model='inception3', batch_size=64, distortions=False)
def benchmark_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with synthetic data."""
params = self._shared_params()._replace(
num_gpus=1, variable_update='parameter_server')
self._run_benchmark(params)
def benchmark_fp16_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with synthetic data."""
params = self._shared_params()._replace(
num_gpus=1, use_fp16=True, variable_update='parameter_server')
self._run_benchmark(params)
def benchmark_synth_1gpu_max_batch_size(self):
"""Finds largest batch size that can be run with 1 gpu using synth data."""
params = self._shared_params()._replace(
num_gpus=1, variable_update='parameter_server')
self._binary_search_batch_size(params, init_batch_size=128)
def benchmark_xla_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with synthetic and XLA."""
params = self._shared_params()._replace(
num_gpus=1, variable_update='parameter_server', xla=True)
self._run_benchmark(params)
def benchmark_fp16_xla_synth_1gpu_gpuparams(self):
"""Tests 1 gpu with fp16, XLA and synthetic data."""
params = self._shared_params()._replace(
num_gpus=1, variable_update='parameter_server', xla=True, use_fp16=True)
self._run_benchmark(params)
def benchmark_xla_synth_1gpu_max_batch_size(self):
"""Finds largest batch that can be run with XLA, 1 gpu, and synth data."""
params = self._shared_params()._replace(
num_gpus=1, variable_update='parameter_server', xla=True)
self._binary_search_batch_size(params, init_batch_size=128)
# Test does not run as part of continuous testing.
def benchmark_xla_fake_1gpu_gpuparams(self):
"""Tests 1 gpu with fake data with XLA."""
params = self._shared_params()._replace(
num_gpus=1,
data_dir=self.fake_data_dir,
data_name='imagenet',
variable_update='parameter_server',
xla=True)
self._run_benchmark(params)
def benchmark_xla_real_1gpu_gpuparams(self):
"""Tests 1 gpu with real data with XLA."""
params = self._shared_params()._replace(
num_gpus=1,
data_dir=self.data_dir,
variable_update='parameter_server',
xla=True)
self._run_benchmark(params)
class NcfBenchmarks(BenchmarkBase):
"""Benchmarks for neural collaborative filtering."""
def _shared_params(self):
return BenchmarkBase._shared_params(self)._replace(
model='ncf', batch_size=64*1024, num_gpus=1, num_warmup_batches=1)
def benchmark_synth_1gpu_gpuparams(self):
params = self._shared_params()._replace(variable_update='parameter_server')
self._run_benchmark(params)
def benchmark_fp16_synth_1gpu_gpuparams(self):
params = self._shared_params()._replace(
variable_update='parameter_server', use_fp16=True)
self._run_benchmark(params)
def benchmark_xla_synth_1gpu_gpuparams(self):
params = self._shared_params()._replace(
variable_update='parameter_server', xla=True)
self._run_benchmark(params)
def benchmark_fp16_xla_synth_1gpu_gpuparams(self):
params = self._shared_params()._replace(
variable_update='parameter_server', xla=True, use_fp16=True)
self._run_benchmark(params)
def benchmark_xla_compile_synth_1gpu_gpuparams(self):
params = self._shared_params()._replace(
variable_update='parameter_server', xla_compile=True)
self._run_benchmark(params)
def benchmark_fp16_xla_compile_synth_1gpu_gpuparams(self):
params = self._shared_params()._replace(
variable_update='parameter_server', xla_compile=True, use_fp16=True)
self._run_benchmark(params)
class DeepSpeech2Benchmarks(BenchmarkBase):
"""Benchmarks for DeepSpeech2 model."""
def _shared_params(self):
return BenchmarkBase._shared_params(self)._replace(
model='deepspeech2', batch_size=32, num_gpus=1, data_name='librispeech')
def benchmark_synth_1gpu_gpuparams(self):
params = self._shared_params()._replace(variable_update='parameter_server')
self._run_benchmark(params)
def benchmark_xla_synth_1gpu_gpuparams(self):
params = self._shared_params()._replace(
variable_update='parameter_server', xla=True)
self._run_benchmark(params)
def benchmark_xla_compile_synth_1gpu_gpuparams(self):
params = self._shared_params()._replace(
variable_update='parameter_server', xla_compile=True)
self._run_benchmark(params)
class SsdBenchmarks(BenchmarkBase):
"""Benchmarks for SSD model."""
def _cudnn_version(self):
if sys.platform == 'win32':
return None
lib = ctypes.cdll.LoadLibrary(None)
if hasattr(lib, 'cudnnGetErrorString'):
version = lib.cudnnGetVersion()
return version
return None
def _shared_params(self):
cudnn_version = self._cudnn_version()
if cudnn_version is None or cudnn_version < 7300:
raise RuntimeError(
'Needs at least cuDNN 7.3 to work with fp16 (b/112048183). '
'Build with --define=use_experimental_cudnn=1')
return BenchmarkBase._shared_params(self)._replace(
# TODO(b/115672206): Replace backbone model and data dir with replicated
# placer location for better performance.
backbone_model_path=platforms_util.get_ssd_backborn_model_file(), # pylint: disable=line-too-long
data_dir=platforms_util.get_ssd_backboard_data_dir(),
batch_size=128,
data_name='coco',
model='ssd300',
num_batches=10,
num_warmup_batches=1,
num_gpus=1,
optimizer='momentum',
momentum=0.9,
weight_decay=5e-4,
loss_type_to_report='base_loss',
single_l2_loss_op=True,
compute_lr_on_cpu=True,
)
def benchmark_xla_compile_real_1gpu_gpuparams(self):
params = self._shared_params()._replace(
num_gpus=1,
xla_compile=True,
)
self._run_benchmark(params)
def benchmark_real_1gpu_gpuparams(self):
params = self._shared_params()._replace(num_gpus=1,)
self._run_benchmark(params)
def benchmark_xla_compile_fp16_real_1gpu_gpuparams(self):
params = self._shared_params()._replace(
num_gpus=1, xla_compile=True, use_fp16=True)
self._run_benchmark(params)
def benchmark_fp16_real_1gpu_gpuparams(self):
params = self._shared_params()._replace(num_gpus=1, use_fp16=True)
self._run_benchmark(params)
def benchmark_xla_compile_real_8gpu_gpuparams(self):
params = self._shared_params()._replace(
num_gpus=8,
xla_compile=True,
variable_update='replicated',
all_reduce_spec='nccl',
gradient_repacking=2,
num_batches=50,
)
self._run_benchmark(params)
def benchmark_real_8gpu_gpuparams(self):
params = self._shared_params()._replace(
num_gpus=8,
variable_update='replicated',
all_reduce_spec='nccl',
gradient_repacking=2,
num_batches=50,
)
self._run_benchmark(params)
def benchmark_xla_compile_fp16_real_8gpu_gpuparams(self):
params = self._shared_params()._replace(
num_gpus=8,
xla_compile=True,
use_fp16=True,
variable_update='replicated',
all_reduce_spec='nccl',
gradient_repacking=2,
num_batches=50,
)
self._run_benchmark(params)
def benchmark_fp16_real_8gpu_gpuparams(self):
params = self._shared_params()._replace(
num_gpus=8,
use_fp16=True,
variable_update='replicated',
all_reduce_spec='nccl',
gradient_repacking=2,
num_batches=50,
)
self._run_benchmark(params)
if __name__ == '__main__':
tf.test.main()
| 36.269517
| 106
| 0.709527
| 5,089
| 39,026
| 5.097662
| 0.082138
| 0.060597
| 0.064143
| 0.086501
| 0.807455
| 0.792113
| 0.770989
| 0.756341
| 0.732673
| 0.715866
| 0
| 0.029255
| 0.189822
| 39,026
| 1,075
| 107
| 36.303256
| 0.791227
| 0.203198
| 0
| 0.718367
| 0
| 0
| 0.070934
| 0.003906
| 0
| 0
| 0
| 0.00186
| 0.001361
| 1
| 0.161905
| false
| 0
| 0.016327
| 0.005442
| 0.221769
| 0.005442
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a555d4a5772cfcd1bdc368d802d1889747b5363
| 7,372
|
py
|
Python
|
gcp/api/osv_service_v1_pb2_grpc.py
|
pombredanne/osv
|
1763d62d5780d20f142f79fb6c4d98b27fe4e8ca
|
[
"Apache-2.0"
] | 483
|
2021-02-05T18:54:17.000Z
|
2022-03-29T20:38:07.000Z
|
gcp/api/osv_service_v1_pb2_grpc.py
|
pombredanne/osv
|
1763d62d5780d20f142f79fb6c4d98b27fe4e8ca
|
[
"Apache-2.0"
] | 142
|
2021-02-05T23:35:58.000Z
|
2022-03-31T04:18:12.000Z
|
gcp/api/osv_service_v1_pb2_grpc.py
|
pombredanne/osv
|
1763d62d5780d20f142f79fb6c4d98b27fe4e8ca
|
[
"Apache-2.0"
] | 63
|
2021-02-06T11:50:39.000Z
|
2022-03-30T09:42:34.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from osv import vulnerability_pb2 as osv_dot_vulnerability__pb2
import osv_service_v1_pb2 as osv__service__v1__pb2
class OSVStub(object):
"""Open source vulnerability database.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetVulnById = channel.unary_unary(
'/osv.v1.OSV/GetVulnById',
request_serializer=osv__service__v1__pb2.GetVulnByIdParameters.SerializeToString,
response_deserializer=osv_dot_vulnerability__pb2.Vulnerability.FromString,
)
self.QueryAffected = channel.unary_unary(
'/osv.v1.OSV/QueryAffected',
request_serializer=osv__service__v1__pb2.QueryAffectedParameters.SerializeToString,
response_deserializer=osv__service__v1__pb2.VulnerabilityList.FromString,
)
self.GetVulnByIdNew = channel.unary_unary(
'/osv.v1.OSV/GetVulnByIdNew',
request_serializer=osv__service__v1__pb2.GetVulnByIdParameters.SerializeToString,
response_deserializer=osv_dot_vulnerability__pb2.Vulnerability.FromString,
)
self.QueryAffectedNew = channel.unary_unary(
'/osv.v1.OSV/QueryAffectedNew',
request_serializer=osv__service__v1__pb2.QueryAffectedParameters.SerializeToString,
response_deserializer=osv__service__v1__pb2.VulnerabilityList.FromString,
)
class OSVServicer(object):
"""Open source vulnerability database.
"""
def GetVulnById(self, request, context):
"""Return a `Vulnerability` object for a given OSV ID.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def QueryAffected(self, request, context):
"""Query vulnerabilities for a particular project at a given commit or
version.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetVulnByIdNew(self, request, context):
"""Return a `Vulnerability` object for a given OSV ID.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def QueryAffectedNew(self, request, context):
"""Query vulnerabilities for a particular project at a given commit or
version.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_OSVServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetVulnById': grpc.unary_unary_rpc_method_handler(
servicer.GetVulnById,
request_deserializer=osv__service__v1__pb2.GetVulnByIdParameters.FromString,
response_serializer=osv_dot_vulnerability__pb2.Vulnerability.SerializeToString,
),
'QueryAffected': grpc.unary_unary_rpc_method_handler(
servicer.QueryAffected,
request_deserializer=osv__service__v1__pb2.QueryAffectedParameters.FromString,
response_serializer=osv__service__v1__pb2.VulnerabilityList.SerializeToString,
),
'GetVulnByIdNew': grpc.unary_unary_rpc_method_handler(
servicer.GetVulnByIdNew,
request_deserializer=osv__service__v1__pb2.GetVulnByIdParameters.FromString,
response_serializer=osv_dot_vulnerability__pb2.Vulnerability.SerializeToString,
),
'QueryAffectedNew': grpc.unary_unary_rpc_method_handler(
servicer.QueryAffectedNew,
request_deserializer=osv__service__v1__pb2.QueryAffectedParameters.FromString,
response_serializer=osv__service__v1__pb2.VulnerabilityList.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'osv.v1.OSV', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class OSV(object):
"""Open source vulnerability database.
"""
@staticmethod
def GetVulnById(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/osv.v1.OSV/GetVulnById',
osv__service__v1__pb2.GetVulnByIdParameters.SerializeToString,
osv_dot_vulnerability__pb2.Vulnerability.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def QueryAffected(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/osv.v1.OSV/QueryAffected',
osv__service__v1__pb2.QueryAffectedParameters.SerializeToString,
osv__service__v1__pb2.VulnerabilityList.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetVulnByIdNew(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/osv.v1.OSV/GetVulnByIdNew',
osv__service__v1__pb2.GetVulnByIdParameters.SerializeToString,
osv_dot_vulnerability__pb2.Vulnerability.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def QueryAffectedNew(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/osv.v1.OSV/QueryAffectedNew',
osv__service__v1__pb2.QueryAffectedParameters.SerializeToString,
osv__service__v1__pb2.VulnerabilityList.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 41.886364
| 99
| 0.662235
| 677
| 7,372
| 6.849335
| 0.155096
| 0.043131
| 0.051758
| 0.064697
| 0.805693
| 0.797714
| 0.758896
| 0.726116
| 0.726116
| 0.726116
| 0
| 0.010513
| 0.264514
| 7,372
| 175
| 100
| 42.125714
| 0.844707
| 0.085323
| 0
| 0.661654
| 1
| 0
| 0.068103
| 0.030737
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075188
| false
| 0
| 0.022556
| 0.030075
| 0.150376
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ce4545d81de8d9b80a2abfb8e8e25f1c2f19f7cb
| 463
|
py
|
Python
|
api/mon/tasks.py
|
klebed/esdc-ce
|
2c9e4591f344247d345a83880ba86777bb794460
|
[
"Apache-2.0"
] | 97
|
2016-11-15T14:44:23.000Z
|
2022-03-13T18:09:15.000Z
|
api/mon/tasks.py
|
klebed/esdc-ce
|
2c9e4591f344247d345a83880ba86777bb794460
|
[
"Apache-2.0"
] | 334
|
2016-11-17T19:56:57.000Z
|
2022-03-18T10:45:53.000Z
|
api/mon/tasks.py
|
klebed/esdc-ce
|
2c9e4591f344247d345a83880ba86777bb794460
|
[
"Apache-2.0"
] | 33
|
2017-01-02T16:04:13.000Z
|
2022-02-07T19:20:24.000Z
|
# noinspection PyUnresolvedReferences
from api.mon.base.tasks import * # noqa: F401,F403
# noinspection PyUnresolvedReferences
from api.mon.vm.tasks import * # noqa: F401,F403
# noinspection PyUnresolvedReferences
from api.mon.node.tasks import * # noqa: F401,F403
# noinspection PyUnresolvedReferences
from api.mon.alerting.tasks import * # noqa: F401,F403
# noinspection PyUnresolvedReferences
from api.mon.alerting.action.tasks import * # noqa: F401,F403
| 42.090909
| 62
| 0.792657
| 56
| 463
| 6.553571
| 0.267857
| 0.463215
| 0.517711
| 0.558583
| 0.956403
| 0.773842
| 0.773842
| 0.773842
| 0.773842
| 0.773842
| 0
| 0.073529
| 0.118791
| 463
| 10
| 63
| 46.3
| 0.82598
| 0.559395
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 12
|
ce8be91c6ee2caf166dd7dcbb46b7c7dc59a529d
| 5,993
|
py
|
Python
|
loldib/getratings/models/NA/na_vi/na_vi_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_vi/na_vi_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_vi/na_vi_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Vi_Top_Aatrox(Ratings):
pass
class NA_Vi_Top_Ahri(Ratings):
pass
class NA_Vi_Top_Akali(Ratings):
pass
class NA_Vi_Top_Alistar(Ratings):
pass
class NA_Vi_Top_Amumu(Ratings):
pass
class NA_Vi_Top_Anivia(Ratings):
pass
class NA_Vi_Top_Annie(Ratings):
pass
class NA_Vi_Top_Ashe(Ratings):
pass
class NA_Vi_Top_AurelionSol(Ratings):
pass
class NA_Vi_Top_Azir(Ratings):
pass
class NA_Vi_Top_Bard(Ratings):
pass
class NA_Vi_Top_Blitzcrank(Ratings):
pass
class NA_Vi_Top_Brand(Ratings):
pass
class NA_Vi_Top_Braum(Ratings):
pass
class NA_Vi_Top_Caitlyn(Ratings):
pass
class NA_Vi_Top_Camille(Ratings):
pass
class NA_Vi_Top_Cassiopeia(Ratings):
pass
class NA_Vi_Top_Chogath(Ratings):
pass
class NA_Vi_Top_Corki(Ratings):
pass
class NA_Vi_Top_Darius(Ratings):
pass
class NA_Vi_Top_Diana(Ratings):
pass
class NA_Vi_Top_Draven(Ratings):
pass
class NA_Vi_Top_DrMundo(Ratings):
pass
class NA_Vi_Top_Ekko(Ratings):
pass
class NA_Vi_Top_Elise(Ratings):
pass
class NA_Vi_Top_Evelynn(Ratings):
pass
class NA_Vi_Top_Ezreal(Ratings):
pass
class NA_Vi_Top_Fiddlesticks(Ratings):
pass
class NA_Vi_Top_Fiora(Ratings):
pass
class NA_Vi_Top_Fizz(Ratings):
pass
class NA_Vi_Top_Galio(Ratings):
pass
class NA_Vi_Top_Gangplank(Ratings):
pass
class NA_Vi_Top_Garen(Ratings):
pass
class NA_Vi_Top_Gnar(Ratings):
pass
class NA_Vi_Top_Gragas(Ratings):
pass
class NA_Vi_Top_Graves(Ratings):
pass
class NA_Vi_Top_Hecarim(Ratings):
pass
class NA_Vi_Top_Heimerdinger(Ratings):
pass
class NA_Vi_Top_Illaoi(Ratings):
pass
class NA_Vi_Top_Irelia(Ratings):
pass
class NA_Vi_Top_Ivern(Ratings):
pass
class NA_Vi_Top_Janna(Ratings):
pass
class NA_Vi_Top_JarvanIV(Ratings):
pass
class NA_Vi_Top_Jax(Ratings):
pass
class NA_Vi_Top_Jayce(Ratings):
pass
class NA_Vi_Top_Jhin(Ratings):
pass
class NA_Vi_Top_Jinx(Ratings):
pass
class NA_Vi_Top_Kalista(Ratings):
pass
class NA_Vi_Top_Karma(Ratings):
pass
class NA_Vi_Top_Karthus(Ratings):
pass
class NA_Vi_Top_Kassadin(Ratings):
pass
class NA_Vi_Top_Katarina(Ratings):
pass
class NA_Vi_Top_Kayle(Ratings):
pass
class NA_Vi_Top_Kayn(Ratings):
pass
class NA_Vi_Top_Kennen(Ratings):
pass
class NA_Vi_Top_Khazix(Ratings):
pass
class NA_Vi_Top_Kindred(Ratings):
pass
class NA_Vi_Top_Kled(Ratings):
pass
class NA_Vi_Top_KogMaw(Ratings):
pass
class NA_Vi_Top_Leblanc(Ratings):
pass
class NA_Vi_Top_LeeSin(Ratings):
pass
class NA_Vi_Top_Leona(Ratings):
pass
class NA_Vi_Top_Lissandra(Ratings):
pass
class NA_Vi_Top_Lucian(Ratings):
pass
class NA_Vi_Top_Lulu(Ratings):
pass
class NA_Vi_Top_Lux(Ratings):
pass
class NA_Vi_Top_Malphite(Ratings):
pass
class NA_Vi_Top_Malzahar(Ratings):
pass
class NA_Vi_Top_Maokai(Ratings):
pass
class NA_Vi_Top_MasterYi(Ratings):
pass
class NA_Vi_Top_MissFortune(Ratings):
pass
class NA_Vi_Top_MonkeyKing(Ratings):
pass
class NA_Vi_Top_Mordekaiser(Ratings):
pass
class NA_Vi_Top_Morgana(Ratings):
pass
class NA_Vi_Top_Nami(Ratings):
pass
class NA_Vi_Top_Nasus(Ratings):
pass
class NA_Vi_Top_Nautilus(Ratings):
pass
class NA_Vi_Top_Nidalee(Ratings):
pass
class NA_Vi_Top_Nocturne(Ratings):
pass
class NA_Vi_Top_Nunu(Ratings):
pass
class NA_Vi_Top_Olaf(Ratings):
pass
class NA_Vi_Top_Orianna(Ratings):
pass
class NA_Vi_Top_Ornn(Ratings):
pass
class NA_Vi_Top_Pantheon(Ratings):
pass
class NA_Vi_Top_Poppy(Ratings):
pass
class NA_Vi_Top_Quinn(Ratings):
pass
class NA_Vi_Top_Rakan(Ratings):
pass
class NA_Vi_Top_Rammus(Ratings):
pass
class NA_Vi_Top_RekSai(Ratings):
pass
class NA_Vi_Top_Renekton(Ratings):
pass
class NA_Vi_Top_Rengar(Ratings):
pass
class NA_Vi_Top_Riven(Ratings):
pass
class NA_Vi_Top_Rumble(Ratings):
pass
class NA_Vi_Top_Ryze(Ratings):
pass
class NA_Vi_Top_Sejuani(Ratings):
pass
class NA_Vi_Top_Shaco(Ratings):
pass
class NA_Vi_Top_Shen(Ratings):
pass
class NA_Vi_Top_Shyvana(Ratings):
pass
class NA_Vi_Top_Singed(Ratings):
pass
class NA_Vi_Top_Sion(Ratings):
pass
class NA_Vi_Top_Sivir(Ratings):
pass
class NA_Vi_Top_Skarner(Ratings):
pass
class NA_Vi_Top_Sona(Ratings):
pass
class NA_Vi_Top_Soraka(Ratings):
pass
class NA_Vi_Top_Swain(Ratings):
pass
class NA_Vi_Top_Syndra(Ratings):
pass
class NA_Vi_Top_TahmKench(Ratings):
pass
class NA_Vi_Top_Taliyah(Ratings):
pass
class NA_Vi_Top_Talon(Ratings):
pass
class NA_Vi_Top_Taric(Ratings):
pass
class NA_Vi_Top_Teemo(Ratings):
pass
class NA_Vi_Top_Thresh(Ratings):
pass
class NA_Vi_Top_Tristana(Ratings):
pass
class NA_Vi_Top_Trundle(Ratings):
pass
class NA_Vi_Top_Tryndamere(Ratings):
pass
class NA_Vi_Top_TwistedFate(Ratings):
pass
class NA_Vi_Top_Twitch(Ratings):
pass
class NA_Vi_Top_Udyr(Ratings):
pass
class NA_Vi_Top_Urgot(Ratings):
pass
class NA_Vi_Top_Varus(Ratings):
pass
class NA_Vi_Top_Vayne(Ratings):
pass
class NA_Vi_Top_Veigar(Ratings):
pass
class NA_Vi_Top_Velkoz(Ratings):
pass
class NA_Vi_Top_Vi(Ratings):
pass
class NA_Vi_Top_Viktor(Ratings):
pass
class NA_Vi_Top_Vladimir(Ratings):
pass
class NA_Vi_Top_Volibear(Ratings):
pass
class NA_Vi_Top_Warwick(Ratings):
pass
class NA_Vi_Top_Xayah(Ratings):
pass
class NA_Vi_Top_Xerath(Ratings):
pass
class NA_Vi_Top_XinZhao(Ratings):
pass
class NA_Vi_Top_Yasuo(Ratings):
pass
class NA_Vi_Top_Yorick(Ratings):
pass
class NA_Vi_Top_Zac(Ratings):
pass
class NA_Vi_Top_Zed(Ratings):
pass
class NA_Vi_Top_Ziggs(Ratings):
pass
class NA_Vi_Top_Zilean(Ratings):
pass
class NA_Vi_Top_Zyra(Ratings):
pass
| 14.371703
| 46
| 0.745203
| 972
| 5,993
| 4.168724
| 0.151235
| 0.238401
| 0.306515
| 0.408687
| 0.777641
| 0.777641
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185383
| 5,993
| 416
| 47
| 14.40625
| 0.829988
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
0c99c3535e0be2a2018d4456b0f4326cea4fa65f
| 28,885
|
py
|
Python
|
sdk/luminesce/api/multi_query_execution_api.py
|
finbourne/luminesce-sdk-python-preview
|
7af198cfa9c0fbd619272fb90601162fb7db0a67
|
[
"MIT"
] | null | null | null |
sdk/luminesce/api/multi_query_execution_api.py
|
finbourne/luminesce-sdk-python-preview
|
7af198cfa9c0fbd619272fb90601162fb7db0a67
|
[
"MIT"
] | null | null | null |
sdk/luminesce/api/multi_query_execution_api.py
|
finbourne/luminesce-sdk-python-preview
|
7af198cfa9c0fbd619272fb90601162fb7db0a67
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
FINBOURNE Honeycomb Web API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 1.9.129
Contact: info@finbourne.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from luminesce.api_client import ApiClient
from luminesce.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class MultiQueryExecutionApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def cancel_multi_query(self, execution_id, **kwargs): # noqa: E501
"""[EXPERIMENTAL] CancelMultiQuery: Cancels (if running) or clears the data from (if completed) a previously started query-set # noqa: E501
Cancel the query-set (if still running) / clear the data (if already returned) The following error codes are to be anticipated with standard Problem Detail reports: - 401 Unauthorized - 404 Not Found : The requested query result doesn't exist and is not running. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_multi_query(execution_id, async_req=True)
>>> result = thread.get()
:param execution_id: ExecutionId returned when starting the query (required)
:type execution_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BackgroundQueryCancelResponse
"""
kwargs['_return_http_data_only'] = True
return self.cancel_multi_query_with_http_info(execution_id, **kwargs) # noqa: E501
def cancel_multi_query_with_http_info(self, execution_id, **kwargs): # noqa: E501
"""[EXPERIMENTAL] CancelMultiQuery: Cancels (if running) or clears the data from (if completed) a previously started query-set # noqa: E501
Cancel the query-set (if still running) / clear the data (if already returned) The following error codes are to be anticipated with standard Problem Detail reports: - 401 Unauthorized - 404 Not Found : The requested query result doesn't exist and is not running. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_multi_query_with_http_info(execution_id, async_req=True)
>>> result = thread.get()
:param execution_id: ExecutionId returned when starting the query (required)
:type execution_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BackgroundQueryCancelResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'execution_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method cancel_multi_query" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'execution_id' is set
if self.api_client.client_side_validation and ('execution_id' not in local_var_params or # noqa: E501
local_var_params['execution_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `execution_id` when calling `cancel_multi_query`") # noqa: E501
collection_formats = {}
path_params = {}
if 'execution_id' in local_var_params:
path_params['executionId'] = local_var_params['execution_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
200: "BackgroundQueryCancelResponse",
}
return self.api_client.call_api(
'/api/MultiQueryBackground/{executionId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def get_progress_of_multi_query(self, execution_id, **kwargs): # noqa: E501
"""[EXPERIMENTAL] GetProgressOfMultiQuery: View progress information (up until this point) for the entire query-set # noqa: E501
View progress information (up until this point) for the entire query-set The following error codes are to be anticipated with standard Problem Detail reports: - 401 Unauthorized - 404 Not Found : The requested query result doesn't exist and is not running. - 429 Too Many Requests : Please try your request again soon 1. The query has been executed successfully in the past yet the server-instance receiving this request (e.g. from a load balancer) doesn't yet have this data available. 1. By virtue of the request you have just placed this will have started to load from the persisted cache and will soon be available. 1. It is also the case that the original server-instance to process the original query is likely to already be able to service this request. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_progress_of_multi_query(execution_id, async_req=True)
>>> result = thread.get()
:param execution_id: ExecutionId returned when starting the query (required)
:type execution_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BackgroundMultiQueryProgressResponse
"""
kwargs['_return_http_data_only'] = True
return self.get_progress_of_multi_query_with_http_info(execution_id, **kwargs) # noqa: E501
def get_progress_of_multi_query_with_http_info(self, execution_id, **kwargs): # noqa: E501
"""[EXPERIMENTAL] GetProgressOfMultiQuery: View progress information (up until this point) for the entire query-set # noqa: E501
View progress information (up until this point) for the entire query-set The following error codes are to be anticipated with standard Problem Detail reports: - 401 Unauthorized - 404 Not Found : The requested query result doesn't exist and is not running. - 429 Too Many Requests : Please try your request again soon 1. The query has been executed successfully in the past yet the server-instance receiving this request (e.g. from a load balancer) doesn't yet have this data available. 1. By virtue of the request you have just placed this will have started to load from the persisted cache and will soon be available. 1. It is also the case that the original server-instance to process the original query is likely to already be able to service this request. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_progress_of_multi_query_with_http_info(execution_id, async_req=True)
>>> result = thread.get()
:param execution_id: ExecutionId returned when starting the query (required)
:type execution_id: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BackgroundMultiQueryProgressResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'execution_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_progress_of_multi_query" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'execution_id' is set
if self.api_client.client_side_validation and ('execution_id' not in local_var_params or # noqa: E501
local_var_params['execution_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `execution_id` when calling `get_progress_of_multi_query`") # noqa: E501
collection_formats = {}
path_params = {}
if 'execution_id' in local_var_params:
path_params['executionId'] = local_var_params['execution_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
200: "BackgroundMultiQueryProgressResponse",
}
return self.api_client.call_api(
'/api/MultiQueryBackground/{executionId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def start_queries(self, type, body, **kwargs): # noqa: E501
"""[EXPERIMENTAL] StartQueries: Starts to Execute the HoneycombSql statements in the background. # noqa: E501
Allow for starting a potentially long running query and getting back an immediate response with how to - fetch the data in various formats (if available, or if not simply being informed it is not yet ready), on a per result basis - view progress information (up until this point), for all results in one go - cancel the queries (if still running) / clear the data (if already returned) The following error codes are to be anticipated with standard Problem Detail reports: - 400 BadRequest - there was something wrong with your query syntax (the issue was detected at parse-time) - 401 Unauthorized # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_queries(type, body, async_req=True)
>>> result = thread.get()
:param type: An enum value defining the set of statements being executed (required)
:type type: MultiQueryDefinitionType
:param body: A \"search\" value (e.g. 'Apple' on an instrument search, a `Finbourne.Filtering` expression of Insights, etc.) In the cases where \"Nothing\" is valid for a `Finbourne.Filtering` expression, pass `True`. (required)
:type body: str
:param as_at: The AsAt time used by any bitemporal provider in the queries.
:type as_at: datetime
:param effective_at: The EffectiveAt time used by any bitemporal provider in the queries.
:type effective_at: datetime
:param limit1: A limit that is applied to first-level queries (e.g. Instruments themselves)
:type limit1: int
:param limit2: A limit that is applied to second-level queries (e.g. Holdings based on the set of Instruments found)
:type limit2: int
:param input1: A value available to queries, these vary by 'type' and are only used by some types at all. e.g. a start-date of some sort
:type input1: str
:param input2: A second value available to queries, these vary by 'type' and are only used by some types at all.
:type input2: str
:param input3: A third value available to queries, these vary by 'type' and are only used by some types at all.
:type input3: str
:param timeout_seconds: Maximum time the query may run for, in seconds: <0 → ∞, 0 → 1200s (20m)
:type timeout_seconds: int
:param keep_for_seconds: Maximum time the result may be kept for, in seconds: <0 → 1200 (20m), 0 → 28800 (8h), max = 2,678,400 (31d)
:type keep_for_seconds: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: BackgroundMultiQueryResponse
"""
kwargs['_return_http_data_only'] = True
return self.start_queries_with_http_info(type, body, **kwargs) # noqa: E501
def start_queries_with_http_info(self, type, body, **kwargs): # noqa: E501
"""[EXPERIMENTAL] StartQueries: Starts to Execute the HoneycombSql statements in the background. # noqa: E501
Allow for starting a potentially long running query and getting back an immediate response with how to - fetch the data in various formats (if available, or if not simply being informed it is not yet ready), on a per result basis - view progress information (up until this point), for all results in one go - cancel the queries (if still running) / clear the data (if already returned) The following error codes are to be anticipated with standard Problem Detail reports: - 400 BadRequest - there was something wrong with your query syntax (the issue was detected at parse-time) - 401 Unauthorized # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_queries_with_http_info(type, body, async_req=True)
>>> result = thread.get()
:param type: An enum value defining the set of statements being executed (required)
:type type: MultiQueryDefinitionType
:param body: A \"search\" value (e.g. 'Apple' on an instrument search, a `Finbourne.Filtering` expression of Insights, etc.) In the cases where \"Nothing\" is valid for a `Finbourne.Filtering` expression, pass `True`. (required)
:type body: str
:param as_at: The AsAt time used by any bitemporal provider in the queries.
:type as_at: datetime
:param effective_at: The EffectiveAt time used by any bitemporal provider in the queries.
:type effective_at: datetime
:param limit1: A limit that is applied to first-level queries (e.g. Instruments themselves)
:type limit1: int
:param limit2: A limit that is applied to second-level queries (e.g. Holdings based on the set of Instruments found)
:type limit2: int
:param input1: A value available to queries, these vary by 'type' and are only used by some types at all. e.g. a start-date of some sort
:type input1: str
:param input2: A second value available to queries, these vary by 'type' and are only used by some types at all.
:type input2: str
:param input3: A third value available to queries, these vary by 'type' and are only used by some types at all.
:type input3: str
:param timeout_seconds: Maximum time the query may run for, in seconds: <0 → ∞, 0 → 1200s (20m)
:type timeout_seconds: int
:param keep_for_seconds: Maximum time the result may be kept for, in seconds: <0 → 1200 (20m), 0 → 28800 (8h), max = 2,678,400 (31d)
:type keep_for_seconds: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(BackgroundMultiQueryResponse, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'type',
'body',
'as_at',
'effective_at',
'limit1',
'limit2',
'input1',
'input2',
'input3',
'timeout_seconds',
'keep_for_seconds'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method start_queries" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'type' is set
if self.api_client.client_side_validation and ('type' not in local_var_params or # noqa: E501
local_var_params['type'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `type` when calling `start_queries`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `start_queries`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'as_at' in local_var_params and local_var_params['as_at'] is not None: # noqa: E501
query_params.append(('asAt', local_var_params['as_at'])) # noqa: E501
if 'effective_at' in local_var_params and local_var_params['effective_at'] is not None: # noqa: E501
query_params.append(('effectiveAt', local_var_params['effective_at'])) # noqa: E501
if 'limit1' in local_var_params and local_var_params['limit1'] is not None: # noqa: E501
query_params.append(('limit1', local_var_params['limit1'])) # noqa: E501
if 'limit2' in local_var_params and local_var_params['limit2'] is not None: # noqa: E501
query_params.append(('limit2', local_var_params['limit2'])) # noqa: E501
if 'input1' in local_var_params and local_var_params['input1'] is not None: # noqa: E501
query_params.append(('input1', local_var_params['input1'])) # noqa: E501
if 'input2' in local_var_params and local_var_params['input2'] is not None: # noqa: E501
query_params.append(('input2', local_var_params['input2'])) # noqa: E501
if 'input3' in local_var_params and local_var_params['input3'] is not None: # noqa: E501
query_params.append(('input3', local_var_params['input3'])) # noqa: E501
if 'timeout_seconds' in local_var_params and local_var_params['timeout_seconds'] is not None: # noqa: E501
query_params.append(('timeoutSeconds', local_var_params['timeout_seconds'])) # noqa: E501
if 'keep_for_seconds' in local_var_params and local_var_params['keep_for_seconds'] is not None: # noqa: E501
query_params.append(('keepForSeconds', local_var_params['keep_for_seconds'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/plain', 'application/json', 'text/json']) # noqa: E501
header_params['Accept-Encoding'] = "gzip, deflate, br"
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['text/plain']) # noqa: E501
# set the LUSID header
header_params['X-LUSID-SDK-Language'] = 'Python'
header_params['X-LUSID-SDK-Version'] = '1.9.129'
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
response_types_map = {
202: "BackgroundMultiQueryResponse",
400: "LusidProblemDetails",
}
return self.api_client.call_api(
'/api/MultiQueryBackground', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_types_map=response_types_map,
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
| 54.193246
| 788
| 0.635451
| 3,549
| 28,885
| 4.988166
| 0.107354
| 0.034796
| 0.056149
| 0.018302
| 0.908151
| 0.895272
| 0.887194
| 0.884709
| 0.85172
| 0.82664
| 0
| 0.019399
| 0.293301
| 28,885
| 532
| 789
| 54.295113
| 0.847352
| 0.555859
| 0
| 0.588745
| 1
| 0
| 0.195758
| 0.040446
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030303
| false
| 0
| 0.021645
| 0
| 0.082251
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0c9ed086afaf750c83021054897bc97fb22f031d
| 26,990
|
py
|
Python
|
tests/test_observable/test_merge.py
|
christiansandberg/RxPY
|
036027d2858ea6c9d45839c863bd791e5bb50c36
|
[
"MIT"
] | null | null | null |
tests/test_observable/test_merge.py
|
christiansandberg/RxPY
|
036027d2858ea6c9d45839c863bd791e5bb50c36
|
[
"MIT"
] | null | null | null |
tests/test_observable/test_merge.py
|
christiansandberg/RxPY
|
036027d2858ea6c9d45839c863bd791e5bb50c36
|
[
"MIT"
] | null | null | null |
import unittest
import reactivex
from reactivex import operators as ops
from reactivex.testing import ReactiveTest, TestScheduler
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class TestMerge(unittest.TestCase):
def test_merge_never2(self):
scheduler = TestScheduler()
n1 = reactivex.never()
n2 = reactivex.never()
def create():
return reactivex.merge(n1, n2)
results = scheduler.start(create)
assert results.messages == []
def test_merge_never3(self):
scheduler = TestScheduler()
n1 = reactivex.never()
n2 = reactivex.never()
n3 = reactivex.never()
def create():
return reactivex.merge(n1, n2, n3)
results = scheduler.start(create)
assert results.messages == []
def test_merge_empty2(self):
scheduler = TestScheduler()
e1 = reactivex.empty()
e2 = reactivex.empty()
def create():
return reactivex.merge(e1, e2)
results = scheduler.start(create)
assert results.messages == [on_completed(200)]
def test_merge_empty3(self):
scheduler = TestScheduler()
e1 = reactivex.empty()
e2 = reactivex.empty()
e3 = reactivex.empty()
def create():
return reactivex.merge(e1, e2, e3)
results = scheduler.start(create)
assert results.messages == [on_completed(200)]
def test_merge_empty_delayed2_right_last(self):
scheduler = TestScheduler()
l_msgs = [on_next(150, 1), on_completed(240)]
r_msgs = [on_next(150, 1), on_completed(250)]
e1 = scheduler.create_hot_observable(l_msgs)
e2 = scheduler.create_hot_observable(r_msgs)
def create():
return reactivex.merge(e1, e2)
results = scheduler.start(create)
assert results.messages == [on_completed(250)]
def test_merge_empty_delayed2_left_last(self):
scheduler = TestScheduler()
l_msgs = [on_next(150, 1), on_completed(250)]
r_msgs = [on_next(150, 1), on_completed(240)]
e1 = scheduler.create_hot_observable(l_msgs)
e2 = scheduler.create_hot_observable(r_msgs)
def create():
return reactivex.merge(e1, e2)
results = scheduler.start(create)
assert results.messages == [on_completed(250)]
def test_merge_empty_delayed3_middle_last(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_completed(245)]
msgs2 = [on_next(150, 1), on_completed(250)]
msgs3 = [on_next(150, 1), on_completed(240)]
e1 = scheduler.create_hot_observable(msgs1)
e2 = scheduler.create_hot_observable(msgs2)
e3 = scheduler.create_hot_observable(msgs3)
def create():
return reactivex.merge(e1, e2, e3)
results = scheduler.start(create)
assert results.messages == [on_completed(250)]
def test_merge_empty_never(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_completed(245)]
e1 = scheduler.create_hot_observable(msgs1)
n1 = reactivex.never()
def create():
return reactivex.merge(e1, n1)
results = scheduler.start(create)
assert results.messages == []
def test_merge_never_empty(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_completed(245)]
e1 = scheduler.create_hot_observable(msgs1)
n1 = reactivex.never()
def create():
return reactivex.merge(n1, e1)
results = scheduler.start(create)
assert results.messages == []
def test_merge_return_never(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_next(210, 2), on_completed(245)]
r1 = scheduler.create_hot_observable(msgs1)
n1 = reactivex.never()
def create():
return reactivex.merge(r1, n1)
results = scheduler.start(create)
assert results.messages == [on_next(210, 2)]
def test_merge_never_return(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_next(210, 2), on_completed(245)]
r1 = scheduler.create_hot_observable(msgs1)
n1 = reactivex.never()
def create():
return reactivex.merge(n1, r1)
results = scheduler.start(create)
assert results.messages == [on_next(210, 2)]
def test_merge_error_never(self):
ex = "ex"
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_next(210, 2), on_error(245, ex)]
e1 = scheduler.create_hot_observable(msgs1)
n1 = reactivex.never()
def create():
return reactivex.merge(e1, n1)
results = scheduler.start(create)
assert results.messages == [on_next(210, 2), on_error(245, ex)]
def test_merge_never_error(self):
ex = "ex"
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_next(210, 2), on_error(245, ex)]
e1 = scheduler.create_hot_observable(msgs1)
n1 = reactivex.never()
def create():
return reactivex.merge(n1, e1)
results = scheduler.start(create)
assert results.messages == [on_next(210, 2), on_error(245, ex)]
def test_merge_empty_return(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_completed(245)]
msgs2 = [on_next(150, 1), on_next(210, 2), on_completed(250)]
e1 = scheduler.create_hot_observable(msgs1)
r1 = scheduler.create_hot_observable(msgs2)
def create():
return reactivex.merge(e1, r1)
results = scheduler.start(create)
assert results.messages == [on_next(210, 2), on_completed(250)]
def test_merge_return_empty(self):
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_completed(245)]
msgs2 = [on_next(150, 1), on_next(210, 2), on_completed(250)]
e1 = scheduler.create_hot_observable(msgs1)
r1 = scheduler.create_hot_observable(msgs2)
def create():
return reactivex.merge(r1, e1)
results = scheduler.start(create)
assert results.messages == [on_next(210, 2), on_completed(250)]
def test_merge_lots2(self):
scheduler = TestScheduler()
msgs1 = [
on_next(150, 1),
on_next(210, 2),
on_next(220, 4),
on_next(230, 6),
on_next(240, 8),
on_completed(245),
]
msgs2 = [
on_next(150, 1),
on_next(215, 3),
on_next(225, 5),
on_next(235, 7),
on_next(245, 9),
on_completed(250),
]
o1 = scheduler.create_hot_observable(msgs1)
o2 = scheduler.create_hot_observable(msgs2)
def create():
return reactivex.merge(o1, o2)
results = scheduler.start(create).messages
assert len(results) == 9
for i, result in enumerate(results[:-1]):
assert result.value.kind == "N"
assert result.time == 210 + i * 5
assert result.value.value == i + 2
assert results[8].value.kind == "C" and results[8].time == 250
def test_merge_lots3(self):
scheduler = TestScheduler()
msgs1 = [
on_next(150, 1),
on_next(210, 2),
on_next(225, 5),
on_next(240, 8),
on_completed(245),
]
msgs2 = [
on_next(150, 1),
on_next(215, 3),
on_next(230, 6),
on_next(245, 9),
on_completed(250),
]
msgs3 = [on_next(150, 1), on_next(220, 4), on_next(235, 7), on_completed(240)]
o1 = scheduler.create_hot_observable(msgs1)
o2 = scheduler.create_hot_observable(msgs2)
o3 = scheduler.create_hot_observable(msgs3)
def create():
return reactivex.merge(o1, o2, o3)
results = scheduler.start(create).messages
assert len(results) == 9
for i, result in enumerate(results[:-1]):
assert (
results[i].value.kind == "N"
and results[i].time == 210 + i * 5
and results[i].value.value == i + 2
)
assert results[8].value.kind == "C" and results[8].time == 250
def test_merge_error_left(self):
ex = "ex"
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_next(210, 2), on_error(245, ex)]
msgs2 = [on_next(150, 1), on_next(215, 3), on_completed(250)]
o1 = scheduler.create_hot_observable(msgs1)
o2 = scheduler.create_hot_observable(msgs2)
def create():
return reactivex.merge(o1, o2)
results = scheduler.start(create)
assert results.messages == [on_next(210, 2), on_next(215, 3), on_error(245, ex)]
def test_merge_error_causes_disposal(self):
ex = "ex"
scheduler = TestScheduler()
msgs1 = [on_next(150, 1), on_error(210, ex)]
msgs2 = [on_next(150, 1), on_next(220, 1), on_completed(250)]
source_not_disposed = [False]
o1 = scheduler.create_hot_observable(msgs1)
def action():
source_not_disposed[0] = True
o2 = scheduler.create_hot_observable(msgs2).pipe(ops.do_action(on_next=action))
def create():
return reactivex.merge(o1, o2)
results = scheduler.start(create)
assert results.messages == [on_error(210, ex)]
assert not source_not_disposed[0]
def test_merge_observable_of_observable_data(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(
300,
scheduler.create_cold_observable(
on_next(10, 101),
on_next(20, 102),
on_next(110, 103),
on_next(120, 104),
on_next(210, 105),
on_next(220, 106),
on_completed(230),
),
),
on_next(
400,
scheduler.create_cold_observable(
on_next(10, 201),
on_next(20, 202),
on_next(30, 203),
on_next(40, 200),
on_completed(50),
),
),
on_next(
500,
scheduler.create_cold_observable(
on_next(10, 301),
on_next(20, 302),
on_next(30, 303),
on_next(40, 304),
on_next(120, 305),
on_completed(150),
),
),
on_completed(600),
)
def create():
return xs.pipe(ops.merge_all())
results = scheduler.start(create)
assert results.messages == [
on_next(310, 101),
on_next(320, 102),
on_next(410, 103),
on_next(410, 201),
on_next(420, 104),
on_next(420, 202),
on_next(430, 203),
on_next(440, 200),
on_next(510, 105),
on_next(510, 301),
on_next(520, 106),
on_next(520, 302),
on_next(530, 303),
on_next(540, 304),
on_next(620, 305),
on_completed(650),
]
def test_merge_observable_of_observable_data_non_overlapped(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(
300,
scheduler.create_cold_observable(
on_next(10, 101), on_next(20, 102), on_completed(230)
),
),
on_next(
400,
scheduler.create_cold_observable(
on_next(10, 201),
on_next(20, 202),
on_next(30, 203),
on_next(40, 200),
on_completed(50),
),
),
on_next(
500,
scheduler.create_cold_observable(
on_next(10, 301),
on_next(20, 302),
on_next(30, 303),
on_next(40, 304),
on_completed(50),
),
),
on_completed(600),
)
def create():
return xs.pipe(ops.merge_all())
results = scheduler.start(create)
assert results.messages == [
on_next(310, 101),
on_next(320, 102),
on_next(410, 201),
on_next(420, 202),
on_next(430, 203),
on_next(440, 200),
on_next(510, 301),
on_next(520, 302),
on_next(530, 303),
on_next(540, 304),
on_completed(600),
]
def test_merge_observable_of_observable_inner_throws(self):
ex = "ex"
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(
300,
scheduler.create_cold_observable(
on_next(10, 101), on_next(20, 102), on_completed(230)
),
),
on_next(
400,
scheduler.create_cold_observable(
on_next(10, 201),
on_next(20, 202),
on_next(30, 203),
on_next(40, 200),
on_error(50, ex),
),
),
on_next(
500,
scheduler.create_cold_observable(
on_next(10, 301),
on_next(20, 302),
on_next(30, 303),
on_next(40, 304),
on_completed(50),
),
),
on_completed(600),
)
def create():
return xs.pipe(ops.merge_all())
results = scheduler.start(create)
assert results.messages == [
on_next(310, 101),
on_next(320, 102),
on_next(410, 201),
on_next(420, 202),
on_next(430, 203),
on_next(440, 200),
on_error(450, ex),
]
def test_merge_observable_of_observable_outer_throws(self):
ex = "ex"
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(
300,
scheduler.create_cold_observable(
on_next(10, 101), on_next(20, 102), on_completed(230)
),
),
on_next(
400,
scheduler.create_cold_observable(
on_next(10, 201),
on_next(20, 202),
on_next(30, 203),
on_next(40, 200),
on_completed(50),
),
),
on_error(500, ex),
)
def create():
return xs.pipe(ops.merge_all())
results = scheduler.start(create)
assert results.messages == [
on_next(310, 101),
on_next(320, 102),
on_next(410, 201),
on_next(420, 202),
on_next(430, 203),
on_next(440, 200),
on_error(500, ex),
]
def test_mergeconcat_basic(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(
210,
scheduler.create_cold_observable(
on_next(50, 1), on_next(100, 2), on_next(120, 3), on_completed(140)
),
),
on_next(
260,
scheduler.create_cold_observable(
on_next(20, 4), on_next(70, 5), on_completed(200)
),
),
on_next(
270,
scheduler.create_cold_observable(
on_next(10, 6), on_next(90, 7), on_next(110, 8), on_completed(130)
),
),
on_next(
320,
scheduler.create_cold_observable(
on_next(210, 9), on_next(240, 10), on_completed(300)
),
),
on_completed(400),
)
def create():
return xs.pipe(ops.merge(max_concurrent=2))
results = scheduler.start(create)
assert results.messages == [
on_next(260, 1),
on_next(280, 4),
on_next(310, 2),
on_next(330, 3),
on_next(330, 5),
on_next(360, 6),
on_next(440, 7),
on_next(460, 8),
on_next(670, 9),
on_next(700, 10),
on_completed(760),
]
assert xs.subscriptions == [subscribe(200, 400)]
def test_mergeconcat_basic_long(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(
210,
scheduler.create_cold_observable(
on_next(50, 1), on_next(100, 2), on_next(120, 3), on_completed(140)
),
),
on_next(
260,
scheduler.create_cold_observable(
on_next(20, 4), on_next(70, 5), on_completed(300)
),
),
on_next(
270,
scheduler.create_cold_observable(
on_next(10, 6), on_next(90, 7), on_next(110, 8), on_completed(130)
),
),
on_next(
320,
scheduler.create_cold_observable(
on_next(210, 9), on_next(240, 10), on_completed(300)
),
),
on_completed(400),
)
def create():
return xs.pipe(ops.merge(max_concurrent=2))
results = scheduler.start(create)
assert results.messages == [
on_next(260, 1),
on_next(280, 4),
on_next(310, 2),
on_next(330, 3),
on_next(330, 5),
on_next(360, 6),
on_next(440, 7),
on_next(460, 8),
on_next(690, 9),
on_next(720, 10),
on_completed(780),
]
assert xs.subscriptions == [subscribe(200, 400)]
def test_mergeconcat_basic_wide(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(
210,
scheduler.create_cold_observable(
on_next(50, 1), on_next(100, 2), on_next(120, 3), on_completed(140)
),
),
on_next(
260,
scheduler.create_cold_observable(
on_next(20, 4), on_next(70, 5), on_completed(300)
),
),
on_next(
270,
scheduler.create_cold_observable(
on_next(10, 6), on_next(90, 7), on_next(110, 8), on_completed(130)
),
),
on_next(
420,
scheduler.create_cold_observable(
on_next(210, 9), on_next(240, 10), on_completed(300)
),
),
on_completed(450),
)
def create():
return xs.pipe(ops.merge(max_concurrent=3))
results = scheduler.start(create)
assert results.messages == [
on_next(260, 1),
on_next(280, 4),
on_next(280, 6),
on_next(310, 2),
on_next(330, 3),
on_next(330, 5),
on_next(360, 7),
on_next(380, 8),
on_next(630, 9),
on_next(660, 10),
on_completed(720),
]
assert xs.subscriptions == [subscribe(200, 450)]
def test_mergeconcat_basic_late(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(
210,
scheduler.create_cold_observable(
on_next(50, 1), on_next(100, 2), on_next(120, 3), on_completed(140)
),
),
on_next(
260,
scheduler.create_cold_observable(
on_next(20, 4), on_next(70, 5), on_completed(300)
),
),
on_next(
270,
scheduler.create_cold_observable(
on_next(10, 6), on_next(90, 7), on_next(110, 8), on_completed(130)
),
),
on_next(
420,
scheduler.create_cold_observable(
on_next(210, 9), on_next(240, 10), on_completed(300)
),
),
on_completed(750),
)
def create():
return xs.pipe(ops.merge(max_concurrent=3))
results = scheduler.start(create)
assert results.messages == [
on_next(260, 1),
on_next(280, 4),
on_next(280, 6),
on_next(310, 2),
on_next(330, 3),
on_next(330, 5),
on_next(360, 7),
on_next(380, 8),
on_next(630, 9),
on_next(660, 10),
on_completed(750),
]
assert xs.subscriptions == [subscribe(200, 750)]
def test_mergeconcat_disposed(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(
210,
scheduler.create_cold_observable(
on_next(50, 1), on_next(100, 2), on_next(120, 3), on_completed(140)
),
),
on_next(
260,
scheduler.create_cold_observable(
on_next(20, 4), on_next(70, 5), on_completed(200)
),
),
on_next(
270,
scheduler.create_cold_observable(
on_next(10, 6), on_next(90, 7), on_next(110, 8), on_completed(130)
),
),
on_next(
320,
scheduler.create_cold_observable(
on_next(210, 9), on_next(240, 10), on_completed(300)
),
),
on_completed(400),
)
def create():
return xs.pipe(ops.merge(max_concurrent=2))
results = scheduler.start(create, disposed=450)
assert results.messages == [
on_next(260, 1),
on_next(280, 4),
on_next(310, 2),
on_next(330, 3),
on_next(330, 5),
on_next(360, 6),
on_next(440, 7),
]
assert xs.subscriptions == [subscribe(200, 400)]
def test_mergeconcat_outererror(self):
ex = "ex"
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(
210,
scheduler.create_cold_observable(
on_next(50, 1), on_next(100, 2), on_next(120, 3), on_completed(140)
),
),
on_next(
260,
scheduler.create_cold_observable(
on_next(20, 4), on_next(70, 5), on_completed(200)
),
),
on_next(
270,
scheduler.create_cold_observable(
on_next(10, 6), on_next(90, 7), on_next(110, 8), on_completed(130)
),
),
on_next(
320,
scheduler.create_cold_observable(
on_next(210, 9), on_next(240, 10), on_completed(300)
),
),
on_error(400, ex),
)
def create():
return xs.pipe(ops.merge(max_concurrent=2))
results = scheduler.start(create)
assert results.messages == [
on_next(260, 1),
on_next(280, 4),
on_next(310, 2),
on_next(330, 3),
on_next(330, 5),
on_next(360, 6),
on_error(400, ex),
]
assert xs.subscriptions == [subscribe(200, 400)]
def test_mergeconcat_innererror(self):
ex = "ex"
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(
210,
scheduler.create_cold_observable(
on_next(50, 1), on_next(100, 2), on_next(120, 3), on_completed(140)
),
),
on_next(
260,
scheduler.create_cold_observable(
on_next(20, 4), on_next(70, 5), on_completed(200)
),
),
on_next(
270,
scheduler.create_cold_observable(
on_next(10, 6), on_next(90, 7), on_next(110, 8), on_error(140, ex)
),
),
on_next(
320,
scheduler.create_cold_observable(
on_next(210, 9), on_next(240, 10), on_completed(300)
),
),
on_completed(400),
)
def create():
return xs.pipe(ops.merge(max_concurrent=2))
results = scheduler.start(create)
assert results.messages == [
on_next(260, 1),
on_next(280, 4),
on_next(310, 2),
on_next(330, 3),
on_next(330, 5),
on_next(360, 6),
on_next(440, 7),
on_next(460, 8),
on_error(490, ex),
]
assert xs.subscriptions == [subscribe(200, 400)]
def test_merge_112233(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(250, 1), on_next(300, 2), on_next(350, 3), on_completed(360)
)
ys = scheduler.create_hot_observable(
on_next(250, 1), on_next(300, 2), on_next(320, 3), on_completed(340)
)
def create():
return xs.pipe(ops.merge(ys))
results = scheduler.start(create)
assert results.messages == [
on_next(250, 1),
on_next(250, 1),
on_next(300, 2),
on_next(300, 2),
on_next(320, 3),
on_next(350, 3),
on_completed(360),
]
assert xs.subscriptions == [subscribe(200, 360)]
| 31.09447
| 88
| 0.496925
| 2,946
| 26,990
| 4.311948
| 0.064834
| 0.15209
| 0.065496
| 0.085964
| 0.908683
| 0.89412
| 0.87869
| 0.859167
| 0.853893
| 0.806266
| 0
| 0.113624
| 0.395443
| 26,990
| 867
| 89
| 31.130334
| 0.664889
| 0
| 0
| 0.803689
| 0
| 0
| 0.000741
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 1
| 0.083004
| false
| 0
| 0.00527
| 0.040843
| 0.130435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0cc2079f06f690caddc243dc13586990eb90d47f
| 14,806
|
py
|
Python
|
test/test_docker.py
|
ai-traders/dojo
|
ad5a2490ccab466f80a0840d7d98688540817239
|
[
"Apache-2.0"
] | 9
|
2019-03-13T10:46:06.000Z
|
2019-07-16T19:28:22.000Z
|
test/test_docker.py
|
ai-traders/dojo
|
ad5a2490ccab466f80a0840d7d98688540817239
|
[
"Apache-2.0"
] | 4
|
2019-02-05T16:07:42.000Z
|
2019-05-03T11:06:28.000Z
|
test/test_docker.py
|
ai-traders/dojo
|
ad5a2490ccab466f80a0840d7d98688540817239
|
[
"Apache-2.0"
] | 1
|
2019-03-05T19:38:43.000Z
|
2019-03-05T19:38:43.000Z
|
from test.support.common import *
def clean_up_docker_container():
run_shell('docker ps -a -q --filter "name=testdojorunid" | xargs --no-run-if-empty docker stop')
run_shell('docker ps -a -q --filter "name=testdojorunid" | xargs --no-run-if-empty docker rm')
def test_docker_container_is_removed():
result = run_command('docker', ['ps', '-a', '--filter', "name=testdojorunid"])
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert not 'testdojorunid' in result.stderr, dojo_combined_output_str
assert result.returncode == 0
def test_docker_when_zero_exit():
clean_up_docker_container()
result = run_dojo('--debug=true --test=true --image=alpine:3.15 whoami'.split(' '))
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert 'root' in result.stdout, dojo_combined_output_str
assert 'alpine:3.15 whoami' in result.stderr, dojo_combined_output_str
assert 'Exit status from run command: 0' in result.stderr, dojo_combined_output_str
assert 'Exit status from cleaning: 0' in result.stderr, dojo_combined_output_str
assert 'Exit status from signals: 0' in result.stderr, dojo_combined_output_str
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
assert result.returncode == 0
test_docker_container_is_removed()
def test_docker_capture_output():
clean_up_docker_container()
# run this one test manually: pytest --capture=fd --verbose test/test_docker.py::test_docker_capture_output
# run this without pytest: ./bin/dojo --debug=true --test=true --image=alpine:3.15 sh -c "printenv HOME"
result = run_dojo(['--debug=true', '--test=true', '--image=alpine:3.15', 'sh', '-c', "printenv HOME"])
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert '/root\n' == result.stdout, dojo_combined_output_str
assert 'Exit status from run command: 0' in result.stderr, dojo_combined_output_str
assert 'Exit status from cleaning: 0' in result.stderr, dojo_combined_output_str
assert 'Exit status from signals: 0' in result.stderr, dojo_combined_output_str
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
assert result.returncode == 0
test_docker_container_is_removed()
def test_docker_capture_output_when_unable_to_pull_image():
clean_up_docker_container()
# pytest --capture=fd --verbose test/test_docker.py::test_docker_capture_output_when_unable_to_pull_image
# ./bin/dojo --debug=true --test=true --image=alpine:3.15 sh -c "printenv HOME && hostname"
result = run_dojo(['--debug=true', '--test=true', '--image=no_such_image91291925129q783187314218194:abc111aaa.9981412', 'sh', '-c', "printenv HOME"])
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
# this is the main reason for this test:
assert 'Unable to find image' in result.stderr, dojo_combined_output_str
assert 'Exit status from run command: 125' in result.stderr, dojo_combined_output_str
assert 'Exit status from cleaning: 0' in result.stderr, dojo_combined_output_str
assert 'Exit status from signals: 0' in result.stderr, dojo_combined_output_str
assert result.returncode == 125
test_docker_container_is_removed()
def test_docker_when_non_existent_command():
clean_up_docker_container()
result = run_dojo('--debug=true --test=true --image=alpine:3.15 notexistentcommand'.split(' '))
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert 'executable file not found' in result.stderr, dojo_combined_output_str
assert 'Exit status from run command: 127' in result.stderr, dojo_combined_output_str
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
assert result.returncode == 127
test_docker_container_is_removed()
def test_docker_when_no_command():
clean_up_docker_container()
result = run_dojo('--debug=true --test=true --image=alpine:3.15 -i=false'.split(' '))
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert 'Exit status from run command: 0' in result.stderr, dojo_combined_output_str
assert 'Exit status from cleaning: 0' in result.stderr, dojo_combined_output_str
assert 'Exit status from signals: 0' in result.stderr, dojo_combined_output_str
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
assert result.returncode == 0
test_docker_container_is_removed()
def test_docker_when_double_dash_command_split():
clean_up_docker_container()
result = run_dojo('--debug=true --test=true --image=alpine:3.15 -- whoami'.split(' '))
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert 'root' in result.stdout, dojo_combined_output_str
assert 'alpine:3.15 whoami' in result.stderr, dojo_combined_output_str
assert 'Exit status from run command: 0' in result.stderr, dojo_combined_output_str
assert 'Exit status from cleaning: 0' in result.stderr, dojo_combined_output_str
assert 'Exit status from signals: 0' in result.stderr, dojo_combined_output_str
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
assert result.returncode == 0
test_docker_container_is_removed()
def test_docker_when_shell_command():
clean_up_docker_container()
result = run_dojo(['--debug=true', '--test=true', '--image=alpine:3.15', 'sh', '-c', 'echo hello'])
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert 'hello' in result.stdout, dojo_combined_output_str
assert 'Exit status from run command: 0' in result.stderr, dojo_combined_output_str
assert 'Exit status from cleaning: 0' in result.stderr, dojo_combined_output_str
assert 'Exit status from signals: 0' in result.stderr, dojo_combined_output_str
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
assert result.returncode == 0
test_docker_container_is_removed()
def test_docker_preserves_env_vars():
clean_up_docker_container()
envs = dict(os.environ)
envs['ABC'] = 'custom_value'
result = run_dojo(
['--debug=true', '--test=true', '--image=alpine:3.15', 'sh', '-c', 'env | grep ABC'],
env=envs)
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert 'custom_value' in result.stdout, dojo_combined_output_str
assert 'Exit status from run command: 0' in result.stderr, dojo_combined_output_str
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
assert result.returncode == 0
# Bash experiments:
# $ export MULTILINE1="first line\nsecond line"
# $ export MULTILINE2="first line
# > second line"
# $ read -d '' MULTILINE3 <<EOF
# > first line
# > second line
# > EOF
# $ export MULTILINE3
#
# $ echo $MULTILINE1
# first line\nsecond line
# $ echo $MULTILINE2
# first line second line
# $ echo $MULTILINE3
# first line second line
# $ echo "$MULTILINE1"
# first line\nsecond line
# $ echo "$MULTILINE2"
# first line
# second line
# $ echo "$MULTILINE3"
# first line
# second line
#
# In Dojo, only the MULTILINE2 and MULTILINE3 will be put among multiline variables.
# MULTILINE1 will be treated as oneline variable.
def test_docker_preserves_multiline_env_vars():
clean_up_docker_container()
envs = dict(os.environ)
envs['ABC'] = """first line
second line"""
result = run_dojo(
# We need to source the file: /etc/dojo.d/variables/01-bash-functions.sh
# explicitly, because the alpine docker image is not a Dojo image, i.e.
# it does not have the Dojo entrypoint.sh.
['--debug=true', '--test=true', '--image=alpine:3.15', 'sh', '-c', '"source /etc/dojo.d/variables/00-multiline-vars.sh && env | grep -A 1 ABC"'],
env=envs)
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert '/etc/dojo.d/variables/00-multiline-vars.sh' in result.stderr, dojo_combined_output_str
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
assert result.returncode == 0
assert 'Exit status from run command:' in result.stderr, dojo_combined_output_str
assert """first line
second line""" in result.stdout, dojo_combined_output_str
def test_docker_preserves_bash_functions_from_env_vars():
clean_up_docker_container()
envs = dict(os.environ)
# the following does not influence the dojo process
# envs['BASH_FUNC_my_bash_func%%'] = """()) { echo "hello"
# }"""
proc = run_dojo_and_set_bash_func(
# We need to source the file: /etc/dojo.d/variables/01-bash-functions.sh
# explicitly, because the alpine docker image is not a Dojo image, i.e.
# it does not have the Dojo entrypoint.sh. Even if it had,
# we'd still have to source the file explicitly, because
# sudo does not preserve bash functions.
# Dojo entrypoint sources this file too, but then it runs sudo.
# https://unix.stackexchange.com/questions/549140/why-doesnt-sudo-e-preserve-the-function-environment-variables-exported-by-ex
# https://unix.stackexchange.com/a/233097
['--debug=true', '--test=true', '--image=alpine:3.15', 'sh', '-c', '"apk add -U bash && bash -c \'source /etc/dojo.d/variables/01-bash-functions.sh && my_bash_func\'"'],
env=envs)
stdout_value_bytes, stderr_value_bytes = proc.communicate()
stdout = stdout_value_bytes.decode("utf-8")
stderr = stderr_value_bytes.decode("utf-8")
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(stdout, stderr)
assert 'Dojo version' in stderr, dojo_combined_output_str
# print(stdout)
# print(stderr)
assert 'Written file /tmp/test-dojo-environment-bash-functions-testdojorunid, contents:' in stderr, dojo_combined_output_str
assert 'my_bash_func() { echo "hello"' in stderr, dojo_combined_output_str
assert '/etc/dojo.d/variables/01-bash-functions.sh' in stderr, dojo_combined_output_str
assert_no_warnings_or_errors(stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(stdout, dojo_combined_output_str)
# the bash function was invoked
assert 'hello' in stdout, dojo_combined_output_str
assert 'Exit status from run command: 0' in stderr, dojo_combined_output_str
def test_docker_when_custom_relative_directory():
clean_up_docker_container()
result = run_dojo(['-c', 'test/test-files/Dojofile', '--debug=true', '--test=true', '--image=alpine:3.15', 'whoami'])
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert 'root' in result.stdout, dojo_combined_output_str
assert 'Exit status from run command: 0' in result.stderr, dojo_combined_output_str
assert 'Exit status from cleaning: 0' in result.stderr, dojo_combined_output_str
assert 'Exit status from signals: 0' in result.stderr, dojo_combined_output_str
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
assert result.returncode == 0
test_docker_container_is_removed()
def test_docker_when_nonexistent_custom_relative_directory():
clean_up_docker_container()
try:
os.removedirs(os.path.join(project_root, 'test/test-files/not-existent'))
except FileNotFoundError:
pass
result = run_dojo(['-c', 'test/test-files/Dojofile.work_not_exists', '--debug=true', '--test=true', '--image=alpine:3.15', 'whoami'])
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert 'Dojo version' in result.stderr, dojo_combined_output_str
assert 'root' in result.stdout, dojo_combined_output_str
assert "test/test-files/not-existent does not exist" in result.stderr, dojo_combined_output_str
assert 'WARN' in result.stderr, dojo_combined_output_str
assert 'Exit status from run command: 0' in result.stderr, dojo_combined_output_str
assert 'Exit status from cleaning: 0' in result.stderr, dojo_combined_output_str
assert 'Exit status from signals: 0' in result.stderr, dojo_combined_output_str
assert result.returncode == 0
test_docker_container_is_removed()
def test_docker_pull_when_image_can_be_pulled():
result = run_dojo('--debug=true --action=pull --image=alpine:3.15'.split(' '))
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert "Dojo version" in result.stderr, dojo_combined_output_str
assert "Pulling from library/alpine" in result.stdout, dojo_combined_output_str
assert result.returncode == 0
assert_no_warnings_or_errors(result.stderr, dojo_combined_output_str)
assert_no_warnings_or_errors(result.stdout, dojo_combined_output_str)
def test_docker_pull_when_no_such_image_exists():
result = run_dojo('--debug=true --action=pull --image=no_such_image91291925129q783187314218194:abc111aaa.9981412'.split(' '))
dojo_combined_output_str = "stdout:\n{0}\nstderror:\n{1}".format(result.stdout, result.stderr)
assert "" == result.stdout, dojo_combined_output_str
assert "Dojo version" in result.stderr, dojo_combined_output_str
assert "repository does not exist or may require 'docker login'" in result.stderr, dojo_combined_output_str
assert result.returncode == 1
| 54.634686
| 177
| 0.749088
| 2,150
| 14,806
| 4.879535
| 0.104186
| 0.115528
| 0.173291
| 0.202173
| 0.856925
| 0.823849
| 0.806691
| 0.787818
| 0.753026
| 0.73482
| 0
| 0.018794
| 0.141091
| 14,806
| 271
| 178
| 54.634686
| 0.806165
| 0.128326
| 0
| 0.610526
| 0
| 0.015789
| 0.246558
| 0.066423
| 0
| 0
| 0
| 0
| 0.526316
| 1
| 0.084211
| false
| 0.005263
| 0.005263
| 0
| 0.089474
| 0.010526
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0cd6679e257b3859ec55fb663a117a00ac93c258
| 8,972
|
py
|
Python
|
scripts/achived/post_plotting.py
|
nmningmei/metacognition
|
734082e247cc7fc9d277563e2676e10692617a3f
|
[
"MIT"
] | 3
|
2019-07-09T15:37:46.000Z
|
2019-07-17T16:28:02.000Z
|
scripts/achived/post_plotting.py
|
nmningmei/metacognition
|
734082e247cc7fc9d277563e2676e10692617a3f
|
[
"MIT"
] | null | null | null |
scripts/achived/post_plotting.py
|
nmningmei/metacognition
|
734082e247cc7fc9d277563e2676e10692617a3f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 23 22:48:24 2018
@author: ning
"""
import os
working_dir = ''
import pandas as pd
pd.options.mode.chained_assignment = None
import seaborn as sns
import numpy as np
from sklearn.model_selection import StratifiedKFold,permutation_test_score
from sklearn.ensemble import RandomForestClassifier
from sklearn.multiclass import OneVsOneClassifier,OneVsRestClassifier
sns.set_style('whitegrid')
sns.set_context('poster')
from utils import post_processing
saving_dir = '../figures/'
##############################################################################
############################## plotting part #################################
##############################################################################
############################## using all 6 features ##########################
if __name__ == '__main__':
pos = pd.read_csv('../results/Pos.csv')
att = pd.read_csv('../results/ATT.csv')
# don't work on the loaded data frame, make a copy of it
df = pos.copy()
g = sns.factorplot(x = 'window',
y = 'score',
hue = 'model',
data = df,
hue_order = ['DecisionTreeClassifier','LogisticRegression'],
aspect = 3,
dodge = 0.1)
# for seaborn 0.9.0
#g = sns.catplot( x = 'window',
# y = 'score',
# hue = 'model',
# data = df,
# aspect = 3,
# kind = 'point',
# hue_order = ['DecisionTreeClassifier','LogisticRegression'],
# ci = 95)
(g.set_axis_labels('Trials look back',
'Clasifi.Score (AUC ROC)')
.fig.suptitle('Model Comparison of Decoding Probability of Success'))
g.fig.savefig(os.path.join(saving_dir,
'Model Comparison of Decoding Probability of Success.png'),
dpi = 500,
bbox_inches = 'tight')
df_post = post_processing(df)
g = sns.factorplot( x = 'Window',
y = 'Values',
hue = 'Attributes',
row = 'Models',
row_order=['DecisionTreeClassifier','LogisticRegression'],
data = df_post,
aspect = 3,
sharey = False,
dodge = 0.1)
# for seaborn 0.9.0
#g = sns.catplot( x = 'window',
# y = 'value',
# hue = 'Attributions',
# row = 'model',
# data = df,
# aspect = 3,
# sharey = False,
# kind = 'point',
# ci = 95)
(g.set_axis_labels('Trials look back',
'')
.set_titles('{row_name}')
.fig.suptitle('Probability of Success',
y = 1.0))
g.fig.axes[0].set(ylabel='Feature Importance')
g.fig.axes[1].set(ylabel='Coefficients')
g.savefig(os.path.join(saving_dir,'Weights plot of Probability of Success.png'),
dpi = 500,
bbox_inches = 'tight',)
df = att.copy()
g = sns.factorplot( x = 'window',
y = 'score',
hue = 'model',
data = df,
hue_order = ['DecisionTreeClassifier','LogisticRegression'],
aspect = 3,
dodge = 0.1)
# for seaborn 0.9.0
#g = sns.catplot( x = 'window',
# y = 'score',
# hue = 'model',
# data = df,
# aspect = 3,
# kind = 'point',
# hue_order = ['DecisionTreeClassifier','LogisticRegression'],
# ci = 95)
(g.set_axis_labels('Trials look back',
'Clasifi.Score (AUC ROC)')
.fig.suptitle('Model Comparison of Decoding Attention'))
g.savefig(os.path.join(saving_dir,'Model Comparison of Decoding Attention.png'),
dpi = 500,
bbox_inches = 'tight')
df_post = post_processing(df)
g = sns.factorplot( x = 'Window',
y = 'Values',
hue = 'Attributes',
row = 'Models',
row_order=['DecisionTreeClassifier','LogisticRegression'],
data = df_post,
aspect = 3,
sharey = False,
dodge = 0.1)
# for seaborn 0.9.0
#g = sns.catplot( x = 'window',
# y = 'value',
# hue = 'Attributions',
# row = 'model',
# data = df,
# aspect = 3,
# sharey = False,
# kind = 'point',
# ci = 95)
(g.set_axis_labels('Trials look back',
'')
.set_titles('{row_name}')
.fig.suptitle('Attention',
y = 1.0))
g.fig.axes[0].set(ylabel='Feature Importance')
g.fig.axes[1].set(ylabel='Coefficients')
g.savefig(os.path.join(saving_dir,'Weights plot of Attention.png'),
dpi = 500,
bbox_inches = 'tight',)
###############################################################################
###################### plot the normalized weights ############################
###############################################################################
pos_ttest = pd.read_csv('../results/Pos_ttest.csv')
att_ttest = pd.read_csv('../results/ATT_ttest.csv')
df = pos_ttest.copy()
g = sns.factorplot(x = 'window',
y = 'ps_mean',
hue = 'model',
ci = None,
kind = 'bar',
data = df,
hue_order = ['DecisionTreeClassifier','LogisticRegression'],
aspect = 2.5)
# for seaborn 0.9.0
#g = sns.catplot( x = 'window',
# y = 'ps_mean',
# hue = 'model',
# ci = None,
# kind = 'bar',
# data = df,
# aspect = 2.5,
# )
g.set_axis_labels('Trials look back',
'Mean of P values (corrected)')
g.fig.axes[0].axhline(0.05,
color = 'red',
linestyle = '--',
alpha = 0.6)
g.fig.suptitle('Probability of Success\nBonferroni corrected P values',y=1.07)
g.savefig(os.path.join(saving_dir,'Significance test of Proabability of Success.png'),
dpi = 500,
bbox_inches = 'tight')
df = att_ttest.copy()
g = sns.factorplot(x = 'window',
y = 'ps_mean',
hue ='model',
ci = None,
kind = 'bar',
data = df,
hue_order = ['DecisionTreeClassifier','LogisticRegression'],
aspect = 2.5)
# for seaborn 0.9.0
#g = sns.catplot( x = 'window',
# y = 'ps_mean',
# hue = 'model',
# ci = None,
# kind = 'bar',
# data =df,
# aspect = 2.5,
# )
g.set_axis_labels('Trials look back',
'Mean of P values (corrected)')
g.fig.axes[0].axhline(0.05,
color = 'red',
linestyle = '--',
alpha = 0.6)
g.fig.suptitle('Attention\nBonferroni corrected P values',
y = 1.05)
g.savefig(os.path.join(saving_dir,'Significance test of Attention.png'),
dpi = 500,
bbox_inches = 'tight')
| 40.414414
| 90
| 0.372938
| 731
| 8,972
| 4.477428
| 0.218878
| 0.014665
| 0.029331
| 0.027498
| 0.81821
| 0.776963
| 0.759242
| 0.750076
| 0.729911
| 0.729911
| 0
| 0.022852
| 0.47325
| 8,972
| 221
| 91
| 40.597285
| 0.6697
| 0.244204
| 0
| 0.714286
| 0
| 0
| 0.203594
| 0.032248
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.079365
| 0
| 0.079365
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0b8db954c70b52760627e847eac6c2834e104e9e
| 879
|
py
|
Python
|
unit_test/sklearn_like_toolkit/search_param/test_ParamOpt.py
|
demetoir/MLtools
|
8c42fcd4cc71728333d9c116ade639fe57d50d37
|
[
"MIT"
] | null | null | null |
unit_test/sklearn_like_toolkit/search_param/test_ParamOpt.py
|
demetoir/MLtools
|
8c42fcd4cc71728333d9c116ade639fe57d50d37
|
[
"MIT"
] | null | null | null |
unit_test/sklearn_like_toolkit/search_param/test_ParamOpt.py
|
demetoir/MLtools
|
8c42fcd4cc71728333d9c116ade639fe57d50d37
|
[
"MIT"
] | null | null | null |
from script.sklearn_like_toolkit.ClassifierPack import ClassifierPack
from script.sklearn_like_toolkit.param_search.ParamOpt import ParamOpt
def test_ParamOpt():
clf_pack = ClassifierPack(['skDecisionTreeClf'])
dtree = clf_pack['skDecisionTreeClf']
from sklearn.datasets import load_iris
data = load_iris()
y = data.target
x = data.data
opt = ParamOpt(cv=10, n_iter=10, n_jobs=1)
dtree = opt.fit(dtree, x, y)
train_score = dtree.score(x, y)
print(train_score)
clf_pack = ClassifierPack(['skDecisionTreeClf'])
dtree = clf_pack['skDecisionTreeClf']
from sklearn.datasets import load_iris
data = load_iris()
y = data.target
x = data.data
opt = ParamOpt(cv=10, n_iter=10, n_jobs=2)
dtree = opt.fit(dtree, x, y)
train_score = dtree.score(x, y)
print(train_score)
| 26.636364
| 71
| 0.671217
| 117
| 879
| 4.854701
| 0.299145
| 0.049296
| 0.059859
| 0.073944
| 0.852113
| 0.753521
| 0.753521
| 0.753521
| 0.753521
| 0.753521
| 0
| 0.014706
| 0.226394
| 879
| 32
| 72
| 27.46875
| 0.820588
| 0
| 0
| 0.782609
| 0
| 0
| 0.080283
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0.173913
| 0
| 0.217391
| 0.086957
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0b9ab4f2b169b1bcf3ff597e2b67e4659d98c4a4
| 2,297
|
py
|
Python
|
tests/terraform/checks/provider/aws/test_credentials.py
|
jamesholland-uk/checkov
|
d73fd4bd7096d48ab3434a92a177bcc55605460a
|
[
"Apache-2.0"
] | 1
|
2022-02-20T21:20:39.000Z
|
2022-02-20T21:20:39.000Z
|
tests/terraform/checks/provider/aws/test_credentials.py
|
jamesholland-uk/checkov
|
d73fd4bd7096d48ab3434a92a177bcc55605460a
|
[
"Apache-2.0"
] | 3
|
2022-03-07T20:37:31.000Z
|
2022-03-21T20:20:14.000Z
|
tests/terraform/checks/provider/aws/test_credentials.py
|
jamesholland-uk/checkov
|
d73fd4bd7096d48ab3434a92a177bcc55605460a
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import hcl2
from checkov.terraform.checks.provider.aws.credentials import check
from checkov.common.models.enums import CheckResult
class TestCredentials(unittest.TestCase):
def test_success_empty(self):
hcl_res = hcl2.loads(
"""
provider "aws" {}
"""
)
provider_conf = hcl_res["provider"][0]["aws"]
scan_result = check.scan_provider_conf(conf=provider_conf)
self.assertEqual(CheckResult.PASSED, scan_result)
def test_success_region(self):
hcl_res = hcl2.loads(
"""
provider "aws" {
region = "us-west-2"
}
"""
)
provider_conf = hcl_res["provider"][0]["aws"]
scan_result = check.scan_provider_conf(conf=provider_conf)
self.assertEqual(CheckResult.PASSED, scan_result)
def test_failure_both_keys(self):
hcl_res = hcl2.loads(
"""
provider "aws" {
region = "us-west-2"
access_key = "AKIAIOSFODNN7EXAMPLE"
secret_key = "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY"
}
"""
)
provider_conf = hcl_res["provider"][0]["aws"]
scan_result = check.scan_provider_conf(conf=provider_conf)
self.assertEqual(CheckResult.FAILED, scan_result)
def test_failure_access_key(self):
hcl_res = hcl2.loads(
"""
provider "aws" {
region = "us-west-2"
access_key = "AKIAIOSFODNN7EXAMPLE"
}
"""
)
provider_conf = hcl_res["provider"][0]["aws"]
scan_result = check.scan_provider_conf(conf=provider_conf)
self.assertEqual(CheckResult.FAILED, scan_result)
def test_failure_secret_key(self):
hcl_res = hcl2.loads(
"""
provider "aws" {
region = "us-west-2"
secret_key = "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY"
}
"""
)
provider_conf = hcl_res["provider"][0]["aws"]
scan_result = check.scan_provider_conf(conf=provider_conf)
self.assertEqual(CheckResult.FAILED, scan_result)
if __name__ == "__main__":
unittest.main()
| 30.626667
| 71
| 0.569003
| 229
| 2,297
| 5.423581
| 0.218341
| 0.144928
| 0.040258
| 0.056361
| 0.808374
| 0.802738
| 0.802738
| 0.778583
| 0.778583
| 0.778583
| 0
| 0.012234
| 0.323901
| 2,297
| 74
| 72
| 31.040541
| 0.787508
| 0
| 0
| 0.540541
| 0
| 0
| 0.039573
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 1
| 0.135135
| false
| 0.054054
| 0.108108
| 0
| 0.27027
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
e7fc9b20884411350ccdb2d7459239dd9946a222
| 2,282
|
py
|
Python
|
tests/test_scoring.py
|
Menelau/pymfe
|
4e43c9210a19e3123d9d24a22efa4e65099ed129
|
[
"MIT"
] | 1
|
2021-11-26T08:17:00.000Z
|
2021-11-26T08:17:00.000Z
|
tests/test_scoring.py
|
Menelau/pymfe
|
4e43c9210a19e3123d9d24a22efa4e65099ed129
|
[
"MIT"
] | null | null | null |
tests/test_scoring.py
|
Menelau/pymfe
|
4e43c9210a19e3123d9d24a22efa4e65099ed129
|
[
"MIT"
] | null | null | null |
"""Test module for General class metafeatures."""
import pytest
import numpy as np
from pymfe.scoring import accuracy
from pymfe.scoring import balanced_accuracy
from pymfe.scoring import f1
def test_accuracy():
y_true = np.array([1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
y_pred = np.array([1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
assert accuracy(y_true, y_pred) == 1.0
y_true = np.array([1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
y_pred = np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
assert accuracy(y_true, y_pred) == 0.0
y_true = np.array([1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
y_pred = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1])
assert accuracy(y_true, y_pred) == 0.5
y_true = np.array([1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
y_pred = np.array([1, 1, 1, 0, 0, 1, 1, 0, 0, 0])
assert accuracy(y_true, y_pred) == 0.6
def test_balanced_accuracy():
y_true = np.array([2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
y_pred = np.array([2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
assert np.isclose(balanced_accuracy(y_true, y_pred), 1.0)
y_true = np.array([2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
y_pred = np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
assert np.isclose(balanced_accuracy(y_true, y_pred), 0.0)
y_true = np.array([2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
y_pred = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1])
assert np.isclose(balanced_accuracy(y_true, y_pred), 1/3)
def test_f1():
y_true = np.array([1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
y_pred = np.array([1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
assert np.isclose(f1(y_true, y_pred), 1.0)
y_true = np.array([1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
y_pred = np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
assert np.isclose(f1(y_true, y_pred), 0.0)
y_true = np.array([1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
y_pred = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1])
assert np.isclose(f1(y_true, y_pred), 1/3)
y_true = np.array([1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
y_pred = np.array([1, 1, 1, 0, 0, 1, 1, 0, 0, 0])
assert np.isclose(f1(y_true, y_pred), 0.6)
y_true = np.array([2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
y_pred = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1])
assert np.isclose(f1(y_true, y_pred), 1/6)
| 36.806452
| 68
| 0.528046
| 529
| 2,282
| 2.172023
| 0.056711
| 0.21584
| 0.255875
| 0.257615
| 0.887728
| 0.821584
| 0.818103
| 0.817232
| 0.817232
| 0.800696
| 0
| 0.181659
| 0.249781
| 2,282
| 61
| 69
| 37.409836
| 0.489486
| 0.018843
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 1
| 0.068182
| false
| 0
| 0.113636
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
f0182d93e564b82ed836b9ec1acb22b5c294c5f9
| 1,545
|
py
|
Python
|
problem-008/main.py
|
stevancorre/project-euler
|
abf99969f0235e7740840bcbf4b23a3f8470fa2b
|
[
"MIT"
] | null | null | null |
problem-008/main.py
|
stevancorre/project-euler
|
abf99969f0235e7740840bcbf4b23a3f8470fa2b
|
[
"MIT"
] | null | null | null |
problem-008/main.py
|
stevancorre/project-euler
|
abf99969f0235e7740840bcbf4b23a3f8470fa2b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Stévan Corre <stevancorre@protonmail.com>
# Date: 04/12/2021
import math
def product_of(ns):
product = 1
for n in ns:
product *= n
return product
def solve():
result = 0
source = "7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"
for i in range(0, len(source) - 13):
digits = list(map(int, source[i:i + 13]))
product = product_of(digits)
if product > result:
result = product
return result
if __name__ == "__main__":
print("ANSWER: %d" % solve())
| 45.441176
| 1,015
| 0.846602
| 73
| 1,545
| 17.780822
| 0.643836
| 0.013867
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.734635
| 0.104854
| 1,545
| 33
| 1,016
| 46.818182
| 0.203905
| 0.071845
| 0
| 0
| 0
| 0
| 0.711888
| 0.699301
| 0
| 1
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0
| 0.058824
| 0
| 0.294118
| 0.058824
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f06fb72a5a2252ee8edb3ffb1bde0cd70b8a2ba4
| 81,820
|
py
|
Python
|
sdk/python/pulumi_newrelic/nrql_alert_condition.py
|
bob-bins/pulumi-newrelic
|
f8a121fb7d6e6ad979d3ccf72467b9e89769e305
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_newrelic/nrql_alert_condition.py
|
bob-bins/pulumi-newrelic
|
f8a121fb7d6e6ad979d3ccf72467b9e89769e305
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_newrelic/nrql_alert_condition.py
|
bob-bins/pulumi-newrelic
|
f8a121fb7d6e6ad979d3ccf72467b9e89769e305
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['NrqlAlertConditionArgs', 'NrqlAlertCondition']
@pulumi.input_type
class NrqlAlertConditionArgs:
def __init__(__self__, *,
nrql: pulumi.Input['NrqlAlertConditionNrqlArgs'],
policy_id: pulumi.Input[int],
account_id: Optional[pulumi.Input[int]] = None,
aggregation_window: Optional[pulumi.Input[int]] = None,
baseline_direction: Optional[pulumi.Input[str]] = None,
close_violations_on_expiration: Optional[pulumi.Input[bool]] = None,
critical: Optional[pulumi.Input['NrqlAlertConditionCriticalArgs']] = None,
description: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
expected_groups: Optional[pulumi.Input[int]] = None,
expiration_duration: Optional[pulumi.Input[int]] = None,
fill_option: Optional[pulumi.Input[str]] = None,
fill_value: Optional[pulumi.Input[float]] = None,
ignore_overlap: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
open_violation_on_expiration: Optional[pulumi.Input[bool]] = None,
open_violation_on_group_overlap: Optional[pulumi.Input[bool]] = None,
runbook_url: Optional[pulumi.Input[str]] = None,
terms: Optional[pulumi.Input[Sequence[pulumi.Input['NrqlAlertConditionTermArgs']]]] = None,
type: Optional[pulumi.Input[str]] = None,
value_function: Optional[pulumi.Input[str]] = None,
violation_time_limit: Optional[pulumi.Input[str]] = None,
violation_time_limit_seconds: Optional[pulumi.Input[int]] = None,
warning: Optional[pulumi.Input['NrqlAlertConditionWarningArgs']] = None):
"""
The set of arguments for constructing a NrqlAlertCondition resource.
:param pulumi.Input['NrqlAlertConditionNrqlArgs'] nrql: A NRQL query. See NRQL below for details.
:param pulumi.Input[int] policy_id: The ID of the policy where this condition should be used.
:param pulumi.Input[int] account_id: The New Relic account ID of the account you wish to create the condition. Defaults to the account ID set in your environment variable `NEW_RELIC_ACCOUNT_ID`.
:param pulumi.Input[int] aggregation_window: The duration of the time window used to evaluate the NRQL query, in seconds. The value must be at least 30 seconds, and no more than 15 minutes (900 seconds). Default is 60 seconds.
:param pulumi.Input[str] baseline_direction: The baseline direction of a _baseline_ NRQL alert condition. Valid values are: `lower_only`, `upper_and_lower`, `upper_only` (case insensitive).
:param pulumi.Input[bool] close_violations_on_expiration: Whether to close all open violations when the signal expires.
:param pulumi.Input['NrqlAlertConditionCriticalArgs'] critical: A list containing the `critical` threshold values. See Terms below for details.
:param pulumi.Input[str] description: The description of the NRQL alert condition.
:param pulumi.Input[bool] enabled: Whether to enable the alert condition. Valid values are `true` and `false`. Defaults to `true`.
:param pulumi.Input[int] expected_groups: Number of expected groups when using `outlier` detection.
:param pulumi.Input[int] expiration_duration: The amount of time (in seconds) to wait before considering the signal expired.
:param pulumi.Input[str] fill_option: Which strategy to use when filling gaps in the signal. Possible values are `none`, `last_value` or `static`. If `static`, the `fill_value` field will be used for filling gaps in the signal.
:param pulumi.Input[float] fill_value: This value will be used for filling gaps in the signal.
:param pulumi.Input[bool] ignore_overlap: **DEPRECATED:** Use `open_violation_on_group_overlap` instead, but use the inverse value of your boolean - e.g. if `ignore_overlap = false`, use `open_violation_on_group_overlap = true`. This argument sets whether to trigger a violation when groups overlap. If set to `true` overlapping groups will not trigger a violation. This argument is only applicable in `outlier` conditions.
:param pulumi.Input[str] name: The title of the condition.
:param pulumi.Input[bool] open_violation_on_expiration: Whether to create a new violation to capture that the signal expired.
:param pulumi.Input[bool] open_violation_on_group_overlap: Whether or not to trigger a violation when groups overlap. Set to `true` if you want to trigger a violation when groups overlap. This argument is only applicable in `outlier` conditions.
:param pulumi.Input[str] runbook_url: Runbook URL to display in notifications.
:param pulumi.Input[Sequence[pulumi.Input['NrqlAlertConditionTermArgs']]] terms: **DEPRECATED** Use `critical`, and `warning` instead. A list of terms for this condition. See Terms below for details.
:param pulumi.Input[str] type: The type of the condition. Valid values are `static`, `baseline`, or `outlier`. Defaults to `static`.
:param pulumi.Input[str] value_function: Possible values are `single_value`, `sum` (case insensitive).
:param pulumi.Input[str] violation_time_limit: **DEPRECATED:** Use `violation_time_limit_seconds` instead. Sets a time limit, in hours, that will automatically force-close a long-lasting violation after the time limit you select. Possible values are `ONE_HOUR`, `TWO_HOURS`, `FOUR_HOURS`, `EIGHT_HOURS`, `TWELVE_HOURS`, `TWENTY_FOUR_HOURS`, `THIRTY_DAYS` (case insensitive).<br>
<small>\***Note**: One of `violation_time_limit` _or_ `violation_time_limit_seconds` must be set, but not both.</small>
:param pulumi.Input[int] violation_time_limit_seconds: Sets a time limit, in seconds, that will automatically force-close a long-lasting violation after the time limit you select. The value must be between 300 seconds (5 minutes) to 2592000 seconds (30 days) (inclusive). <br>
<small>\***Note**: One of `violation_time_limit` _or_ `violation_time_limit_seconds` must be set, but not both.</small>
:param pulumi.Input['NrqlAlertConditionWarningArgs'] warning: A list containing the `warning` threshold values. See Terms below for details.
"""
pulumi.set(__self__, "nrql", nrql)
pulumi.set(__self__, "policy_id", policy_id)
if account_id is not None:
pulumi.set(__self__, "account_id", account_id)
if aggregation_window is not None:
pulumi.set(__self__, "aggregation_window", aggregation_window)
if baseline_direction is not None:
pulumi.set(__self__, "baseline_direction", baseline_direction)
if close_violations_on_expiration is not None:
pulumi.set(__self__, "close_violations_on_expiration", close_violations_on_expiration)
if critical is not None:
pulumi.set(__self__, "critical", critical)
if description is not None:
pulumi.set(__self__, "description", description)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if expected_groups is not None:
pulumi.set(__self__, "expected_groups", expected_groups)
if expiration_duration is not None:
pulumi.set(__self__, "expiration_duration", expiration_duration)
if fill_option is not None:
pulumi.set(__self__, "fill_option", fill_option)
if fill_value is not None:
pulumi.set(__self__, "fill_value", fill_value)
if ignore_overlap is not None:
warnings.warn("""use `open_violation_on_group_overlap` attribute instead, but use the inverse of your boolean - e.g. if ignore_overlap = false, use open_violation_on_group_overlap = true""", DeprecationWarning)
pulumi.log.warn("""ignore_overlap is deprecated: use `open_violation_on_group_overlap` attribute instead, but use the inverse of your boolean - e.g. if ignore_overlap = false, use open_violation_on_group_overlap = true""")
if ignore_overlap is not None:
pulumi.set(__self__, "ignore_overlap", ignore_overlap)
if name is not None:
pulumi.set(__self__, "name", name)
if open_violation_on_expiration is not None:
pulumi.set(__self__, "open_violation_on_expiration", open_violation_on_expiration)
if open_violation_on_group_overlap is not None:
pulumi.set(__self__, "open_violation_on_group_overlap", open_violation_on_group_overlap)
if runbook_url is not None:
pulumi.set(__self__, "runbook_url", runbook_url)
if terms is not None:
warnings.warn("""use `critical` and `warning` attributes instead""", DeprecationWarning)
pulumi.log.warn("""terms is deprecated: use `critical` and `warning` attributes instead""")
if terms is not None:
pulumi.set(__self__, "terms", terms)
if type is not None:
pulumi.set(__self__, "type", type)
if value_function is not None:
pulumi.set(__self__, "value_function", value_function)
if violation_time_limit is not None:
warnings.warn("""use `violation_time_limit_seconds` attribute instead""", DeprecationWarning)
pulumi.log.warn("""violation_time_limit is deprecated: use `violation_time_limit_seconds` attribute instead""")
if violation_time_limit is not None:
pulumi.set(__self__, "violation_time_limit", violation_time_limit)
if violation_time_limit_seconds is not None:
pulumi.set(__self__, "violation_time_limit_seconds", violation_time_limit_seconds)
if warning is not None:
pulumi.set(__self__, "warning", warning)
@property
@pulumi.getter
def nrql(self) -> pulumi.Input['NrqlAlertConditionNrqlArgs']:
"""
A NRQL query. See NRQL below for details.
"""
return pulumi.get(self, "nrql")
@nrql.setter
def nrql(self, value: pulumi.Input['NrqlAlertConditionNrqlArgs']):
pulumi.set(self, "nrql", value)
@property
@pulumi.getter(name="policyId")
def policy_id(self) -> pulumi.Input[int]:
"""
The ID of the policy where this condition should be used.
"""
return pulumi.get(self, "policy_id")
@policy_id.setter
def policy_id(self, value: pulumi.Input[int]):
pulumi.set(self, "policy_id", value)
@property
@pulumi.getter(name="accountId")
def account_id(self) -> Optional[pulumi.Input[int]]:
"""
The New Relic account ID of the account you wish to create the condition. Defaults to the account ID set in your environment variable `NEW_RELIC_ACCOUNT_ID`.
"""
return pulumi.get(self, "account_id")
@account_id.setter
def account_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "account_id", value)
@property
@pulumi.getter(name="aggregationWindow")
def aggregation_window(self) -> Optional[pulumi.Input[int]]:
"""
The duration of the time window used to evaluate the NRQL query, in seconds. The value must be at least 30 seconds, and no more than 15 minutes (900 seconds). Default is 60 seconds.
"""
return pulumi.get(self, "aggregation_window")
@aggregation_window.setter
def aggregation_window(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "aggregation_window", value)
@property
@pulumi.getter(name="baselineDirection")
def baseline_direction(self) -> Optional[pulumi.Input[str]]:
"""
The baseline direction of a _baseline_ NRQL alert condition. Valid values are: `lower_only`, `upper_and_lower`, `upper_only` (case insensitive).
"""
return pulumi.get(self, "baseline_direction")
@baseline_direction.setter
def baseline_direction(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "baseline_direction", value)
@property
@pulumi.getter(name="closeViolationsOnExpiration")
def close_violations_on_expiration(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to close all open violations when the signal expires.
"""
return pulumi.get(self, "close_violations_on_expiration")
@close_violations_on_expiration.setter
def close_violations_on_expiration(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "close_violations_on_expiration", value)
@property
@pulumi.getter
def critical(self) -> Optional[pulumi.Input['NrqlAlertConditionCriticalArgs']]:
"""
A list containing the `critical` threshold values. See Terms below for details.
"""
return pulumi.get(self, "critical")
@critical.setter
def critical(self, value: Optional[pulumi.Input['NrqlAlertConditionCriticalArgs']]):
pulumi.set(self, "critical", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the NRQL alert condition.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to enable the alert condition. Valid values are `true` and `false`. Defaults to `true`.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="expectedGroups")
def expected_groups(self) -> Optional[pulumi.Input[int]]:
"""
Number of expected groups when using `outlier` detection.
"""
return pulumi.get(self, "expected_groups")
@expected_groups.setter
def expected_groups(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "expected_groups", value)
@property
@pulumi.getter(name="expirationDuration")
def expiration_duration(self) -> Optional[pulumi.Input[int]]:
"""
The amount of time (in seconds) to wait before considering the signal expired.
"""
return pulumi.get(self, "expiration_duration")
@expiration_duration.setter
def expiration_duration(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "expiration_duration", value)
@property
@pulumi.getter(name="fillOption")
def fill_option(self) -> Optional[pulumi.Input[str]]:
"""
Which strategy to use when filling gaps in the signal. Possible values are `none`, `last_value` or `static`. If `static`, the `fill_value` field will be used for filling gaps in the signal.
"""
return pulumi.get(self, "fill_option")
@fill_option.setter
def fill_option(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fill_option", value)
@property
@pulumi.getter(name="fillValue")
def fill_value(self) -> Optional[pulumi.Input[float]]:
"""
This value will be used for filling gaps in the signal.
"""
return pulumi.get(self, "fill_value")
@fill_value.setter
def fill_value(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "fill_value", value)
@property
@pulumi.getter(name="ignoreOverlap")
def ignore_overlap(self) -> Optional[pulumi.Input[bool]]:
"""
**DEPRECATED:** Use `open_violation_on_group_overlap` instead, but use the inverse value of your boolean - e.g. if `ignore_overlap = false`, use `open_violation_on_group_overlap = true`. This argument sets whether to trigger a violation when groups overlap. If set to `true` overlapping groups will not trigger a violation. This argument is only applicable in `outlier` conditions.
"""
return pulumi.get(self, "ignore_overlap")
@ignore_overlap.setter
def ignore_overlap(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "ignore_overlap", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The title of the condition.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="openViolationOnExpiration")
def open_violation_on_expiration(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to create a new violation to capture that the signal expired.
"""
return pulumi.get(self, "open_violation_on_expiration")
@open_violation_on_expiration.setter
def open_violation_on_expiration(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "open_violation_on_expiration", value)
@property
@pulumi.getter(name="openViolationOnGroupOverlap")
def open_violation_on_group_overlap(self) -> Optional[pulumi.Input[bool]]:
"""
Whether or not to trigger a violation when groups overlap. Set to `true` if you want to trigger a violation when groups overlap. This argument is only applicable in `outlier` conditions.
"""
return pulumi.get(self, "open_violation_on_group_overlap")
@open_violation_on_group_overlap.setter
def open_violation_on_group_overlap(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "open_violation_on_group_overlap", value)
@property
@pulumi.getter(name="runbookUrl")
def runbook_url(self) -> Optional[pulumi.Input[str]]:
"""
Runbook URL to display in notifications.
"""
return pulumi.get(self, "runbook_url")
@runbook_url.setter
def runbook_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "runbook_url", value)
@property
@pulumi.getter
def terms(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NrqlAlertConditionTermArgs']]]]:
"""
**DEPRECATED** Use `critical`, and `warning` instead. A list of terms for this condition. See Terms below for details.
"""
return pulumi.get(self, "terms")
@terms.setter
def terms(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NrqlAlertConditionTermArgs']]]]):
pulumi.set(self, "terms", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The type of the condition. Valid values are `static`, `baseline`, or `outlier`. Defaults to `static`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="valueFunction")
def value_function(self) -> Optional[pulumi.Input[str]]:
"""
Possible values are `single_value`, `sum` (case insensitive).
"""
return pulumi.get(self, "value_function")
@value_function.setter
def value_function(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "value_function", value)
@property
@pulumi.getter(name="violationTimeLimit")
def violation_time_limit(self) -> Optional[pulumi.Input[str]]:
"""
**DEPRECATED:** Use `violation_time_limit_seconds` instead. Sets a time limit, in hours, that will automatically force-close a long-lasting violation after the time limit you select. Possible values are `ONE_HOUR`, `TWO_HOURS`, `FOUR_HOURS`, `EIGHT_HOURS`, `TWELVE_HOURS`, `TWENTY_FOUR_HOURS`, `THIRTY_DAYS` (case insensitive).<br>
<small>\***Note**: One of `violation_time_limit` _or_ `violation_time_limit_seconds` must be set, but not both.</small>
"""
return pulumi.get(self, "violation_time_limit")
@violation_time_limit.setter
def violation_time_limit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "violation_time_limit", value)
@property
@pulumi.getter(name="violationTimeLimitSeconds")
def violation_time_limit_seconds(self) -> Optional[pulumi.Input[int]]:
"""
Sets a time limit, in seconds, that will automatically force-close a long-lasting violation after the time limit you select. The value must be between 300 seconds (5 minutes) to 2592000 seconds (30 days) (inclusive). <br>
<small>\***Note**: One of `violation_time_limit` _or_ `violation_time_limit_seconds` must be set, but not both.</small>
"""
return pulumi.get(self, "violation_time_limit_seconds")
@violation_time_limit_seconds.setter
def violation_time_limit_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "violation_time_limit_seconds", value)
@property
@pulumi.getter
def warning(self) -> Optional[pulumi.Input['NrqlAlertConditionWarningArgs']]:
"""
A list containing the `warning` threshold values. See Terms below for details.
"""
return pulumi.get(self, "warning")
@warning.setter
def warning(self, value: Optional[pulumi.Input['NrqlAlertConditionWarningArgs']]):
pulumi.set(self, "warning", value)
@pulumi.input_type
class _NrqlAlertConditionState:
def __init__(__self__, *,
account_id: Optional[pulumi.Input[int]] = None,
aggregation_window: Optional[pulumi.Input[int]] = None,
baseline_direction: Optional[pulumi.Input[str]] = None,
close_violations_on_expiration: Optional[pulumi.Input[bool]] = None,
critical: Optional[pulumi.Input['NrqlAlertConditionCriticalArgs']] = None,
description: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
expected_groups: Optional[pulumi.Input[int]] = None,
expiration_duration: Optional[pulumi.Input[int]] = None,
fill_option: Optional[pulumi.Input[str]] = None,
fill_value: Optional[pulumi.Input[float]] = None,
ignore_overlap: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
nrql: Optional[pulumi.Input['NrqlAlertConditionNrqlArgs']] = None,
open_violation_on_expiration: Optional[pulumi.Input[bool]] = None,
open_violation_on_group_overlap: Optional[pulumi.Input[bool]] = None,
policy_id: Optional[pulumi.Input[int]] = None,
runbook_url: Optional[pulumi.Input[str]] = None,
terms: Optional[pulumi.Input[Sequence[pulumi.Input['NrqlAlertConditionTermArgs']]]] = None,
type: Optional[pulumi.Input[str]] = None,
value_function: Optional[pulumi.Input[str]] = None,
violation_time_limit: Optional[pulumi.Input[str]] = None,
violation_time_limit_seconds: Optional[pulumi.Input[int]] = None,
warning: Optional[pulumi.Input['NrqlAlertConditionWarningArgs']] = None):
"""
Input properties used for looking up and filtering NrqlAlertCondition resources.
:param pulumi.Input[int] account_id: The New Relic account ID of the account you wish to create the condition. Defaults to the account ID set in your environment variable `NEW_RELIC_ACCOUNT_ID`.
:param pulumi.Input[int] aggregation_window: The duration of the time window used to evaluate the NRQL query, in seconds. The value must be at least 30 seconds, and no more than 15 minutes (900 seconds). Default is 60 seconds.
:param pulumi.Input[str] baseline_direction: The baseline direction of a _baseline_ NRQL alert condition. Valid values are: `lower_only`, `upper_and_lower`, `upper_only` (case insensitive).
:param pulumi.Input[bool] close_violations_on_expiration: Whether to close all open violations when the signal expires.
:param pulumi.Input['NrqlAlertConditionCriticalArgs'] critical: A list containing the `critical` threshold values. See Terms below for details.
:param pulumi.Input[str] description: The description of the NRQL alert condition.
:param pulumi.Input[bool] enabled: Whether to enable the alert condition. Valid values are `true` and `false`. Defaults to `true`.
:param pulumi.Input[int] expected_groups: Number of expected groups when using `outlier` detection.
:param pulumi.Input[int] expiration_duration: The amount of time (in seconds) to wait before considering the signal expired.
:param pulumi.Input[str] fill_option: Which strategy to use when filling gaps in the signal. Possible values are `none`, `last_value` or `static`. If `static`, the `fill_value` field will be used for filling gaps in the signal.
:param pulumi.Input[float] fill_value: This value will be used for filling gaps in the signal.
:param pulumi.Input[bool] ignore_overlap: **DEPRECATED:** Use `open_violation_on_group_overlap` instead, but use the inverse value of your boolean - e.g. if `ignore_overlap = false`, use `open_violation_on_group_overlap = true`. This argument sets whether to trigger a violation when groups overlap. If set to `true` overlapping groups will not trigger a violation. This argument is only applicable in `outlier` conditions.
:param pulumi.Input[str] name: The title of the condition.
:param pulumi.Input['NrqlAlertConditionNrqlArgs'] nrql: A NRQL query. See NRQL below for details.
:param pulumi.Input[bool] open_violation_on_expiration: Whether to create a new violation to capture that the signal expired.
:param pulumi.Input[bool] open_violation_on_group_overlap: Whether or not to trigger a violation when groups overlap. Set to `true` if you want to trigger a violation when groups overlap. This argument is only applicable in `outlier` conditions.
:param pulumi.Input[int] policy_id: The ID of the policy where this condition should be used.
:param pulumi.Input[str] runbook_url: Runbook URL to display in notifications.
:param pulumi.Input[Sequence[pulumi.Input['NrqlAlertConditionTermArgs']]] terms: **DEPRECATED** Use `critical`, and `warning` instead. A list of terms for this condition. See Terms below for details.
:param pulumi.Input[str] type: The type of the condition. Valid values are `static`, `baseline`, or `outlier`. Defaults to `static`.
:param pulumi.Input[str] value_function: Possible values are `single_value`, `sum` (case insensitive).
:param pulumi.Input[str] violation_time_limit: **DEPRECATED:** Use `violation_time_limit_seconds` instead. Sets a time limit, in hours, that will automatically force-close a long-lasting violation after the time limit you select. Possible values are `ONE_HOUR`, `TWO_HOURS`, `FOUR_HOURS`, `EIGHT_HOURS`, `TWELVE_HOURS`, `TWENTY_FOUR_HOURS`, `THIRTY_DAYS` (case insensitive).<br>
<small>\***Note**: One of `violation_time_limit` _or_ `violation_time_limit_seconds` must be set, but not both.</small>
:param pulumi.Input[int] violation_time_limit_seconds: Sets a time limit, in seconds, that will automatically force-close a long-lasting violation after the time limit you select. The value must be between 300 seconds (5 minutes) to 2592000 seconds (30 days) (inclusive). <br>
<small>\***Note**: One of `violation_time_limit` _or_ `violation_time_limit_seconds` must be set, but not both.</small>
:param pulumi.Input['NrqlAlertConditionWarningArgs'] warning: A list containing the `warning` threshold values. See Terms below for details.
"""
if account_id is not None:
pulumi.set(__self__, "account_id", account_id)
if aggregation_window is not None:
pulumi.set(__self__, "aggregation_window", aggregation_window)
if baseline_direction is not None:
pulumi.set(__self__, "baseline_direction", baseline_direction)
if close_violations_on_expiration is not None:
pulumi.set(__self__, "close_violations_on_expiration", close_violations_on_expiration)
if critical is not None:
pulumi.set(__self__, "critical", critical)
if description is not None:
pulumi.set(__self__, "description", description)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if expected_groups is not None:
pulumi.set(__self__, "expected_groups", expected_groups)
if expiration_duration is not None:
pulumi.set(__self__, "expiration_duration", expiration_duration)
if fill_option is not None:
pulumi.set(__self__, "fill_option", fill_option)
if fill_value is not None:
pulumi.set(__self__, "fill_value", fill_value)
if ignore_overlap is not None:
warnings.warn("""use `open_violation_on_group_overlap` attribute instead, but use the inverse of your boolean - e.g. if ignore_overlap = false, use open_violation_on_group_overlap = true""", DeprecationWarning)
pulumi.log.warn("""ignore_overlap is deprecated: use `open_violation_on_group_overlap` attribute instead, but use the inverse of your boolean - e.g. if ignore_overlap = false, use open_violation_on_group_overlap = true""")
if ignore_overlap is not None:
pulumi.set(__self__, "ignore_overlap", ignore_overlap)
if name is not None:
pulumi.set(__self__, "name", name)
if nrql is not None:
pulumi.set(__self__, "nrql", nrql)
if open_violation_on_expiration is not None:
pulumi.set(__self__, "open_violation_on_expiration", open_violation_on_expiration)
if open_violation_on_group_overlap is not None:
pulumi.set(__self__, "open_violation_on_group_overlap", open_violation_on_group_overlap)
if policy_id is not None:
pulumi.set(__self__, "policy_id", policy_id)
if runbook_url is not None:
pulumi.set(__self__, "runbook_url", runbook_url)
if terms is not None:
warnings.warn("""use `critical` and `warning` attributes instead""", DeprecationWarning)
pulumi.log.warn("""terms is deprecated: use `critical` and `warning` attributes instead""")
if terms is not None:
pulumi.set(__self__, "terms", terms)
if type is not None:
pulumi.set(__self__, "type", type)
if value_function is not None:
pulumi.set(__self__, "value_function", value_function)
if violation_time_limit is not None:
warnings.warn("""use `violation_time_limit_seconds` attribute instead""", DeprecationWarning)
pulumi.log.warn("""violation_time_limit is deprecated: use `violation_time_limit_seconds` attribute instead""")
if violation_time_limit is not None:
pulumi.set(__self__, "violation_time_limit", violation_time_limit)
if violation_time_limit_seconds is not None:
pulumi.set(__self__, "violation_time_limit_seconds", violation_time_limit_seconds)
if warning is not None:
pulumi.set(__self__, "warning", warning)
@property
@pulumi.getter(name="accountId")
def account_id(self) -> Optional[pulumi.Input[int]]:
"""
The New Relic account ID of the account you wish to create the condition. Defaults to the account ID set in your environment variable `NEW_RELIC_ACCOUNT_ID`.
"""
return pulumi.get(self, "account_id")
@account_id.setter
def account_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "account_id", value)
@property
@pulumi.getter(name="aggregationWindow")
def aggregation_window(self) -> Optional[pulumi.Input[int]]:
"""
The duration of the time window used to evaluate the NRQL query, in seconds. The value must be at least 30 seconds, and no more than 15 minutes (900 seconds). Default is 60 seconds.
"""
return pulumi.get(self, "aggregation_window")
@aggregation_window.setter
def aggregation_window(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "aggregation_window", value)
@property
@pulumi.getter(name="baselineDirection")
def baseline_direction(self) -> Optional[pulumi.Input[str]]:
"""
The baseline direction of a _baseline_ NRQL alert condition. Valid values are: `lower_only`, `upper_and_lower`, `upper_only` (case insensitive).
"""
return pulumi.get(self, "baseline_direction")
@baseline_direction.setter
def baseline_direction(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "baseline_direction", value)
@property
@pulumi.getter(name="closeViolationsOnExpiration")
def close_violations_on_expiration(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to close all open violations when the signal expires.
"""
return pulumi.get(self, "close_violations_on_expiration")
@close_violations_on_expiration.setter
def close_violations_on_expiration(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "close_violations_on_expiration", value)
@property
@pulumi.getter
def critical(self) -> Optional[pulumi.Input['NrqlAlertConditionCriticalArgs']]:
"""
A list containing the `critical` threshold values. See Terms below for details.
"""
return pulumi.get(self, "critical")
@critical.setter
def critical(self, value: Optional[pulumi.Input['NrqlAlertConditionCriticalArgs']]):
pulumi.set(self, "critical", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the NRQL alert condition.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to enable the alert condition. Valid values are `true` and `false`. Defaults to `true`.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="expectedGroups")
def expected_groups(self) -> Optional[pulumi.Input[int]]:
"""
Number of expected groups when using `outlier` detection.
"""
return pulumi.get(self, "expected_groups")
@expected_groups.setter
def expected_groups(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "expected_groups", value)
@property
@pulumi.getter(name="expirationDuration")
def expiration_duration(self) -> Optional[pulumi.Input[int]]:
"""
The amount of time (in seconds) to wait before considering the signal expired.
"""
return pulumi.get(self, "expiration_duration")
@expiration_duration.setter
def expiration_duration(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "expiration_duration", value)
@property
@pulumi.getter(name="fillOption")
def fill_option(self) -> Optional[pulumi.Input[str]]:
"""
Which strategy to use when filling gaps in the signal. Possible values are `none`, `last_value` or `static`. If `static`, the `fill_value` field will be used for filling gaps in the signal.
"""
return pulumi.get(self, "fill_option")
@fill_option.setter
def fill_option(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fill_option", value)
@property
@pulumi.getter(name="fillValue")
def fill_value(self) -> Optional[pulumi.Input[float]]:
"""
This value will be used for filling gaps in the signal.
"""
return pulumi.get(self, "fill_value")
@fill_value.setter
def fill_value(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "fill_value", value)
@property
@pulumi.getter(name="ignoreOverlap")
def ignore_overlap(self) -> Optional[pulumi.Input[bool]]:
"""
**DEPRECATED:** Use `open_violation_on_group_overlap` instead, but use the inverse value of your boolean - e.g. if `ignore_overlap = false`, use `open_violation_on_group_overlap = true`. This argument sets whether to trigger a violation when groups overlap. If set to `true` overlapping groups will not trigger a violation. This argument is only applicable in `outlier` conditions.
"""
return pulumi.get(self, "ignore_overlap")
@ignore_overlap.setter
def ignore_overlap(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "ignore_overlap", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The title of the condition.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def nrql(self) -> Optional[pulumi.Input['NrqlAlertConditionNrqlArgs']]:
"""
A NRQL query. See NRQL below for details.
"""
return pulumi.get(self, "nrql")
@nrql.setter
def nrql(self, value: Optional[pulumi.Input['NrqlAlertConditionNrqlArgs']]):
pulumi.set(self, "nrql", value)
@property
@pulumi.getter(name="openViolationOnExpiration")
def open_violation_on_expiration(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to create a new violation to capture that the signal expired.
"""
return pulumi.get(self, "open_violation_on_expiration")
@open_violation_on_expiration.setter
def open_violation_on_expiration(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "open_violation_on_expiration", value)
@property
@pulumi.getter(name="openViolationOnGroupOverlap")
def open_violation_on_group_overlap(self) -> Optional[pulumi.Input[bool]]:
"""
Whether or not to trigger a violation when groups overlap. Set to `true` if you want to trigger a violation when groups overlap. This argument is only applicable in `outlier` conditions.
"""
return pulumi.get(self, "open_violation_on_group_overlap")
@open_violation_on_group_overlap.setter
def open_violation_on_group_overlap(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "open_violation_on_group_overlap", value)
@property
@pulumi.getter(name="policyId")
def policy_id(self) -> Optional[pulumi.Input[int]]:
"""
The ID of the policy where this condition should be used.
"""
return pulumi.get(self, "policy_id")
@policy_id.setter
def policy_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "policy_id", value)
@property
@pulumi.getter(name="runbookUrl")
def runbook_url(self) -> Optional[pulumi.Input[str]]:
"""
Runbook URL to display in notifications.
"""
return pulumi.get(self, "runbook_url")
@runbook_url.setter
def runbook_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "runbook_url", value)
@property
@pulumi.getter
def terms(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NrqlAlertConditionTermArgs']]]]:
"""
**DEPRECATED** Use `critical`, and `warning` instead. A list of terms for this condition. See Terms below for details.
"""
return pulumi.get(self, "terms")
@terms.setter
def terms(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NrqlAlertConditionTermArgs']]]]):
pulumi.set(self, "terms", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The type of the condition. Valid values are `static`, `baseline`, or `outlier`. Defaults to `static`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="valueFunction")
def value_function(self) -> Optional[pulumi.Input[str]]:
"""
Possible values are `single_value`, `sum` (case insensitive).
"""
return pulumi.get(self, "value_function")
@value_function.setter
def value_function(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "value_function", value)
@property
@pulumi.getter(name="violationTimeLimit")
def violation_time_limit(self) -> Optional[pulumi.Input[str]]:
"""
**DEPRECATED:** Use `violation_time_limit_seconds` instead. Sets a time limit, in hours, that will automatically force-close a long-lasting violation after the time limit you select. Possible values are `ONE_HOUR`, `TWO_HOURS`, `FOUR_HOURS`, `EIGHT_HOURS`, `TWELVE_HOURS`, `TWENTY_FOUR_HOURS`, `THIRTY_DAYS` (case insensitive).<br>
<small>\***Note**: One of `violation_time_limit` _or_ `violation_time_limit_seconds` must be set, but not both.</small>
"""
return pulumi.get(self, "violation_time_limit")
@violation_time_limit.setter
def violation_time_limit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "violation_time_limit", value)
@property
@pulumi.getter(name="violationTimeLimitSeconds")
def violation_time_limit_seconds(self) -> Optional[pulumi.Input[int]]:
"""
Sets a time limit, in seconds, that will automatically force-close a long-lasting violation after the time limit you select. The value must be between 300 seconds (5 minutes) to 2592000 seconds (30 days) (inclusive). <br>
<small>\***Note**: One of `violation_time_limit` _or_ `violation_time_limit_seconds` must be set, but not both.</small>
"""
return pulumi.get(self, "violation_time_limit_seconds")
@violation_time_limit_seconds.setter
def violation_time_limit_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "violation_time_limit_seconds", value)
@property
@pulumi.getter
def warning(self) -> Optional[pulumi.Input['NrqlAlertConditionWarningArgs']]:
"""
A list containing the `warning` threshold values. See Terms below for details.
"""
return pulumi.get(self, "warning")
@warning.setter
def warning(self, value: Optional[pulumi.Input['NrqlAlertConditionWarningArgs']]):
pulumi.set(self, "warning", value)
class NrqlAlertCondition(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_id: Optional[pulumi.Input[int]] = None,
aggregation_window: Optional[pulumi.Input[int]] = None,
baseline_direction: Optional[pulumi.Input[str]] = None,
close_violations_on_expiration: Optional[pulumi.Input[bool]] = None,
critical: Optional[pulumi.Input[pulumi.InputType['NrqlAlertConditionCriticalArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
expected_groups: Optional[pulumi.Input[int]] = None,
expiration_duration: Optional[pulumi.Input[int]] = None,
fill_option: Optional[pulumi.Input[str]] = None,
fill_value: Optional[pulumi.Input[float]] = None,
ignore_overlap: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
nrql: Optional[pulumi.Input[pulumi.InputType['NrqlAlertConditionNrqlArgs']]] = None,
open_violation_on_expiration: Optional[pulumi.Input[bool]] = None,
open_violation_on_group_overlap: Optional[pulumi.Input[bool]] = None,
policy_id: Optional[pulumi.Input[int]] = None,
runbook_url: Optional[pulumi.Input[str]] = None,
terms: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NrqlAlertConditionTermArgs']]]]] = None,
type: Optional[pulumi.Input[str]] = None,
value_function: Optional[pulumi.Input[str]] = None,
violation_time_limit: Optional[pulumi.Input[str]] = None,
violation_time_limit_seconds: Optional[pulumi.Input[int]] = None,
warning: Optional[pulumi.Input[pulumi.InputType['NrqlAlertConditionWarningArgs']]] = None,
__props__=None):
"""
Use this resource to create and manage NRQL alert conditions in New Relic.
## Example Usage
## NRQL
The `nrql` block supports the following arguments:
- `query` - (Required) The NRQL query to execute for the condition.
- `evaluation_offset` - (Optional*) Represented in minutes and must be within 1-20 minutes (inclusive). NRQL queries are evaluated in one-minute time windows. The start time depends on this value. It's recommended to set this to 3 minutes. An offset of less than 3 minutes will trigger violations sooner, but you may see more false positives and negatives due to data latency. With `evaluation_offset` set to 3 minutes, the NRQL time window applied to your query will be: `SINCE 3 minutes ago UNTIL 2 minutes ago`.<br>
<small>\***Note**: One of `evaluation_offset` _or_ `since_value` must be set, but not both.</small>
- `since_value` - (Optional*) **DEPRECATED:** Use `evaluation_offset` instead. The value to be used in the `SINCE <X> minutes ago` clause for the NRQL query. Must be between 1-20 (inclusive). <br>
<small>\***Note**: One of `evaluation_offset` _or_ `since_value` must be set, but not both.</small>
## Terms
> **NOTE:** The direct use of the `term` has been deprecated, and users should use `critical` and `warning` instead. What follows now applies to the named priority attributes for `critical` and `warning`, but for those attributes the priority is not allowed.
NRQL alert conditions support up to two terms. At least one `term` must have `priority` set to `critical` and the second optional `term` must have `priority` set to `warning`.
The `term` block the following arguments:
- `operator` - (Optional) Valid values are `above`, `below`, or `equals` (case insensitive). Defaults to `equals`. Note that when using a `type` of `outlier`, the only valid option here is `above`.
- `priority` - (Optional) `critical` or `warning`. Defaults to `critical`.
- `threshold` - (Required) The value which will trigger a violation. Must be `0` or greater.
- `threshold_duration` - (Optional) The duration, in seconds, that the threshold must violate in order to create a violation. Value must be a multiple of the `aggregation_window` (which has a default of 60 seconds).
<br>For _baseline_ and _outlier_ NRQL alert conditions, the value must be within 120-3600 seconds (inclusive).
<br>For _static_ NRQL alert conditions with the `sum` value function, the value must be within 120-7200 seconds (inclusive).
<br>For _static_ NRQL alert conditions with the `single_value` value function, the value must be within 60-7200 seconds (inclusive).
- `threshold_occurrences` - (Optional) The criteria for how many data points must be in violation for the specified threshold duration. Valid values are: `all` or `at_least_once` (case insensitive).
- `duration` - (Optional) **DEPRECATED:** Use `threshold_duration` instead. The duration of time, in _minutes_, that the threshold must violate for in order to create a violation. Must be within 1-120 (inclusive).
- `time_function` - (Optional) **DEPRECATED:** Use `threshold_occurrences` instead. The criteria for how many data points must be in violation for the specified threshold duration. Valid values are: `all` or `any`.
## Import
Alert conditions can be imported using a composite ID of `<policy_id>:<condition_id>:<conditionType>`, e.g. // For `baseline` conditions
```sh
$ pulumi import newrelic:index/nrqlAlertCondition:NrqlAlertCondition foo 538291:6789035:baseline
```
// For `static` conditions
```sh
$ pulumi import newrelic:index/nrqlAlertCondition:NrqlAlertCondition foo 538291:6789035:static
```
// For `outlier` conditions
```sh
$ pulumi import newrelic:index/nrqlAlertCondition:NrqlAlertCondition foo 538291:6789035:outlier
```
The actual values for `policy_id` and `condition_id` can be retrieved from the following New Relic URL when viewing the NRQL alert condition you want to import<small>alerts.newrelic.com/accounts/**\<account_id\>**/policies/**\<policy_id\>**/conditions/**\<condition_id\>**/edit</small>
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] account_id: The New Relic account ID of the account you wish to create the condition. Defaults to the account ID set in your environment variable `NEW_RELIC_ACCOUNT_ID`.
:param pulumi.Input[int] aggregation_window: The duration of the time window used to evaluate the NRQL query, in seconds. The value must be at least 30 seconds, and no more than 15 minutes (900 seconds). Default is 60 seconds.
:param pulumi.Input[str] baseline_direction: The baseline direction of a _baseline_ NRQL alert condition. Valid values are: `lower_only`, `upper_and_lower`, `upper_only` (case insensitive).
:param pulumi.Input[bool] close_violations_on_expiration: Whether to close all open violations when the signal expires.
:param pulumi.Input[pulumi.InputType['NrqlAlertConditionCriticalArgs']] critical: A list containing the `critical` threshold values. See Terms below for details.
:param pulumi.Input[str] description: The description of the NRQL alert condition.
:param pulumi.Input[bool] enabled: Whether to enable the alert condition. Valid values are `true` and `false`. Defaults to `true`.
:param pulumi.Input[int] expected_groups: Number of expected groups when using `outlier` detection.
:param pulumi.Input[int] expiration_duration: The amount of time (in seconds) to wait before considering the signal expired.
:param pulumi.Input[str] fill_option: Which strategy to use when filling gaps in the signal. Possible values are `none`, `last_value` or `static`. If `static`, the `fill_value` field will be used for filling gaps in the signal.
:param pulumi.Input[float] fill_value: This value will be used for filling gaps in the signal.
:param pulumi.Input[bool] ignore_overlap: **DEPRECATED:** Use `open_violation_on_group_overlap` instead, but use the inverse value of your boolean - e.g. if `ignore_overlap = false`, use `open_violation_on_group_overlap = true`. This argument sets whether to trigger a violation when groups overlap. If set to `true` overlapping groups will not trigger a violation. This argument is only applicable in `outlier` conditions.
:param pulumi.Input[str] name: The title of the condition.
:param pulumi.Input[pulumi.InputType['NrqlAlertConditionNrqlArgs']] nrql: A NRQL query. See NRQL below for details.
:param pulumi.Input[bool] open_violation_on_expiration: Whether to create a new violation to capture that the signal expired.
:param pulumi.Input[bool] open_violation_on_group_overlap: Whether or not to trigger a violation when groups overlap. Set to `true` if you want to trigger a violation when groups overlap. This argument is only applicable in `outlier` conditions.
:param pulumi.Input[int] policy_id: The ID of the policy where this condition should be used.
:param pulumi.Input[str] runbook_url: Runbook URL to display in notifications.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NrqlAlertConditionTermArgs']]]] terms: **DEPRECATED** Use `critical`, and `warning` instead. A list of terms for this condition. See Terms below for details.
:param pulumi.Input[str] type: The type of the condition. Valid values are `static`, `baseline`, or `outlier`. Defaults to `static`.
:param pulumi.Input[str] value_function: Possible values are `single_value`, `sum` (case insensitive).
:param pulumi.Input[str] violation_time_limit: **DEPRECATED:** Use `violation_time_limit_seconds` instead. Sets a time limit, in hours, that will automatically force-close a long-lasting violation after the time limit you select. Possible values are `ONE_HOUR`, `TWO_HOURS`, `FOUR_HOURS`, `EIGHT_HOURS`, `TWELVE_HOURS`, `TWENTY_FOUR_HOURS`, `THIRTY_DAYS` (case insensitive).<br>
<small>\***Note**: One of `violation_time_limit` _or_ `violation_time_limit_seconds` must be set, but not both.</small>
:param pulumi.Input[int] violation_time_limit_seconds: Sets a time limit, in seconds, that will automatically force-close a long-lasting violation after the time limit you select. The value must be between 300 seconds (5 minutes) to 2592000 seconds (30 days) (inclusive). <br>
<small>\***Note**: One of `violation_time_limit` _or_ `violation_time_limit_seconds` must be set, but not both.</small>
:param pulumi.Input[pulumi.InputType['NrqlAlertConditionWarningArgs']] warning: A list containing the `warning` threshold values. See Terms below for details.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: NrqlAlertConditionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Use this resource to create and manage NRQL alert conditions in New Relic.
## Example Usage
## NRQL
The `nrql` block supports the following arguments:
- `query` - (Required) The NRQL query to execute for the condition.
- `evaluation_offset` - (Optional*) Represented in minutes and must be within 1-20 minutes (inclusive). NRQL queries are evaluated in one-minute time windows. The start time depends on this value. It's recommended to set this to 3 minutes. An offset of less than 3 minutes will trigger violations sooner, but you may see more false positives and negatives due to data latency. With `evaluation_offset` set to 3 minutes, the NRQL time window applied to your query will be: `SINCE 3 minutes ago UNTIL 2 minutes ago`.<br>
<small>\***Note**: One of `evaluation_offset` _or_ `since_value` must be set, but not both.</small>
- `since_value` - (Optional*) **DEPRECATED:** Use `evaluation_offset` instead. The value to be used in the `SINCE <X> minutes ago` clause for the NRQL query. Must be between 1-20 (inclusive). <br>
<small>\***Note**: One of `evaluation_offset` _or_ `since_value` must be set, but not both.</small>
## Terms
> **NOTE:** The direct use of the `term` has been deprecated, and users should use `critical` and `warning` instead. What follows now applies to the named priority attributes for `critical` and `warning`, but for those attributes the priority is not allowed.
NRQL alert conditions support up to two terms. At least one `term` must have `priority` set to `critical` and the second optional `term` must have `priority` set to `warning`.
The `term` block the following arguments:
- `operator` - (Optional) Valid values are `above`, `below`, or `equals` (case insensitive). Defaults to `equals`. Note that when using a `type` of `outlier`, the only valid option here is `above`.
- `priority` - (Optional) `critical` or `warning`. Defaults to `critical`.
- `threshold` - (Required) The value which will trigger a violation. Must be `0` or greater.
- `threshold_duration` - (Optional) The duration, in seconds, that the threshold must violate in order to create a violation. Value must be a multiple of the `aggregation_window` (which has a default of 60 seconds).
<br>For _baseline_ and _outlier_ NRQL alert conditions, the value must be within 120-3600 seconds (inclusive).
<br>For _static_ NRQL alert conditions with the `sum` value function, the value must be within 120-7200 seconds (inclusive).
<br>For _static_ NRQL alert conditions with the `single_value` value function, the value must be within 60-7200 seconds (inclusive).
- `threshold_occurrences` - (Optional) The criteria for how many data points must be in violation for the specified threshold duration. Valid values are: `all` or `at_least_once` (case insensitive).
- `duration` - (Optional) **DEPRECATED:** Use `threshold_duration` instead. The duration of time, in _minutes_, that the threshold must violate for in order to create a violation. Must be within 1-120 (inclusive).
- `time_function` - (Optional) **DEPRECATED:** Use `threshold_occurrences` instead. The criteria for how many data points must be in violation for the specified threshold duration. Valid values are: `all` or `any`.
## Import
Alert conditions can be imported using a composite ID of `<policy_id>:<condition_id>:<conditionType>`, e.g. // For `baseline` conditions
```sh
$ pulumi import newrelic:index/nrqlAlertCondition:NrqlAlertCondition foo 538291:6789035:baseline
```
// For `static` conditions
```sh
$ pulumi import newrelic:index/nrqlAlertCondition:NrqlAlertCondition foo 538291:6789035:static
```
// For `outlier` conditions
```sh
$ pulumi import newrelic:index/nrqlAlertCondition:NrqlAlertCondition foo 538291:6789035:outlier
```
The actual values for `policy_id` and `condition_id` can be retrieved from the following New Relic URL when viewing the NRQL alert condition you want to import<small>alerts.newrelic.com/accounts/**\<account_id\>**/policies/**\<policy_id\>**/conditions/**\<condition_id\>**/edit</small>
:param str resource_name: The name of the resource.
:param NrqlAlertConditionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(NrqlAlertConditionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_id: Optional[pulumi.Input[int]] = None,
aggregation_window: Optional[pulumi.Input[int]] = None,
baseline_direction: Optional[pulumi.Input[str]] = None,
close_violations_on_expiration: Optional[pulumi.Input[bool]] = None,
critical: Optional[pulumi.Input[pulumi.InputType['NrqlAlertConditionCriticalArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
expected_groups: Optional[pulumi.Input[int]] = None,
expiration_duration: Optional[pulumi.Input[int]] = None,
fill_option: Optional[pulumi.Input[str]] = None,
fill_value: Optional[pulumi.Input[float]] = None,
ignore_overlap: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
nrql: Optional[pulumi.Input[pulumi.InputType['NrqlAlertConditionNrqlArgs']]] = None,
open_violation_on_expiration: Optional[pulumi.Input[bool]] = None,
open_violation_on_group_overlap: Optional[pulumi.Input[bool]] = None,
policy_id: Optional[pulumi.Input[int]] = None,
runbook_url: Optional[pulumi.Input[str]] = None,
terms: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NrqlAlertConditionTermArgs']]]]] = None,
type: Optional[pulumi.Input[str]] = None,
value_function: Optional[pulumi.Input[str]] = None,
violation_time_limit: Optional[pulumi.Input[str]] = None,
violation_time_limit_seconds: Optional[pulumi.Input[int]] = None,
warning: Optional[pulumi.Input[pulumi.InputType['NrqlAlertConditionWarningArgs']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = NrqlAlertConditionArgs.__new__(NrqlAlertConditionArgs)
__props__.__dict__["account_id"] = account_id
__props__.__dict__["aggregation_window"] = aggregation_window
__props__.__dict__["baseline_direction"] = baseline_direction
__props__.__dict__["close_violations_on_expiration"] = close_violations_on_expiration
__props__.__dict__["critical"] = critical
__props__.__dict__["description"] = description
__props__.__dict__["enabled"] = enabled
__props__.__dict__["expected_groups"] = expected_groups
__props__.__dict__["expiration_duration"] = expiration_duration
__props__.__dict__["fill_option"] = fill_option
__props__.__dict__["fill_value"] = fill_value
if ignore_overlap is not None and not opts.urn:
warnings.warn("""use `open_violation_on_group_overlap` attribute instead, but use the inverse of your boolean - e.g. if ignore_overlap = false, use open_violation_on_group_overlap = true""", DeprecationWarning)
pulumi.log.warn("""ignore_overlap is deprecated: use `open_violation_on_group_overlap` attribute instead, but use the inverse of your boolean - e.g. if ignore_overlap = false, use open_violation_on_group_overlap = true""")
__props__.__dict__["ignore_overlap"] = ignore_overlap
__props__.__dict__["name"] = name
if nrql is None and not opts.urn:
raise TypeError("Missing required property 'nrql'")
__props__.__dict__["nrql"] = nrql
__props__.__dict__["open_violation_on_expiration"] = open_violation_on_expiration
__props__.__dict__["open_violation_on_group_overlap"] = open_violation_on_group_overlap
if policy_id is None and not opts.urn:
raise TypeError("Missing required property 'policy_id'")
__props__.__dict__["policy_id"] = policy_id
__props__.__dict__["runbook_url"] = runbook_url
if terms is not None and not opts.urn:
warnings.warn("""use `critical` and `warning` attributes instead""", DeprecationWarning)
pulumi.log.warn("""terms is deprecated: use `critical` and `warning` attributes instead""")
__props__.__dict__["terms"] = terms
__props__.__dict__["type"] = type
__props__.__dict__["value_function"] = value_function
if violation_time_limit is not None and not opts.urn:
warnings.warn("""use `violation_time_limit_seconds` attribute instead""", DeprecationWarning)
pulumi.log.warn("""violation_time_limit is deprecated: use `violation_time_limit_seconds` attribute instead""")
__props__.__dict__["violation_time_limit"] = violation_time_limit
__props__.__dict__["violation_time_limit_seconds"] = violation_time_limit_seconds
__props__.__dict__["warning"] = warning
super(NrqlAlertCondition, __self__).__init__(
'newrelic:index/nrqlAlertCondition:NrqlAlertCondition',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
account_id: Optional[pulumi.Input[int]] = None,
aggregation_window: Optional[pulumi.Input[int]] = None,
baseline_direction: Optional[pulumi.Input[str]] = None,
close_violations_on_expiration: Optional[pulumi.Input[bool]] = None,
critical: Optional[pulumi.Input[pulumi.InputType['NrqlAlertConditionCriticalArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
expected_groups: Optional[pulumi.Input[int]] = None,
expiration_duration: Optional[pulumi.Input[int]] = None,
fill_option: Optional[pulumi.Input[str]] = None,
fill_value: Optional[pulumi.Input[float]] = None,
ignore_overlap: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
nrql: Optional[pulumi.Input[pulumi.InputType['NrqlAlertConditionNrqlArgs']]] = None,
open_violation_on_expiration: Optional[pulumi.Input[bool]] = None,
open_violation_on_group_overlap: Optional[pulumi.Input[bool]] = None,
policy_id: Optional[pulumi.Input[int]] = None,
runbook_url: Optional[pulumi.Input[str]] = None,
terms: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NrqlAlertConditionTermArgs']]]]] = None,
type: Optional[pulumi.Input[str]] = None,
value_function: Optional[pulumi.Input[str]] = None,
violation_time_limit: Optional[pulumi.Input[str]] = None,
violation_time_limit_seconds: Optional[pulumi.Input[int]] = None,
warning: Optional[pulumi.Input[pulumi.InputType['NrqlAlertConditionWarningArgs']]] = None) -> 'NrqlAlertCondition':
"""
Get an existing NrqlAlertCondition resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] account_id: The New Relic account ID of the account you wish to create the condition. Defaults to the account ID set in your environment variable `NEW_RELIC_ACCOUNT_ID`.
:param pulumi.Input[int] aggregation_window: The duration of the time window used to evaluate the NRQL query, in seconds. The value must be at least 30 seconds, and no more than 15 minutes (900 seconds). Default is 60 seconds.
:param pulumi.Input[str] baseline_direction: The baseline direction of a _baseline_ NRQL alert condition. Valid values are: `lower_only`, `upper_and_lower`, `upper_only` (case insensitive).
:param pulumi.Input[bool] close_violations_on_expiration: Whether to close all open violations when the signal expires.
:param pulumi.Input[pulumi.InputType['NrqlAlertConditionCriticalArgs']] critical: A list containing the `critical` threshold values. See Terms below for details.
:param pulumi.Input[str] description: The description of the NRQL alert condition.
:param pulumi.Input[bool] enabled: Whether to enable the alert condition. Valid values are `true` and `false`. Defaults to `true`.
:param pulumi.Input[int] expected_groups: Number of expected groups when using `outlier` detection.
:param pulumi.Input[int] expiration_duration: The amount of time (in seconds) to wait before considering the signal expired.
:param pulumi.Input[str] fill_option: Which strategy to use when filling gaps in the signal. Possible values are `none`, `last_value` or `static`. If `static`, the `fill_value` field will be used for filling gaps in the signal.
:param pulumi.Input[float] fill_value: This value will be used for filling gaps in the signal.
:param pulumi.Input[bool] ignore_overlap: **DEPRECATED:** Use `open_violation_on_group_overlap` instead, but use the inverse value of your boolean - e.g. if `ignore_overlap = false`, use `open_violation_on_group_overlap = true`. This argument sets whether to trigger a violation when groups overlap. If set to `true` overlapping groups will not trigger a violation. This argument is only applicable in `outlier` conditions.
:param pulumi.Input[str] name: The title of the condition.
:param pulumi.Input[pulumi.InputType['NrqlAlertConditionNrqlArgs']] nrql: A NRQL query. See NRQL below for details.
:param pulumi.Input[bool] open_violation_on_expiration: Whether to create a new violation to capture that the signal expired.
:param pulumi.Input[bool] open_violation_on_group_overlap: Whether or not to trigger a violation when groups overlap. Set to `true` if you want to trigger a violation when groups overlap. This argument is only applicable in `outlier` conditions.
:param pulumi.Input[int] policy_id: The ID of the policy where this condition should be used.
:param pulumi.Input[str] runbook_url: Runbook URL to display in notifications.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NrqlAlertConditionTermArgs']]]] terms: **DEPRECATED** Use `critical`, and `warning` instead. A list of terms for this condition. See Terms below for details.
:param pulumi.Input[str] type: The type of the condition. Valid values are `static`, `baseline`, or `outlier`. Defaults to `static`.
:param pulumi.Input[str] value_function: Possible values are `single_value`, `sum` (case insensitive).
:param pulumi.Input[str] violation_time_limit: **DEPRECATED:** Use `violation_time_limit_seconds` instead. Sets a time limit, in hours, that will automatically force-close a long-lasting violation after the time limit you select. Possible values are `ONE_HOUR`, `TWO_HOURS`, `FOUR_HOURS`, `EIGHT_HOURS`, `TWELVE_HOURS`, `TWENTY_FOUR_HOURS`, `THIRTY_DAYS` (case insensitive).<br>
<small>\***Note**: One of `violation_time_limit` _or_ `violation_time_limit_seconds` must be set, but not both.</small>
:param pulumi.Input[int] violation_time_limit_seconds: Sets a time limit, in seconds, that will automatically force-close a long-lasting violation after the time limit you select. The value must be between 300 seconds (5 minutes) to 2592000 seconds (30 days) (inclusive). <br>
<small>\***Note**: One of `violation_time_limit` _or_ `violation_time_limit_seconds` must be set, but not both.</small>
:param pulumi.Input[pulumi.InputType['NrqlAlertConditionWarningArgs']] warning: A list containing the `warning` threshold values. See Terms below for details.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _NrqlAlertConditionState.__new__(_NrqlAlertConditionState)
__props__.__dict__["account_id"] = account_id
__props__.__dict__["aggregation_window"] = aggregation_window
__props__.__dict__["baseline_direction"] = baseline_direction
__props__.__dict__["close_violations_on_expiration"] = close_violations_on_expiration
__props__.__dict__["critical"] = critical
__props__.__dict__["description"] = description
__props__.__dict__["enabled"] = enabled
__props__.__dict__["expected_groups"] = expected_groups
__props__.__dict__["expiration_duration"] = expiration_duration
__props__.__dict__["fill_option"] = fill_option
__props__.__dict__["fill_value"] = fill_value
__props__.__dict__["ignore_overlap"] = ignore_overlap
__props__.__dict__["name"] = name
__props__.__dict__["nrql"] = nrql
__props__.__dict__["open_violation_on_expiration"] = open_violation_on_expiration
__props__.__dict__["open_violation_on_group_overlap"] = open_violation_on_group_overlap
__props__.__dict__["policy_id"] = policy_id
__props__.__dict__["runbook_url"] = runbook_url
__props__.__dict__["terms"] = terms
__props__.__dict__["type"] = type
__props__.__dict__["value_function"] = value_function
__props__.__dict__["violation_time_limit"] = violation_time_limit
__props__.__dict__["violation_time_limit_seconds"] = violation_time_limit_seconds
__props__.__dict__["warning"] = warning
return NrqlAlertCondition(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accountId")
def account_id(self) -> pulumi.Output[int]:
"""
The New Relic account ID of the account you wish to create the condition. Defaults to the account ID set in your environment variable `NEW_RELIC_ACCOUNT_ID`.
"""
return pulumi.get(self, "account_id")
@property
@pulumi.getter(name="aggregationWindow")
def aggregation_window(self) -> pulumi.Output[int]:
"""
The duration of the time window used to evaluate the NRQL query, in seconds. The value must be at least 30 seconds, and no more than 15 minutes (900 seconds). Default is 60 seconds.
"""
return pulumi.get(self, "aggregation_window")
@property
@pulumi.getter(name="baselineDirection")
def baseline_direction(self) -> pulumi.Output[Optional[str]]:
"""
The baseline direction of a _baseline_ NRQL alert condition. Valid values are: `lower_only`, `upper_and_lower`, `upper_only` (case insensitive).
"""
return pulumi.get(self, "baseline_direction")
@property
@pulumi.getter(name="closeViolationsOnExpiration")
def close_violations_on_expiration(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to close all open violations when the signal expires.
"""
return pulumi.get(self, "close_violations_on_expiration")
@property
@pulumi.getter
def critical(self) -> pulumi.Output[Optional['outputs.NrqlAlertConditionCritical']]:
"""
A list containing the `critical` threshold values. See Terms below for details.
"""
return pulumi.get(self, "critical")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The description of the NRQL alert condition.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to enable the alert condition. Valid values are `true` and `false`. Defaults to `true`.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="expectedGroups")
def expected_groups(self) -> pulumi.Output[Optional[int]]:
"""
Number of expected groups when using `outlier` detection.
"""
return pulumi.get(self, "expected_groups")
@property
@pulumi.getter(name="expirationDuration")
def expiration_duration(self) -> pulumi.Output[Optional[int]]:
"""
The amount of time (in seconds) to wait before considering the signal expired.
"""
return pulumi.get(self, "expiration_duration")
@property
@pulumi.getter(name="fillOption")
def fill_option(self) -> pulumi.Output[Optional[str]]:
"""
Which strategy to use when filling gaps in the signal. Possible values are `none`, `last_value` or `static`. If `static`, the `fill_value` field will be used for filling gaps in the signal.
"""
return pulumi.get(self, "fill_option")
@property
@pulumi.getter(name="fillValue")
def fill_value(self) -> pulumi.Output[Optional[float]]:
"""
This value will be used for filling gaps in the signal.
"""
return pulumi.get(self, "fill_value")
@property
@pulumi.getter(name="ignoreOverlap")
def ignore_overlap(self) -> pulumi.Output[Optional[bool]]:
"""
**DEPRECATED:** Use `open_violation_on_group_overlap` instead, but use the inverse value of your boolean - e.g. if `ignore_overlap = false`, use `open_violation_on_group_overlap = true`. This argument sets whether to trigger a violation when groups overlap. If set to `true` overlapping groups will not trigger a violation. This argument is only applicable in `outlier` conditions.
"""
return pulumi.get(self, "ignore_overlap")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The title of the condition.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def nrql(self) -> pulumi.Output['outputs.NrqlAlertConditionNrql']:
"""
A NRQL query. See NRQL below for details.
"""
return pulumi.get(self, "nrql")
@property
@pulumi.getter(name="openViolationOnExpiration")
def open_violation_on_expiration(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to create a new violation to capture that the signal expired.
"""
return pulumi.get(self, "open_violation_on_expiration")
@property
@pulumi.getter(name="openViolationOnGroupOverlap")
def open_violation_on_group_overlap(self) -> pulumi.Output[Optional[bool]]:
"""
Whether or not to trigger a violation when groups overlap. Set to `true` if you want to trigger a violation when groups overlap. This argument is only applicable in `outlier` conditions.
"""
return pulumi.get(self, "open_violation_on_group_overlap")
@property
@pulumi.getter(name="policyId")
def policy_id(self) -> pulumi.Output[int]:
"""
The ID of the policy where this condition should be used.
"""
return pulumi.get(self, "policy_id")
@property
@pulumi.getter(name="runbookUrl")
def runbook_url(self) -> pulumi.Output[Optional[str]]:
"""
Runbook URL to display in notifications.
"""
return pulumi.get(self, "runbook_url")
@property
@pulumi.getter
def terms(self) -> pulumi.Output[Optional[Sequence['outputs.NrqlAlertConditionTerm']]]:
"""
**DEPRECATED** Use `critical`, and `warning` instead. A list of terms for this condition. See Terms below for details.
"""
return pulumi.get(self, "terms")
@property
@pulumi.getter
def type(self) -> pulumi.Output[Optional[str]]:
"""
The type of the condition. Valid values are `static`, `baseline`, or `outlier`. Defaults to `static`.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="valueFunction")
def value_function(self) -> pulumi.Output[Optional[str]]:
"""
Possible values are `single_value`, `sum` (case insensitive).
"""
return pulumi.get(self, "value_function")
@property
@pulumi.getter(name="violationTimeLimit")
def violation_time_limit(self) -> pulumi.Output[str]:
"""
**DEPRECATED:** Use `violation_time_limit_seconds` instead. Sets a time limit, in hours, that will automatically force-close a long-lasting violation after the time limit you select. Possible values are `ONE_HOUR`, `TWO_HOURS`, `FOUR_HOURS`, `EIGHT_HOURS`, `TWELVE_HOURS`, `TWENTY_FOUR_HOURS`, `THIRTY_DAYS` (case insensitive).<br>
<small>\***Note**: One of `violation_time_limit` _or_ `violation_time_limit_seconds` must be set, but not both.</small>
"""
return pulumi.get(self, "violation_time_limit")
@property
@pulumi.getter(name="violationTimeLimitSeconds")
def violation_time_limit_seconds(self) -> pulumi.Output[Optional[int]]:
"""
Sets a time limit, in seconds, that will automatically force-close a long-lasting violation after the time limit you select. The value must be between 300 seconds (5 minutes) to 2592000 seconds (30 days) (inclusive). <br>
<small>\***Note**: One of `violation_time_limit` _or_ `violation_time_limit_seconds` must be set, but not both.</small>
"""
return pulumi.get(self, "violation_time_limit_seconds")
@property
@pulumi.getter
def warning(self) -> pulumi.Output[Optional['outputs.NrqlAlertConditionWarning']]:
"""
A list containing the `warning` threshold values. See Terms below for details.
"""
return pulumi.get(self, "warning")
| 59.204052
| 526
| 0.686201
| 10,166
| 81,820
| 5.324415
| 0.037576
| 0.06686
| 0.073714
| 0.029264
| 0.962552
| 0.955919
| 0.948862
| 0.945795
| 0.943781
| 0.933436
| 0
| 0.004798
| 0.212906
| 81,820
| 1,381
| 527
| 59.246923
| 0.835699
| 0.434674
| 0
| 0.890306
| 1
| 0.007653
| 0.166116
| 0.072849
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16199
| false
| 0.001276
| 0.008929
| 0
| 0.267857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6501353b0ccda822b1119f34ca9acccdf3fc9f18
| 121
|
py
|
Python
|
04higher_order_functions_modules_std_lambdas_inheritance/my_module.py
|
DrSloth/first_steps_in_python
|
36e56bfd4da89bc644d2adbc2ea5010a071f2c4c
|
[
"MIT"
] | 2
|
2022-02-18T09:03:07.000Z
|
2022-02-18T09:03:16.000Z
|
04higher_order_functions_modules_std_lambdas_inheritance/my_module.py
|
DrSloth/first_steps_in_python
|
36e56bfd4da89bc644d2adbc2ea5010a071f2c4c
|
[
"MIT"
] | null | null | null |
04higher_order_functions_modules_std_lambdas_inheritance/my_module.py
|
DrSloth/first_steps_in_python
|
36e56bfd4da89bc644d2adbc2ea5010a071f2c4c
|
[
"MIT"
] | null | null | null |
# This module contains a function to print Hello, World!
# Prints Hello, World!
def hello():
print("Hello, World!")
| 20.166667
| 56
| 0.68595
| 17
| 121
| 4.882353
| 0.647059
| 0.361446
| 0.361446
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190083
| 121
| 5
| 57
| 24.2
| 0.846939
| 0.619835
| 0
| 0
| 0
| 0
| 0.302326
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
3324581aa82e2b9e1bb643ed6389a5890676034b
| 6,028
|
py
|
Python
|
tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2011] 1.py
|
ghlecl/holidata
|
1db24d4aecab7ec7a007720987d84ffb0988b6db
|
[
"MIT"
] | null | null | null |
tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2011] 1.py
|
ghlecl/holidata
|
1db24d4aecab7ec7a007720987d84ffb0988b6db
|
[
"MIT"
] | null | null | null |
tests/snapshots/snap_test_holidata/test_holidata_produces_holidays_for_locale_and_year[de_DE-2011] 1.py
|
ghlecl/holidata
|
1db24d4aecab7ec7a007720987d84ffb0988b6db
|
[
"MIT"
] | null | null | null |
[
{
'date': '2011-01-01',
'description': 'Neujahr',
'locale': 'de-DE',
'notes': '',
'region': '',
'type': 'NF'
},
{
'date': '2011-01-06',
'description': 'Heilige drei Könige',
'locale': 'de-DE',
'notes': '',
'region': 'BW',
'type': 'NRF'
},
{
'date': '2011-01-06',
'description': 'Heilige drei Könige',
'locale': 'de-DE',
'notes': '',
'region': 'BY',
'type': 'NRF'
},
{
'date': '2011-01-06',
'description': 'Heilige drei Könige',
'locale': 'de-DE',
'notes': '',
'region': 'ST',
'type': 'NRF'
},
{
'date': '2011-04-22',
'description': 'Karfreitag',
'locale': 'de-DE',
'notes': '',
'region': '',
'type': 'NRV'
},
{
'date': '2011-04-24',
'description': 'Ostern',
'locale': 'de-DE',
'notes': '',
'region': '',
'type': 'NRV'
},
{
'date': '2011-04-25',
'description': 'Ostermontag',
'locale': 'de-DE',
'notes': '',
'region': '',
'type': 'NRV'
},
{
'date': '2011-05-01',
'description': 'Erster Maifeiertag',
'locale': 'de-DE',
'notes': '',
'region': '',
'type': 'NF'
},
{
'date': '2011-06-02',
'description': 'Christi Himmelfahrt',
'locale': 'de-DE',
'notes': '',
'region': '',
'type': 'NRV'
},
{
'date': '2011-06-12',
'description': 'Pfingstsonntag',
'locale': 'de-DE',
'notes': '',
'region': '',
'type': 'NRV'
},
{
'date': '2011-06-13',
'description': 'Pfingstmontag',
'locale': 'de-DE',
'notes': '',
'region': '',
'type': 'NRV'
},
{
'date': '2011-06-23',
'description': 'Fronleichnam',
'locale': 'de-DE',
'notes': '',
'region': 'BW',
'type': 'NRV'
},
{
'date': '2011-06-23',
'description': 'Fronleichnam',
'locale': 'de-DE',
'notes': '',
'region': 'BY',
'type': 'NRV'
},
{
'date': '2011-06-23',
'description': 'Fronleichnam',
'locale': 'de-DE',
'notes': '',
'region': 'HE',
'type': 'NRV'
},
{
'date': '2011-06-23',
'description': 'Fronleichnam',
'locale': 'de-DE',
'notes': '',
'region': 'NW',
'type': 'NRV'
},
{
'date': '2011-06-23',
'description': 'Fronleichnam',
'locale': 'de-DE',
'notes': '',
'region': 'RP',
'type': 'NRV'
},
{
'date': '2011-06-23',
'description': 'Fronleichnam',
'locale': 'de-DE',
'notes': '',
'region': 'SL',
'type': 'NRV'
},
{
'date': '2011-08-15',
'description': 'Mariä Himmelfahrt',
'locale': 'de-DE',
'notes': '',
'region': 'SL',
'type': 'NRF'
},
{
'date': '2011-10-03',
'description': 'Tag der Deutschen Einheit',
'locale': 'de-DE',
'notes': '',
'region': '',
'type': 'NRF'
},
{
'date': '2011-10-31',
'description': 'Reformationstag',
'locale': 'de-DE',
'notes': '',
'region': 'BB',
'type': 'NRF'
},
{
'date': '2011-10-31',
'description': 'Reformationstag',
'locale': 'de-DE',
'notes': '',
'region': 'MV',
'type': 'NRF'
},
{
'date': '2011-10-31',
'description': 'Reformationstag',
'locale': 'de-DE',
'notes': '',
'region': 'SN',
'type': 'NRF'
},
{
'date': '2011-10-31',
'description': 'Reformationstag',
'locale': 'de-DE',
'notes': '',
'region': 'ST',
'type': 'NRF'
},
{
'date': '2011-10-31',
'description': 'Reformationstag',
'locale': 'de-DE',
'notes': '',
'region': 'TH',
'type': 'NRF'
},
{
'date': '2011-11-01',
'description': 'Allerheiligen',
'locale': 'de-DE',
'notes': '',
'region': 'BW',
'type': 'NRF'
},
{
'date': '2011-11-01',
'description': 'Allerheiligen',
'locale': 'de-DE',
'notes': '',
'region': 'BY',
'type': 'NRF'
},
{
'date': '2011-11-01',
'description': 'Allerheiligen',
'locale': 'de-DE',
'notes': '',
'region': 'NW',
'type': 'NRF'
},
{
'date': '2011-11-01',
'description': 'Allerheiligen',
'locale': 'de-DE',
'notes': '',
'region': 'RP',
'type': 'NRF'
},
{
'date': '2011-11-01',
'description': 'Allerheiligen',
'locale': 'de-DE',
'notes': '',
'region': 'SL',
'type': 'NRF'
},
{
'date': '2011-11-16',
'description': 'Buß- und Bettag',
'locale': 'de-DE',
'notes': '',
'region': 'SN',
'type': 'NRV'
},
{
'date': '2011-12-24',
'description': 'Heilig Abend',
'locale': 'de-DE',
'notes': '',
'region': '',
'type': 'NRF'
},
{
'date': '2011-12-25',
'description': 'Weihnachtstag',
'locale': 'de-DE',
'notes': '',
'region': '',
'type': 'NRF'
},
{
'date': '2011-12-26',
'description': 'Zweiter Weihnachtstag',
'locale': 'de-DE',
'notes': '',
'region': '',
'type': 'NRF'
},
{
'date': '2011-12-31',
'description': 'Silvester',
'locale': 'de-DE',
'notes': '',
'region': '',
'type': 'NF'
}
]
| 22
| 51
| 0.37077
| 479
| 6,028
| 4.665971
| 0.146138
| 0.1217
| 0.152125
| 0.228188
| 0.822819
| 0.821029
| 0.811186
| 0.778076
| 0.778076
| 0.724832
| 0
| 0.07491
| 0.397644
| 6,028
| 274
| 52
| 22
| 0.540622
| 0
| 0
| 0.620438
| 0
| 0
| 0.390446
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6825b92437b2decac156c71d71cfbd6456858669
| 108
|
py
|
Python
|
spec_version.py
|
antonvh/ev3dev-lang-python
|
7dd6a657d998476c3258863998cae34ea161baa1
|
[
"MIT"
] | 1
|
2020-12-01T23:23:33.000Z
|
2020-12-01T23:23:33.000Z
|
spec_version.py
|
antonvh/ev3dev-lang-python
|
7dd6a657d998476c3258863998cae34ea161baa1
|
[
"MIT"
] | null | null | null |
spec_version.py
|
antonvh/ev3dev-lang-python
|
7dd6a657d998476c3258863998cae34ea161baa1
|
[
"MIT"
] | 1
|
2020-12-01T23:23:34.000Z
|
2020-12-01T23:23:34.000Z
|
# ~autogen spec_version
spec_version = "spec: 0.9.3-pre-r2, kernel: v3.16.7-ckt16-7-ev3dev-ev3"
# ~autogen
| 21.6
| 71
| 0.703704
| 20
| 108
| 3.7
| 0.75
| 0.297297
| 0.405405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135417
| 0.111111
| 108
| 4
| 72
| 27
| 0.635417
| 0.277778
| 0
| 0
| 1
| 1
| 0.72
| 0.346667
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d7a9feddc8c1b28c75d579ce48b9fe064b15ddec
| 119
|
py
|
Python
|
python_20740628/chapter_one/6.py
|
vimm0/python_exercise
|
7773d95b4c25b82a9d014f7a814ac83df9ebac17
|
[
"MIT"
] | null | null | null |
python_20740628/chapter_one/6.py
|
vimm0/python_exercise
|
7773d95b4c25b82a9d014f7a814ac83df9ebac17
|
[
"MIT"
] | null | null | null |
python_20740628/chapter_one/6.py
|
vimm0/python_exercise
|
7773d95b4c25b82a9d014f7a814ac83df9ebac17
|
[
"MIT"
] | 1
|
2018-01-04T16:27:31.000Z
|
2018-01-04T16:27:31.000Z
|
def check_palindrome(str):
return str == str[::-1]
print(check_palindrome('palpa'))
print(check_palindrome('radar'))
| 19.833333
| 32
| 0.731092
| 16
| 119
| 5.25
| 0.5625
| 0.535714
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009174
| 0.084034
| 119
| 6
| 33
| 19.833333
| 0.761468
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
|
0
| 7
|
d7f9937de321ef353dde5adfe61aa9c9997e6117
| 3,190
|
py
|
Python
|
build/lib/pypse/operators.py
|
HankelBao/PyPse
|
0022226703592ef030f8d4cdb94cdb089d36edb5
|
[
"MIT"
] | 9
|
2018-11-25T15:31:21.000Z
|
2021-03-17T20:24:57.000Z
|
build/lib/pypse/operators.py
|
HankelBao/PyPse
|
0022226703592ef030f8d4cdb94cdb089d36edb5
|
[
"MIT"
] | null | null | null |
build/lib/pypse/operators.py
|
HankelBao/PyPse
|
0022226703592ef030f8d4cdb94cdb089d36edb5
|
[
"MIT"
] | 2
|
2019-02-02T17:42:35.000Z
|
2020-12-08T16:25:50.000Z
|
from lark import Tree
from .values import Value, ValueType
from .debug import DebugOutput
class Operator():
priority = 0
def operate(value1: Value, value2: Value) -> Value:
pass
def debug_output():
DebugOutput.output("anonymous operator")
class OperatorAdd(Operator):
priority = 2
def operate(value1: Value, value2: Value) -> Value:
if value1.value_type == ValueType.INT and value2.value_type == ValueType.INT:
value = Value(ValueType.INT)
value_in_python = int(value1.value_in_python + value2.value_in_python)
value.assign_value_in_python(value_in_python)
return value
return None
def debug_output():
DebugOutput.output("+")
class OperatorMinus(Operator):
priority = 2
def operate(value1: Value, value2: Value) -> Value:
value = Value(value1.value_type)
value_in_python = value1.value_in_python - value2.value_in_python
value.assign_value_in_python(value_in_python)
return value
def debug_output():
DebugOutput.output("-")
class OperatorMultiple(Operator):
priority = 3
def operate(value1: Value, value2: Value) -> Value:
value = Value(value1.value_type)
value_in_python = value1.value_in_python * value2.value_in_python
value.assign_value_in_python(value_in_python)
return value
def debug_output():
DebugOutput.output("*")
class OperatorDivide(Operator):
priority = 3
def operate(value1: Value, value2: Value) -> Value:
value = Value(value1.value_type)
value_in_python = value1.value_in_python / value2.value_in_python
value.assign_value_in_python(value_in_python)
return value
def debug_output():
DebugOutput.output("/")
class OperatorEqual(Operator):
priority = 1
def operate(value1: Value, value2: Value) -> Value:
if value1.value_type == value2.value_type:
value_in_python = value1.value_in_python == value2.value_in_python
value = Value(ValueType.BOOL)
value.assign_value_in_python(value_in_python)
return value
return None
def debug_output():
DebugOutput.output("=")
class OperatorLargerThan(Operator):
priority = 1
def operate(value1: Value, value2: Value) -> Value:
if value1.value_type == value2.value_type:
value_in_python = value1.value_in_python > value2.value_in_python
value = Value(ValueType.BOOL)
value.assign_value_in_python(value_in_python)
return value
return None
def debug_output():
DebugOutput.output(">")
class OperatorSmallerThan(Operator):
priority = 1
def operate(value1: Value, value2: Value) -> Value:
if value1.value_type == value2.value_type:
value_in_python = value1.value_in_python < value2.value_in_python
value = Value(ValueType.BOOL)
value.assign_value_in_python(value_in_python)
return value
return None
def debug_output():
DebugOutput.output("<")
# from .converters import convert_token_to_operator
| 27.982456
| 85
| 0.663636
| 378
| 3,190
| 5.338624
| 0.116402
| 0.121407
| 0.225471
| 0.124876
| 0.827552
| 0.81219
| 0.81219
| 0.793855
| 0.793855
| 0.793855
| 0
| 0.020476
| 0.249843
| 3,190
| 113
| 86
| 28.230089
| 0.822817
| 0.015361
| 0
| 0.625
| 0
| 0
| 0.007964
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.0125
| 0.0375
| 0
| 0.575
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
cc1ac9d48b670d93f673abd781103ddf70895605
| 1,584
|
py
|
Python
|
dash_fcast/distributions/utils.py
|
dsbowen/dash-fcast
|
b589dfa370e8170f76893ba2f484bf03eaca7522
|
[
"MIT"
] | null | null | null |
dash_fcast/distributions/utils.py
|
dsbowen/dash-fcast
|
b589dfa370e8170f76893ba2f484bf03eaca7522
|
[
"MIT"
] | 1
|
2021-05-26T12:55:28.000Z
|
2021-05-27T12:15:48.000Z
|
dash_fcast/distributions/utils.py
|
dsbowen/dash-fcast
|
b589dfa370e8170f76893ba2f484bf03eaca7522
|
[
"MIT"
] | null | null | null |
from scipy.stats._continuous_distns import expon_gen, gamma_gen
class reflect():
def pdf(self, x, *args, **kwargs):
return super().pdf(-x, *args, **kwargs)
def logpdf(self, x, *args, **kwargs):
return super().logpdf(-x, *args, **kwargs)
def cdf(self, x, *args, **kwargs):
return 1 - super().cdf(-x, *args, **kwargs)
# def logcdf(self, x, *args, **kwargs):
# return super().logcdf(-x, *args, **kwargs)
def sf(self, x, *args, **kwargs):
return 1 - super().sf(-x, *args, **kwargs)
# def logsf(self, x, *args, **kwargs):
# return super().logsf(-x, *args, **kwargs)
def ppf(self, q, *args, **kwargs):
return -super().ppf(1-q, *args, **kwargs)
def isf(self, q, *args, **kwargs):
return -super().isf(1-q, *args, **kwargs)
def moment(self, n, *args, **kwargs):
return -super().moment(n, *args, **kwargs)
def mean(self, *args, **kwargs):
return -super().mean(*args, **kwargs)
def median(self, *args, **kwargs):
return -super().median(*args, **kwargs)
# TODO
# stats
# entropy
# expect
# interval
# fit
# fit_loc_scale
# nnlf
# support
class rexpon_gen(reflect, expon_gen):
def __call__(self, loc=0, *args, **kwargs):
return super().__call__(-loc, *args, **kwargs)
rexpon = rexpon_gen(a=0., name='rexpon')
class rgamma_gen(reflect, gamma_gen):
def __call__(self, a=1, loc=0, *args, **kwargs):
return super().__call__(a, -loc, *args, **kwargs)
rgamma = rgamma_gen(a=0, name='rgamma')
| 25.967213
| 63
| 0.569444
| 210
| 1,584
| 4.161905
| 0.22381
| 0.297483
| 0.237986
| 0.264302
| 0.398169
| 0.306636
| 0.128146
| 0
| 0
| 0
| 0
| 0.007432
| 0.23548
| 1,584
| 61
| 64
| 25.967213
| 0.714286
| 0.147096
| 0
| 0
| 0
| 0
| 0.008969
| 0
| 0
| 0
| 0
| 0.016393
| 0
| 1
| 0.392857
| false
| 0
| 0.035714
| 0.392857
| 0.928571
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
0be132164364ba05a50c33f7d7f0ce57460713e7
| 11,745
|
py
|
Python
|
rlkit/demos/source/encoder_dict_to_mdp_path_loader.py
|
anair13/rlkit
|
028885a6528b9d871d1946671f84ad93d90eded1
|
[
"MIT"
] | 11
|
2020-01-24T17:16:42.000Z
|
2022-01-30T23:07:06.000Z
|
rlkit/demos/source/encoder_dict_to_mdp_path_loader.py
|
anair13/rlkit
|
028885a6528b9d871d1946671f84ad93d90eded1
|
[
"MIT"
] | null | null | null |
rlkit/demos/source/encoder_dict_to_mdp_path_loader.py
|
anair13/rlkit
|
028885a6528b9d871d1946671f84ad93d90eded1
|
[
"MIT"
] | 3
|
2020-04-13T09:17:25.000Z
|
2021-10-16T08:03:10.000Z
|
from collections import OrderedDict
import pickle
import numpy as np
import torch
import torch.optim as optim
from torch import nn as nn
import torch.nn.functional as F
import copy
import rlkit.torch.pytorch_util as ptu
from rlkit.core.eval_util import create_stats_ordered_dict
from rlkit.torch.torch_rl_algorithm import TorchTrainer
from rlkit.demos.source.dict_to_mdp_path_loader import DictToMDPPathLoader
from rlkit.util.io import (
load_local_or_remote_file, sync_down_folder, get_absolute_path, sync_down
)
import random
from rlkit.torch.core import np_to_pytorch_batch
from rlkit.data_management.path_builder import PathBuilder
from rlkit.launchers.conf import LOCAL_LOG_DIR, AWS_S3_PATH
from rlkit.core import logger
import glob
class EncoderDictToMDPPathLoader(DictToMDPPathLoader):
def __init__(
self,
trainer,
replay_buffer,
demo_train_buffer,
demo_test_buffer,
model=None,
model_path=None,
reward_fn=None,
env=None,
demo_paths=[], # list of dicts
normalize=False,
demo_train_split=0.9,
demo_data_split=1,
add_demos_to_replay_buffer=True,
condition_encoding=False,
bc_num_pretrain_steps=0,
bc_batch_size=64,
bc_weight=1.0,
rl_weight=1.0,
q_num_pretrain_steps=0,
weight_decay=0,
eval_policy=None,
recompute_reward=False,
object_list=None,
env_info_key=None,
obs_key=None,
load_terminals=True,
delete_after_loading=False,
data_filter_fn=lambda x: True, # Return true to add path, false to ignore it
**kwargs
):
super().__init__(trainer,
replay_buffer,
demo_train_buffer,
demo_test_buffer,
demo_paths,
demo_train_split,
demo_data_split,
add_demos_to_replay_buffer,
bc_num_pretrain_steps,
bc_batch_size,
bc_weight,
rl_weight,
q_num_pretrain_steps,
weight_decay,
eval_policy,
recompute_reward,
env_info_key,
obs_key,
load_terminals,
delete_after_loading,
data_filter_fn,
**kwargs)
if model is None:
self.model = load_local_or_remote_file(model_path, delete_after_loading=delete_after_loading)
else:
self.model = model
self.condition_encoding = condition_encoding
self.reward_fn = reward_fn
self.normalize = normalize
self.object_list = object_list
self.env = env
def preprocess(self, observation):
observation = copy.deepcopy(observation)
images = np.stack([observation[i]['image_observation'] for i in range(len(observation))])
if self.normalize:
images = images / 255.0
if self.condition_encoding:
cond = images[0].repeat(len(observation), axis=0)
latents = self.model.encode_np(images, cond)
else:
latents = self.model.encode_np(images)
for i in range(len(observation)):
observation[i]["initial_latent_state"] = latents[0]
observation[i]["latent_observation"] = latents[i]
observation[i]["latent_achieved_goal"] = latents[i]
observation[i]["latent_desired_goal"] = latents[-1]
del observation[i]['image_observation']
return observation
def preprocess_array_obs(self, observation):
new_observations = []
for i in range(len(observation)):
new_observations.append(dict(observation=observation[i]))
return new_observations
def encode(self, obs):
if self.normalize:
return ptu.get_numpy(self.model.encode(ptu.from_numpy(obs) / 255.0))
return ptu.get_numpy(self.model.encode(ptu.from_numpy(obs)))
def load_path(self, path, replay_buffer, obs_dict=None):
# Filter data #
if not self.data_filter_fn(path): return
rewards = []
path_builder = PathBuilder()
H = min(len(path["observations"]), len(path["actions"]))
if obs_dict:
traj_obs = self.preprocess(path["observations"])
next_traj_obs = self.preprocess(path["next_observations"])
else:
traj_obs = self.preprocess_array_obs(path["observations"])
next_traj_obs = self.preprocess_array_obs(path["next_observations"])
for i in range(H):
ob = traj_obs[i]
next_ob = next_traj_obs[i]
action = path["actions"][i]
reward = path["rewards"][i]
terminal = path["terminals"][i]
if not self.load_terminals:
terminal = np.zeros(terminal.shape)
agent_info = path["agent_infos"][i]
env_info = path["env_infos"][i]
if self.recompute_reward:
reward = self.reward_fn(ob, action, next_ob, next_ob)
reward = np.array([reward]).flatten()
rewards.append(reward)
terminal = np.array([terminal]).reshape((1,))
path_builder.add_all(
observations=ob,
actions=action,
rewards=reward,
next_observations=next_ob,
terminals=terminal,
agent_infos=agent_info,
env_infos=env_info,
)
self.demo_trajectory_rewards.append(rewards)
path = path_builder.get_all_stacked()
replay_buffer.add_path(path)
print("rewards", np.min(rewards), np.max(rewards))
print("loading path, length", len(path["observations"]), len(path["actions"]))
print("actions", np.min(path["actions"]), np.max(path["actions"]))
print("path sum rewards", sum(rewards), len(rewards))
class DualEncoderDictToMDPPathLoader(DictToMDPPathLoader):
def __init__(
self,
trainer,
replay_buffer,
demo_train_buffer,
demo_test_buffer,
model=None,
model_path=None,
input_model=None,
input_model_path=None,
reward_fn=None,
env=None,
demo_paths=[], # list of dicts
normalize=False,
demo_train_split=0.9,
demo_data_split=1,
add_demos_to_replay_buffer=True,
condition_input_encoding=False,
bc_num_pretrain_steps=0,
bc_batch_size=64,
bc_weight=1.0,
rl_weight=1.0,
q_num_pretrain_steps=0,
weight_decay=0,
eval_policy=None,
recompute_reward=False,
object_list=None,
env_info_key=None,
obs_key=None,
load_terminals=True,
delete_after_loading=False,
data_filter_fn=lambda x: True, # Return true to add path, false to ignore it
**kwargs
):
super().__init__(trainer,
replay_buffer,
demo_train_buffer,
demo_test_buffer,
demo_paths,
demo_train_split,
demo_data_split,
add_demos_to_replay_buffer,
bc_num_pretrain_steps,
bc_batch_size,
bc_weight,
rl_weight,
q_num_pretrain_steps,
weight_decay,
eval_policy,
recompute_reward,
env_info_key,
obs_key,
load_terminals,
delete_after_loading,
data_filter_fn,
**kwargs)
if model is None:
self.model = load_local_or_remote_file(model_path, delete_after_loading=delete_after_loading)
else:
self.model = model
if input_model is None:
self.input_model = load_local_or_remote_file(input_model_path, delete_after_loading=delete_after_loading)
else:
self.input_model = input_model
self.condition_input_encoding = condition_input_encoding
self.reward_fn = reward_fn
self.normalize = normalize
self.object_list = object_list
self.env = env
def preprocess(self, observation):
observation = copy.deepcopy(observation)
images = np.stack([observation[i]['image_observation'] for i in range(len(observation))])
if self.normalize:
images = images / 255.0
if self.condition_input_encoding:
cond = images[0].repeat(len(observation), axis=0)
input_latents = self.input_model.encode_np(images, cond)
else:
input_latents = self.input_model.encode_np(images)
latents = self.model.encode_np(images)
for i in range(len(observation)):
observation[i]["initial_latent_state"] = latents[0]
observation[i]["latent_observation"] = latents[i]
observation[i]["latent_achieved_goal"] = latents[i]
observation[i]["input_latent"] = input_latents[i]
observation[i]["latent_desired_goal"] = latents[-1]
del observation[i]['image_observation']
return observation
def preprocess_array_obs(self, observation):
new_observations = []
for i in range(len(observation)):
new_observations.append(dict(observation=observation[i]))
return new_observations
def encode(self, obs):
if self.normalize:
return ptu.get_numpy(self.model.encode(ptu.from_numpy(obs) / 255.0))
return ptu.get_numpy(self.model.encode(ptu.from_numpy(obs)))
def load_path(self, path, replay_buffer, obs_dict=None):
# Filter data #
if not self.data_filter_fn(path): return
rewards = []
path_builder = PathBuilder()
H = min(len(path["observations"]), len(path["actions"]))
if obs_dict:
traj_obs = self.preprocess(path["observations"])
next_traj_obs = self.preprocess(path["next_observations"])
else:
traj_obs = self.preprocess_array_obs(path["observations"])
next_traj_obs = self.preprocess_array_obs(path["next_observations"])
for i in range(H):
ob = traj_obs[i]
next_ob = next_traj_obs[i]
action = path["actions"][i]
reward = path["rewards"][i]
terminal = path["terminals"][i]
if not self.load_terminals:
terminal = np.zeros(terminal.shape)
agent_info = path["agent_infos"][i]
env_info = path["env_infos"][i]
if self.recompute_reward:
reward = self.reward_fn(ob, action, next_ob, next_ob)
reward = np.array([reward]).flatten()
rewards.append(reward)
terminal = np.array([terminal]).reshape((1,))
path_builder.add_all(
observations=ob,
actions=action,
rewards=reward,
next_observations=next_ob,
terminals=terminal,
agent_infos=agent_info,
env_infos=env_info,
)
self.demo_trajectory_rewards.append(rewards)
path = path_builder.get_all_stacked()
replay_buffer.add_path(path)
print("rewards", np.min(rewards), np.max(rewards))
print("loading path, length", len(path["observations"]), len(path["actions"]))
print("actions", np.min(path["actions"]), np.max(path["actions"]))
print("path sum rewards", sum(rewards), len(rewards))
| 35.376506
| 117
| 0.595317
| 1,354
| 11,745
| 4.878139
| 0.127031
| 0.027252
| 0.027252
| 0.013323
| 0.875852
| 0.872672
| 0.861771
| 0.861771
| 0.849659
| 0.836336
| 0
| 0.006324
| 0.313325
| 11,745
| 332
| 118
| 35.376506
| 0.812647
| 0.01192
| 0
| 0.863481
| 0
| 0
| 0.055191
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03413
| false
| 0
| 0.064846
| 0
| 0.133106
| 0.027304
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f097efe23e1c24c07bb3add977603ab04023f660
| 5,522
|
py
|
Python
|
printacc/accounts/models.py
|
rozumalex/printacc
|
419f080ce91a737077c0b3d1e98f961fc41e2850
|
[
"MIT"
] | null | null | null |
printacc/accounts/models.py
|
rozumalex/printacc
|
419f080ce91a737077c0b3d1e98f961fc41e2850
|
[
"MIT"
] | null | null | null |
printacc/accounts/models.py
|
rozumalex/printacc
|
419f080ce91a737077c0b3d1e98f961fc41e2850
|
[
"MIT"
] | 1
|
2021-04-08T04:31:19.000Z
|
2021-04-08T04:31:19.000Z
|
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import AbstractUser
from .managers import AccountManager
class Account(AbstractUser):
objects = AccountManager()
is_admin = models.BooleanField(default=False, verbose_name='Admin')
is_dealer = models.BooleanField(default=False, verbose_name='Dealer')
def last_used_time(self):
return 'n/a'
def total_usages(self):
return 'n/a'
def daily_usage(self):
return 'n/a'
def usages_for_last_week(self):
return 'n/a'
class Meta:
ordering = ['username']
unique_together = ('email',)
class User(Account):
class Meta:
proxy = True
def last_used_time(self):
last_usage = self.statistics_set.filter(
user=self.id).order_by('-date_used').first()
if not last_usage:
return 'never'
return self.statistics_set.filter(
user=self.id).order_by('-date_used').first()
def total_usages(self):
return self.statistics_set.filter(
user=self.id).all().count()
def daily_usage(self):
delta = timezone.now() - self.date_joined + timezone.timedelta(days=1)
return int(self.total_usages() / delta.days)
def usages_for_last_week(self):
period = timezone.now() - timezone.timedelta(days=7)
return self.statistics_set.filter(user=self.id,
date_used__gte=period).count()
class Dealer(Account):
class Meta:
proxy = True
def last_used_time(self):
return self.statistics_set.filter(
plotter__dealer__id=self.id).order_by('-date_used').first()
def total_usages(self):
return self.statistics_set.filter(
plotter__dealer__id=self.id).count()
def daily_usage(self):
delta = timezone.now() - self.date_joined + timezone.timedelta(days=1)
return int(self.total_usages() / delta.days)
def usages_for_last_week(self):
period = timezone.now() - timezone.timedelta(days=7)
return self.statistics_set.filter(plotter__dealer__id=self.id,
date_used__gte=period).count()
class Plotter(models.Model):
dealer = models.ForeignKey(Dealer, on_delete=models.CASCADE,
related_name='owner')
model = models.CharField(max_length=255)
description = models.CharField(max_length=255)
date_added = models.DateTimeField(default=timezone.now)
users = models.ManyToManyField(User, blank=True)
def last_used_time(self):
return self.statistics_set.filter(
plotter=self.id).order_by('-date_used').first()
def total_usages(self):
return self.statistics_set.filter(
plotter=self.id).all().count()
def daily_usage(self):
delta = timezone.now() - self.date_added + timezone.timedelta(days=1)
return int(self.total_usages() / delta.days)
def usages_for_last_week(self):
period = timezone.now() - timezone.timedelta(days=7)
return self.statistics_set.filter(plotter__id=self.id,
date_used__gte=period).count()
def dealers_email(self):
return self.dealer.email
def __str__(self):
return self.model
class Pattern(models.Model):
name = models.CharField(max_length=255)
plotter = models.ForeignKey(Plotter, on_delete=models.CASCADE)
description = models.CharField(max_length=255)
date_added = models.DateTimeField(default=timezone.now)
def last_used_time(self):
return self.statistics_set.filter(
pattern=self.id).order_by('-date_used').first()
def total_usages(self):
return self.statistics_set.filter(pattern=self.id).all().count()
def daily_usage(self):
delta = timezone.now() - self.date_added + timezone.timedelta(days=1)
return int(self.total_usages() / delta.days)
def usages_for_last_week(self):
period = timezone.now() - timezone.timedelta(days=7)
return self.statistics_set.filter(pattern__id=self.id,
date_used__gte=period).count()
def __str__(self):
return self.name
class Clients(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
dealer = models.ForeignKey(Dealer, on_delete=models.CASCADE,
related_name='dealer')
limit = models.IntegerField()
def last_used_time(self):
return self.statistics_set.filter(
plotter=self.id).order_by('-date_used').first()
def total_usages(self):
return self.statistics_set.filter(
plotter=self.id).all().count()
def daily_usage(self):
delta = timezone.now() - self.date_joined
return self.total_usages() / delta.days()
def usages_in_period(self, days):
period = timezone.now() - timezone.timedelta(days=days)
return self.statistics_set.filter(user=self.id,
date_used__gte=period)
class Admin(Account):
is_admin = True
is_dealer = False
class Meta:
proxy = True
class Statistics(models.Model):
plotter = models.ForeignKey(Plotter, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
pattern = models.ForeignKey(Pattern, on_delete=models.CASCADE)
date_used = models.DateTimeField(default=timezone.now)
| 31.375
| 78
| 0.646143
| 677
| 5,522
| 5.057607
| 0.138848
| 0.055491
| 0.079439
| 0.107477
| 0.804322
| 0.754381
| 0.702979
| 0.693341
| 0.62354
| 0.600175
| 0
| 0.004786
| 0.243209
| 5,522
| 175
| 79
| 31.554286
| 0.814549
| 0
| 0
| 0.626984
| 0
| 0
| 0.020283
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.214286
| false
| 0
| 0.031746
| 0.126984
| 0.730159
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
f099a93691f9c459d75553970d0bfda3051f9e34
| 10,915
|
py
|
Python
|
tests/test_exclusions.py
|
zreichert/pytest_mark_checker
|
990a579635b3ba394a9780f6ab934e4983b00ef1
|
[
"Apache-2.0"
] | null | null | null |
tests/test_exclusions.py
|
zreichert/pytest_mark_checker
|
990a579635b3ba394a9780f6ab934e4983b00ef1
|
[
"Apache-2.0"
] | 6
|
2018-04-24T14:00:08.000Z
|
2018-09-14T15:06:13.000Z
|
tests/test_exclusions.py
|
zreichert/pytest_mark_checker
|
990a579635b3ba394a9780f6ab934e4983b00ef1
|
[
"Apache-2.0"
] | 1
|
2018-04-26T15:51:00.000Z
|
2018-04-26T15:51:00.000Z
|
# -*- coding: utf-8 -*-
"""Tests for validating rules processing exclusions for test methods and functions. (Driven by the 'exclude_methods'
and 'exclude_functions' options.)
"""
# ======================================================================================================================
# Imports
# ======================================================================================================================
import pytest
# ======================================================================================================================
# Globals
# ======================================================================================================================
# args to only use checks that raise an 'M' prefixed error
extra_args = ['--select', 'M']
# ======================================================================================================================
# Test Suites
# ======================================================================================================================
class TestExcludeClasses(object):
"""Tests for validating the 'exclude_classes' option works as expected."""
# Test suite variables
config = """
[flake8]
pytest_mark1 = name=test,exclude_classes=true
"""
def test_enable_exclude_classes_configuration(self, flake8dir):
"""Verify rules that explicitly enable the 'exclude_classes' configuration will ignore classes that are
missing required marks.
"""
# Setup
flake8dir.make_setup_cfg(self.config)
flake8dir.make_example_py("""
class TestDisabledConfiguration(object):
pass
""")
# Test
result = flake8dir.run_flake8(extra_args)
assert [] == result.out_lines
def test_enabled_with_class_containing_tests(self, flake8dir):
"""Verify rules that explicitly enable the 'exclude_classes' configuration will ignore classes that are
missing required marks, but will still trigger violations on test functions contained within the class.
"""
# Setup
flake8dir.make_setup_cfg(self.config)
flake8dir.make_example_py("""
class TestUnconfigured(object):
def test_function(self):
pass
""")
# Test
result = flake8dir.run_flake8(extra_args)
assert ['./example.py:2:1: M501 test definition not marked with test'] == result.out_lines
def test_unconfigured_with_class_containing_tests(self, flake8dir):
"""Verify rules that leave the 'exclude_classes' option unconfigured will trigger violations on test functions
contained within the class as well as the class itself.
"""
# Setup
flake8dir.make_setup_cfg("""
[flake8]
pytest_mark1 = name=test
""")
flake8dir.make_example_py("""
class TestUnconfigured(object):
def test_function(self):
pass
""")
# Expectations
exp_out_lines = ['./example.py:1:1: M501 test definition not marked with test',
'./example.py:2:1: M501 test definition not marked with test']
# Test
result = flake8dir.run_flake8(extra_args)
# noinspection PyUnresolvedReferences
pytest.helpers.assert_lines(exp_out_lines, result.out_lines)
class TestExcludeFunctions(object):
"""Tests for validating the 'exclude_functions' option works as expected."""
# Test suite variables
config = """
[flake8]
pytest_mark1 = name=test,exclude_functions=true
"""
def test_enable_exclude_functions_configuration(self, flake8dir):
"""Verify that a violation is not triggered on a function that is missing a required mark with the
'exclude_functions' option set to 'true'.
"""
# Setup
flake8dir.make_setup_cfg(self.config)
flake8dir.make_example_py("""
def test_exclude_function():
pass
""")
# Test
result = flake8dir.run_flake8(extra_args)
assert [] == result.out_lines
class TestExcludeMethods(object):
"""Tests for validating the 'exclude_methods' option works as expected."""
# Test suite variables
config = """
[flake8]
pytest_mark1 = name=test,exclude_methods=true
"""
def test_enable_exclude_methods_configuration(self, flake8dir):
"""Verify that a violation is not triggered on a method that is missing a required mark with the
'exclude_methods' option set to 'true'.
"""
# Setup
flake8dir.make_setup_cfg(self.config)
flake8dir.make_example_py("""
class TestExclusion(object):
def test_exclude_method(self):
pass
""")
# Test
result = flake8dir.run_flake8(extra_args)
assert ['./example.py:1:1: M501 test definition not marked with test'] == result.out_lines
def test_exclude_method_with_class_properly_marked(self, flake8dir):
"""Verify that a violation is not triggered on a method that is missing a required mark with the
'exclude_methods' option set to 'true'.
"""
# Setup
flake8dir.make_setup_cfg(self.config)
flake8dir.make_example_py("""
@pytest.mark.test('Classy!')
class TestExclusion(object):
def test_exclude_method(self):
pass
""")
# Test
result = flake8dir.run_flake8(extra_args)
assert [] == result.out_lines
def test_mangled_method_signature(self, flake8dir):
"""Verify that a violation is triggered on a method that is missing a required mark with the 'exclude_methods'
option set to 'true' and the signature is mangled. (First element is not 'self')
"""
# Setup
flake8dir.make_setup_cfg(self.config)
flake8dir.make_example_py("""
@pytest.mark.test('Classy!')
class TestExclusion(object):
def test_exclude_method(me):
pass
""")
# Test
result = flake8dir.run_flake8(extra_args)
assert ['./example.py:3:1: M501 test definition not marked with test'] == result.out_lines
class TestMixedExclusions(object):
"""Tests for validating that the various 'exclude_*' options can be combined in different fashions without
causing surprising behavior.
"""
example_tests = """
class TestClass(object):
def test_method(self):
pass
def test_function():
pass
"""
def test_no_exclusions_configured(self, flake8dir):
"""Verify that violations are triggered on all supported test definition types when no exclusions are configured
and all test definitions are missing required marks.
"""
# Setup
flake8dir.make_setup_cfg("""
[flake8]
pytest_mark1 = name=test
""")
flake8dir.make_example_py(self.example_tests)
# Expectations
exp_out_lines = ['./example.py:1:1: M501 test definition not marked with test',
'./example.py:2:1: M501 test definition not marked with test',
'./example.py:5:1: M501 test definition not marked with test']
# Test
result = flake8dir.run_flake8(extra_args)
# noinspection PyUnresolvedReferences
pytest.helpers.assert_lines(exp_out_lines, result.out_lines)
def test_all_exclusions_configured(self, flake8dir):
"""Verify that no violations are triggered on all supported test definition types when all exclusions are
configured and all test definitions are missing required marks.
"""
# Setup
flake8dir.make_setup_cfg("""
[flake8]
pytest_mark1 = name=test,
exclude_classes=true,
exclude_methods=true,
exclude_functions=true
""")
flake8dir.make_example_py(self.example_tests)
# Test
result = flake8dir.run_flake8(extra_args)
assert [] == result.out_lines
def test_classes_and_methods_excluded(self, flake8dir):
"""Verify that only test function definition violations are triggered when 'exclude_classes' and
'exclude_methods' are configured and all test definitions are missing required marks.
"""
# Setup
flake8dir.make_setup_cfg("""
[flake8]
pytest_mark1 = name=test,
exclude_classes=true,
exclude_methods=true,
exclude_functions=false
""")
flake8dir.make_example_py(self.example_tests)
# Test
result = flake8dir.run_flake8(extra_args)
assert ['./example.py:5:1: M501 test definition not marked with test'] == result.out_lines
def test_classes_and_methods_excluded_functions_take_args(self, flake8dir):
"""Verify that two test function definition violations are triggered when 'exclude_classes' and
'exclude_methods' are configured and all test definitions are missing required marks.
One function has two args and one takes no args.
"""
# Setup
flake8dir.make_setup_cfg("""
[flake8]
pytest_mark1 = name=test,
exclude_classes=true,
exclude_methods=true,
exclude_functions=false
""")
flake8dir.make_example_py("""
class TestClass(object):
def test_method(self):
pass
def test_function_with_args(fixture_one, fixture_two):
pass
def test_function_without_args():
pass
""")
# Test
result = flake8dir.run_flake8(extra_args)
assert ['./example.py:5:1: M501 test definition not marked with test',
'./example.py:8:1: M501 test definition not marked with test'] == result.out_lines
def test_classes_and_functions_excluded(self, flake8dir):
"""Verify that only test function definition violations are triggered when 'exclude_classes' and
'exclude_functions' are configured and all test definitions are missing required marks.
"""
# Setup
flake8dir.make_setup_cfg("""
[flake8]
pytest_mark1 = name=test,
exclude_classes=true,
exclude_methods=false,
exclude_functions=true
""")
flake8dir.make_example_py(self.example_tests)
# Test
result = flake8dir.run_flake8(extra_args)
assert ['./example.py:2:1: M501 test definition not marked with test'] == result.out_lines
def test_methods_and_functions_excluded(self, flake8dir):
"""Verify that only test function definition violations are triggered when 'exclude_methods' and
'exclude_functions' are configured and all test definitions are missing required marks.
"""
# Setup
flake8dir.make_setup_cfg("""
[flake8]
pytest_mark1 = name=test,
exclude_classes=false,
exclude_methods=true,
exclude_functions=true
""")
flake8dir.make_example_py(self.example_tests)
# Test
result = flake8dir.run_flake8(extra_args)
assert ['./example.py:1:1: M501 test definition not marked with test'] == result.out_lines
| 32.777778
| 120
| 0.622721
| 1,215
| 10,915
| 5.401646
| 0.123457
| 0.051501
| 0.037635
| 0.045558
| 0.856163
| 0.838641
| 0.809995
| 0.802225
| 0.800549
| 0.761237
| 0
| 0.017957
| 0.229592
| 10,915
| 332
| 121
| 32.876506
| 0.762516
| 0.348694
| 0
| 0.77381
| 0
| 0
| 0.38248
| 0.116852
| 0
| 0
| 0
| 0
| 0.077381
| 1
| 0.077381
| false
| 0.071429
| 0.005952
| 0
| 0.130952
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
f0f21e632729a2193e3acef46ed979a5688547f9
| 8,326
|
py
|
Python
|
hiddenfigures/tests/stats/dropshot_test.py
|
millcityrunner/carball
|
c94cb8caf2f00e616409d2c22b73475e6268fffa
|
[
"Apache-2.0"
] | null | null | null |
hiddenfigures/tests/stats/dropshot_test.py
|
millcityrunner/carball
|
c94cb8caf2f00e616409d2c22b73475e6268fffa
|
[
"Apache-2.0"
] | null | null | null |
hiddenfigures/tests/stats/dropshot_test.py
|
millcityrunner/carball
|
c94cb8caf2f00e616409d2c22b73475e6268fffa
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from hiddenfigures.analysis.analysis_manager import AnalysisManager
from hiddenfigures.tests.utils import run_analysis_test_on_replay, get_raw_replays
from hiddenfigures.generated.api.stats.dropshot_pb2 import DAMAGED, DESTROYED
class Test_Dropshot():
def test_single_damage(self, replay_cache):
def test(analysis: AnalysisManager):
assertions = unittest.TestCase('__init__')
proto_game = analysis.get_protobuf_data()
assert proto_game.players[0].stats.dropshot_stats.total_damage == 1
assert proto_game.players[0].stats.dropshot_stats.damage_efficiency == 1.0
assert proto_game.teams[0].stats.dropshot_stats.total_damage == 1
assert proto_game.teams[0].stats.dropshot_stats.damage_efficiency == 1.0
dropshot_ball_proto = proto_game.game_stats.ball_stats.extra_mode
assert dropshot_ball_proto.dropshot_phase_stats[0].phase == 0
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[0].average, 9.508282, 5)
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[0].max, 9.508282, 5)
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[0].total, 9.508282, 5)
assert dropshot_ball_proto.dropshot_phase_stats[1].phase == 1
assert dropshot_ball_proto.dropshot_phase_stats[1].average == 0
assert dropshot_ball_proto.dropshot_phase_stats[1].max == 0
assert dropshot_ball_proto.dropshot_phase_stats[1].total == 0
assert dropshot_ball_proto.dropshot_phase_stats[2].phase == 2
assert dropshot_ball_proto.dropshot_phase_stats[2].average == 0
assert dropshot_ball_proto.dropshot_phase_stats[2].max == 0
assert dropshot_ball_proto.dropshot_phase_stats[2].total == 0
game_dropshot_stats = proto_game.game_stats.dropshot_stats
assert len(game_dropshot_stats.damage_events) == 1
assert len(game_dropshot_stats.damage_events[0].tiles) == 1
assert game_dropshot_stats.damage_events[0].tiles[0].id == 101
assert game_dropshot_stats.damage_events[0].tiles[0].state == DAMAGED
run_analysis_test_on_replay(test, get_raw_replays()["DROPSHOT_SINGLE_DAMAGE"], cache=replay_cache)
def test_double_damage(self, replay_cache):
def test(analysis: AnalysisManager):
proto_game = analysis.get_protobuf_data()
assertions = unittest.TestCase('__init__')
assert proto_game.players[0].stats.dropshot_stats.total_damage == 3
assert proto_game.players[0].stats.dropshot_stats.damage_efficiency == 1.0
assert proto_game.teams[1].stats.dropshot_stats.total_damage == 3
assert proto_game.teams[1].stats.dropshot_stats.damage_efficiency == 1.0
dropshot_ball_proto = proto_game.game_stats.ball_stats.extra_mode
assert dropshot_ball_proto.dropshot_phase_stats[0].phase == 0
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[0].average, 10.189136, 5)
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[0].max, 10.189136, 5)
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[0].total, 10.189136, 5)
assert dropshot_ball_proto.dropshot_phase_stats[1].phase == 1
assert dropshot_ball_proto.dropshot_phase_stats[1].average == 0
assert dropshot_ball_proto.dropshot_phase_stats[1].max == 0
assert dropshot_ball_proto.dropshot_phase_stats[1].total == 0
assert dropshot_ball_proto.dropshot_phase_stats[2].phase == 2
assert dropshot_ball_proto.dropshot_phase_stats[2].average == 0
assert dropshot_ball_proto.dropshot_phase_stats[2].max == 0
assert dropshot_ball_proto.dropshot_phase_stats[2].total == 0
game_dropshot_stats = proto_game.game_stats.dropshot_stats
assert len(game_dropshot_stats.damage_events) == 3
assert game_dropshot_stats.damage_events[0].tiles[0].state == DAMAGED
assert game_dropshot_stats.damage_events[1].tiles[0].state == DESTROYED
assert len(game_dropshot_stats.tile_stats.damage_stats) == 2
assert game_dropshot_stats.tile_stats.damage_stats[0].total_damage == 2
run_analysis_test_on_replay(test, get_raw_replays()["DROPSHOT_DOUBLE_DAMAGE"], cache=replay_cache)
def test_phase1_ball(self, replay_cache):
def test(analysis: AnalysisManager):
proto_game = analysis.get_protobuf_data()
assertions = unittest.TestCase('__init__')
dropshot_ball_proto = proto_game.game_stats.ball_stats.extra_mode
assert dropshot_ball_proto.dropshot_phase_stats[0].phase == 0
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[0].average, 3.72452275, 5)
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[0].max, 6.9999535, 5)
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[0].total, 7.4490455, 5)
assert dropshot_ball_proto.dropshot_phase_stats[1].phase == 1
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[1].average, 0.1, 5)
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[1].max, 0.1, 5)
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[1].total, 0.1, 5)
assert dropshot_ball_proto.dropshot_phase_stats[2].phase == 2
assert dropshot_ball_proto.dropshot_phase_stats[2].average == 0
assert dropshot_ball_proto.dropshot_phase_stats[2].max == 0
assert dropshot_ball_proto.dropshot_phase_stats[2].total == 0
game_dropshot_stats = proto_game.game_stats.dropshot_stats
assert len(game_dropshot_stats.damage_events[0].tiles) == 7
run_analysis_test_on_replay(test, get_raw_replays()["DROPSHOT_PHASE1_BALL"], cache=replay_cache)
def test_phase2_ball(self, replay_cache):
def test(analysis: AnalysisManager):
proto_game = analysis.get_protobuf_data()
assertions = unittest.TestCase('__init__')
dropshot_ball_proto = proto_game.game_stats.ball_stats.extra_mode
assert dropshot_ball_proto.dropshot_phase_stats[0].phase == 0
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[0].average, 7.12466465, 5)
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[0].max, 12.1493613, 5)
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[0].total, 14.2493293, 5)
assert dropshot_ball_proto.dropshot_phase_stats[1].phase == 1
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[1].average, 19.000713, 5)
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[1].max, 19.000713, 5)
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[1].total, 19.000713, 5)
assert dropshot_ball_proto.dropshot_phase_stats[2].phase == 2
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[2].average, 4.901173, 5)
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[2].max, 4.901173, 5)
assertions.assertAlmostEqual(dropshot_ball_proto.dropshot_phase_stats[2].total, 4.901173, 5)
game_dropshot_stats = proto_game.game_stats.dropshot_stats
assert len(game_dropshot_stats.damage_events[0].tiles) == 12
run_analysis_test_on_replay(test, get_raw_replays()["DROPSHOT_PHASE2_BALL"], cache=replay_cache)
def test_goal(self, replay_cache):
def test(analysis: AnalysisManager):
proto_game = analysis.get_protobuf_data()
assert proto_game.game_metadata.goals[0].extra_mode_info.dropshot_tile.id == 89
assert proto_game.game_metadata.goals[0].extra_mode_info.phase_1_tiles == 0
assert proto_game.game_metadata.goals[0].extra_mode_info.phase_2_tiles == 1
run_analysis_test_on_replay(test, get_raw_replays()["DROPSHOT_GOAL"], cache=replay_cache)
| 55.139073
| 108
| 0.725198
| 1,078
| 8,326
| 5.219852
| 0.079777
| 0.110894
| 0.1571
| 0.213258
| 0.915052
| 0.909366
| 0.882886
| 0.869735
| 0.854629
| 0.814111
| 0
| 0.044487
| 0.187365
| 8,326
| 150
| 109
| 55.506667
| 0.787171
| 0
| 0
| 0.495327
| 1
| 0
| 0.015494
| 0.005285
| 0
| 0
| 0
| 0
| 0.691589
| 1
| 0.093458
| false
| 0
| 0.037383
| 0
| 0.140187
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
50077d2b9c95c392a29e8c3a8da944c166fcc7ba
| 3,574
|
py
|
Python
|
users/forms.py
|
omzi/django-blog
|
560799a5d5624ea4a50ed8b8cc60c8bcbc651ab7
|
[
"MIT"
] | 1
|
2021-05-05T13:57:36.000Z
|
2021-05-05T13:57:36.000Z
|
users/forms.py
|
omzi/django-blog
|
560799a5d5624ea4a50ed8b8cc60c8bcbc651ab7
|
[
"MIT"
] | null | null | null |
users/forms.py
|
omzi/django-blog
|
560799a5d5624ea4a50ed8b8cc60c8bcbc651ab7
|
[
"MIT"
] | null | null | null |
from django.contrib.auth.forms import UserCreationForm, UserChangeForm, PasswordChangeForm, PasswordResetForm
from django.contrib.auth.models import User
from django import forms
from django.core.exceptions import ValidationError
class RegistrationForm(UserCreationForm):
username = forms.CharField(label='Username', min_length=4, max_length=100, widget=forms.TextInput(attrs={'placeholder': 'Enter A Username'}))
email = forms.EmailField(label='Email Address', widget=forms.EmailInput(attrs={'placeholder': 'Enter Your Email Address'}))
first_name = forms.CharField(label='First Name', min_length=4, max_length=100, widget=forms.TextInput(attrs={'placeholder': 'Enter Your First Name'}))
last_name = forms.CharField(label='Last Name', min_length=4, max_length=100, widget=forms.TextInput(attrs={'placeholder': 'Enter Your Last Name'}))
password1 = forms.CharField(label='Password', widget=forms.PasswordInput(attrs={'placeholder': 'Enter Your Password'}))
password2 = forms.CharField(label='Confirm Password', widget=forms.PasswordInput(attrs={'placeholder': 'Confirm Your Password'}))
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['username'].widget.attrs.update({'autofocus': False})
class Meta:
model = User
fields = ('username', 'email', 'first_name', 'last_name', 'password1', 'password2')
class UserEditForm(UserChangeForm):
username = forms.CharField(label='Username', min_length=4, max_length=100, widget=forms.TextInput(attrs={'placeholder': 'Enter A Username', 'class': 'input-form trans-bg'}))
email = forms.EmailField(label='Email Address', widget=forms.EmailInput(attrs={'placeholder': 'Enter Your Email Address', 'class': 'input-form trans-bg'}))
first_name = forms.CharField(label='First Name', min_length=4, max_length=100, widget=forms.TextInput(attrs={'placeholder': 'Enter Your First Name', 'class': 'input-form trans-bg'}))
last_name = forms.CharField(label='Last Name', min_length=4, max_length=100, widget=forms.TextInput(attrs={'placeholder': 'Enter Your Last Name', 'class': 'input-form trans-bg'}))
class Meta:
model = User
fields = ('username', 'email', 'first_name', 'last_name')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
del self.fields['password']
class PasswordChangeUserForm(PasswordChangeForm):
old_password = forms.CharField(label='Old Password', widget=forms.PasswordInput(attrs={'placeholder': 'Enter Your Password'}))
new_password1 = forms.CharField(label='New Password', widget=forms.PasswordInput(attrs={'placeholder': 'Enter Your New Password'}))
new_password2 = forms.CharField(label='Confirm Password', widget=forms.PasswordInput(attrs={'placeholder': 'Confirm Your New Password'}))
class Meta:
model = User
fields = ('old_password', 'new_password1', 'new_password2')
class PasswordResetUserForm(PasswordResetForm):
email = forms.EmailField(label='Email Address', widget=forms.EmailInput(attrs={'placeholder': 'Enter Your Email Address'}))
class Meta:
model = User
fields = ('email')
class PasswordConfirmUserForm(PasswordResetForm):
new_password1 = forms.CharField(label='New Password', widget=forms.PasswordInput(attrs={'placeholder': 'Enter Your New Password'}))
new_password2 = forms.CharField(label='Confirm Password', widget=forms.PasswordInput(attrs={'placeholder': 'Confirm Your New Password'}))
class Meta:
model = User
fields = ('email')
| 56.730159
| 186
| 0.72188
| 416
| 3,574
| 6.096154
| 0.161058
| 0.069401
| 0.097397
| 0.108438
| 0.766167
| 0.751577
| 0.72358
| 0.72358
| 0.72358
| 0.640773
| 0
| 0.010964
| 0.132345
| 3,574
| 63
| 187
| 56.730159
| 0.806837
| 0
| 0
| 0.478261
| 0
| 0
| 0.266573
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0.304348
| 0.086957
| 0
| 0.695652
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
502a9d053857ba0e436d96a3af14b408438af774
| 10,095
|
py
|
Python
|
octopus/modules/es/tests/unit/test_sanitisation.py
|
richard-jones/magnificent-octopus
|
4a65f5a3e919af61887302d2911849233347f18f
|
[
"Apache-2.0"
] | 2
|
2016-02-22T04:31:30.000Z
|
2021-08-03T23:58:36.000Z
|
octopus/modules/es/tests/unit/test_sanitisation.py
|
richard-jones/magnificent-octopus
|
4a65f5a3e919af61887302d2911849233347f18f
|
[
"Apache-2.0"
] | 9
|
2015-01-04T14:00:05.000Z
|
2021-12-13T19:35:07.000Z
|
octopus/modules/es/tests/unit/test_sanitisation.py
|
richard-jones/magnificent-octopus
|
4a65f5a3e919af61887302d2911849233347f18f
|
[
"Apache-2.0"
] | 3
|
2016-09-09T13:39:45.000Z
|
2018-02-19T14:23:12.000Z
|
from unittest import TestCase
from octopus.modules.es import sanitise
class TestModels(TestCase):
def setUp(self):
super(TestModels, self).setUp()
def tearDown(self):
super(TestModels, self).tearDown()
def test_01_basic_success(self):
query = {
"query" : {"match_all" : {}},
"size" : 10,
"from" : 9
}
sane = sanitise.sanitise(query, sanitise.EDGES_STRUCT)
assert sane == query
def test_02_no_query(self):
query = {
"size" : 10,
"from" : 9
}
with self.assertRaises(sanitise.QuerySanitisationException):
sane = sanitise.sanitise(query, sanitise.EDGES_STRUCT)
def test_03_disallowed_fields(self):
query = {
"query" : {"filtered" : {"filter" : {"terms" : {"index.field" : ["one", "two"]}}}},
"size" : 10,
"from" : 9,
"random" : "field",
"aggs" : {}, # without the type_field_map this should be stripped out
"sort" : [] # without the type_field_map this should be stripped out
}
sane = sanitise.sanitise(query, sanitise.EDGES_STRUCT)
expected = {
"query" : {"filtered" : {"filter" : {"terms" : {"index.field" : ["one", "two"]}}}},
"size" : 10,
"from" : 9
}
assert sane == expected
def test_04_aggregations_success(self):
query = {
"query" : {"filtered" : {"filter" : {"terms" : {"index.field" : ["one", "two"]}}}},
"size" : 10,
"from" : 9,
"aggs" : {
"first" : {
"terms" : {"field" : "aaaa"}
},
"second" : {
"terms" : {"field" : "bbbb"},
"aggs" : {
"aleph" : {
"date_histogram" : {"field" : "dddd"}
}
}
},
"third" : {
"date_histogram" : {"field" : "cccc"}
}
}
}
type_field_map = {
"terms" : {
"aaaa" : {
"aggs" : False
},
"bbbb" : {
"aggs" : True,
"type_field_map" : {
"date_histogram" : {
"dddd" : {
"aggs" : False
}
}
}
}
},
"date_histogram" : {
"cccc" : {
"aggs" : False
}
}
}
sane = sanitise.sanitise(query, sanitise.EDGES_STRUCT, aggs_type_field_map=type_field_map)
assert sane == query
def test_05_aggregations_stripped(self):
query = {
"query" : {"filtered" : {"filter" : {"terms" : {"index.field" : ["one", "two"]}}}},
"size" : 10,
"from" : 9,
"aggregations" : {
"first" : {
"terms" : {"field" : "aaaa"},
"aggregations" : {
"alpha" : {
"date_histogram" : {"field" : "eeee"}
}
}
},
"second" : {
"terms" : {"field" : "bbbb"},
"aggregations" : {
"aleph" : {
"date_histogram" : {"field" : "dddd"}
}
}
},
"third" : {
"date_histogram" : {"field" : "cccc"}
}
}
}
type_field_map = {
"terms" : {
"aaaa" : {
"aggs" : False
},
"bbbb" : {
"aggs" : True,
"type_field_map" : {
"date_histogram" : {
"ffff" : {
"aggs" : False
}
}
}
}
}
}
sane = sanitise.sanitise(query, sanitise.EDGES_STRUCT, aggs_type_field_map=type_field_map)
expected = {
"query" : {"filtered" : {"filter" : {"terms" : {"index.field" : ["one", "two"]}}}},
"size" : 10,
"from" : 9,
"aggregations" : {
"first" : {
"terms" : {"field" : "aaaa"},
},
"second" : {
"terms" : {"field" : "bbbb"},
}
}
}
assert sane == expected
def test_06_coerce_fail(self):
query = {
"query" : {"match_all" : {}},
"size" : "ten",
"from" : 9
}
with self.assertRaises(sanitise.QuerySanitisationException):
sane = sanitise.sanitise(query, sanitise.EDGES_STRUCT)
query = {
"query" : {"match_all" : {}},
"size" : 10,
"from" : "nine"
}
with self.assertRaises(sanitise.QuerySanitisationException):
sane = sanitise.sanitise(query, sanitise.EDGES_STRUCT)
def test_07_unfiltered(self):
query = {
"query" : {"bool" : {"must" : [{"term" : {"index.field" : "one"}}]}},
"size" : "ten",
"from" : 9
}
with self.assertRaises(sanitise.QuerySanitisationException):
sane = sanitise.sanitise(query, sanitise.EDGES_STRUCT)
def test_08_sources(self):
query = {
"source" : {
"includes" : ["whatever"]
},
"query" : {"match_all" : {}},
"size" : 10,
"from" : 9
}
sane = sanitise.sanitise(query, sanitise.EDGES_STRUCT, source_includes=["id", "created_date", "last_updated"])
expected = {
"source" : {
"includes" : ["id", "created_date", "last_updated"]
},
"query" : {"match_all" : {}},
"size" : 10,
"from" : 9
}
assert sane == expected
def test_09_sorting(self):
query = {
"query" : {"match_all" : {}},
"size" : 10,
"from" : 9,
"sort" : [
{"one" : {"order" : "asc"}},
{"two" : {"order" : "desc"}},
{"three" : {"order" : "asc"}}
]
}
sane = sanitise.sanitise(query, sanitise.EDGES_STRUCT, sortable=["one", "three"])
expected = {
"query" : {"match_all" : {}},
"size" : 10,
"from" : 9,
"sort" : [
{"one" : {"order" : "asc"}},
{"three" : {"order" : "asc"}}
]
}
assert sane == expected
query = {
"query" : {"match_all" : {}},
"size" : 10,
"from" : 9,
"sort" : [
"field", "here"
]
}
with self.assertRaises(sanitise.QuerySanitisationException):
sane = sanitise.sanitise(query, sanitise.EDGES_STRUCT, sortable=["field", "here"])
def test_10_all_together(self):
query = {
"source" : {
"includes" : ["whatever"]
},
"query" : {"filtered" : {"filter" : {"terms" : {"index.field" : ["one", "two"]}}}},
"size" : 10,
"from" : 9,
"random" : "field",
"aggregations" : {
"first" : {
"terms" : {"field" : "aaaa"},
"aggs" : {
"alpha" : {
"date_histogram" : {"field" : "eeee"}
}
}
},
"second" : {
"terms" : {"field" : "bbbb"},
"aggs" : {
"aleph" : {
"date_histogram" : {"field" : "dddd"}
}
}
},
"third" : {
"date_histogram" : {"field" : "cccc"}
}
},
"sort" : [
{"one" : {"order" : "asc"}},
{"two" : {"order" : "desc"}},
{"three" : {"order" : "asc"}}
]
}
type_field_map = {
"terms" : {
"aaaa" : {
"aggs" : False
},
"bbbb" : {
"aggs" : True,
"type_field_map" : {
"date_histogram" : {
"ffff" : {
"aggs" : False
}
}
}
}
}
}
sane = sanitise.sanitise(query, sanitise.EDGES_STRUCT,
source_includes=["id", "created_date", "last_updated"],
sortable=["one", "three"],
aggs_type_field_map=type_field_map)
expected = {
"source" : {
"includes" : ["id", "created_date", "last_updated"]
},
"query" : {"filtered" : {"filter" : {"terms" : {"index.field" : ["one", "two"]}}}},
"size" : 10,
"from" : 9,
"aggregations" : {
"first" : {
"terms" : {"field" : "aaaa"},
},
"second" : {
"terms" : {"field" : "bbbb"},
}
},
"sort" : [
{"one" : {"order" : "asc"}},
{"three" : {"order" : "asc"}}
]
}
assert sane == expected
| 30.044643
| 118
| 0.346409
| 662
| 10,095
| 5.13142
| 0.155589
| 0.02355
| 0.044157
| 0.045334
| 0.855166
| 0.825729
| 0.801001
| 0.777451
| 0.745952
| 0.662644
| 0
| 0.013269
| 0.507281
| 10,095
| 336
| 119
| 30.044643
| 0.669682
| 0.010797
| 0
| 0.630508
| 0
| 0
| 0.165865
| 0
| 0
| 0
| 0
| 0
| 0.040678
| 1
| 0.040678
| false
| 0
| 0.00678
| 0
| 0.050847
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
acc4d4274a16ea826c0dbe038c70d8dc38be764a
| 350,426
|
py
|
Python
|
lbpketappipig/Data_11_MagUp_lbpketappipig.py
|
Williams224/davinci-scripts
|
730642d2ff13543eca4073a4ce0932631195de56
|
[
"MIT"
] | null | null | null |
lbpketappipig/Data_11_MagUp_lbpketappipig.py
|
Williams224/davinci-scripts
|
730642d2ff13543eca4073a4ce0932631195de56
|
[
"MIT"
] | null | null | null |
lbpketappipig/Data_11_MagUp_lbpketappipig.py
|
Williams224/davinci-scripts
|
730642d2ff13543eca4073a4ce0932631195de56
|
[
"MIT"
] | null | null | null |
#-- GAUDI jobOptions generated on Sun Feb 7 01:33:09 2016
#-- Contains event types :
#-- 90000000 - 3886 files - 297532269 events - 4488.35 GBytes
#-- Extra information about the data processing phases:
#-- Processing Pass Step-127013
#-- StepId : 127013
#-- StepName : Stripping21r1-Merging-DV-v36r1
#-- ApplicationName : DaVinci
#-- ApplicationVersion : v36r1
#-- OptionFiles : $APPCONFIGOPTS/Merging/DV-Stripping-Merging.py
#-- DDDB : dddb-20130929
#-- CONDDB : cond-20141107
#-- ExtraPackages : AppConfig.v3r203;SQLDDDB.v7r10
#-- Visible : N
from Gaudi.Configuration import *
from GaudiConf import IOHelper
IOHelper('ROOT').inputFiles(['LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000001_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000020_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000060_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000061_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000102_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000137_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000153_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000167_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000185_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000209_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000210_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000258_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000259_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000297_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000322_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000339_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000362_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000383_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000397_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000398_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000399_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000442_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000458_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000459_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000460_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000461_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000462_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000463_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000464_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000556_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000570_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000589_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000604_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000618_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000641_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000642_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000669_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000670_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000708_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000736_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000737_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000772_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000790_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000791_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000818_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000819_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000846_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000847_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000874_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000875_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000876_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000916_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000930_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000931_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000958_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000972_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00000986_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001000_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001001_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001028_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001042_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001043_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001044_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001045_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001046_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001112_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001126_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001140_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001154_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001168_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001182_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001183_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001184_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001185_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001238_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001239_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001266_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001267_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001268_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001269_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001322_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001336_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001350_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001364_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001378_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001396_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001419_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001433_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001447_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001476_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001492_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001517_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001531_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001532_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001533_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001576_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001577_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001605_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001619_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001644_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001658_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001674_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001688_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001703_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001704_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001705_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001706_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001766_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001767_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001768_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001769_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001824_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001825_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001864_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001865_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001894_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001895_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001922_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001936_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001937_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001964_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001965_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00001992_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002012_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002026_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002040_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002054_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002055_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002082_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002096_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002110_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002124_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002138_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002152_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002166_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002180_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002194_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002208_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002221_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002235_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002249_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002250_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002285_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002299_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002300_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002327_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002343_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002359_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002360_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002361_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002401_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002423_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002424_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002452_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002467_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002481_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002504_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002518_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002519_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002549_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002566_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002582_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002600_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002618_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002632_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002646_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002647_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002675_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002691_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002706_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002707_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002733_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002748_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002762_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002763_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002788_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002803_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002821_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002822_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002823_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002863_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002877_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002878_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002904_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002921_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002935_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002951_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002972_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002973_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002974_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002975_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00002976_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003043_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003057_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003081_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003082_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003110_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003111_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003137_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003151_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003165_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003179_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003180_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003207_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003221_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003235_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003249_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003263_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003277_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003291_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003305_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003319_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003333_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003347_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003361_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003375_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003390_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003404_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003420_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003421_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003447_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003464_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003478_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003479_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003480_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003526_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003542_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003563_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003577_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003578_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003579_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003580_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003630_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003644_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003645_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003646_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003688_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003689_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003714_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003715_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003716_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003717_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003718_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003719_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003800_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003801_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003802_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003845_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003859_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003860_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003889_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003903_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003904_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003940_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003953_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003954_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003955_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00003999_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004000_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004001_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004002_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004003_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004072_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004073_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004118_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004119_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004143_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004155_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004169_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004170_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004197_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004211_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004212_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004238_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004239_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004240_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004288_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004302_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004303_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004304_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004344_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004359_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004373_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004395_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004396_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004425_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004426_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004427_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004428_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004429_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004430_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004431_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004531_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004532_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004559_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004560_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004561_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004562_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004563_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004564_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004565_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004665_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004666_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004667_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004668_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004669_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004737_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004738_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004739_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004740_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004741_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004804_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004805_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004806_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004807_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004861_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004862_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004863_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004864_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004865_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004866_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004867_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004957_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004971_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004985_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00004986_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005006_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005016_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005029_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005038_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005044_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005045_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005046_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005047_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005048_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005112_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005113_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005114_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005115_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005116_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005184_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005203_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005215_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005216_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005217_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005218_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005275_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005276_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005301_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005302_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005303_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005304_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005360_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005381_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005395_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005411_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005423_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005424_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005425_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005475_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005481_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005482_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005483_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005484_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005485_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005486_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005565_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005566_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005567_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005568_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005629_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005630_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005631_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005671_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005685_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005699_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005714_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005738_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005739_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005767_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005781_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005782_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005783_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005784_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005785_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005786_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005874_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005875_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005876_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005877_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005945_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005952_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005957_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005971_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005972_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00005973_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006010_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006011_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006012_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006013_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006014_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006015_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006016_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006107_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006108_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006109_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006110_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006111_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006184_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006185_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006186_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006187_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006239_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006240_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006241_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006295_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006296_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006297_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006298_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006299_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006300_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006301_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006392_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006429_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006443_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006444_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006480_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006496_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006497_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006532_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006533_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006563_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006564_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006565_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006613_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006614_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006641_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006642_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006643_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006644_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006707_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006708_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006709_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006710_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006763_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006764_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006765_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006766_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006826_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006841_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006842_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006843_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006889_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006890_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006891_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006892_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006948_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006949_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006977_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00006992_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007014_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007015_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007050_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007078_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007079_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007080_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007120_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007121_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007122_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007123_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007124_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007125_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007202_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007203_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007204_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007240_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007254_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007268_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007282_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007283_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007284_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007285_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007335_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007336_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007337_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007338_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007392_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007393_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007394_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007436_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007437_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007477_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007491_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007492_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007536_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007551_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007552_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007553_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007554_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007621_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007622_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007623_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007624_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007625_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007626_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007627_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007730_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007731_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007732_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007733_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007734_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007735_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007736_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007737_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007858_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007859_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007860_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007861_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007862_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007863_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007950_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007951_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007952_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007953_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007954_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007955_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007956_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00007957_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008067_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008096_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008097_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008098_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008099_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008149_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008150_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008151_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008152_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008153_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008154_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008155_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008156_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008258_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008259_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008260_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008261_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008262_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008263_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008339_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008353_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008354_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008355_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008356_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008357_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008358_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008359_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008360_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008361_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008481_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008482_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008483_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008484_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008485_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008552_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008553_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008554_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008555_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008556_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008557_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008634_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008648_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008649_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008677_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008678_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008679_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008680_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008732_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008733_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008756_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008757_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008758_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008759_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008760_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008761_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008762_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008872_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008873_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008874_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008875_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008944_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008959_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008973_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008987_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008988_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00008989_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009038_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009052_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009053_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009054_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009055_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009056_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009057_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009136_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009150_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009151_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009181_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009195_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009196_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009197_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009198_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009265_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009266_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009267_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009268_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009269_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009270_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009364_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009380_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009395_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009396_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009397_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009439_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009458_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009478_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009501_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009502_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009503_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009504_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009558_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009577_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009578_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009579_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009619_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009634_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009649_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009650_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009651_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009652_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009653_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009721_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009735_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009752_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009753_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009754_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009755_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009817_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009818_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009854_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009855_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009891_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009892_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009928_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009957_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009958_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0000/00041838_00009960_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010013_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010014_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010015_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010016_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010017_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010018_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010121_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010122_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010155_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010156_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010157_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010158_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010159_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010160_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010263_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010280_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010296_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010297_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010298_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010343_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010344_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010345_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010384_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010385_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010386_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010387_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010388_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010389_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010390_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010490_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010491_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010492_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010542_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010543_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010544_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010545_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010546_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010547_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010649_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010686_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010687_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010688_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010726_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010727_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010728_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010729_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010730_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010731_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010732_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010828_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010852_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010853_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010854_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010855_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010856_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010857_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010858_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010959_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010960_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010961_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010962_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010963_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00010964_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011040_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011041_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011071_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011085_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011103_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011104_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011105_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011106_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011107_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011108_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011185_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011206_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011207_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011208_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011209_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011210_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011211_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011212_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011306_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011307_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011308_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011348_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011349_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011386_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011387_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011388_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011389_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011390_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011391_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011392_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011487_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011503_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011504_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011506_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011557_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011558_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011559_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011560_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011561_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011562_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011651_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011666_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011667_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011668_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011706_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011707_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011708_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011709_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011710_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011711_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011800_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011801_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011802_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011803_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011804_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011872_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011873_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011900_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011901_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011929_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011930_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011931_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011932_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011983_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00011984_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012013_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012014_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012015_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012016_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012079_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012093_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012094_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012095_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012096_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012097_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012098_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012184_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012185_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012221_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012222_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012223_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012224_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012225_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012226_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012228_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012348_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012349_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012350_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012393_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012407_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012408_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012409_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012410_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012412_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012497_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012499_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012500_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012501_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012503_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012504_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012505_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012625_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012626_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012627_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012628_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012629_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012630_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012631_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012740_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012757_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012773_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012795_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012796_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012825_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012826_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012827_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012828_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012829_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012831_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012925_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012926_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012927_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012928_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012929_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012930_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00012931_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013024_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013025_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013026_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013027_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013028_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013029_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013130_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013148_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013149_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013150_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013151_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013152_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013217_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013245_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013246_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013247_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013248_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013301_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013302_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013303_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013304_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013305_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013306_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013385_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013386_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013387_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013455_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013456_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013457_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013458_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013459_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013460_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013564_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013565_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013566_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013567_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013568_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013647_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013648_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013649_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013650_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013651_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013652_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013653_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013746_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013747_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013780_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013781_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013782_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013783_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013784_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013785_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013786_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013880_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013881_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013882_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013938_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013939_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013940_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013941_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00013942_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014024_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014025_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014026_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014027_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014079_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014080_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014081_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014124_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014125_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014126_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014127_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014128_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014129_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014214_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014242_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014275_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014305_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014318_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014331_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014336_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014347_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014358_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014365_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014366_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014380_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014406_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014409_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014413_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014427_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014434_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014443_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014448_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014455_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014474_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014475_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014476_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014477_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014478_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014552_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014566_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014576_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014599_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014601_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014613_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014614_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014615_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014616_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014617_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014618_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014702_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014703_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014704_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014705_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014706_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014707_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014708_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014805_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014812_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014824_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014825_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014826_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014827_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014828_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014894_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014917_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014925_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014926_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014927_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014928_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014929_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00014930_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015018_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015019_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015020_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015021_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015022_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015104_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015105_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015134_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015135_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015136_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015137_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015138_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015139_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015140_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015141_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015259_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015260_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015261_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015262_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015263_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015264_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015266_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015375_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015376_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015379_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015380_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015390_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015391_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015392_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015442_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015456_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015457_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015458_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015459_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015460_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015461_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015462_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015463_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015464_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015581_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015586_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015600_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015609_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015610_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015611_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015612_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015613_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015614_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015712_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015713_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015714_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015715_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015716_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015717_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015718_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015812_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015813_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015843_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015844_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015845_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015846_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015847_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015848_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015849_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015943_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015959_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015960_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015989_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015990_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015991_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00015992_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016045_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016060_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016061_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016062_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016063_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016064_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016065_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016162_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016163_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016164_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016165_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016226_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016227_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016228_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016229_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016296_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016297_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016298_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016299_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016300_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016301_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016380_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016381_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016408_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016409_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016410_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016411_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016412_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016413_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016414_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016528_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016529_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016530_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016531_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016532_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016609_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016610_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016611_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016612_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016613_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016614_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016697_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016698_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016699_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016700_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016701_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016702_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016809_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016810_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016811_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016812_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016813_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016814_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016815_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016929_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016930_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016931_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016932_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00016933_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017001_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017002_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017003_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017004_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017065_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017066_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017067_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017106_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017107_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017108_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017109_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017110_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017111_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017197_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017233_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017247_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017248_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017249_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017250_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017251_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017252_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017253_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017254_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017365_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017366_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017367_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017368_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017369_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017370_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017474_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017475_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017476_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017477_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017478_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017479_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017480_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017481_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017584_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017585_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017586_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017687_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017688_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017689_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017690_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017752_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017753_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017754_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017755_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017810_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017811_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017812_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017813_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017814_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017815_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017816_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017919_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017920_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017921_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017922_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017923_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00017924_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018003_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018027_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018044_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018045_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018046_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018086_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018087_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018115_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018116_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018117_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018118_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018119_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018120_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018121_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018231_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018232_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018233_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018234_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018235_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018236_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018237_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018238_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018239_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018362_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018363_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018364_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018408_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018409_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018444_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018445_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018446_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018492_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018493_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018494_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018542_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018543_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018544_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018545_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018546_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018547_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018644_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018645_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018646_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018647_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018648_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018649_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018728_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018752_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018753_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018782_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018783_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018784_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018830_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018831_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018832_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018833_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018884_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018898_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018899_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018932_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018947_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018948_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018949_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018950_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018951_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00018952_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019045_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019046_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019047_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019048_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019049_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019123_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019124_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019125_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019174_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019175_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019211_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019212_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019213_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019214_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019215_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019216_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019217_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019218_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019219_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019343_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019344_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019345_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019346_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019347_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019348_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019349_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019350_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019351_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019466_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019467_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019468_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019516_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019517_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019518_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019519_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019575_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019576_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019577_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019578_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019579_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019580_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019581_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019582_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019693_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019716_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019717_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019718_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019719_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019720_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019721_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019722_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019820_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019821_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019822_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019823_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019824_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019825_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019826_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019827_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019828_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019949_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019950_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019951_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019952_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019953_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019954_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019955_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019956_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0001/00041838_00019957_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020077_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020095_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020096_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020097_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020098_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020099_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020100_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020101_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020102_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020204_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020205_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020206_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020207_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020208_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020280_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020281_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020282_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020283_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020284_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020285_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020372_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020387_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020388_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020389_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020430_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020453_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020454_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020482_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020483_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020484_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020485_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020486_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020487_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020587_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020588_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020614_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020615_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020616_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020657_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020658_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020659_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020660_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020661_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020662_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020663_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020664_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020767_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020768_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020797_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020798_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020799_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020838_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020839_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020840_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020841_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020842_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020843_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020919_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020920_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020937_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020938_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020939_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020940_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00020941_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021005_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021019_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021034_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021035_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021061_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021062_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021088_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021089_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021090_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021133_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021134_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021135_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021175_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021176_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021207_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021208_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021209_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021210_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021211_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021212_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021213_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021310_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021311_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021312_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021313_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021314_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021379_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021395_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021396_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021397_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021398_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021447_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021448_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021475_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021476_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021477_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021521_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021535_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021552_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021566_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021582_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021601_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021602_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021603_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021604_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021605_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021606_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021685_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021699_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021700_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021701_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021702_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021756_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021757_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021758_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021791_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021810_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021811_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021812_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021857_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021858_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021859_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021860_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021861_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021927_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021942_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021943_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021944_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021945_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021946_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00021947_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022046_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022060_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022075_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022076_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022103_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022104_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022133_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022134_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022135_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022177_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022178_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022179_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022180_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022226_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022227_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022228_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022275_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022289_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022290_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022291_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022324_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022338_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022352_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022353_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022354_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022355_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022414_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022415_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022416_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022462_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022463_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022464_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022465_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022466_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022467_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022570_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022584_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022598_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022599_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022600_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022601_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022602_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022603_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022604_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022605_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022606_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022727_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022728_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022729_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022730_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022789_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022803_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022804_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022834_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022848_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022849_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022850_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022851_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022904_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022905_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022906_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022907_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022908_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00022909_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023003_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023004_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023005_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023006_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023007_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023008_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023103_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023118_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023119_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023120_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023153_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023189_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023205_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023221_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023222_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023223_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023224_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023225_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023226_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023332_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023346_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023361_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023375_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023389_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023403_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023418_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023419_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023420_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023465_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023480_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023499_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023500_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023527_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023528_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023555_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023556_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023557_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023558_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023559_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023560_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023637_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023638_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023667_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023668_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023694_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023695_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023696_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023697_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023698_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023764_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023765_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023766_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023767_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023768_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023769_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023848_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023849_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023881_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023895_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023896_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023897_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023898_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023899_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023900_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023901_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00023993_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024015_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024016_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024046_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024061_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024062_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024063_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024109_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024110_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024136_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024137_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024138_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024178_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024239_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024240_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024268_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024269_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024293_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024294_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024295_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024338_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024339_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024340_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024341_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024342_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024343_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024344_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024455_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024456_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024478_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024479_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024480_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024481_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024482_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024555_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024584_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024585_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024612_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024613_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024614_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024615_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024616_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024617_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024618_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024619_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024620_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024739_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024755_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024756_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024778_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024779_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024780_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024819_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024820_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024821_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024889_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024903_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024917_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024934_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024949_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024950_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024951_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024984_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00024998_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025012_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025026_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025027_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025049_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025050_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025051_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025085_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025099_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025114_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025129_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025143_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025144_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025191_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025192_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025223_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025237_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025253_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025267_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025281_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025296_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025297_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025323_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025324_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025325_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025371_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025372_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025411_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025430_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025431_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025432_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025433_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025482_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025483_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025484_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025528_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025529_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025530_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025531_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025532_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025594_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025595_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025596_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025634_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025635_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025636_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025637_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025686_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025687_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025688_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025724_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025738_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025739_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025770_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025771_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025772_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025773_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025774_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025836_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025837_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025838_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025839_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025888_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025904_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025905_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025906_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025907_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025908_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025970_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025971_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025972_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025973_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00025974_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026036_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026037_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026038_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026039_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026040_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026102_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026103_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026136_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026137_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026138_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026139_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026200_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026214_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026230_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026244_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026245_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026246_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026247_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026296_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026297_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026298_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026343_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026344_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026345_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026346_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026347_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026409_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026410_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026439_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026440_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026475_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026490_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026491_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026492_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026493_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026494_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026556_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026557_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026558_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026597_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026613_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026614_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026642_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026656_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026672_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026689_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026690_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026719_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026720_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026750_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026764_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026765_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026796_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026797_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026798_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026799_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026859_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026860_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026861_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026862_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026925_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026926_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026953_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026969_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026970_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00026971_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027033_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027047_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027068_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027069_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027070_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027119_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027120_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027148_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027164_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027165_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027194_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027195_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027217_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027218_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027219_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027220_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027267_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027281_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027282_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027283_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027325_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027326_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027327_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027328_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027329_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027387_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027401_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027418_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027432_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027446_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027447_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027448_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027492_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027507_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027521_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027535_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027549_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027564_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027580_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027581_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027582_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027623_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027637_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027638_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027667_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027681_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027682_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027683_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027684_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027685_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027748_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027749_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027776_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027793_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027794_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027795_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027796_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027797_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027861_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027862_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027891_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027892_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027893_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027894_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027895_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027960_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027976_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00027990_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028005_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028019_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028033_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028047_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028061_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028075_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028089_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028090_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028091_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028126_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028140_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028141_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028142_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028182_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028196_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028210_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028225_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028226_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028227_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028266_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028280_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028281_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028282_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028283_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028333_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028334_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028335_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028380_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028381_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028382_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028383_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028436_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028450_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028451_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028452_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028453_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028503_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028504_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028505_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028506_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028566_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028581_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028582_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028615_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028616_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028646_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028647_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028673_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028689_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028690_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028717_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028734_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028735_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028736_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028737_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028787_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028801_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028802_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028803_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028804_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028854_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028855_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028856_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028890_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028891_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028892_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028893_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028943_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028958_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028972_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028986_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028987_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028988_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00028989_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029047_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029048_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029049_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029089_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029103_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029119_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029120_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029149_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029150_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029172_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029190_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029191_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029192_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029193_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029194_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029251_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029266_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029267_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029294_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029295_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029296_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029297_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029345_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029346_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029370_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029371_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029392_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029406_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029407_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029439_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029453_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029454_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029478_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029495_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029496_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029519_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029534_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029548_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029564_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029578_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029592_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029593_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029594_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029595_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029645_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029663_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029664_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029665_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029666_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029667_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029728_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029729_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029759_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029760_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029761_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029807_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029808_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029809_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029810_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029860_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029874_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029875_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029876_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029877_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029878_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029940_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029941_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029942_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029943_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0002/00041838_00029944_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030006_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030024_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030025_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030026_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030027_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030077_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030099_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030113_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030128_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030129_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030130_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030174_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030188_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030202_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030203_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030204_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030205_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030254_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030268_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030282_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030296_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030297_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030298_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030299_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030349_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030350_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030383_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030384_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030385_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030386_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030436_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030437_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030463_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030477_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030492_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030493_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030494_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030495_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030496_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030558_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030559_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030560_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030561_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030609_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030610_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030611_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030660_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030661_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030662_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030710_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030711_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030743_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030745_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030796_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030797_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030798_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030799_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030850_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030851_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030852_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030891_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030892_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030893_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030894_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030945_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030960_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030961_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030962_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00030963_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031012_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031013_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031039_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031055_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031056_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031057_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031098_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031113_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031131_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031132_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031167_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031168_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031191_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031192_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031193_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031194_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031244_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031261_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031275_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031276_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031302_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031316_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031317_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031318_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031319_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031367_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031368_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031391_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031405_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031406_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031429_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031443_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031457_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031473_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031493_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031509_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031543_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031557_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031558_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031582_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031597_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031598_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031599_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031600_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031657_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031658_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031659_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031700_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031701_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031702_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031703_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031759_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031773_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031774_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031775_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031776_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031777_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031778_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031861_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031862_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031886_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031887_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031918_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031919_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031920_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031921_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031969_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031970_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00031994_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032009_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032010_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032011_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032012_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032060_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032081_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032100_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032101_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032102_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032135_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032136_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032137_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032138_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032187_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032188_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032189_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032190_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032191_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032255_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032327_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032328_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032329_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032369_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032370_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032371_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032372_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032373_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032448_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032449_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032450_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032451_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032452_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032453_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032570_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032571_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032572_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032573_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032574_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032575_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032576_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032577_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032688_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032689_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032690_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032691_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032692_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032693_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032694_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032789_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032790_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032791_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032792_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032793_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032794_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032795_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032796_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032903_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032906_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032907_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032908_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032909_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032910_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00032911_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033011_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033013_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033014_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033015_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033016_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033017_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033018_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033116_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033117_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033118_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033119_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033120_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033121_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033122_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033123_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033124_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033254_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033255_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033256_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033257_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033258_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033259_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033363_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033364_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033365_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033366_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033367_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033464_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033465_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033466_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033467_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033468_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033538_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033539_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033540_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033592_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033649_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033650_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033651_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033652_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033653_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033654_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033655_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033656_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033760_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033773_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033776_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033799_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033813_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033814_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033842_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033858_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033859_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033890_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033910_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033924_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033936_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033945_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033947_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033960_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00033986_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034001_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034013_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034021_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034033_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034043_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034060_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034065_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034066_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034067_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034068_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034134_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034135_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034136_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034137_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034195_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034203_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034248_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034249_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034279_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034297_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034311_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034312_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034342_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034343_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034367_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034383_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034398_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034412_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034432_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034433_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034461_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034479_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034493_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034505_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034506_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034507_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034553_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034554_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034555_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034556_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034557_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034621_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034635_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034636_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034637_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034638_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034639_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034703_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034718_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034729_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034730_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034731_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034780_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034781_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034814_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034847_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034852_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034868_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034870_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034897_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034908_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034913_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034918_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034927_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034965_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034975_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034996_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00034997_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035035_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035036_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035037_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035038_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035039_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035040_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035146_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035155_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035159_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035169_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035179_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035199_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035205_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035242_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035257_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035260_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035267_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035308_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035314_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035347_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035360_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035361_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035362_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035363_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035426_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035429_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035440_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035448_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035472_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035485_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035503_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035511_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035553_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035567_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035571_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035622_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035644_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035651_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035721_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035723_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035733_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035741_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035748_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035770_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035777_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035782_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035792_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035817_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035826_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035849_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035851_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035861_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035866_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035878_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035904_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035911_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035918_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035948_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035949_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035950_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035951_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035952_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00035953_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036074_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036075_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036076_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036077_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036131_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036149_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036160_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036182_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036193_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036194_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036195_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036196_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036197_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036198_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036320_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036335_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036336_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036340_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036345_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036383_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036387_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036396_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036413_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036437_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036455_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036495_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036496_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036497_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036498_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036499_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036587_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036588_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036589_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036590_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036659_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036673_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036677_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036678_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036711_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036730_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036739_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036741_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036742_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036798_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036799_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036800_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036801_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036802_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036803_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036804_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036932_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036941_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036942_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036943_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036944_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036945_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00036946_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037048_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037049_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037050_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037051_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037052_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037053_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037054_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037055_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037056_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037177_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037210_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037211_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037212_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037213_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037214_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037215_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037216_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037217_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037332_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037333_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037334_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037335_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037336_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037407_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037408_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037409_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037410_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037411_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037484_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037518_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037519_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037520_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037521_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037522_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037523_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037648_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037652_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037665_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037693_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037717_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037731_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037735_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037736_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037809_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037840_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037847_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037848_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037849_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037850_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037851_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037852_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037853_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037854_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037954_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037971_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00037985_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038042_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038045_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038098_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038144_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038174_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038177_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038178_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038179_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038189_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038233_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038241_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038254_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038255_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038256_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038257_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038258_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038339_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038342_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038358_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038383_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038398_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038399_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038400_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038401_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038402_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038403_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038502_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038503_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038504_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038505_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038506_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038507_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038508_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038626_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038642_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038663_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038676_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038681_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038689_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038692_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038704_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038720_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038733_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038738_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038744_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038756_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038788_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038789_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038790_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038791_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038792_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038793_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038898_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038900_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038934_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038935_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038936_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00038937_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039001_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039006_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039032_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039049_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039053_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039054_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039055_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039056_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039112_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039113_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039114_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039115_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039183_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039196_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039275_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039281_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039293_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039300_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039320_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039334_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039357_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039365_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039366_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039367_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039368_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039369_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039437_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039451_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039477_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039480_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039486_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039501_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039518_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039519_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039520_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039521_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039572_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039602_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039614_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039659_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039667_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039680_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039682_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039741_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039753_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039754_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039755_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039756_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039757_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039758_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039761_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039888_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039929_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039934_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039957_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039958_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0003/00041838_00039959_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040016_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040022_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040064_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040080_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040081_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040082_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040083_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040140_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040144_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040192_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040248_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040251_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040265_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040273_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040283_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040332_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040392_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040409_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040410_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040411_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040412_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040413_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040478_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040479_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040480_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040481_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040534_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040561_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040568_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040580_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040584_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040587_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040615_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040692_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040751_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040753_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040770_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040785_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040786_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040803_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040836_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040839_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040841_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040842_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040843_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040844_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040845_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040846_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040847_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040848_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040958_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040959_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040960_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040961_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040962_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040963_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00040964_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041070_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041071_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041072_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041073_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041140_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041147_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041148_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041150_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041152_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041153_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041154_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041217_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041219_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041236_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041237_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041238_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041239_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041240_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041241_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041242_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041243_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041375_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041377_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041378_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041379_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041380_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041381_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041382_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041383_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041494_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041499_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041500_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041501_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041502_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041560_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041563_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041566_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041572_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041588_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041589_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041590_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041591_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041592_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041593_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041790_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041792_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041795_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041805_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041806_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041807_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041808_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041809_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041810_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041811_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041941_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041959_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041966_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00041996_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042058_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042070_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042072_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042085_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042086_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042087_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042088_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042141_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042195_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042206_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042207_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042208_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042209_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042210_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042286_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042298_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042303_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042306_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042365_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042372_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042391_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042407_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042438_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042443_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042444_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042445_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042446_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042447_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042448_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042449_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042567_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042576_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042590_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042592_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042600_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042605_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042609_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042610_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042669_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042686_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042702_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042779_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042784_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042791_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042796_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042805_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042809_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042813_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042821_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042825_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042833_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042851_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042877_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042879_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042889_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00042977_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043019_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043041_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043042_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043043_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043134_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043141_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043144_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043153_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043281_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043320_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043327_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043329_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043334_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043395_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043404_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043418_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043419_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043440_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043445_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043451_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043452_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043456_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043484_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043490_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043499_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043513_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043515_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043539_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043544_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043545_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043546_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043547_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043548_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043549_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043550_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043551_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043653_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043678_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043693_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043733_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043751_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043775_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043791_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043797_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043801_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043835_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043837_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043852_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043884_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043885_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043887_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043896_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043916_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043944_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00043986_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044052_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044067_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044124_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044156_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044184_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044187_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044252_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044265_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044268_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044275_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044282_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044303_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044331_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044352_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044369_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044383_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044397_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044398_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044429_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044436_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044461_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044484_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044529_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044542_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044549_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044570_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044583_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044585_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044591_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044592_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044614_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044625_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044631_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044632_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044646_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044649_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044655_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044666_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044676_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044715_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044717_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044718_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044719_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044735_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044769_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044780_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044798_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044831_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044870_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044883_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044939_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044956_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044985_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00044986_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045015_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045016_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045029_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045053_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045065_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045071_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045085_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045120_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045145_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045155_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045161_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045189_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045212_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045216_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045235_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045250_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045251_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045265_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045285_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045287_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045305_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045369_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045385_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045391_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045392_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045402_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045404_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045419_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045420_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045421_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045423_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045424_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045425_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045426_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045450_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045494_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045522_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045558_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045575_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045592_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045620_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045629_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045632_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045643_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045669_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045670_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045685_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045686_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045709_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045710_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045763_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045767_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045787_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045805_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045806_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045814_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045839_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045863_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045880_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045885_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045940_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045965_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045966_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00045980_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046004_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046025_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046026_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046039_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046053_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046068_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046071_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046083_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046140_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046141_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046142_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046194_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046213_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046225_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046230_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046240_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046244_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046275_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046276_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046291_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046292_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046315_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046316_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046323_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046326_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046327_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046336_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046340_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046344_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046388_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046389_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046415_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046417_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046423_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046438_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046443_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046449_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046453_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046456_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046499_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046524_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046526_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046535_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046549_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046557_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046572_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046601_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046602_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046626_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046627_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046632_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046643_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046648_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046655_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046669_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046706_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046713_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046722_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046735_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046737_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046742_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046750_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046832_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046855_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046877_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046893_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046896_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046901_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046922_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046947_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046958_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046960_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046969_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046971_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046976_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046985_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00046996_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047011_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047037_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047039_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047042_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047047_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047051_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047070_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047080_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047113_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047125_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047132_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047137_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047142_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047152_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047157_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047166_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047260_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047282_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047284_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047309_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047334_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047335_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047336_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047337_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047338_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047339_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047340_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047341_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047453_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047454_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047455_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047501_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047502_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047530_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047531_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047532_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047533_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047534_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047535_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047536_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047645_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047646_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047647_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047648_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047708_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047723_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047739_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047740_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047741_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047742_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047743_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047744_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047819_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047820_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047821_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047822_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047869_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047870_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047871_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047913_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047914_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047915_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047916_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047963_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047964_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047965_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00047966_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048014_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048015_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048016_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048062_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048063_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048095_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048096_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048120_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048121_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048122_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048166_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048198_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048199_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048200_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048201_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048202_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048290_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048307_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048325_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048326_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048327_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048372_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048386_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048390_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048406_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048409_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048436_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048447_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048449_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048478_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048491_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048502_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048513_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048520_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048525_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048544_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048548_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048551_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048564_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048574_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048577_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048580_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048629_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048652_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048668_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048673_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048679_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048683_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048688_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048689_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048697_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048701_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048702_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048716_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048720_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048737_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048741_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048743_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048754_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048767_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048781_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048789_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048791_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048803_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048824_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048840_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048856_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048861_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048863_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048871_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048882_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048884_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048886_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048896_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048909_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048926_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048939_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048942_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048958_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048959_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048989_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048990_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048992_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048993_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048996_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00048998_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049022_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049046_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049052_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049073_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049104_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049108_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049113_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049126_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049199_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049239_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049244_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049257_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049267_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049274_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049300_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049305_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049306_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049354_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049403_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049417_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049450_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049456_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049459_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049470_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049499_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049508_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049526_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049529_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049538_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049574_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049581_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049625_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049630_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049643_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049646_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049647_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049661_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049665_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049690_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049693_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049695_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049718_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049726_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049730_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049756_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049762_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049781_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049790_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049796_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049797_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049816_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049822_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049832_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049850_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049853_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049854_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049864_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049884_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049887_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049922_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049923_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049929_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049936_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049940_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049948_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049953_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0004/00041838_00049999_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050001_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050032_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050041_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050081_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050091_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050101_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050104_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050108_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050111_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050118_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050125_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050145_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050154_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050165_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050176_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050187_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050221_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050230_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050247_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050263_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050264_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050265_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050266_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050267_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050268_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050269_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050270_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050393_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050394_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050406_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050407_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050408_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050409_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050410_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050411_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050412_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050509_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050510_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050511_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050512_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050569_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050583_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050606_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050634_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050656_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050671_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050672_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050682_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050683_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050684_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050685_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050735_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050741_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050755_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050775_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050781_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050786_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050788_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050790_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050791_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050792_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050793_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050794_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050795_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050796_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050893_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050909_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050938_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050973_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050978_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050979_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050980_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00050981_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051056_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051082_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051093_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051098_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051101_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051122_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051136_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051142_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051143_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051144_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051238_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051248_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051259_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051286_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051294_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051302_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051326_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051331_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051341_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051352_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051375_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051389_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051401_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051413_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051432_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051462_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051520_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051528_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051547_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051555_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051581_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051592_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051598_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051607_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051618_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051627_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051629_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051639_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051667_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051668_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051669_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051670_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051729_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051730_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051785_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051787_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051788_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051802_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051806_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051826_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051827_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051828_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051829_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051830_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051831_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051832_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051833_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051834_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051958_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051959_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051960_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051961_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051962_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051963_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00051964_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052059_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052115_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052161_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052165_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052183_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052191_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052195_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052227_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052228_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052252_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052253_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052254_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052304_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052322_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052323_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052328_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052329_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052330_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052331_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052405_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052412_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052422_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052425_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052455_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052462_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052480_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052483_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052527_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052568_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052574_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052600_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052605_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052615_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052654_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052671_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052674_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052685_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052713_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052730_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052763_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052770_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052782_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052786_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052802_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052806_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052810_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052822_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052839_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052862_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052863_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052864_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052893_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052894_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052918_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052924_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052935_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052946_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052948_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052955_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052969_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052970_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052971_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00052979_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053001_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053008_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053016_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053033_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053049_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053058_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053071_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053076_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053084_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053102_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053106_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053112_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053135_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053136_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053159_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053160_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053170_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053172_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053201_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053202_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053203_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053240_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053259_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053273_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053309_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053313_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053323_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053329_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053332_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053347_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053369_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053376_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053388_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053389_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053390_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053391_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053392_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053393_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053394_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053546_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053547_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053549_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053555_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053587_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053595_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053604_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053607_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053609_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053633_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053640_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053642_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053669_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053676_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053677_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053681_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053692_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053704_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053705_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053736_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053742_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053760_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053761_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053762_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053763_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053764_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053829_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053830_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053835_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053886_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053889_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053926_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053930_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053932_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053947_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053965_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053967_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053989_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00053996_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054002_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054004_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054015_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054030_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054031_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054054_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054107_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054109_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054122_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054140_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054161_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054163_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054181_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054183_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054196_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054197_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054222_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054229_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054240_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054241_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054260_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054270_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054276_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054279_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054280_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054289_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054290_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054291_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054292_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054354_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054380_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054384_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054385_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054386_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054449_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054465_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054466_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054467_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054468_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054469_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054538_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054539_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054542_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054561_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054575_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054587_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054588_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054589_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054590_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054591_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054592_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054689_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054690_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054691_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054692_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054756_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054765_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054780_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054792_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054793_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054794_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054819_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054820_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054821_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054873_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054874_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054875_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054919_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054920_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054921_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054969_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054975_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054976_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054977_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00054978_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055044_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055045_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055046_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055047_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055107_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055125_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055156_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055158_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055159_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055160_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055161_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055162_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055163_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055164_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055269_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055280_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055307_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055325_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055360_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055361_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055362_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055363_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055399_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055400_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055401_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055402_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055464_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055465_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055466_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055522_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055523_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055524_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055525_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055526_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055527_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055528_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055529_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055640_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055641_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055642_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055643_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055644_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055748_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055784_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055796_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055797_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055798_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055860_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055862_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055901_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055922_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055923_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055924_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055925_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055926_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055989_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055990_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055991_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055992_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00055993_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056068_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056070_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056071_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056072_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056073_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056121_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056126_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056131_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056132_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056133_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056184_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056195_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056196_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056197_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056198_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056250_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056251_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056283_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056294_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056335_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056336_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056337_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056338_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056406_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056424_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056425_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056451_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056452_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056453_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056499_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056500_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056501_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056502_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056553_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056554_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056555_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056556_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056557_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056622_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056643_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056644_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056645_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056694_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056695_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056696_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056697_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056747_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056748_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056749_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056750_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056751_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056814_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056815_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056816_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056817_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056874_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056893_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056894_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056895_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056896_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056897_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056986_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00056988_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057000_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057032_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057055_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057070_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057143_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057149_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057159_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057183_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057207_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057216_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057232_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057237_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057258_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057261_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057282_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057287_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057297_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057316_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057326_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057334_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057337_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057356_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057363_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057368_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057383_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057393_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057401_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057405_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057432_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057447_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057473_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057475_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057480_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057491_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057494_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057523_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057552_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057555_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057558_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057599_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057600_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057773_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00057774_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058058_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058082_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058133_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058134_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058176_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058177_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058198_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058199_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058200_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058236_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058237_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058238_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058287_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058288_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058310_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058311_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058312_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058360_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058361_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058362_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058408_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058409_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058451_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058452_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058453_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058501_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058502_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058549_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058550_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058551_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058592_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058593_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058594_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058649_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058650_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058651_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058698_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058699_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058700_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058741_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058742_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058786_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058787_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058788_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058789_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058790_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058791_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058792_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058793_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058794_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058795_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058796_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058797_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058798_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058799_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058800_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058801_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058802_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058803_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058804_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058805_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058806_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058807_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058808_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058809_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058810_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058811_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058812_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058813_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058814_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058815_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058816_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058817_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058818_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058819_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058820_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058828_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058831_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058832_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058833_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058834_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058836_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058837_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00058838_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059009_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059010_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059037_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059051_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059052_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059081_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059094_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059108_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059122_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059136_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059150_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059164_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059180_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059194_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059225_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059239_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059253_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059281_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059296_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059321_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059336_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059351_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059367_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059368_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059382_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059383_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059403_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059417_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059431_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059461_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059506_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059520_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059557_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059571_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059597_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059619_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059647_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059650_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059664_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059678_1.bhadron.mdst',
'LFN:/lhcb/LHCb/Collision11/BHADRON.MDST/00041838/0005/00041838_00059714_1.bhadron.mdst'
], clear=True)
| 89.623018
| 118
| 0.833083
| 50,595
| 350,426
| 5.616405
| 0.078367
| 0.300857
| 0.150428
| 0.300857
| 0.888852
| 0.888852
| 0.888852
| 0.888852
| 0.888852
| 0.888666
| 0
| 0.347978
| 0.01142
| 350,426
| 3,909
| 119
| 89.645945
| 0.472294
| 0.001547
| 0
| 0
| 1
| 0.999229
| 0.955215
| 0.955203
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.000514
| 0
| 0.000514
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 15
|
acd79e3533bcc4a536c6c3b3257c1d3e5fb2c4e4
| 437
|
py
|
Python
|
tests/test_context.py
|
ningyixue/AIPI530_Final_Project
|
b95353ffd003692a37a59042dfcd744a18b7e802
|
[
"MIT"
] | 565
|
2020-08-01T02:44:28.000Z
|
2022-03-30T15:00:54.000Z
|
tests/test_context.py
|
ningyixue/AIPI530_Final_Project
|
b95353ffd003692a37a59042dfcd744a18b7e802
|
[
"MIT"
] | 144
|
2020-08-01T03:45:10.000Z
|
2022-03-30T14:51:16.000Z
|
tests/test_context.py
|
ningyixue/AIPI530_Final_Project
|
b95353ffd003692a37a59042dfcd744a18b7e802
|
[
"MIT"
] | 103
|
2020-08-26T13:27:34.000Z
|
2022-03-31T12:24:27.000Z
|
from d3rlpy.context import disable_parallel, get_parallel_flag, parallel
def test_parallel():
assert not get_parallel_flag()
with parallel():
assert get_parallel_flag()
with disable_parallel():
assert not get_parallel_flag()
assert get_parallel_flag()
assert not get_parallel_flag()
with disable_parallel():
assert not get_parallel_flag()
assert not get_parallel_flag()
| 27.3125
| 72
| 0.704805
| 54
| 437
| 5.333333
| 0.240741
| 0.305556
| 0.416667
| 0.347222
| 0.715278
| 0.715278
| 0.590278
| 0.590278
| 0.444444
| 0.444444
| 0
| 0.002967
| 0.228833
| 437
| 15
| 73
| 29.133333
| 0.851632
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.583333
| 1
| 0.083333
| true
| 0
| 0.083333
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4a17ae374709b77704deca0baa98401f9412a475
| 23,347
|
py
|
Python
|
hdf5index.py
|
CodingNowNow/jiaoyi
|
57513f8cf0d282fa70ac9e8e76ff785d7a2a019c
|
[
"MIT"
] | 1
|
2019-03-22T06:36:56.000Z
|
2019-03-22T06:36:56.000Z
|
hdf5index.py
|
nvsnvyu/hikyuu
|
57513f8cf0d282fa70ac9e8e76ff785d7a2a019c
|
[
"MIT"
] | null | null | null |
hdf5index.py
|
nvsnvyu/hikyuu
|
57513f8cf0d282fa70ac9e8e76ff785d7a2a019c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf8 -*-
# cp936
"""
建立HDF5索引
"""
import datetime
import tables
class IndexRecord(tables.IsDescription):
datetime = tables.UInt64Col() #IGNORE:E1101
start = tables.UInt64Col() #IGNORE:E1101
def UpdateWeekIndex(h5file):
try:
group = h5file.getNode("/","week")
except:
group = h5file.createGroup("/","week")
def getNewDate(olddate):
y = olddate/100000000
m = olddate/1000000 - y*100
d = olddate/10000 - (y*10000+m*100)
tempdate = datetime.date(y,m,d)
tempweekdate = tempdate - datetime.timedelta(tempdate.weekday())
newdate = tempweekdate.year*100000000 + tempweekdate.month*1000000 + tempweekdate.day*10000
return newdate
for table in h5file.walkNodes("/data"):
if type(table) != tables.table.Table:
continue
#print table.name
try:
index_table = h5file.getNode(group,table.name)
except:
index_table = h5file.createTable(group,table.name, IndexRecord)
total = table.nrows
if 0 == total:
continue
index_total = index_table.nrows
index_row = index_table.row
if index_total:
index_last_date = int(index_table[-1]['datetime'])
last_date = getNewDate(int(table[-1]['datetime']))
if index_last_date == last_date:
continue
startix = int(index_table[-1]['start'])
pre_index_date = int(index_table[-1]['datetime'])
else:
startix = 0
pre_index_date = getNewDate(int(table[0]['datetime']))
index_row['datetime'] = pre_index_date
index_row['start'] = 0
index_row.append()
#week_table.flush()
index = startix
for row in table[startix:]:
date = int(row['datetime'])
cur_index_date = getNewDate(date)
if cur_index_date != pre_index_date:
index_row['datetime'] = cur_index_date
index_row['start'] = index
index_row.append()
pre_index_date = cur_index_date
index += 1
index_table.flush()
def UpdateMonthIndex(h5file):
try:
group = h5file.getNode("/","month")
except:
group = h5file.createGroup("/","month")
def getNewDate(olddate):
y = olddate/100000000
m = olddate/1000000 - y*100
return(y*100000000 + m*1000000 + 10000)
for table in h5file.walkNodes("/data"):
if type(table) != tables.table.Table:
continue
#print table.name
try:
index_table = h5file.getNode(group,table.name)
except:
index_table = h5file.createTable(group,table.name, IndexRecord)
total = table.nrows
if 0 == total:
continue
index_total = index_table.nrows
index_row = index_table.row
if index_total:
index_last_date = int(index_table[-1]['datetime'])
last_date = getNewDate(int(table[-1]['datetime']))
if index_last_date == last_date:
continue
startix = int(index_table[-1]['start'])
pre_index_date = int(index_table[-1]['datetime'])
else:
startix = 0
date = int(table[0]['datetime'])
pre_index_date = getNewDate(date)
index_row['datetime'] = pre_index_date
index_row['start'] = 0
index_row.append()
#week_table.flush()
index = startix
for row in table[startix:]:
date = int(row['datetime'])
cur_index_date = getNewDate(date)
if cur_index_date != pre_index_date:
index_row['datetime'] = cur_index_date
index_row['start'] = index
index_row.append()
pre_index_date = cur_index_date
index += 1
index_table.flush()
def UpdateYearIndex(h5file):
try:
group = h5file.getNode("/","year")
except:
group = h5file.createGroup("/","year")
def getNewDate(olddate):
y = olddate/100000000
return(y*100000000 + 1010000)
for table in h5file.walkNodes("/data"):
if type(table) != tables.table.Table:
continue
#print table.name
try:
index_table = h5file.getNode(group,table.name)
except:
index_table = h5file.createTable(group,table.name, IndexRecord)
total = table.nrows
if 0 == total:
continue
index_total = index_table.nrows
index_row = index_table.row
if index_total:
index_last_date = int(index_table[-1]['datetime'])
last_date = getNewDate(int(table[-1]['datetime']))
if index_last_date == last_date:
continue
startix = int(index_table[-1]['start'])
pre_index_date = int(index_table[-1]['datetime'])
else:
startix = 0
date = int(table[0]['datetime'])
pre_index_date = getNewDate(date)
index_row['datetime'] = pre_index_date
index_row['start'] = 0
index_row.append()
#week_table.flush()
index = startix
for row in table[startix:]:
date = int(row['datetime'])
cur_index_date = getNewDate(date)
if cur_index_date != pre_index_date:
index_row['datetime'] = cur_index_date
index_row['start'] = index
index_row.append()
pre_index_date = cur_index_date
index += 1
index_table.flush()
def UpdateHalfYearIndex(h5file):
try:
group = h5file.getNode("/","halfyear")
except:
group = h5file.createGroup("/","halfyear")
def getNewDate(olddate):
halfyearDict={1:1,2:1,3:1,4:1,5:1,6:1,7:7,8:7,9:7,10:7,11:7,12:7}
y = olddate/100000000
m = olddate/1000000 - y*100
return( y*100000000 + halfyearDict[m]*1000000 + 10000 )
for table in h5file.walkNodes("/data"):
if type(table) != tables.table.Table:
continue
#print table.name
try:
index_table = h5file.getNode(group,table.name)
except:
index_table = h5file.createTable(group,table.name, IndexRecord)
total = table.nrows
if 0 == total:
continue
index_total = index_table.nrows
index_row = index_table.row
if index_total:
index_last_date = int(index_table[-1]['datetime'])
last_date = getNewDate(int(table[-1]['datetime']))
if index_last_date == last_date:
continue
startix = int(index_table[-1]['start'])
pre_index_date = int(index_table[-1]['datetime'])
else:
startix = 0
date = int(table[0]['datetime'])
pre_index_date = getNewDate(date)
index_row['datetime'] = pre_index_date
index_row['start'] = 0
index_row.append()
#week_table.flush()
index = startix
for row in table[startix:]:
date = int(row['datetime'])
cur_index_date = getNewDate(date)
if cur_index_date != pre_index_date:
index_row['datetime'] = cur_index_date
index_row['start'] = index
index_row.append()
pre_index_date = cur_index_date
index += 1
index_table.flush()
def UpdateQuarterIndex(h5file):
try:
group = h5file.getNode("/","quarter")
except:
group = h5file.createGroup("/","quarter")
def getNewDate(olddate):
quarterDict={1:1,2:1,3:1,4:4,5:4,6:4,7:7,8:7,9:7,10:10,11:10,12:10}
y = olddate/100000000
m = olddate/1000000 - y*100
return( y*100000000 + quarterDict[m]*1000000 + 10000 )
for table in h5file.walkNodes("/data"):
if type(table) != tables.table.Table:
continue
#print table.name
try:
index_table = h5file.getNode(group,table.name)
except:
index_table = h5file.createTable(group,table.name, IndexRecord)
total = table.nrows
if 0 == total:
continue
index_total = index_table.nrows
index_row = index_table.row
if index_total:
index_last_date = int(index_table[-1]['datetime'])
last_date = getNewDate(int(table[-1]['datetime']))
if index_last_date == last_date:
continue
startix = int(index_table[-1]['start'])
pre_index_date = int(index_table[-1]['datetime'])
else:
startix = 0
date = int(table[0]['datetime'])
pre_index_date = getNewDate(date)
index_row['datetime'] = pre_index_date
index_row['start'] = 0
index_row.append()
#week_table.flush()
index = startix
for row in table[startix:]:
date = int(row['datetime'])
cur_index_date = getNewDate(date)
if cur_index_date != pre_index_date:
index_row['datetime'] = cur_index_date
index_row['start'] = index
index_row.append()
pre_index_date = cur_index_date
index += 1
index_table.flush()
def UpdateDayIndex(h5file):
try:
group = h5file.getNode("/","day")
except:
group = h5file.createGroup("/","day")
def getNewDate(olddate):
newdate = olddate/10000*10000
return newdate
for table in h5file.walkNodes("/data"):
if type(table) != tables.table.Table:
continue
#print table.name
try:
index_table = h5file.getNode(group,table.name)
except:
index_table = h5file.createTable(group,table.name, IndexRecord)
total = table.nrows
if 0 == total:
continue
index_total = index_table.nrows
index_row = index_table.row
if index_total:
index_last_date = int(index_table[-1]['datetime'])
last_date = getNewDate(int(table[-1]['datetime']))
if index_last_date == last_date:
continue
startix = int(index_table[-1]['start'])
pre_index_date = int(index_table[-1]['datetime'])
else:
startix = 0
date = int(table[0]['datetime'])
pre_index_date = getNewDate(date)
index_row['datetime'] = pre_index_date
index_row['start'] = 0
index_row.append()
#week_table.flush()
index = startix
for row in table[startix:]:
date = int(row['datetime'])
cur_index_date = getNewDate(date)
if cur_index_date != pre_index_date:
index_row['datetime'] = cur_index_date
index_row['start'] = index
index_row.append()
pre_index_date = cur_index_date
index += 1
index_table.flush()
def UpdateHourIndex(h5file):
try:
group = h5file.getNode("/","min60")
except:
group = h5file.createGroup("/","min60")
def getNewDate(olddate):
min = olddate-olddate/10000*10000
if min<=1030:
newdate = olddate/10000*10000 + 1030
elif min<=1130:
newdate = olddate/10000*10000 + 1130
elif min<=1400:
newdate = olddate/10000*10000 + 1400
else:
newdate = olddate/10000*10000 + 1500
return newdate
for table in h5file.walkNodes("/data"):
if type(table) != tables.table.Table:
continue
#print table.name
try:
index_table = h5file.getNode(group,table.name)
except:
index_table = h5file.createTable(group,table.name, IndexRecord)
total = table.nrows
if 0 == total:
continue
index_total = index_table.nrows
index_row = index_table.row
if index_total:
index_last_date = int(index_table[-1]['datetime'])
last_date = getNewDate(int(table[-1]['datetime']))
if index_last_date == last_date:
continue
startix = int(index_table[-1]['start'])
pre_index_date = int(index_table[-1]['datetime'])
else:
startix = 0
date = int(table[0]['datetime'])
pre_index_date = getNewDate(date)
index_row['datetime'] = pre_index_date
index_row['start'] = 0
index_row.append()
index = startix
for row in table[startix:]:
date = int(row['datetime'])
cur_index_date = getNewDate(date)
if cur_index_date != pre_index_date:
index_row['datetime'] = cur_index_date
index_row['start'] = index
index_row.append()
pre_index_date = cur_index_date
index += 1
index_table.flush()
def UpdateFifteenMinIndex(h5file):
try:
group = h5file.getNode("/","min15")
except:
group = h5file.createGroup("/","min15")
def getNewDate(olddate):
min = olddate-olddate/10000*10000
if min<=945:
newdate = olddate/10000*10000 + 945
elif min<=1000:
newdate = olddate/10000*10000 + 1000
elif min<=1015:
newdate = olddate/10000*10000 + 1015
elif min<=1030:
newdate = olddate/10000*10000 + 1030
elif min<=1045:
newdate = olddate/10000*10000 + 1045
elif min<=1100:
newdate = olddate/10000*10000 + 1100
elif min<=1115:
newdate = olddate/10000*10000 + 1115
elif min<=1130:
newdate = olddate/10000*10000 + 1130
elif min<=1315:
newdate = olddate/10000*10000 + 1315
elif min<=1330:
newdate = olddate/10000*10000 + 1330
elif min<=1345:
newdate = olddate/10000*10000 + 1345
elif min<=1400:
newdate = olddate/10000*10000 + 1400
elif min<=1415:
newdate = olddate/10000*10000 + 1415
elif min<=1430:
newdate = olddate/10000*10000 + 1430
elif min<=1445:
newdate = olddate/10000*10000 + 1445
else:
newdate = olddate/10000*10000 + 1500
return newdate
for table in h5file.walkNodes("/data"):
if type(table) != tables.table.Table:
continue
#print table.name
try:
index_table = h5file.getNode(group,table.name)
except:
index_table = h5file.createTable(group,table.name, IndexRecord)
total = table.nrows
if 0 == total:
continue
index_total = index_table.nrows
index_row = index_table.row
if index_total:
index_last_date = int(index_table[-1]['datetime'])
last_date = getNewDate(int(table[-1]['datetime']))
if index_last_date == last_date:
continue
startix = int(index_table[-1]['start'])
pre_index_date = int(index_table[-1]['datetime'])
else:
startix = 0
date = int(table[0]['datetime'])
pre_index_date = getNewDate(date)
index_row['datetime'] = pre_index_date
index_row['start'] = 0
index_row.append()
index = startix
for row in table[startix:]:
date = int(row['datetime'])
cur_index_date = getNewDate(date)
if cur_index_date != pre_index_date:
index_row['datetime'] = cur_index_date
index_row['start'] = index
index_row.append()
pre_index_date = cur_index_date
index += 1
index_table.flush()
def UpdateHalfHourIndex(h5file):
try:
group = h5file.getNode("/","min30")
except:
group = h5file.createGroup("/","min30")
def getNewDate(olddate):
min = olddate-olddate/10000*10000
if min<=1000:
newdate = olddate/10000*10000 + 1000
elif min<=1030:
newdate = olddate/10000*10000 + 1030
elif min<=1100:
newdate = olddate/10000*10000 + 1100
elif min<=1130:
newdate = olddate/10000*10000 + 1130
elif min<=1330:
newdate = olddate/10000*10000 + 1330
elif min<=1400:
newdate = olddate/10000*10000 + 1400
elif min<=1430:
newdate = olddate/10000*10000 + 1430
else:
newdate = olddate/10000*10000 + 1500
return newdate
for table in h5file.walkNodes("/data"):
if type(table) != tables.table.Table:
continue
#print table.name
try:
index_table = h5file.getNode(group,table.name)
except:
index_table = h5file.createTable(group,table.name, IndexRecord)
total = table.nrows
if 0 == total:
continue
index_total = index_table.nrows
index_row = index_table.row
if index_total:
index_last_date = int(index_table[-1]['datetime'])
last_date = getNewDate(int(table[-1]['datetime']))
if index_last_date == last_date:
continue
startix = int(index_table[-1]['start'])
pre_index_date = int(index_table[-1]['datetime'])
else:
startix = 0
date = int(table[0]['datetime'])
pre_index_date = getNewDate(date)
index_row['datetime'] = pre_index_date
index_row['start'] = 0
index_row.append()
index = startix
for row in table[startix:]:
date = int(row['datetime'])
cur_index_date = getNewDate(date)
if cur_index_date != pre_index_date:
index_row['datetime'] = cur_index_date
index_row['start'] = index
index_row.append()
pre_index_date = cur_index_date
index += 1
index_table.flush()
def UpdateFiveMinIndex(h5file):
try:
group = h5file.getNode("/","min5")
except:
group = h5file.createGroup("/","min5")
def getNewDate(olddate):
newdate = olddate/100*100
min = olddate-newdate
if min == 0:
pass
elif min <= 5:
newdate += 5
elif min<=10:
newdate += 10
elif min<=15:
newdate += 15
elif min<=20:
newdate += 20
elif min<=25:
newdate += 25
elif min<=30:
newdate += 30
elif min<=35:
newdate += 35
elif min<=40:
newdate += 40
elif min<=45:
newdate += 45
elif min<=50:
newdate += 50
elif min<=55:
newdate += 55
else:
newdate += 100
return newdate
for table in h5file.walkNodes("/data"):
if type(table) != tables.table.Table:
continue
#print table.name
try:
index_table = h5file.getNode(group,table.name)
except:
index_table = h5file.createTable(group,table.name, IndexRecord)
total = table.nrows
if 0 == total:
continue
index_total = index_table.nrows
index_row = index_table.row
if index_total:
index_last_date = int(index_table[-1]['datetime'])
last_date = getNewDate(int(table[-1]['datetime']))
if index_last_date == last_date:
continue
startix = int(index_table[-1]['start'])
pre_index_date = int(index_table[-1]['datetime'])
else:
startix = 0
date = int(table[0]['datetime'])
pre_index_date = getNewDate(date)
index_row['datetime'] = pre_index_date
index_row['start'] = 0
index_row.append()
#week_table.flush()
index = startix
for row in table[startix:]:
date = int(row['datetime'])
cur_index_date = getNewDate(date)
if cur_index_date != pre_index_date:
index_row['datetime'] = cur_index_date
index_row['start'] = index
index_row.append()
pre_index_date = cur_index_date
index += 1
index_table.flush()
def UpdateDayDataAllIndex(h5file):
UpdateWeekIndex(h5file)
UpdateMonthIndex(h5file)
UpdateQuarterIndex(h5file)
UpdateHalfYearIndex(h5file)
UpdateYearIndex(h5file)
def Update5MinDataAllIndex(h5file):
UpdateFifteenMinIndex(h5file)
UpdateHalfHourIndex(h5file)
UpdateHourIndex(h5file)
#UpdateDayIndex(h5file)
#UpdateWeekIndex(h5file)
#UpdateMonthIndex(h5file)
#UpdateQuarterIndex(h5file)
#UpdateHalfYearIndex(h5file)
#UpdateYearIndex(h5file)
def Update1MinDataAllIndex(h5file):
UpdateFiveMinIndex(h5file)
#UpdateFifteenMinIndex(h5file)
#UpdateHalfHourIndex(h5file)
#UpdateHourIndex(h5file)
#UpdateDayIndex(h5file)
#UpdateWeekIndex(h5file)
#UpdateMonthIndex(h5file)
#UpdateQuarterIndex(h5file)
#UpdateHalfYearIndex(h5file)
#UpdateYearIndex(h5file)
if __name__ == "__main__":
import time
starttime = time.time()
print "\nUpdate SH Day Data index ================> "
h5file = tables.openFile('d:/workspace/hikyuu/test/data/sh_day.h5', mode='a',
filters=tables.Filters(complevel=9,complib='zlib', shuffle=True))
UpdateDayDataAllIndex(h5file)
h5file.close()
print "\nUpdate SH 5min Data index ================> "
h5file = tables.openFile('d:/workspace/hikyuu/test/data/sh_5min.h5', mode='a',
filters=tables.Filters(complevel=9,complib='zlib', shuffle=True))
Update5MinDataAllIndex(h5file)
h5file.close()
print "\nUpdate SZ Day Data index ================> "
h5file = tables.openFile('d:/workspace/hikyuu/test/data/sz_day.h5', mode='a',
filters=tables.Filters(complevel=9,complib='zlib', shuffle=True))
UpdateDayDataAllIndex(h5file)
h5file.close()
print "\nUpdate SZ 5min Data index ================> "
h5file = tables.openFile('d:/workspace/hikyuu/test/data/sz_5min.h5', mode='a',
filters=tables.Filters(complevel=9,complib='zlib', shuffle=True))
Update5MinDataAllIndex(h5file)
h5file.close()
endtime = time.time()
print "\nTotal time:"
print "%.2fs" % (endtime-starttime)
print "%.2fm" % ((endtime-starttime)/60)
| 32.381415
| 103
| 0.544524
| 2,488
| 23,347
| 4.937299
| 0.062701
| 0.065939
| 0.048844
| 0.034191
| 0.854038
| 0.826522
| 0.822208
| 0.819766
| 0.811381
| 0.802752
| 0
| 0.070457
| 0.345269
| 23,347
| 720
| 104
| 32.426389
| 0.733155
| 0.031096
| 0
| 0.795222
| 0
| 0
| 0.056319
| 0.007001
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.001706
| 0.005119
| null | null | 0.011945
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c5abdcdf0011c5eff02c7431597d1902c2e051a9
| 31,060
|
py
|
Python
|
Codigo/Method.py
|
NicholasAlmeidaPinto/SimuladoresPythonIC
|
f0e1b072823b89095d8d75b78e6cb09414fc5d76
|
[
"MIT"
] | null | null | null |
Codigo/Method.py
|
NicholasAlmeidaPinto/SimuladoresPythonIC
|
f0e1b072823b89095d8d75b78e6cb09414fc5d76
|
[
"MIT"
] | null | null | null |
Codigo/Method.py
|
NicholasAlmeidaPinto/SimuladoresPythonIC
|
f0e1b072823b89095d8d75b78e6cb09414fc5d76
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 9 09:03:01 2017
@author: Nicholas de Almeida Pinto
"""
#import numpy as np
#Function that defines and calculates the TVD's method
def First_TVD(Sw,N, mio, miw, j, porosity, deltt, deltx, delty):
for i in range(N):
for k in range(N):
if i == 0:
Fax=Sw[i,k,j]
Fbx=Sw[i,k,j]
elif i == 1:
Fax=Sw[i-1,k,j]
Fbx=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
elif i == N-1:
Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Fbx=Sw[i,k,j]
else:
Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Fbx=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
if k == 0:
Fay=Sw[i,k,j]
Fby=Sw[i,k,j]
elif k == 1:
Fay=Sw[i,k-1,j]
Fby=Sw[i,k,j]+.5*(Sw[i,k+1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k-1,j],Sw[i,k+1,j]-Sw[i,k,j])))
elif k == N-1:
Fay=Sw[i,k-1,j]+.5*(Sw[i,k,j]-Sw[i,k-1,j])*max(0,min(1,Safe(Sw[i,k-1,j]-Sw[i,k-2,j],Sw[i,k,j]-Sw[i,k-1,j])))
Fby=Sw[i,k,j]
else:
Fay=Sw[i,k-1,j]+.5*(Sw[i,k,j]-Sw[i,k-1,j])*max(0,min(1,Safe(Sw[i,k-1,j]-Sw[i,k-2,j],Sw[i,k,j]-Sw[i,k-1,j])))
Fby=Sw[i,k,j]+.5*(Sw[i,k+1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k-1,j],Sw[i,k+1,j]-Sw[i,k,j])))
Fx = Safe((Fax**2),(Fax**2 + (miw/mio)*((1-Fax)**2))) - Safe((Fbx**2),(Fbx**2 + (miw/mio)*((1-Fbx)**2)))
Fy = Safe((Fay**2),(Fay**2 + (miw/mio)*((1-Fay)**2))) - Safe((Fby**2),(Fby**2 + (miw/mio)*((1-Fby)**2)))
Sw[i,k,j+1] = Sw[i,k,j] + ((Fx/deltx) + (Fy/delty))*deltt/porosity[i,k]
return Sw
#-----------------------------------------------------------------------------#
#Function that defines and calculates the TVD's method
def TVD(Sw,N, mio, miw, j, porosity, deltt, deltx, delty):
"""This simulator is different from the previous 'First_TVD' because the is
a 'reverse copy' of all the math, this means that, while the for makes the
simulator advance from 0 to N, there are the same math doing from N to 0,
using the oposite signal inside the index."""
for i in range(N):
for k in range(N):
if i == 0:
Fax=Sw[i,k,j]
Fbx=Sw[i,k,j]
Faxn=Sw[i+1,k,j]+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+2,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
Fbxn=Sw[i,k,j]
elif i == 1:
Fax=Sw[i-1,k,j]
Fbx=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
Faxn=Sw[i+1,k,j]+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+2,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
Fbxn=Sw[i,k,j]+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i+1,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
elif i == N-2:
Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Fbx=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
Faxn=Sw[i+1,k,j]
Fbxn=Sw[i,k,j]+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i+1,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
elif i == N-1:
Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Fbx=Sw[i,k,j]
Faxn=Sw[i,k,j]
Fbxn=Sw[i,k,j]
else:
Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Fbx=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
Faxn=Sw[i+1,k,j]+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+2,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
Fbxn=Sw[i,k,j]+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i+1,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
if k == 0:
Fay=Sw[i,k,j]
Fby=Sw[i,k,j]
Fayn=Sw[i,k+1,j]+.5*(Sw[i,k,j]-Sw[i,k+1,j])*max(0,min(1,Safe(Sw[i,k+1,j]-Sw[i,k+2,j],Sw[i,k,j]-Sw[i,k+1,j])))
Fbyn=Sw[i,k,j]
elif k == 1:
Fay=Sw[i,k-1,j]
Fby=Sw[i,k,j]+.5*(Sw[i,k+1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k-1,j],Sw[i,k+1,j]-Sw[i,k,j])))
Fayn=Sw[i,k+1,j]+.5*(Sw[i,k,j]-Sw[i,k+1,j])*max(0,min(1,Safe(Sw[i,k+1,j]-Sw[i,k+2,j],Sw[i,k,j]-Sw[i,k+1,j])))
Fbyn=Sw[i,k,j]+.5*(Sw[i,k-1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k+1,j],Sw[i,k-1,j]-Sw[i,k,j])))
elif k == N-2:
Fay=Sw[i,k-1,j]+.5*(Sw[i,k,j]-Sw[i,k-1,j])*max(0,min(1,Safe(Sw[i,k-1,j]-Sw[i,k-2,j],Sw[i,k,j]-Sw[i,k-1,j])))
Fby=Sw[i,k,j]+.5*(Sw[i,k+1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k-1,j],Sw[i,k+1,j]-Sw[i,k,j])))
Fayn=Sw[i,k+1,j]
Fbyn=Sw[i,k,j]+.5*(Sw[i,k-1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k+1,j],Sw[i,k-1,j]-Sw[i,k,j])))
elif k == N-1:
Fay=Sw[i,k-1,j]+.5*(Sw[i,k,j]-Sw[i,k-1,j])*max(0,min(1,Safe(Sw[i,k-1,j]-Sw[i,k-2,j],Sw[i,k,j]-Sw[i,k-1,j])))
Fby=Sw[i,k,j]
Fayn=Sw[i,k,j]
Fbyn=Sw[i,k,j]
else:
Fay=Sw[i,k-1,j]+.5*(Sw[i,k,j]-Sw[i,k-1,j])*max(0,min(1,Safe(Sw[i,k-1,j]-Sw[i,k-2,j],Sw[i,k,j]-Sw[i,k-1,j])))
Fby=Sw[i,k,j]+.5*(Sw[i,k+1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k-1,j],Sw[i,k+1,j]-Sw[i,k,j])))
Fayn=Sw[i,k+1,j]+.5*(Sw[i,k,j]-Sw[i,k+1,j])*max(0,min(1,Safe(Sw[i,k+1,j]-Sw[i,k+2,j],Sw[i,k,j]-Sw[i,k+1,j])))
Fbyn=Sw[i,k,j]+.5*(Sw[i,k-1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k+1,j],Sw[i,k-1,j]-Sw[i,k,j])))
if i < N-1 and i > 0 and Sw[i,k,j] < Sw[i-1,k,j]:
Faxn=0
Fbxn=0
elif i > 0 and i < N-1:
Fax=0
Fbx=0
if k < N-1 and k > 0 and Sw[i,k,j] < Sw[i,k-1,j]:
Fayn=0
Fbyn=0
elif k > 0 and k < N-1:
Fay=0
Fby=0
Fx = Conta(Fax,miw,mio) - Conta(Fbx,miw,mio) + Conta(Faxn,miw,mio) - Conta(Fbxn,miw,mio)
Fy = Conta(Fay,miw,mio) - Conta(Fby,miw,mio) + Conta(Fayn,miw,mio) - Conta(Fbyn,miw,mio)
Sw[i,k,j+1]=Sw[i,k,j]+((Fx/deltx)+(Fy/delty))*deltt/porosity[i,k]
return Sw
#-----------------------------------------------------------------------------#
def Original_NonStandard(Sw,N, mio, miw, j, porosity, deltt, deltx, delty):
import math #uses to calculate exp
alpha = 2
phix=(1/(2*alpha))*(1-math.exp(-alpha*deltt/deltx))
phiy=(1/(2*alpha))*(1-math.exp(-alpha*deltt/delty))
for i in range(N):
for k in range(N):
if i==0:
Fax=(Sw[i+1,k,j]**2)/((Sw[i+1,k,j]**2)+(miw/mio)*((1-Sw[i+1,k,j])**2))
Gx=phix*(alpha*(Sw[i+1,k,j]-2*Sw[i,k,j]+Sw[i,k,j])-Fax)*.5/porosity[i,k]
elif i==N-1:
Fax=(Sw[i,k,j]**2)/((Sw[i,k,j]**2)+(miw/mio)*((1-Sw[i,k,j])**2))
Gx=phix*(alpha*(Sw[i,k,j]-2*Sw[i,k,j]+Sw[i-1,k,j])+Fax)*.5/porosity[i,k]
else:
Fax=(Sw[i+1,k,j]**2)/((Sw[i+1,k,j]**2)+(miw/mio)*((1-Sw[i+1,k,j])**2))
Gx=phix*(alpha*(Sw[i+1,k,j]-2*Sw[i,k,j]+Sw[i-1,k,j])-Fax)*.5/porosity[i,k]
if k==0:
Fay=(Sw[i,k+1,j]**2)/((Sw[i,k+1,j]**2)+(miw/mio)*((1-Sw[i,k+1,j])**2))
Gy=phiy*(alpha*(Sw[i,k+1,j]-2*Sw[i,k,j]+Sw[i,k,j])-Fay)*.5/porosity[i,k]
elif k==N-1:
Fay=(Sw[i,k,j]**2)/((Sw[i,k,j]**2)+(miw/mio)*((1-Sw[i,k,j])**2))
Gy=phiy*(alpha*(Sw[i,k,j]-2*Sw[i,k,j]+Sw[i,k-1,j])+Fay)*.5/porosity[i,k]
else:
Fay=(Sw[i,k+1,j]**2)/((Sw[i,k+1,j]**2)+(miw/mio)*((1-Sw[i,k+1,j])**2))
Gy=phiy*(alpha*(Sw[i,k+1,j]-2*Sw[i,k,j]+Sw[i,k-1,j])-Fay)*.5/porosity[i,k]
Sw[i,k,j+1]=Sw[i,k,j]+Gx+Gy
return Sw
#-----------------------------------------------------------------------------#
def NonStandard(Sw,N, mio, miw, j, porosity, deltt, deltx, delty):
"""This simulator is called NonStandard. The principal changes comparing
from TVD: for each value of saturation, is needed the previous and next value,
this is easier to do than TVD because only uses one previous or next cell."""
import math #uses to calculate exp
alpha = 2
phix=(1/(2*alpha))*(1-math.exp(-alpha*deltt/deltx))
phiy=(1/(2*alpha))*(1-math.exp(-alpha*deltt/delty))
for i in range(N):
for k in range(N):
if i==0:
Fax=(Sw[i+1,k,j]**2)/((Sw[i+1,k,j]**2)+(miw/mio)*((1-Sw[i+1,k,j])**2))
Fbx=(Sw[i,k,j]**2)/((Sw[i,k,j]**2)+(miw/mio)*((1-Sw[i,k,j])**2)) #Pay attention to lines below with comments, probabily the correct value is 0
Gx=phix*(alpha*(Sw[i+1,k,j]-2*Sw[i,k,j]+Sw[i,k,j])-Fax+Fbx)*.5/porosity[i,k]
elif i==N-1:
Fax=(Sw[i,k,j]**2)/((Sw[i,k,j]**2)+(miw/mio)*((1-Sw[i,k,j])**2)) #this one
Fbx=(Sw[i-1,k,j]**2)/((Sw[i-1,k,j]**2)+(miw/mio)*((1-Sw[i-1,k,j])**2))
Gx=phix*(alpha*(Sw[i,k,j]-2*Sw[i,k,j]+Sw[i-1,k,j])+Fax-Fbx)*.5/porosity[i,k]
else:
Fax=(Sw[i+1,k,j]**2)/((Sw[i+1,k,j]**2)+(miw/mio)*((1-Sw[i+1,k,j])**2))
Fbx=(Sw[i-1,k,j]**2)/((Sw[i-1,k,j]**2)+(miw/mio)*((1-Sw[i-1,k,j])**2))
if Sw[i,k,j]>Sw[i+1,k,j]:
Gx=phix*(alpha*(Sw[i+1,k,j]-2*Sw[i,k,j]+Sw[i-1,k,j])-Fax+Fbx)*.5/porosity[i,k]
else:
Gx=phix*(alpha*(Sw[i+1,k,j]-2*Sw[i,k,j]+Sw[i-1,k,j])+Fax-Fbx)*.5/porosity[i,k]
if k==0:
Fay=(Sw[i,k+1,j]**2)/((Sw[i,k+1,j]**2)+(miw/mio)*((1-Sw[i,k+1,j])**2))
Fby=(Sw[i,k,j]**2)/((Sw[i,k,j]**2)+(miw/mio)*((1-Sw[i,k,j])**2)) # this one
Gy=phiy*(alpha*(Sw[i,k+1,j]-2*Sw[i,k,j]+Sw[i,k,j])-Fay+Fby)*.5/porosity[i,k]
elif k==N-1:
Fay=(Sw[i,k,j]**2)/((Sw[i,k,j]**2)+(miw/mio)*((1-Sw[i,k,j])**2)) #this one too
Fby=(Sw[i,k-1,j]**2)/((Sw[i,k-1,j]**2)+(miw/mio)*((1-Sw[i,k-1,j])**2))
Gy=phiy*(alpha*(Sw[i,k,j]-2*Sw[i,k,j]+Sw[i,k-1,j])+Fay-Fby)*.5/porosity[i,k]
else:
Fay=(Sw[i,k+1,j]**2)/((Sw[i,k+1,j]**2)+(miw/mio)*((1-Sw[i,k+1,j])**2))
Fby=(Sw[i,k-1,j]**2)/((Sw[i,k-1,j]**2)+(miw/mio)*((1-Sw[i,k-1,j])**2))
if Sw[i,k,j]>Sw[i,k+1,j]:
Gy=phiy*(alpha*(Sw[i,k+1,j]-2*Sw[i,k,j]+Sw[i,k-1,j])-Fay+Fby)*.5/porosity[i,k]
else:
Gy=phiy*(alpha*(Sw[i,k+1,j]-2*Sw[i,k,j]+Sw[i,k-1,j])+Fay-Fby)*.5/porosity[i,k]
Sw[i,k,j+1]=Sw[i,k,j]+Gx+Gy
return Sw
#-----------------------------------------------------------------------------#
def Test_New_Simulator(Sw,N, mio, miw, j, porosity, deltt, deltx, delty):
Fax = 0.
Fbx = 0.
Fay = 0.
Fby = 0.
Faxn = 0.
Fbxn = 0.
Fayn = 0.
Fbyn = 0.
Fy = 0.
Fx = 0.
import math #uses to calculate exp
alpha = 2
phix=(1/(2*alpha))*(1-math.exp(-alpha*deltt/deltx))
phiy=(1/(2*alpha))*(1-math.exp(-alpha*deltt/delty))
for i in range(N):
for k in range(N):
if i == 0:
Fax=Sw[i,k,j]
Fbx=Sw[i,k,j]
Faxn=Sw[i+1,k,j]+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+2,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
Fbxn=Sw[i,k,j]
elif i == 1:
Fax=Sw[i-1,k,j]
Fbx=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
Faxn=Sw[i+1,k,j]+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+2,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
Fbxn=Sw[i,k,j]+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i+1,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
elif i == N-2:
Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Fbx=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
Faxn=Sw[i+1,k,j]
Fbxn=Sw[i,k,j]+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i+1,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
elif i == N-1:
Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Fbx=Sw[i,k,j]
Faxn=Sw[i,k,j]
Fbxn=Sw[i,k,j]
else:
Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Fbx=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
Faxn=Sw[i+1,k,j]+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+2,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
Fbxn=Sw[i,k,j]+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i+1,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
if k == 0:
Fay=Sw[i,k,j]
Fby=Sw[i,k,j]
Fayn=Sw[i,k+1,j]+.5*(Sw[i,k,j]-Sw[i,k+1,j])*max(0,min(1,Safe(Sw[i,k+1,j]-Sw[i,k+2,j],Sw[i,k,j]-Sw[i,k+1,j])))
Fbyn=Sw[i,k,j]
elif k == 1:
Fay=Sw[i,k-1,j]
Fby=Sw[i,k,j]+.5*(Sw[i,k+1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k-1,j],Sw[i,k+1,j]-Sw[i,k,j])))
Fayn=Sw[i,k+1,j]+.5*(Sw[i,k,j]-Sw[i,k+1,j])*max(0,min(1,Safe(Sw[i,k+1,j]-Sw[i,k+2,j],Sw[i,k,j]-Sw[i,k+1,j])))
Fbyn=Sw[i,k,j]+.5*(Sw[i,k-1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k+1,j],Sw[i,k-1,j]-Sw[i,k,j])))
elif k == N-2:
Fay=Sw[i,k-1,j]+.5*(Sw[i,k,j]-Sw[i,k-1,j])*max(0,min(1,Safe(Sw[i,k-1,j]-Sw[i,k-2,j],Sw[i,k,j]-Sw[i,k-1,j])))
Fby=Sw[i,k,j]+.5*(Sw[i,k+1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k-1,j],Sw[i,k+1,j]-Sw[i,k,j])))
Fayn=Sw[i,k+1,j]
Fbyn=Sw[i,k,j]+.5*(Sw[i,k-1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k+1,j],Sw[i,k-1,j]-Sw[i,k,j])))
elif k == N-1:
Fay=Sw[i,k-1,j]+.5*(Sw[i,k,j]-Sw[i,k-1,j])*max(0,min(1,Safe(Sw[i,k-1,j]-Sw[i,k-2,j],Sw[i,k,j]-Sw[i,k-1,j])))
Fby=Sw[i,k,j]
Fayn=Sw[i,k,j]
Fbyn=Sw[i,k,j]
else:
Fay=Sw[i,k-1,j]+.5*(Sw[i,k,j]-Sw[i,k-1,j])*max(0,min(1,Safe(Sw[i,k-1,j]-Sw[i,k-2,j],Sw[i,k,j]-Sw[i,k-1,j])))
Fby=Sw[i,k,j]+.5*(Sw[i,k+1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k-1,j],Sw[i,k+1,j]-Sw[i,k,j])))
Fayn=Sw[i,k+1,j]+.5*(Sw[i,k,j]-Sw[i,k+1,j])*max(0,min(1,Safe(Sw[i,k+1,j]-Sw[i,k+2,j],Sw[i,k,j]-Sw[i,k+1,j])))
Fbyn=Sw[i,k,j]+.5*(Sw[i,k-1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k+1,j],Sw[i,k-1,j]-Sw[i,k,j])))
if i < N-1 and i > 0 and Sw[i,k,j] < Sw[i-1,k,j]:
Faxn=0
Fbxn=0
elif i > 0 and i < N-1:
Fax=0
Fbx=0
if k < N-1 and k > 0 and Sw[i,k,j] < Sw[i,k-1,j]:
Fayn=0
Fbyn=0
elif k > 0 and k < N-1:
Fay=0
Fby=0
Fx=(Safe((Faxn**2),(Faxn**2+(miw/mio)*((1-Faxn)**2)))-Safe((Fbxn**2),(Fbxn**2+(miw/mio)*((1-Fbxn)**2)))) + (Safe((Fax**2),(Fax**2+(miw/mio)*((1-Fax)**2)))-Safe((Fbx**2),(Fbx**2+(miw/mio)*((1-Fbx)**2))))
Fy=(Safe((Fayn**2),(Fayn**2+(miw/mio)*((1-Fayn)**2)))-Safe((Fbyn**2),(Fbyn**2+(miw/mio)*((1-Fbyn)**2)))) + (Safe((Fay**2),(Fay**2+(miw/mio)*((1-Fay)**2)))-Safe((Fby**2),(Fby**2+(miw/mio)*((1-Fby)**2))))
Sw[i,k,j+1]=Sw[i,k,j]+(phix*Fx+phiy*Fy)/porosity[i,k]
return Sw
#-----------------------------------------------------------------------------#
def NewTVD(Sw,N, mio, miw, j, porosity, deltt, deltx, delty):
'''Fa means the water is increasing, Fb means the water is going out '''
for i in range(N):
for k in range(N):
Fax = 0.
Fbx = 0.
Faxn = 0.
Fbxn = 0.
Fay = 0.
Fby = 0.
Fayn = 0.
Fbyn = 0.
if i == 0:
if Sw[i,k,j]>Sw[i+1,k,j]:
#Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Fbx=Sw[i,k,j]#0=+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
elif Sw[i,k,j]<Sw[i+1,k,j]:
Fax=Sw[i+1,k,j]+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+2,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
#Fbx=Sw[i,k,j]+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i+1,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
elif i == 1:
if Sw[i,k,j]>Sw[i+1,k,j]:
#Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Fbx=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
elif Sw[i,k,j]<Sw[i+1,k,j]:
Fax=Sw[i+1,k,j]+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+2,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
#Fbx=Sw[i,k,j]+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i+1,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
if Sw[i-1,k,j]>Sw[i,k,j]:
Faxn=Sw[i-1,k,j]#0=+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-1,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
#Fbxn=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
elif Sw[i-1,k,j]<Sw[i,k,j]:
#Faxn=Sw[i+1,k,j]+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+2,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
Fbxn=Sw[i,k,j]+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i+1,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
elif i == N-1:
if Sw[i-1,k,j]>Sw[i,k,j]:
Faxn=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
#Fbxn=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
elif Sw[i-1,k,j]<Sw[i,k,j]:
#Faxn=Sw[i+1,k,j]+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+2,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
Fbxn=Sw[i,k,j]#0=+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
elif i == N-2:
if Sw[i,k,j]>Sw[i+1,k,j]:
#Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Fbx=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
elif Sw[i,k,j]<Sw[i+1,k,j]:
Fax=Sw[i+1,k,j]#0=+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+1,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
#Fbx=Sw[i,k,j]+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i+1,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
if Sw[i-1,k,j]>Sw[i,k,j]:
Faxn=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
#Fbxn=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
elif Sw[i-1,k,j]<Sw[i,k,j]:
#Faxn=Sw[i+1,k,j]+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+2,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
Fbxn=Sw[i,k,j]+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i+1,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
else:
if Sw[i,k,j]>Sw[i+1,k,j]:
#Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Fbx=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
elif Sw[i,k,j]<Sw[i+1,k,j]:
Fax=Sw[i+1,k,j]+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+2,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
#Fbx=Sw[i,k,j]+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i+1,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
if Sw[i-1,k,j]>Sw[i,k,j]:
Faxn=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
#Fbxn=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
elif Sw[i-1,k,j]<Sw[i,k,j]:
#Faxn=Sw[i+1,k,j]+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+2,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
Fbxn=Sw[i,k,j]+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i+1,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
if k == 0:
if Sw[i,k,j]>Sw[i,k+1,j]:
#Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Fby=Sw[i,k,j]#0=+.5*(Sw[i,k+1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k,j],Sw[i,k+1,j]-Sw[i,k,j])))
elif Sw[i,k,j]<Sw[i,k+1,j]:
Fay=Sw[i,k+1,j]+.5*(Sw[i,k,j]-Sw[i,k+1,j])*max(0,min(1,Safe(Sw[i,k+1,j]-Sw[i,k+2,j],Sw[i,k,j]-Sw[i,k+1,j])))
#Fbx=Sw[i,k,j]+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i+1,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
elif k == 1:
if Sw[i,k,j]>Sw[i,k+1,j]:
#Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Fby=Sw[i,k,j]+.5*(Sw[i,k+1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k-1,j],Sw[i,k+1,j]-Sw[i,k,j])))
elif Sw[i,k,j]<Sw[i,k+1,j]:
Fay=Sw[i,k+1,j]+.5*(Sw[i,k,j]-Sw[i,k+1,j])*max(0,min(1,Safe(Sw[i,k+1,j]-Sw[i,k+2,j],Sw[i,k,j]-Sw[i,k+1,j])))
#Fbx=Sw[i,k,j]+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i+1,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
if Sw[i,k-1,j]>Sw[i,k,j]:
Fayn=Sw[i,k-1,j]#0=+.5*(Sw[i,k,j]-Sw[i,k-1,j])*max(0,min(1,Safe(Sw[i,k-1,j]-Sw[i,k-1,j],Sw[i,k,j]-Sw[i,k-1,j])))
#Fbxn=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
elif Sw[i,k-1,j]<Sw[i,k,j]:
#Faxn=Sw[i+1,k,j]+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+2,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
Fbyn=Sw[i,k,j]+.5*(Sw[i,k-1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k+1,j],Sw[i,k-1,j]-Sw[i,k,j])))
elif k == N-1:
if Sw[i,k-1,j]>Sw[i,k,j]:
Fayn=Sw[i,k-1,j]+.5*(Sw[i,k,j]-Sw[i,k-1,j])*max(0,min(1,Safe(Sw[i,k-1,j]-Sw[i,k-2,j],Sw[i,k,j]-Sw[i,k-1,j])))
#Fbxn=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
elif Sw[i,k-1,j]<Sw[i,k,j]:
#Faxn=Sw[i+1,k,j]+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+2,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
Fbyn=Sw[i,k,j]#0=+.5*(Sw[i,k-1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k,j],Sw[i,k-1,j]-Sw[i,k,j])))
elif k == N-2:
if Sw[i,k,j]>Sw[i,k+1,j]:
#Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Fby=Sw[i,k,j]+.5*(Sw[i,k+1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k-1,j],Sw[i,k+1,j]-Sw[i,k,j])))
elif Sw[i,k,j]<Sw[i,k+1,j]:
Fay=Sw[i,k+1,j]#0=+.5*(Sw[i,k,j]-Sw[i,k+1,j])*max(0,min(1,Safe(Sw[i,k+1,j]-Sw[i,k+1,j],Sw[i,k,j]-Sw[i,k+1,j])))
#Fbx=Sw[i,k,j]+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i+1,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
if Sw[i,k-1,j]>Sw[i,k,j]:
Fayn=Sw[i,k-1,j]+.5*(Sw[i,k,j]-Sw[i,k-1,j])*max(0,min(1,Safe(Sw[i,k-1,j]-Sw[i,k-2,j],Sw[i,k,j]-Sw[i,k-1,j])))
#Fbxn=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
elif Sw[i,k-1,j]<Sw[i,k,j]:
#Faxn=Sw[i+1,k,j]+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+2,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
Fbyn=Sw[i,k,j]+.5*(Sw[i,k-1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k+1,j],Sw[i,k-1,j]-Sw[i,k,j])))
else:
if Sw[i,k,j]>Sw[i,k+1,j]:
#Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Fby=Sw[i,k,j]+.5*(Sw[i,k+1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k-1,j],Sw[i,k+1,j]-Sw[i,k,j])))
elif Sw[i,k,j]<Sw[i,k+1,j]:
Fay=Sw[i,k+1,j]+.5*(Sw[i,k,j]-Sw[i,k+1,j])*max(0,min(1,Safe(Sw[i,k+1,j]-Sw[i,k+2,j],Sw[i,k,j]-Sw[i,k+1,j])))
#Fbx=Sw[i,k,j]+.5*(Sw[i-1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i+1,k,j],Sw[i-1,k,j]-Sw[i,k,j])))
if Sw[i,k-1,j]>Sw[i,k,j]:
Fayn=Sw[i,k-1,j]+.5*(Sw[i,k,j]-Sw[i,k-1,j])*max(0,min(1,Safe(Sw[i,k-1,j]-Sw[i,k-2,j],Sw[i,k,j]-Sw[i,k-1,j])))
#Fbxn=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
elif Sw[i,k-1,j]<Sw[i,k,j]:
#Faxn=Sw[i+1,k,j]+.5*(Sw[i,k,j]-Sw[i+1,k,j])*max(0,min(1,Safe(Sw[i+1,k,j]-Sw[i+2,k,j],Sw[i,k,j]-Sw[i+1,k,j])))
Fbyn=Sw[i,k,j]+.5*(Sw[i,k-1,j]-Sw[i,k,j])*max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k+1,j],Sw[i,k-1,j]-Sw[i,k,j])))
Fx = Conta(Fax,miw,mio) - Conta(Fbx,miw,mio) + Conta(Faxn,miw,mio) - Conta(Fbxn,miw,mio)
Fy = Conta(Fay,miw,mio) - Conta(Fby,miw,mio) + Conta(Fayn,miw,mio) - Conta(Fbyn,miw,mio)
Sw[i,k,j+1] = Sw[i,k,j] + ((Fx/deltx) + (Fy/delty))*deltt/porosity[i,k]
return Sw
#-----------------------------------------------------------------------------#
def TVD_NonStandard_Not_Both_Directions(Sw,N, mio, miw, j, porosity, deltt, deltx, delty):
for i in range(N):
for k in range(N):
Nax,Nbx,Nay,Nby = 0,0,0,0;
if i == 0:
Fax=Sw[i,k,j]
Fbx=Sw[i,k,j]
elif i == N-1:
Nxa=(Sw[i-1,k,j]**2)/((Sw[i-1,k,j]**2)+(miw/mio)*((1-Sw[i-1,k,j])**2))
Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*Nax#max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Fbx=Sw[i,k,j]
else:
Nax=(Sw[i-1,k,j]**2)/((Sw[i-1,k,j]**2)+(miw/mio)*((1-Sw[i-1,k,j])**2))
Fax=Sw[i-1,k,j]+.5*(Sw[i,k,j]-Sw[i-1,k,j])*Nax#max(0,min(1,Safe(Sw[i-1,k,j]-Sw[i-2,k,j],Sw[i,k,j]-Sw[i-1,k,j])))
Nbx=(Sw[i+1,k,j]**2)/((Sw[i+1,k,j]**2)+(miw/mio)*((1-Sw[i+1,k,j])**2))
Fbx=Sw[i,k,j]+.5*(Sw[i+1,k,j]-Sw[i,k,j])*Nbx#max(0,min(1,Safe(Sw[i,k,j]-Sw[i-1,k,j],Sw[i+1,k,j]-Sw[i,k,j])))
if k == 0:
Fay=Sw[i,k,j]
Fby=Sw[i,k,j]
elif k == N-1:
Nay=(Sw[i,k-1,j]**2)/((Sw[i,k-1,j]**2)+(miw/mio)*((1-Sw[i,k-1,j])**2))
Fay=Sw[i,k-1,j]+.5*(Sw[i,k,j]-Sw[i,k-1,j])*Nay#max(0,min(1,Safe(Sw[i,k-1,j]-Sw[i,k-2,j],Sw[i,k,j]-Sw[i,k-1,j])))
Fby=Sw[i,k,j]
else:
Nay=(Sw[i,k-1,j]**2)/((Sw[i,k-1,j]**2)+(miw/mio)*((1-Sw[i,k-1,j])**2))
Fay=Sw[i,k-1,j]+.5*(Sw[i,k,j]-Sw[i,k-1,j])*Nay#max(0,min(1,Safe(Sw[i,k-1,j]-Sw[i,k-2,j],Sw[i,k,j]-Sw[i,k-1,j])))
Nby=(Sw[i,k+1,j]**2)/((Sw[i,k+1,j]**2)+(miw/mio)*((1-Sw[i,k+1,j])**2))
Fby=Sw[i,k,j]+.5*(Sw[i,k+1,j]-Sw[i,k,j])*Nby#max(0,min(1,Safe(Sw[i,k,j]-Sw[i,k-1,j],Sw[i,k+1,j]-Sw[i,k,j])))
Fx = Safe((Fax**2),(Fax**2 + (miw/mio)*((1-Fax)**2))) - Safe((Fbx**2),(Fbx**2 + (miw/mio)*((1-Fbx)**2)))
Fy = Safe((Fay**2),(Fay**2 + (miw/mio)*((1-Fay)**2))) - Safe((Fby**2),(Fby**2 + (miw/mio)*((1-Fby)**2)))
Sw[i,k,j+1] = Sw[i,k,j] + ((Fx/deltx) + (Fy/delty))*deltt/porosity[i,k]
return Sw
#-----------------------------------------------------------------------------#
def Conta(F,miw,mio):
return Safe((F**2),(F**2 + (miw/mio)*((1-F)**2)))
#-----------------------------------------------------------------------------#
#Safe division, prevines division by zero
def Safe(a,b):
if b == 0:
return 0
else:
return a/b
#-----------------------------------------------------------------------------#
| 60.428016
| 215
| 0.398551
| 7,460
| 31,060
| 1.658177
| 0.020509
| 0.274535
| 0.244462
| 0.205335
| 0.928052
| 0.923363
| 0.921665
| 0.921342
| 0.920534
| 0.915198
| 0
| 0.058976
| 0.266838
| 31,060
| 513
| 216
| 60.545809
| 0.484235
| 0.201062
| 0
| 0.880637
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023873
| false
| 0
| 0.007958
| 0.002653
| 0.058355
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
c5e115af3c072382a00c110d88f4fb99ea9de793
| 146
|
py
|
Python
|
pystellibs/future/__init__.py
|
lcjohnso/pystellibs
|
3df5d029df61e5488c5dae3422ec8b616298e85f
|
[
"MIT"
] | 5
|
2016-10-25T13:37:55.000Z
|
2020-11-03T09:46:17.000Z
|
pystellibs/future/__init__.py
|
lcjohnso/pystellibs
|
3df5d029df61e5488c5dae3422ec8b616298e85f
|
[
"MIT"
] | 2
|
2018-06-01T21:50:29.000Z
|
2019-01-05T21:12:14.000Z
|
pystellibs/future/__init__.py
|
lcjohnso/pystellibs
|
3df5d029df61e5488c5dae3422ec8b616298e85f
|
[
"MIT"
] | 3
|
2016-10-18T16:52:39.000Z
|
2019-11-13T20:09:56.000Z
|
import matplotlib.path
if hasattr(matplotlib.path.Path, 'contains_points'):
from matplotlib.path import Path
else:
from .path import Path
| 24.333333
| 52
| 0.767123
| 20
| 146
| 5.55
| 0.45
| 0.378378
| 0.252252
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150685
| 146
| 5
| 53
| 29.2
| 0.895161
| 0
| 0
| 0
| 0
| 0
| 0.10274
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c5ef9b34d2c806413107877160ef807fd5daeb11
| 5,967
|
py
|
Python
|
construct/lib/binary.py
|
tmr232/construct
|
208df16e51f8fc35f5c924a5cabc623eb425bc4e
|
[
"MIT"
] | 2
|
2016-06-04T16:49:11.000Z
|
2017-07-21T10:21:12.000Z
|
construct/lib/binary.py
|
tmr232/construct
|
208df16e51f8fc35f5c924a5cabc623eb425bc4e
|
[
"MIT"
] | 1
|
2019-10-28T16:26:19.000Z
|
2019-10-28T21:38:18.000Z
|
construct/lib/binary.py
|
tmr232/construct
|
208df16e51f8fc35f5c924a5cabc623eb425bc4e
|
[
"MIT"
] | 2
|
2016-03-18T04:30:35.000Z
|
2018-03-28T16:57:43.000Z
|
import six
from construct.lib.py3compat import int2byte
if six.PY3:
def int_to_bin(number, width = 32):
r"""
Convert an integer into its binary representation in a bytes object.
Width is the amount of bits to generate. If width is larger than the actual
amount of bits required to represent number in binary, sign-extension is
used. If it's smaller, the representation is trimmed to width bits.
Each "bit" is either '\x00' or '\x01'. The MSBit is first.
Examples:
>>> int_to_bin(19, 5)
b'\x01\x00\x00\x01\x01'
>>> int_to_bin(19, 8)
b'\x00\x00\x00\x01\x00\x00\x01\x01'
"""
number = int(number)
if number < 0:
number += 1 << width
i = width - 1
bits = bytearray(width)
while number and i >= 0:
bits[i] = number & 1
number >>= 1
i -= 1
return bytes(bits)
# heavily optimized for performance
def bin_to_int(bits, signed = False):
r"""
Logical opposite of int_to_bin. Both '0' and '\x00' are considered zero,
and both '1' and '\x01' are considered one. Set sign to True to interpret
the number as a 2-s complement signed integer.
"""
bits = "".join("01"[b & 1] for b in bits)
if signed and bits[0] == "1":
bits = bits[1:]
bias = 1 << len(bits)
else:
bias = 0
return int(bits, 2) - bias
_char_to_bin = [0] * 256
_bin_to_char = {}
for i in range(256):
ch = int2byte(i)
bin = int_to_bin(i, 8)
# Populate with for both keys i and ch, to support Python 2 & 3
_char_to_bin[i] = bin
_bin_to_char[bin] = ord(ch)
def encode_bin(data):
"""
Create a binary representation of the given b'' object. Assume 8-bit
ASCII. Example:
>>> encode_bin('ab')
b"\x00\x01\x01\x00\x00\x00\x00\x01\x00\x01\x01\x00\x00\x00\x01\x00"
"""
return six.b("").join(_char_to_bin[int(ch)] for ch in data)
def decode_bin(data):
if len(data) & 7:
raise ValueError("Data length must be a multiple of 8")
i = 0
j = 0
l = len(data) // 8
arr = bytearray(l)
while j < l:
arr[j] = _bin_to_char[data[i:i+8]]
i += 8
j += 1
return arr
def swap_bytes(bits, bytesize=8):
r"""
Bits is a b'' object containing a binary representation. Assuming each
bytesize bits constitute a bytes, perform a endianness byte swap. Example:
>>> swap_bytes(b'00011011', 2)
b'11100100'
"""
i = 0
l = len(bits)
output = [six.b("")] * ((l // bytesize) + 1)
j = len(output) - 1
while i < l:
output[j] = bits[i : i + bytesize]
i += bytesize
j -= 1
return six.b("").join(output)
else:
def int_to_bin(number, width = 32):
r"""
Convert an integer into its binary representation in a bytes object.
Width is the amount of bits to generate. If width is larger than the actual
amount of bits required to represent number in binary, sign-extension is
used. If it's smaller, the representation is trimmed to width bits.
Each "bit" is either '\x00' or '\x01'. The MSBit is first.
Examples:
>>> int_to_bin(19, 5)
'\x01\x00\x00\x01\x01'
>>> int_to_bin(19, 8)
'\x00\x00\x00\x01\x00\x00\x01\x01'
"""
if number < 0:
number += 1 << width
i = width - 1
bits = ["\x00"] * width
while number and i >= 0:
bits[i] = "\x00\x01"[number & 1]
number >>= 1
i -= 1
return "".join(bits)
# heavily optimized for performance
def bin_to_int(bits, signed = False):
r"""
Logical opposite of int_to_bin. Both '0' and '\x00' are considered zero,
and both '1' and '\x01' are considered one. Set sign to True to interpret
the number as a 2-s complement signed integer.
"""
bits = "".join("01"[ord(b) & 1] for b in bits)
if signed and bits[0] == "1":
bits = bits[1:]
bias = 1 << len(bits)
else:
bias = 0
return int(bits, 2) - bias
_char_to_bin = [0] * 256
_bin_to_char = {}
for i in range(256):
ch = int2byte(i)
bin = int_to_bin(i, 8)
# Populate with for both keys i and ch, to support Python 2 & 3
_char_to_bin[i] = bin
_bin_to_char[bin] = ch
def encode_bin(data):
"""
Create a binary representation of the given b'' object. Assume 8-bit
ASCII. Example:
>>> encode_bin('ab')
b"\x00\x01\x01\x00\x00\x00\x00\x01\x00\x01\x01\x00\x00\x00\x01\x00"
"""
return "".join(_char_to_bin[ord(ch)] for ch in data)
def decode_bin(data):
if len(data) & 7:
raise ValueError("Data length must be a multiple of 8")
i = 0
j = 0
l = len(data) // 8
chars = [""] * l
while j < l:
chars[j] = _bin_to_char[data[i:i+8]]
i += 8
j += 1
return "".join(chars)
def swap_bytes(bits, bytesize=8):
r"""
Bits is a b'' object containing a binary representation. Assuming each
bytesize bits constitute a bytes, perform a endianness byte swap. Example:
>>> swap_bytes(b'00011011', 2)
b'11100100'
"""
i = 0
l = len(bits)
output = [""] * ((l // bytesize) + 1)
j = len(output) - 1
while i < l:
output[j] = bits[i : i + bytesize]
i += bytesize
j -= 1
return "".join(output)
| 31.739362
| 83
| 0.516005
| 837
| 5,967
| 3.594982
| 0.157706
| 0.035892
| 0.026587
| 0.023928
| 0.92456
| 0.92456
| 0.92456
| 0.909937
| 0.876703
| 0.876703
| 0
| 0.074143
| 0.369365
| 5,967
| 187
| 84
| 31.909091
| 0.725485
| 0.368359
| 0
| 0.706422
| 0
| 0
| 0.027682
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.091743
| false
| 0
| 0.018349
| 0
| 0.201835
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
681a6a8f948814dc0845d6f964af7ca6a09490d3
| 160
|
py
|
Python
|
domain/services/connect.py
|
Srinjay-hack/Buddy
|
155b9ba58a20bf043493213dd8349f61012fc480
|
[
"Apache-2.0"
] | null | null | null |
domain/services/connect.py
|
Srinjay-hack/Buddy
|
155b9ba58a20bf043493213dd8349f61012fc480
|
[
"Apache-2.0"
] | null | null | null |
domain/services/connect.py
|
Srinjay-hack/Buddy
|
155b9ba58a20bf043493213dd8349f61012fc480
|
[
"Apache-2.0"
] | null | null | null |
from domain.models import Caller,Assistant
from accounts.models import Caller as ConnectCaller
from accounts.models import Assistant as ConnectAssistant
| 14.545455
| 57
| 0.83125
| 20
| 160
| 6.65
| 0.5
| 0.270677
| 0.270677
| 0.360902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.14375
| 160
| 10
| 58
| 16
| 0.970803
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a88124bdd0a8d6d705d0b3e8ff19123ccf577665
| 550
|
py
|
Python
|
eval_covid19china_timm-regnetx_002_MedianBlur.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_covid19china_timm-regnetx_002_MedianBlur.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_covid19china_timm-regnetx_002_MedianBlur.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_0_MedianBlur.yml",
"python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_1_MedianBlur.yml",
"python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_2_MedianBlur.yml",
"python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_3_MedianBlur.yml",
"python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_4_MedianBlur.yml",
]
for l in ls:
os.system(l)
| 50
| 104
| 0.849091
| 80
| 550
| 5.4625
| 0.3
| 0.114416
| 0.1373
| 0.217391
| 0.897025
| 0.897025
| 0.897025
| 0.897025
| 0.897025
| 0.897025
| 0
| 0.057803
| 0.056364
| 550
| 11
| 105
| 50
| 0.7842
| 0
| 0
| 0
| 0
| 0
| 0.880218
| 0.653358
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a8b3ea68d6b426525fb29ef103c406483a9e5e90
| 18,847
|
py
|
Python
|
tests/test_json.py
|
atilaneves/reggae-python
|
6d98e487733dfb2f870296e96ccd12b7f7a4d09a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_json.py
|
atilaneves/reggae-python
|
6d98e487733dfb2f870296e96ccd12b7f7a4d09a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_json.py
|
atilaneves/reggae-python
|
6d98e487733dfb2f870296e96ccd12b7f7a4d09a
|
[
"BSD-3-Clause"
] | null | null | null |
from reggae.build import Target, Build, optional
from reggae.rules import * # noqa
from json import dumps, loads
import pytest
def test_target():
tgt = Target("foo.d")
assert tgt.jsonify() == {"type": "fixed",
"command": {},
"outputs": ["foo.d"],
"dependencies": {"type": "fixed", "targets": []},
"implicits": {"type": "fixed", "targets": []}}
json = dumps(tgt.jsonify())
assert loads(json) == tgt.jsonify()
def test_optional_target():
tgt = optional(Target("foo.d"))
assert tgt.jsonify() == {"type": "fixed",
"command": {},
"outputs": ["foo.d"],
"dependencies": {"type": "fixed", "targets": []},
"implicits": {"type": "fixed", "targets": []},
"optional": True}
json = dumps(tgt.jsonify())
assert loads(json) == tgt.jsonify()
def test_build():
build = Build(Target("foo", "dmd -offoo foo.d", [Target("foo.d")]))
assert build.jsonify() == [{"type": "fixed",
"command": {"type": "shell",
"cmd": "dmd -offoo foo.d"},
"outputs": ["foo"],
"dependencies": {"type": "fixed",
"targets":
[{"type": "fixed",
"command": {},
"outputs": ["foo.d"],
"dependencies": {
"type": "fixed",
"targets": []},
"implicits": {
"type": "fixed",
"targets": []}}]},
"implicits": {"type": "fixed", "targets": []}}]
json = dumps(build.jsonify())
assert(loads(json) == build.jsonify())
def test_link_foo():
mainObj = Target("main.o",
"dmd -I$project/src -c $in -of$out",
Target("src/main.d"))
assert mainObj.jsonify() == \
{"type": "fixed",
"command": {"type": "shell",
"cmd": "dmd -I$project/src -c $in -of$out"},
"outputs": ["main.o"],
"dependencies": {"type": "fixed",
"targets": [
{"type": "fixed",
"command": {}, "outputs": ["src/main.d"],
"dependencies": {
"type": "fixed",
"targets": []},
"implicits": {
"type": "fixed",
"targets": []}}]},
"implicits": {
"type": "fixed",
"targets": []}}
def test_link_fixed():
mainObj = Target("main.o",
"dmd -I$project/src -c $in -of$out",
Target("src/main.d"))
mathsObj = Target("maths.o",
"dmd -c $in -of$out",
Target("src/maths.d"))
app = link(exe_name="myapp",
dependencies=[mainObj, mathsObj],
flags="-L-M")
bld = Build(app)
assert bld.jsonify() == \
[{"type": "fixed",
"command": {"type": "link", "flags": "-L-M"},
"outputs": ["myapp"],
"dependencies": {
"type": "fixed",
"targets":
[{"type": "fixed",
"command": {"type": "shell",
"cmd": "dmd -I$project/src -c $in -of$out"},
"outputs": ["main.o"],
"dependencies": {"type": "fixed",
"targets": [
{"type": "fixed",
"command": {}, "outputs": ["src/main.d"],
"dependencies": {
"type": "fixed",
"targets": []},
"implicits": {
"type": "fixed",
"targets": []}}]},
"implicits": {
"type": "fixed",
"targets": []}},
{"type": "fixed",
"command": {"type": "shell", "cmd":
"dmd -c $in -of$out"},
"outputs": ["maths.o"],
"dependencies": {
"type": "fixed",
"targets": [
{"type": "fixed",
"command": {}, "outputs": ["src/maths.d"],
"dependencies": {
"type": "fixed",
"targets": []},
"implicits": {
"type": "fixed",
"targets": []}}]},
"implicits": {
"type": "fixed",
"targets": []}}]},
"implicits": {
"type": "fixed",
"targets": []}}]
json = dumps(bld.jsonify())
assert(loads(json) == bld.jsonify())
def test_link_dynamic():
objs = object_files(flags='-I$project/src', src_dirs=['src'])
app = link(exe_name="myapp",
dependencies=objs,
flags="-L-M")
bld = Build(app)
assert bld.jsonify() == \
[{"type": "fixed",
"command": {"type": "link", "flags": "-L-M"},
"outputs": ["myapp"],
"dependencies": {
"type": "dynamic",
"func": "objectFiles",
"src_dirs": ["src"],
"exclude_dirs": [],
"src_files": [],
"exclude_files": [],
"flags": "-I$project/src",
"includes": [],
"string_imports": []},
"implicits": {
"type": "fixed",
"targets": []}}]
json = dumps(bld.jsonify())
assert(loads(json) == bld.jsonify())
def test_static_lib():
lib = static_library('libstuff.a',
flags='-I$project/src',
src_dirs=['src'])
app = link(exe_name="myapp",
dependencies=lib,
flags="-L-M")
bld = Build(app)
assert bld.jsonify() == \
[{"type": "fixed",
"command": {"type": "link", "flags": "-L-M"},
"outputs": ["myapp"],
"dependencies": {
"type": "dynamic",
"func": "staticLibrary",
"name": "libstuff.a",
"src_dirs": ["src"],
"exclude_dirs": [],
"src_files": [],
"exclude_files": [],
"flags": "-I$project/src",
"includes": [],
"string_imports": []},
"implicits": {
"type": "fixed",
"targets": []}}]
json = dumps(bld.jsonify())
assert(loads(json) == bld.jsonify())
def test_scriptlike():
app = scriptlike(src_name='src/main.d',
exe_name='leapp',
flags='-g',
includes=['src'])
bld = Build(app)
assert bld.jsonify() == \
[{"type": "dynamic",
"func": "scriptlike",
"src_name": "src/main.d",
"exe_name": "leapp",
"link_with": {"type": "fixed", "targets": []},
"flags": "-g",
"includes": ["src"],
"string_imports": []}]
json = dumps(bld.jsonify())
assert(loads(json) == bld.jsonify())
def test_build_two_targets():
objs1 = object_files(flags='-I$project/src',
src_dirs=['src'])
app1 = link(exe_name="app1",
dependencies=objs1,
flags="-L-M")
objs2 = object_files(flags='-I$project/other',
src_dirs=['other', 'yetanother'])
app2 = link(exe_name="app2",
dependencies=objs2)
bld = Build(app1, app2)
assert bld.jsonify() == \
[{"type": "fixed",
"command": {"type": "link", "flags": "-L-M"},
"outputs": ["app1"],
"dependencies": {
"type": "dynamic",
"func": "objectFiles",
"src_dirs": ["src"],
"exclude_dirs": [],
"src_files": [],
"exclude_files": [],
"flags": "-I$project/src",
"includes": [],
"string_imports": []},
"implicits": {
"type": "fixed",
"targets": []}},
{"type": "fixed",
"command": {"type": "link", "flags": ""},
"outputs": ["app2"],
"dependencies": {
"type": "dynamic",
"func": "objectFiles",
"src_dirs": ["other", "yetanother"],
"exclude_dirs": [],
"src_files": [],
"exclude_files": [],
"flags": "-I$project/other",
"includes": [],
"string_imports": []},
"implicits": {
"type": "fixed",
"targets": []}}]
json = dumps(bld.jsonify())
assert(loads(json) == bld.jsonify())
def test_object_files_error():
with pytest.raises(TypeError):
object_files('')
with pytest.raises(TypeError):
object_files([], '')
with pytest.raises(TypeError):
object_files([], [], '')
with pytest.raises(TypeError):
object_files([], [], [], '')
with pytest.raises(TypeError):
object_files([], [], [], [], [])
with pytest.raises(TypeError):
object_files([], [], [], [], '', '')
with pytest.raises(TypeError):
object_files([], [], [], [], '', [], '')
def test_target_concat():
mainObj = Target("main.o",
"dmd -I$project/src -c $in -of$out",
Target("src/main.d"))
mathsObj = Target("maths.o",
"dmd -c $in -of$out",
Target("src/maths.d"))
app = link(exe_name="myapp",
dependencies=target_concat(mainObj, mathsObj),
flags="-L-M")
bld = Build(app)
assert bld.jsonify() == \
[{"type": "fixed",
"command": {"type": "link", "flags": "-L-M"},
"outputs": ["myapp"],
"dependencies": {
"type": "dynamic",
"func": "targetConcat",
"dependencies": [
{"type": "fixed",
"command": {"type": "shell",
"cmd": "dmd -I$project/src -c $in -of$out"},
"outputs": ["main.o"],
"dependencies": {"type": "fixed",
"targets": [
{"type": "fixed",
"command": {},
"outputs": ["src/main.d"],
"dependencies": {
"type": "fixed",
"targets": []},
"implicits": {
"type": "fixed",
"targets": []}}]},
"implicits": {
"type": "fixed",
"targets": []}},
{"type": "fixed",
"command": {"type": "shell", "cmd":
"dmd -c $in -of$out"},
"outputs": ["maths.o"],
"dependencies": {
"type": "fixed",
"targets": [
{"type": "fixed",
"command": {}, "outputs": ["src/maths.d"],
"dependencies": {
"type": "fixed",
"targets": []},
"implicits": {
"type": "fixed",
"targets": []}}]},
"implicits": {
"type": "fixed",
"targets": []}}]},
"implicits": {
"type": "fixed",
"targets": []}}]
json = dumps(bld.jsonify())
assert(loads(json) == bld.jsonify())
def test_link_dynamic_concat():
main_obj = Target("main.o",
"dmd -I$project/src -c $in -of$out",
Target("src/main.d"))
objs = object_files(flags='-I$project/src', src_dirs=['src'])
app = link(exe_name="myapp",
dependencies=[objs, main_obj],
flags="-L-M")
bld = Build(app)
assert bld.jsonify() == \
[{"type": "fixed",
"command": {"type": "link", "flags": "-L-M"},
"outputs": ["myapp"],
"implicits": {"type": "fixed", "targets": []},
"dependencies": {
"type": "dynamic",
"func": "targetConcat",
"dependencies": [
{"type": "dynamic",
"func": "objectFiles",
"src_dirs": ["src"],
"exclude_dirs": [],
"src_files": [],
"exclude_files": [],
"flags": "-I$project/src",
"includes": [],
"string_imports": []},
{"type": "fixed",
"command": {"type": "shell",
"cmd": "dmd -I$project/src -c $in -of$out"},
"outputs": ["main.o"],
"dependencies": {"type": "fixed",
"targets": [
{"type": "fixed",
"command": {},
"outputs": ["src/main.d"],
"dependencies": {
"type": "fixed",
"targets": []},
"implicits": {
"type": "fixed",
"targets": []}}]},
"implicits": {
"type": "fixed",
"targets": []}},
]
}}]
json = dumps(bld.jsonify())
assert(loads(json) == bld.jsonify())
def test_src_files():
objs = object_files(flags='-g -pg',
src_dirs=['src'],
src_files=['main.cpp'])
app = link(exe_name='myapp', dependencies=objs)
bld = Build(app)
assert bld.jsonify() == \
[{"type": "fixed",
"command": {"type": "link", "flags": ""},
"outputs": ["myapp"],
"dependencies": {
"type": "dynamic",
"func": "objectFiles",
"src_dirs": ["src"],
"exclude_dirs": [],
"src_files": ['main.cpp'],
"exclude_files": [],
"flags": "-g -pg",
"includes": [],
"string_imports": []},
"implicits": {
"type": "fixed",
"targets": []}}]
json = dumps(bld.jsonify())
assert(loads(json) == bld.jsonify())
def test_list_with_one_item():
objs = object_files(src_dirs=['src'])
app = link(exe_name='myapp', dependencies=[objs])
bld = Build(app)
assert bld.jsonify() == \
[{"type": "fixed",
"command": {"type": "link", "flags": ""},
"outputs": ["myapp"],
"dependencies": {
"type": "dynamic",
"func": "objectFiles",
"src_dirs": ["src"],
"exclude_dirs": [],
"src_files": [],
"exclude_files": [],
"flags": "",
"includes": [],
"string_imports": []},
"implicits": {
"type": "fixed",
"targets": []}}]
json = dumps(bld.jsonify())
assert(loads(json) == bld.jsonify())
def test_mix_dynamic_and_static():
objs = object_files(flags='-I$project/src', src_dirs=['src'])
app = Target('app',
'cmd',
[objs, Target('libfoo.a')])
bld = Build(app)
assert bld.jsonify() == \
[{"type": "fixed",
"command": {"type": "shell", "cmd": "cmd"},
"outputs": ["app"],
"implicits": {"type": "fixed", "targets": []},
"dependencies": {
"type": "dynamic",
"func": "targetConcat",
"dependencies": [
{"type": "dynamic",
"func": "objectFiles",
"src_dirs": ["src"],
"exclude_dirs": [],
"src_files": [],
"exclude_files": [],
"flags": "-I$project/src",
"includes": [],
"string_imports": []},
{"type": "fixed",
"command": {},
"outputs": ["libfoo.a"],
"dependencies": {"type": "fixed",
"targets": []},
"implicits": {
"type": "fixed",
"targets": []}},
]
}}]
json = dumps(bld.jsonify())
assert(loads(json) == bld.jsonify())
def test_executable():
bld = Build(executable(name="myapp",
compiler_flags='-I$project/src',
src_dirs=['src'],
linker_flags='-L-M'))
assert bld.jsonify() == \
[{"type": "fixed",
"command": {"type": "link", "flags": "-L-M"},
"outputs": ["myapp"],
"dependencies": {
"type": "dynamic",
"func": "objectFiles",
"src_dirs": ["src"],
"exclude_dirs": [],
"src_files": [],
"exclude_files": [],
"flags": "-I$project/src",
"includes": [],
"string_imports": []},
"implicits": {
"type": "fixed",
"targets": []}}]
json = dumps(bld.jsonify())
assert(loads(json) == bld.jsonify())
| 35.695076
| 79
| 0.356555
| 1,342
| 18,847
| 4.914307
| 0.07228
| 0.10235
| 0.114026
| 0.106141
| 0.856255
| 0.851099
| 0.851099
| 0.828355
| 0.814253
| 0.790751
| 0
| 0.00119
| 0.464955
| 18,847
| 527
| 80
| 35.762808
| 0.652816
| 0.000212
| 0
| 0.802548
| 0
| 0
| 0.234754
| 0
| 0
| 0
| 0
| 0
| 0.061571
| 1
| 0.03397
| false
| 0
| 0.029724
| 0
| 0.063694
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a8b5519df60f2ca13484dd498dc214ede3da8cfb
| 997
|
py
|
Python
|
run-framework-scidb.py
|
danutrusu/framework_thesis
|
be2d6f2484a66d83994baded369bc832b9583563
|
[
"MIT"
] | null | null | null |
run-framework-scidb.py
|
danutrusu/framework_thesis
|
be2d6f2484a66d83994baded369bc832b9583563
|
[
"MIT"
] | null | null | null |
run-framework-scidb.py
|
danutrusu/framework_thesis
|
be2d6f2484a66d83994baded369bc832b9583563
|
[
"MIT"
] | null | null | null |
import os
#os.system("./run.sh operations -s scidb --system-configs conf/scidb.properties --datadir $(pwd)/data --load --generate --sizes 100MB -d 1,2,3,4,5 --datatype char")
#os.system("./run.sh operations -s scidb --system-configs conf/scidb.properties --datadir $(pwd)/data --load --generate --sizes 100MB -d 1,2,3,4,5 --datatype double")
#os.system("./run.sh operations -s scidb --system-configs conf/scidb.properties --datadir $(pwd)/data --load --generate --sizes 100MB -d 1,2,3,4,5 --datatype int32")
os.system("./run.sh operations -s scidb --system-configs conf/scidb.properties --datadir $(pwd)/data --load --generate --sizes 1GB -d 1,2,3,4,5 --datatype char")
os.system("./run.sh operations -s scidb --system-configs conf/scidb.properties --datadir $(pwd)/data --load --generate --sizes 1GB -d 1,2,3,4,5 --datatype double")
os.system("./run.sh operations -s scidb --system-configs conf/scidb.properties --datadir $(pwd)/data --load --generate --sizes 1GB -d 1,2,3,4,5 --datatype int32")
| 124.625
| 166
| 0.7001
| 164
| 997
| 4.256098
| 0.176829
| 0.068768
| 0.094556
| 0.111748
| 0.988539
| 0.988539
| 0.988539
| 0.988539
| 0.974212
| 0.974212
| 0
| 0.051168
| 0.098295
| 997
| 7
| 167
| 142.428571
| 0.72525
| 0.49348
| 0
| 0
| 1
| 0.75
| 0.890438
| 0.125498
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
a8bf1424fc402737312ac37ee2bc23dcc09c6adb
| 92
|
py
|
Python
|
base.py
|
augustinharter/sba
|
43c08c02fe98a007b9f9dcad53db0c0410bc4f0f
|
[
"MIT"
] | null | null | null |
base.py
|
augustinharter/sba
|
43c08c02fe98a007b9f9dcad53db0c0410bc4f0f
|
[
"MIT"
] | null | null | null |
base.py
|
augustinharter/sba
|
43c08c02fe98a007b9f9dcad53db0c0410bc4f0f
|
[
"MIT"
] | null | null | null |
import numpy as np
import torch as T
import torch.nn as nn
import torch.nn.functional as F
| 15.333333
| 31
| 0.782609
| 19
| 92
| 3.789474
| 0.473684
| 0.458333
| 0.361111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184783
| 92
| 5
| 32
| 18.4
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7660919a6a38ad95b8f534eabc8831a9915b5aef
| 17,575
|
py
|
Python
|
tensorpack/models/l1_norm.py
|
ToWeRT1An/tensorpack
|
f343e65b3c92fdf92cda7a90e8d7fd9df622b1b1
|
[
"Apache-2.0"
] | 1
|
2019-08-26T08:55:43.000Z
|
2019-08-26T08:55:43.000Z
|
tensorpack/models/l1_norm.py
|
ToWeRT1An/tensorpack
|
f343e65b3c92fdf92cda7a90e8d7fd9df622b1b1
|
[
"Apache-2.0"
] | 7
|
2019-12-16T21:58:30.000Z
|
2022-02-10T00:17:01.000Z
|
tensorpack/models/l1_norm.py
|
T1anZhenYu/tensorpack
|
f343e65b3c92fdf92cda7a90e8d7fd9df622b1b1
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# File: batch_norm.py
import re
import six
import os
from ..compat import tfv1 as tf # this should be avoided first in model code
from tensorflow.python.training import moving_averages
from tensorpack.tfutils.summary import add_moving_summary, add_param_summary,add_tensor_summary
from ..tfutils.collection import backup_collection, restore_collection
from ..tfutils.common import get_tf_version_tuple
from ..tfutils.tower import get_current_tower_context
from ..utils import logger
from ..utils.argtools import get_data_format
from ..utils.develop import log_deprecated
from .common import VariableHolder, layer_register
from .tflayer import convert_to_tflayer_args, rename_get_variable
import numpy as np
from tensorflow.python.training.moving_averages import assign_moving_average
__all__ = ['L2norm','L1norm','L2norm_quan_train','Myrangenorm','Otherrangenorm']
# decay: being too close to 1 leads to slow start-up. torch use 0.9.
# eps: torch: 1e-5. Lasagne: 1e-4
bitG = 8
def nonlin(x):
return tf.clip_by_value(x, -2, 2)
def quantize(x,k):
n = float(2 ** k - 1)
@tf.custom_gradient
def _quantize(x):
return tf.round(x * n) / n, lambda dy: dy
return _quantize(x)
def quan_(x,max_value):
rank = x.get_shape().ndims
assert rank is not None
maxx = max_value
x = x / maxx
n = float(2**bitG - 1)
x = x * 0.5 + 0.5 + tf.random_uniform(
tf.shape(x), minval=-0.5 / n, maxval=0.5 / n)
x = tf.clip_by_value(x, 0.0, 1.0)
x = quantize(x, bitG) - 0.5
return x * maxx * 2
def quan(x):
rank = x.get_shape().ndims
assert rank is not None
maxx = tf.reduce_max(tf.abs(x), list(range(1, rank)), keep_dims=True)
x = x / maxx
n = float(2**bitG - 1)
x = x * 0.5 + 0.5 + tf.random_uniform(
tf.shape(x), minval=-0.5 / n, maxval=0.5 / n)
x = tf.clip_by_value(x, 0.0, 1.0)
x = quantize(x, bitG) - 0.5
return x * maxx * 2
def near_2(x):
def log2(x):
numerator = tf.log(x)
denominator = tf.log(tf.constant(2, dtype=numerator.dtype))
return numerator / denominator
return tf.sign(x)*tf.pow(2,tf.round(log2(tf.abs(x))))
@layer_register()
@convert_to_tflayer_args(
args_names=[],
name_mapping={
'use_bias': 'center',
'use_scale': 'scale',
'gamma_init': 'gamma_initializer',
'decay': 'momentum',
'use_local_stat': 'training'
})
def BNN(x, train, eps=1e-05, decay=0.9, affine=True, name=None):
with tf.variable_scope(name, default_name='BatchNorm2d'):
params_shape = x.get_shape().as_list()
params_shape = params_shape[-1:]
moving_mean = tf.get_variable('mean', shape=params_shape,
initializer=tf.zeros_initializer,
trainable=False)
moving_variance = tf.get_variable('variance', shape=params_shape,
initializer=tf.ones_initializer,
trainable=False)
def mean_var_with_update():
mean, variance_ = tf.nn.moments(x, [0,1,2], name='moments')
variance = tf.reduce_sum((x - mean)*near_2((x-mean)),[0,1,2])
with tf.control_dependencies([assign_moving_average(moving_mean, mean, decay),#计算滑动平均值
assign_moving_average(moving_variance, variance, decay)]):
return tf.identity(mean), tf.identity(variance)
if train:#亲测tf.cond的第一个函数不能直接写成ture or false,所以只好用一个很蠢的方法。
xx = tf.constant(3)
yy = tf.constant(4)
else:
xx = tf.constant(4)
yy = tf.constant(3)
mean, variance = tf.cond(xx<yy, mean_var_with_update, lambda: (moving_mean, moving_variance))
if affine:
beta = tf.get_variable('beta', params_shape,
initializer=tf.zeros_initializer)
gamma = tf.get_variable('gamma', params_shape,
initializer=tf.ones_initializer)
x_ = (x-mean)*(1/(tf.sqrt(variance)+eps))
x = x_ * near_2(gamma)
#x = tf.nn.batch_normalization(x, mean, variance, beta, gamma, eps)
else:
x = tf.nn.batch_normalization(x, mean, variance, None, None, eps)
return x,gamma,beta,moving_mean,moving_variance,mean,variance
@layer_register()
@convert_to_tflayer_args(
args_names=[],
name_mapping={
'use_bias': 'center',
'use_scale': 'scale',
'gamma_init': 'gamma_initializer',
'decay': 'momentum',
'use_local_stat': 'training'
})
def Myrangenorm(x, train, eps=1e-05, decay=0.9, affine=True, name=None):
with tf.variable_scope(name, default_name='Myrangenorm'):
b = tf.shape(x)[0]
params_shape = x.get_shape().as_list()
params_shape = params_shape[-1:]
moving_mean = tf.get_variable('mean', shape=params_shape,
initializer=tf.zeros_initializer,
trainable=False)
moving_variance = tf.get_variable('variance', shape=params_shape,
initializer=tf.ones_initializer,
trainable=False)
#c_max = tf.tile(tf.expand_dims(tf.reduce_max(x),0),params_shape)
#c_min = tf.tile(tf.expand_dims(tf.reduce_min(x),0),params_shape)
c_max = tf.reduce_max(x,[0,1,2])
c_min = tf.reduce_min(x,[0,1,2])
mean_, variance_ = tf.nn.moments(x, [0,1,2], name='moments')
my_bm = tf.identity((c_max+c_min)/2,name='my_bm')
my_bv = tf.identity((c_max-c_min),name='my_bv')
real_bm = tf.identity(mean_,name='real_bm')
real_bv = tf.identity(variance_,name='real_bv')
add_tensor_summary(my_bm,name='my_bm',types=['mean','histogram','rms'])
add_tensor_summary(my_bv,name='my_bv',types=['mean','histogram','rms'])
add_tensor_summary(real_bm,name='real_bm',types=['mean','histogram','rms'])
add_tensor_summary(real_bv,name='real_bv',types=['mean','histogram','rms'])
# diff_bm = tf.identity(((c_max+c_min)/2)-mean_,name='diff_bm')
# diff_bv = tf.identity(tf.sqrt(c_max-c_min)-tf.sqrt(variance_),name='diff_bv')
# ratio_bm = tf.identity((((c_max+c_min)/2))/mean_,name='ratio_bm')
# ratio_bv = tf.identity((c_max-c_min)/tf.sqrt(variance_),name='ratio_bv')
# ratio_bv2 = tf.identity(tf.sqrt(c_max-c_min)/tf.sqrt(variance_),name='ratio_bv2')
lambda_ = tf.get_variable('lambda_', params_shape,
initializer=tf.zeros_initializer)
add_tensor_summary(lambda_,name='lambda_',types=['mean','histogram','rms'])
def mean_var_with_update():
mean = (c_max+c_min)/2
variance = (c_max-c_min)*(tf.abs(1+lambda_))
#variance = tf.square(c_max-c_min)/(2*tf.math.log(tf.cast(b,dtype=tf.float32)))
with tf.control_dependencies([assign_moving_average(moving_mean, mean, decay),#计算滑动平均值
assign_moving_average(moving_variance, variance, decay)]):
return tf.identity(mean), tf.identity(variance)
if train:#亲测tf.cond的第一个函数不能直接写成ture or false,所以只好用一个很蠢的方法。
xx = tf.constant(3)
yy = tf.constant(4)
else:
xx = tf.constant(4)
yy = tf.constant(3)
mean, variance = tf.cond(xx<yy, mean_var_with_update,
lambda: (moving_mean, moving_variance))
if affine:
beta = tf.get_variable('beta', params_shape,
initializer=tf.zeros_initializer)
gamma = tf.get_variable('gamma', params_shape,
initializer=tf.ones_initializer)
add_tensor_summary(gamma,name='gamma',types=['mean','histogram','rms'])
add_tensor_summary(beta,name='beta',types=['mean','histogram','rms'])
x = tf.nn.batch_normalization(x, mean, variance, beta, gamma, eps)
return x,gamma,beta,moving_mean,moving_variance,mean,variance
else:
x = tf.nn.batch_normalization(x, mean, variance, None, None, eps)
return x,None,None,moving_mean,moving_variance,mean,variance
#return x
@layer_register()
@convert_to_tflayer_args(
args_names=[],
name_mapping={
'use_bias': 'center',
'use_scale': 'scale',
'gamma_init': 'gamma_initializer',
'decay': 'momentum',
'use_local_stat': 'training'
})
def Otherrangenorm(x, train, eps=1e-05, decay=0.9, affine=True, name=None):
with tf.variable_scope(name, default_name='BatchNorm2d'):
params_shape = x.get_shape().as_list()
params_shape = params_shape[-1:]
moving_mean = tf.get_variable('mean', shape=params_shape,
initializer=tf.zeros_initializer,
trainable=False)
moving_variance = tf.get_variable('variance', shape=params_shape,
initializer=tf.ones_initializer,
trainable=False)
c_max = tf.reduce_max(x,[0,1,2])
c_min = tf.reduce_min(x,[0,1,2])
def mean_var_with_update():
mean_, variance_ = tf.nn.moments(x, [0,1,2], name='moments')
mean = mean_
variance = tf.square(c_max-c_min)
with tf.control_dependencies([assign_moving_average(moving_mean, mean_, decay),#计算滑动平均值
assign_moving_average(moving_variance, variance, decay)]):
return tf.identity(mean_), tf.identity(variance)
if train:#亲测tf.cond的第一个函数不能直接写成ture or false,所以只好用一个很蠢的方法。
xx = tf.constant(3)
yy = tf.constant(4)
else:
xx = tf.constant(4)
yy = tf.constant(3)
mean, variance = tf.cond(xx<yy, mean_var_with_update, lambda: (moving_mean, moving_variance))
if affine:
beta = tf.get_variable('beta', params_shape,
initializer=tf.zeros_initializer)
gamma = tf.get_variable('gamma', params_shape,
initializer=tf.ones_initializer)
x = tf.nn.batch_normalization(x, mean, variance, beta, gamma, eps)
else:
x = tf.nn.batch_normalization(x, mean, variance, None, None, eps)
return x
@layer_register()
@convert_to_tflayer_args(
args_names=[],
name_mapping={
'use_bias': 'center',
'use_scale': 'scale',
'gamma_init': 'gamma_initializer',
'decay': 'momentum',
'use_local_stat': 'training'
})
def L2norm_quan_train(x, train, layer_num,eps=1e-05, decay=0.9, affine=True, name=None):
if layer_num == 1:
beta_max,gamma_max,variance_max,mean_max = 0.4,1.3,12,8
elif layer_num == 2:
beta_max,gamma_max,variance_max,mean_max = 0.6,1.2,18,6
elif layer_num == 3:
beta_max,gamma_max,variance_max,mean_max = 0.5,1.1,15,12
elif layer_num == 4:
beta_max,gamma_max,variance_max,mean_max = 0.6,1.2,13,6
elif layer_num == 5:
beta_max,gamma_max,variance_max,mean_max = 0.5,1.1,12,3
elif layer_num == 6:
beta_max,gamma_max,variance_max,mean_max = 0.2,1.5,55,8
with tf.variable_scope(name, default_name='BatchNorm2d'):
params_shape = x.get_shape().as_list()
params_shape = params_shape[-1:]
moving_mean = tf.get_variable('mean', shape=params_shape,
initializer=tf.zeros_initializer,
trainable=False)
moving_variance = tf.get_variable('variance', shape=params_shape,
initializer=tf.ones_initializer,
trainable=False)
def mean_var_with_update():
mean, variance = tf.nn.moments(x, [0,1,2], name='moments')
mean = quan_(mean,mean_max)
variance = quan_(variance,tf.reduce_max(variance)-tf.reduce_min(variance))+eps
with tf.control_dependencies([assign_moving_average(moving_mean, mean, decay),#计算滑动平均值
assign_moving_average(moving_variance, variance, decay)]):
return tf.identity(mean), tf.identity(variance)
if train:#亲测tf.cond的第一个函数不能直接写成ture or false,所以只好用一个很蠢的方法。
xx = tf.constant(3)
yy = tf.constant(4)
else:
xx = tf.constant(4)
yy = tf.constant(3)
mean, variance = tf.cond(xx<yy, mean_var_with_update, lambda: (moving_mean, moving_variance))
if affine:
beta = tf.get_variable('beta', params_shape,
initializer=tf.zeros_initializer)
gamma = tf.get_variable('gamma', params_shape,
initializer=tf.ones_initializer)
x = tf.nn.batch_normalization(x, mean,variance, beta,gamma, eps)
else:
x = tf.nn.batch_normalization(x, mean, variance,None, None, eps)
return x
@layer_register()
@convert_to_tflayer_args(
args_names=[],
name_mapping={
'use_bias': 'center',
'use_scale': 'scale',
'gamma_init': 'gamma_initializer',
'decay': 'momentum',
'use_local_stat': 'training'
})
def L2norm(x, train, eps=1e-05, decay=0.9, affine=True, name=None):
with tf.variable_scope(name, default_name='BatchNorm2d'):
params_shape = x.get_shape().as_list()
params_shape = params_shape[-1:]
moving_mean = tf.get_variable('mean', shape=params_shape,
initializer=tf.zeros_initializer,
trainable=False)
moving_variance = tf.get_variable('variance', shape=params_shape,
initializer=tf.ones_initializer,
trainable=False)
def mean_var_with_update():
mean, variance = tf.nn.moments(x, [0,1,2], name='moments')
with tf.control_dependencies([assign_moving_average(moving_mean, mean, decay),#计算滑动平均值
assign_moving_average(moving_variance, variance, decay)]):
return tf.identity(mean), tf.identity(variance)
if train:#亲测tf.cond的第一个函数不能直接写成ture or false,所以只好用一个很蠢的方法。
xx = tf.constant(3)
yy = tf.constant(4)
else:
xx = tf.constant(4)
yy = tf.constant(3)
mean, variance = tf.cond(xx<yy, mean_var_with_update, lambda: (moving_mean, moving_variance))
if affine:
beta = tf.get_variable('beta', params_shape,
initializer=tf.zeros_initializer)
gamma = tf.get_variable('gamma', params_shape,
initializer=tf.ones_initializer)
x = tf.nn.batch_normalization(x, mean, variance, beta, gamma, eps)
else:
x = tf.nn.batch_normalization(x, mean, variance, None, None, eps)
return x,gamma,beta,moving_mean,moving_variance
@layer_register()
@convert_to_tflayer_args(
args_names=[],
name_mapping={
'use_bias': 'center',
'use_scale': 'scale',
'gamma_init': 'gamma_initializer',
'decay': 'momentum',
'use_local_stat': 'training'
})
def L1norm(x, train, eps=1e-05, decay=0.9, affine=True, name=None):
def get_l1norm(x,ave):
return 4/5*tf.reduce_mean(tf.abs(x-ave),axis=[0,1,2])
with tf.variable_scope(name, default_name='BatchNorm2d'):
params_shape = x.get_shape().as_list()
params_shape = params_shape[-1:]
moving_mean = tf.get_variable('mean', shape=params_shape,
initializer=tf.zeros_initializer,
trainable=False)
moving_variance = tf.get_variable('variance', shape=params_shape,
initializer=tf.ones_initializer,
trainable=False)
def mean_var_with_update():
mean, v_ = tf.nn.moments(x, [0,1,2], name='moments')
variance = get_l1norm(x,mean)
with tf.control_dependencies([assign_moving_average(moving_mean, mean, decay),#计算滑动平均值
assign_moving_average(moving_variance, variance, decay)]):
return tf.identity(mean), tf.identity(variance)
if train:#亲测tf.cond的第一个函数不能直接写成ture or false,所以只好用一个很蠢的方法。
xx = tf.constant(3)
yy = tf.constant(4)
else:
xx = tf.constant(4)
yy = tf.constant(3)
mean, variance = tf.cond(xx<yy, mean_var_with_update, lambda: (moving_mean, moving_variance))
if affine:
beta = tf.get_variable('beta', params_shape,
initializer=tf.zeros_initializer)
gamma = tf.get_variable('gamma', params_shape,
initializer=tf.ones_initializer)
x = tf.nn.batch_normalization(x, mean, variance, beta, gamma, eps)
else:
x = tf.nn.batch_normalization(x, mean, variance, None, None, eps)
return x,gamma,beta,mean, variance
| 43.610422
| 101
| 0.585092
| 2,213
| 17,575
| 4.416629
| 0.097153
| 0.050645
| 0.033251
| 0.061387
| 0.806834
| 0.796808
| 0.786577
| 0.76949
| 0.754758
| 0.744117
| 0
| 0.01853
| 0.29377
| 17,575
| 403
| 102
| 43.610422
| 0.768933
| 0.065661
| 0
| 0.740525
| 0
| 0
| 0.061669
| 0
| 0
| 0
| 0
| 0
| 0.005831
| 1
| 0.058309
| false
| 0
| 0.046647
| 0.008746
| 0.166181
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
76711aa5d70435c0a6f5c16daa696c4962327352
| 552
|
py
|
Python
|
tests/mangle.py
|
ZYAZP/python2
|
7dc3b62eff51e1decb4a408122e77630fdc1687d
|
[
"MIT"
] | 1,062
|
2015-11-18T01:04:33.000Z
|
2022-03-29T07:13:30.000Z
|
tests/mangle.py
|
ArrowSides/onelinerizer
|
7dc3b62eff51e1decb4a408122e77630fdc1687d
|
[
"MIT"
] | 26
|
2015-11-17T06:58:07.000Z
|
2022-01-15T18:11:16.000Z
|
tests/mangle.py
|
ArrowSides/onelinerizer
|
7dc3b62eff51e1decb4a408122e77630fdc1687d
|
[
"MIT"
] | 100
|
2015-11-17T09:01:22.000Z
|
2021-09-12T13:58:28.000Z
|
__a, _A__a, _B__a, __b, _A__b, _B__b, __c, _A__c, _B__c = 0, 1, 2, 3, 4, 5, 6, 7, 8
print __a, _A__a, _B__a, __b, _A__b, _B__b, __c, _A__c, _B__c
class A(object):
__a = 9
print __a, _A__a, _B__a, __b, _A__b, _B__b, __c, _A__c, _B__c
def f(self):
print __a, _A__a, _B__a, __b, _A__b, _B__b, __c, _A__c, _B__c
class B(object):
__b = 10
print __a, _A__a, _B__a, __b, _A__b, _B__b, __c, _A__c, _B__c
def f(self):
print __a, _A__a, _B__a, __b, _A__b, _B__b, __c, _A__c, _B__c
A().f()
A().B().f()
| 34.5
| 83
| 0.583333
| 125
| 552
| 1.392
| 0.16
| 0.218391
| 0.206897
| 0.275862
| 0.810345
| 0.810345
| 0.810345
| 0.810345
| 0.810345
| 0.810345
| 0
| 0.029557
| 0.264493
| 552
| 15
| 84
| 36.8
| 0.399015
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.357143
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
76901622cb873db301e08d962facfe2ed9177ee7
| 36,382
|
py
|
Python
|
tool/SoDark.py
|
MrWhiteSoul/WannaTool
|
78df7eee605367d04e3e77bfa5865afda680eb4f
|
[
"MIT"
] | 70
|
2020-04-10T09:50:57.000Z
|
2022-03-31T23:26:46.000Z
|
tool/SoDark.py
|
Acc3ssGr4nt3d/WannaTool
|
5aba765597f35e221e619c231890fb3af9fc156e
|
[
"MIT"
] | 1
|
2021-12-12T12:20:17.000Z
|
2021-12-12T12:22:31.000Z
|
tool/SoDark.py
|
Acc3ssGr4nt3d/WannaTool
|
5aba765597f35e221e619c231890fb3af9fc156e
|
[
"MIT"
] | 19
|
2020-04-10T06:26:56.000Z
|
2022-03-20T03:08:54.000Z
|
import marshal,zlib,base64
exec(marshal.loads(zlib.decompress(base64.b16decode("789CEDBD09741C497618185985AB0AF70DF04CA249102089FB6613CD068F2639CD6B0AEC069B1C363A51990012A80B955924D0035A96DBB646B22C6BA5952FDDA3DD91666C4BD6D81E49F6589675F8D23596656B77EDF7B49E1DAF65EBD9FB56AB95BDE395E57F446466556501209BAD1E8D1B644566C69D9111FF8A1FFF2785FCAB86DFABF073BE3B2A8429C4030E350A23144629ACA2B09AC21A616AE241ADD8F98A30214F54A4EAC4833A751F130F62EA3E2E1EC4D57DBD7850AFEE1BC4830675DF281E34AAFB26F1A049DD378B07CDEABE453C6851F7ADE241ABBA6F130FDAC2F2B48B07EDEABE433CE850F79DE241A7BAEF120FBAE8BE4AA4BA45BA473CE8111A3E578B54AF481F120F0E0B6D29F3B2A8B28E88CDB8C87F5CD31E1C159A754C58B5C2AC11DD9610DD662D5CE167D6C1F5B8B08E8AF734D10D3F2DA389FB962EAC18C66870D938214CF9D02636FAC47B30942F09AB556C9CA4FB53C27A496CF40BEB3467828701815907C5C619CC61C258D6439AA69933624DC3ECCB67850983764EAC410543140E533842E1288563148E533841E1248553144E533843E1AC3061D8E7E8FE3C852F537881C2790A5F11668BF89370735198AD74F3AA30DBE8664198ED747349981D747359989D747345985D74735598DD74F39A307BE8E69A307BE9E6BA300FD1CD0D611EA69B8F09F308DDBC2ECCA374735398C7E8E696308FD3CD6D61EA7473479827E8E6AE30FBE8E6E3C27C896E12C23C49378BC23C4537F784D94F376F08F334DDBC29CC01BA5912E620DDDC17E619BA794B9867E9E68130CFD1CD43610ED1CD2784394C378F843942376F0B73946E9685F58E30C7C49F84C56208739C22578439413749614ED28D29CC29BAB184B52ACC69B11911F9BF10B52EE227D7320227EBE2C02C2ED25A080E3F1C7B796E34ED54D1FD68DAFE03F8BB3DA0C1A31B87E0DE7ADE32CCBBD96C8AE31A21B8641905D75E2DA416B3859C8BD10F293CE1B442F848BF95350B294BBF9ACF67F3FA79DDC5BA0BEEEAAC5B073769637BD9B5D3968D251C6CE20DC7CA0F2DAC5919D749C0E39D9C953746E6866747F581858C99CFDAE6CB3A45EAB7EC8C3D32313E3C3A3C3E3E3539323B35ACBFF1B26E9B83FADDBCE5B8D991F1E1B1F1E1C9F109FD4D2BEFD8D9CC083C8E4DBBF8A2B7EC45CBDD316C0453490CEAE15703BFCBD88F7180573B87842B1016C1AA7D2F229EC2DB6A34684B99D668951B116E54AC4668ED0E4561D5C07B6D54E370E2F282657FEDBA5B83CBBDDBC5050C0B1A1E6941D3B5AE243EC657285427B09C4A8FCBF47AB8C668E93FD544853C0D7E9E88D869C52FB6016FD0483D5ACA8C8B2AB75E6C345097AB34EC72A3574F53A02F4D3E88719BA99616B1CCD5B5E1128615FA9ED0B45D8DA2DA71A15ED88DA887367101EF3A68ACF0AE13EF9E4671F5EE46C5668DC8FF4264674883350C3574C30AEE86C2DDB080BB9F5609BB1E5730ACDD6958B6B060A7210FD43F0D4B1616EB34AC5358A174390E171D17E534AC475889D3500B949986B508AB701A16202CBDE9A7D5C2ED121BDDB8FC70606AC46E8DD8E811BB55342AF8D04BFDAB15BBD598001371B716D7E8347DC733E2290CF621B17158ECD6E142855C002321BB7B446C1CC5BC1BC7A8E6989A2EE768BAC4C52EBCFB71B11BC3C5DC4B8D4184CEC32247FD44E0EB0DD3D7E74934429D53F36DEB0CCCB74308D4375EA28FF7F723C51FEF64F9C78332F7B786A3002CE498FF4144D63DC675FBA54F054A8FCBD2C3D14AB9836D4DF8130530F7E2C024AC9DDB0E2EEB5476CD1976B75D02047917098164CA32F28E15114287BF65F8D3837FF4B8ECFDC5E16904233F41393972441FC1806F2176308E99F81F3CBE4349F03042396505FA6E1C332C8FF821A69CBB40C95E6C7C849B80F6CE2D630C46EFF29D2ED3008C0CEB63C373F1878FE60FFCF7F091CAAE5FB192D974CE4E59A6BEB203A05002212FFD9E65A421FAA6BDB6EEEA977756ACBC7E236366339663FB99160BB95C36EF42BE8554CA58D71797EEE943FAAD1D2EED65F3FF201B26BF96B7AD8C0959018AEA3BD9C2F3BEC4EDAC6B61E305771DE0B96B9BC6A60E3D758DCCDA5A21B3A66F184F8C15DD700DA7B42F8E952E187ADA700CECF70E14D05D2BBF6198B6FF72576F2EDC5AB8A7DFBA7AFBF29D4B0B274EE8576F7FECCE5B6F9D38F1ACDD7560A289F8C34414E1791721A29B77AEDDB8AD2FBCFEC66DFDB585CB572FDDB9F3BA9EC06C344B1F5178D6E9A0BC88853246DA1AB99D1DD6AFDF8517761A29E1AEE1384FB2791363B0DA75D7CD39E74746D2C3AB46D25AC9663787E1233BED94F96346DECEAC19193D55D0570A797BCBC5E63279423BB42AACB461A70825E6A06207B190633CB6864CEBB19DB49C6BF06CE4ECE54D6B677E7676DC989D9C1B9D981E338DB9D999D1F195D5B91963747CCC3493639366326F9980356D23E52CBB3B396B3E27BB4A6DCC3BEF405DABD97CDA70E73FB678E736605840A2AEB59C3692EB76C65AB6CDF9312FD2B11CC498CB497821DB72E6C752D9A491B2E6ADCCF21B8B690BBEBD390F687F7D1856BA9D512DCD3B2FE37AB7DC423EB3EC38A965C0C3D9421E5E647EF4F1FCD8F0E8F4F8EA6CD29A5B9D995C1983DBC9E4D8F84432393E313931634C1A13E3AE0EE5F77B5142E07254888850CDBB2D08674A868106175FD8ADF106803EF5988B1F296418DC9E607CC948F057C351A0FA78588870F18783527890A8A551B7A3C2B038383D615C28DB63179FD2E6145D1D7BCD391D9860F0C245536C04691C98A58F0128E5D673D466CEC81B69872A7BE236609F92D8C8B29BDDB432724EF22A587CE3F5C5AB8BFAA5C49D13CE00C46F9B6B43D99C95D155734F9E3C190628E116562C6A4DC22AA20293EB567233978517759AF1C5D2FA507E555730DF3944ED5C4FA78787F515A330043F3D50E468F102F316D48D4C329BCF5B4977A01F57077E36EC11DDA4AD4C813EF5EBD60E119234076EDCE17B5C54598746C0D9715C2B4D1FC985CEE7E82E07F4E93A9046D055E3C9324CD7824BB5AED8F9028FB5B54DC9692BB96E64EC772D6AEB8DC44DAE1FF35ADBB64B95ADDB1BF0A531EA5EBEC01997E1BBB8D9FC0E75CA7696D7DD74CAA5A56CA5E07D9671D25109BAA16E1656D2505F0D4D349816292ABA6E38EB297B85BA94B19E50722167C22CA4DEAD5BDBA6BD065F9D1ACD5B5B059C01941B2AA106369C6C867A99CA1AA643512EBC1B453DC9DB5011A3E2ACC3EB020076813EA9B59DB4722ECC73C7C54F7A399BC940CFE19947A09A06276FAC539F360B19006B03F50A88D1F4A27E149C3C5D734F4C4A31D22B762A714C7D4F78158366E73685069781B7C7A715FA8EE91D9C8E820282E1E69A7C215858E730F24D4CD2A25AB356AFB56B8D2DD5DA947612FE45B556AD45EB8B3440D808FFE25A03FC8B6A5DDA22E46D84FB0EAD46EBD4DAE0A915527B210E6B3805251AB51391468D18017CC16AC508FC276404EE48CA0E689D0D22C9901788888D28B206BBC2A3CA8962027A75A346300DBE4BB2805EA07D774912D00B04EE52E629D0E1B544C77D4E201D57C75C424C125675CC25002D08E4623D125D4481350895B1319031C657A2C24E09E44A369AC44633D5FE2B547B03326948647A243D575BCF7C4A03F029D755CD2D5063A3AC19E848B71548F3283FCB5CCD32578BCCD52A59162FA14D26B4972674C884CED2842E99D05D9AD023137A4B130EC984C3A5094764C2D1D2846332E17869822E134E9426F4C984974A134ECA8453A509FD32E1340F569B088EEA804C1CE451C5E9518DAC04F017C8907F4BE400DFD7D6B0C03451F33F1001E4451CFC22C5CE50EC17222EB35D99798A9DA5D85F8AB89D1C7B9A62E728F6CB11E08B28B69562CF53EC7F8EB8DD1CFB5FA87B2F536C53145022C5FE1B8ABD40B127A36E2FC7FE2AC5CE53ECF928B02A14FB5314FB0AC5DE8DBA8739F68728F622C57E4B1427E311B9B68273BC35300693EAFDA9E4ABFCFE51F7A87C7F8A5DA0D87F1A3DC030F689FD33113773A98C9B71CE06F0F15ADEC8AD1763E4B475318870E709E6219223C8669B446C3245DA4048F0F2DE88B486325D05CC43B8FBE18900457909C9C91309EC9E637EC4547D904CD5473C95EC2EA1E9934ECC23E0509C87710334C147C79C5EB80CEA7686C86CC7D6178054D05F936B84338D3B3D94E98A9133DC4D98C937AE8C5C430E0558AD1CAF94D109E2C206F57BD96C4ABF6E24374BEA9874AA29FD52F61E474C394D1471D35E375CDD012AA3E070CA34AD2C140966576192E86B050318054A9AA15538A85FCB670B393D65036145F1B3CE118ABF9CC7966F017D6AA4C2DE64CE39C1F970157386B7F003DCB3F29B86EDEA26BC7F2EC7FD181B25FE7110DE265770E0C59062A28431D98B1B3068F4C2B2E3A3B4FC0779F923CC78E98FBD42D747AFE82E122A091C231AFE710A27289CA4708AC2690A67289CA590BBFC48BF64E581D8B4533A10D66BEBD425865C3A41AE0492F00311D548E2BEF76861F096F7B88AC103EF710D8387DEE33A069FF01E91EB4D3CF21E373078DB7BDCC460D97B4C61F00E3E62BF138677B7E265C14E2690661B400233F192A23251449D98C7A85730B888C1AB182C60700A83410CAE63802C5602A9CDC42806C87027AE62F01A0667549D293B6D2510FC1325EE4D70A66A0BE91CDDACC37C21927625CBB4BD9C88385979DE1125BB962F707E984D9B940F6F72CCC2E5AD74F6B1E5B5E2E28418C084C40D6C1D6B87CC893BAA63805E8CC41BEA296D648C32FA39711253F0F931C60249DC06646F1B90C04D40F822197C8A9E9194D6E05E83B8F07F1A90D90D90A38188EBFD7E4054CBBB531122AD6BE58F48EBCBF08A3B5D21A4F552A6012957406AD7AE23C17A9D65ED2652A25580A7E1078FDD84BFAB14F18D62E21AA2A5FE8C97B95ACADBA352DE1E25928D2B6339390A62E53E48B085B82C544F056AC506500B0D1EA51FC139B011479A7E83A868A0EFB79D3AA0948178EEBDF268AE0EC87CE8CF6E15EE9EF50261E27C553D37D373FED785ACB545C51229AD5AA8562D54AB166AC4F59DC1D05168E3E7DD1A6A0CFABB9469054680A5FCEB28E5CFB8B5F72B146EF70B779415FE9FA9F0DFACA954B8D32FDC5556F8CB54F87CC5C2DD7EE19EB2C2C723D47275A5C2BD7EE1E6B2C29FA0C2E72B163EE4173E5C56F82F51E19FA9AA54F8885FF86859E17F418545C5C2C7FCC2C7CB0AB7E31E52261BBDBF331D5A58F70B9F10BD1B8DB80B0933031894D29A1E514DE7B5FB3B43A1359DF46B3A257A4366CDF751055F14952AE8F72B381D5AC16F5205AF884A0331E057305856F8086E4DE1BAAF50F88C5FF86C59E1B7F6297CCE2F3C5456F8CF7B857F430B2B4C3B34DB3F819B46E608AEF4BFA2C14A9783342A338FC9CCE3B2A52ADC920D1BA42A04B65B3F5D75BF4205937E0553A1155CC50A32FF0774F711419169C8354320F069755885B3F2790E5F23732AB4D159BFD1E9B206BF50ADC6871B3CEF35F81F431B7CF9000DBEEC3778BEACC1D335AAC1F021BAE0170ECC6418530FFED5C83186DF52B0E66FF46A7E35749ECCFBF3E415A8F9A2D0BB81F3F49E178479499C28E9ED3FF7EACC86D67999A6CF1DDC8034AFE0F4B92C70FA0C86BEDA55FFD5D46BFD7E8D6AE9622DBED6EFD7DC875FD16BDDAFDD7BFABFE6BFD6B5B2D1FEAE7D0A5FF70BDF282BFC6B5EE1A9D0C21FF30BBF0EE0AB499837697331224A2A3A5CB7772F6EF915DD2EEBC5FDBABD3FC19DC027B8BBDF27F878F927F8E375AAA59FC596E0F93EFC8A3EC1BF545D086B3F11A040168B2890AD5F17F07F298C74B957B110892990F62B115320C7431C867E8F58C905D360D1034A7AE566D8A265E493EBFA6D236DE9C073DEB802BC5CC2144A3E11A74CC0FB1436814723962F7E09D9A1E1E161E224CE502BAA1E1BD81E487126C43E0292915562999D624149E216367C0F037C1DE7F0DED510B3575C83644AE975CEEBB26FAFD979C7D5331C4762EE558C59C618A785DFD0364DE00A551E1469A7298A333551A69B46A01A6C2865A85A6A29038D9EDC20541B1B9817BF4A413ECB21BD8AAC2E8E35727D9E86CC0AB2A6B9F56C868A61529AE296298EB605A11BB4D784659183A6AFB04C3B89B2654C4791BD6A39259F65F2F56CDA72B34FBCE475F92C6BBF069F85397ADE95C027593261A5ECB540C579F92C87E76E3665BB36F44D0D4F4E45C8AFB294CD6F2ACD9F27704FDC3B892FE825868803866A809F0AB49293CF9C7A359D4B6577B883986AC967FAACC067E5DD65DC28A10F320A7F43C04063F7F4454CD3310DBF114EEEA1A121AE226372196E20637AB92822309CF23BBD417B31A68E0A4BEA3BF1FE8CC94A4C71F55D741B07D56E0F7C1C23032CE09AE5E8E7E51492CF941E4CD66931038F6E67B111E227EDACFC4A1F2F645DCA445F698B9E640B97ECBCBB6E1A3B6A8456E4337568447E8A9B76C6FB1429B897B37231B98EA216EE9A6516E4C489D300CA345DCA23AFE08C448E3B7E35E3C207E17D4D941CE8EEBA85128502C102EEF0EDACABBF962D64CC015C93C4A427901725669AF9F08C508C3732AA7B73EEC4413B39985FB46396B77229800C03B58A4126F891A855DF618B429BD8E5C4C7155F5CC41C67E1F2B7F1B95F63E6B80B7E31C9EA1E051619778A903D6ED574ED385C35481F04E6B62AF2FCD769791D2AB956CADF0BAD97E76ED14E460643EE31BD21C2F7AF16D5301848296D632AB44DBF442F8C451385C4CCE3F0451533BF1D7906669E305C54A2C50873F2D78A126A64426D69429D4C889526C465427D6942834C682C4D689209CDA5092D32A1B534A14D26B4972674C884CE604295DCDB8AA8BDADEB307731B127C0C54624431A0DCA310E134DFD18B1BCCA5F2579CF08EF6EE1953633FE2B1518A0DD8F6FD560499034E3B7287690623F8D1B8C14FB6B147B86627F5283954EB15FA4D8B314FB2BA8AF47B19FA1D87314FB150DD022C57E3BC50E51EC5771BB8662DFA5D8618A6D89B88D1C6B52EC08C5F647DC268EBD4DB1A3147B21E23673EC2CC58E516C22E2B6706C9FD87F0088021A3F1805B424A424534A84AF59AE6E9B3A132424D324BAA324012ED9B4CA84924EA73398296DA57147610D45C824F974DABC64D2BB5145510C4A50B33CADA809948F4A09B99FAFA81514994A413BE6C814288D890759C97271632559FCC6DEC17CC7C3F315B5892B5ECAA3910A246AB254281DA01EFD8D2B961F93D49864C5242126B930498349064C925F92F77E2D899D4B44CCADC5D8AB04711132430132615E1BC801F8701922106C243250F50223621CC1632BE998405E7828CE0B1181BC996C202F3C14E78508CE4B6894FB4F3263D26C3132C6320AAACB11600E2E39C07DCE5D518C00DFBF04381E2957A9F83234B5F3D25E2A15ACAE8C2C5515B154D342B154D5A85B01D035A07441725454BA8849A50B00618458EAA588382EF1499C8033B45A4B3A151112DB7E92343362C80A6691158491DD6824E1465CEAEC4651A34246207621F62B2E08AF049B68E46754986EC16ADEA397EBBE8615B4B2D26F1BE94737D1FB6D7DA758C2244F4D9AF4088215B6507F3B43DFA7959FA5AC3824431B15E606DB79731D46B78B5EBE03E5CADD30005A58C94EF90C386DB7DA1BB6908C5D81FE75CB71E9F6D9D29E32A04C13EDB939449F7F3D42C0E535CB65B61380B091F24029129EB85471AA8FA87548ADEB5801F199C46222B1C74AA28D0F25117C2FEB120FB33674F9C86F7F79FAD045A9432A5B5C2C5087560BA9D48EBE16C41104AF2183F118C02B72724F6C775D318C9277B89CB790B9A064E8A44D10B45AF5D463582115F53F4D9F9F5CB4529663D8FADB436F1F80EAA6710FC029DCDE62EDAD4DD366ADB93B8BA4D35599CEF6609B0466FC92BC5195CB21DE40D508569C7359DF2B65B1DAD96AAAE0ACB3DA19F0434CACA72C2B97B8ACA0A4DCE4C2C16138E5ED6DF954FBBB54038E033DEEF02E1B20A572E0F53D70F96E045EE31278219D0ED42A51EC4749D3AB55EBD4CE01386A8D20257F9236BC3A80AEF528D958103CFD83C8F381A7BF4BE0C9AD61E2AF5652B73572C1D42A78B5F3268E2A002B804974FCA1D6DFF6E12314F5B4B1D420379654858D92F85567456A49350B811289C066A1374DD49BDF26AD2F59AA992962753AE4DA75D9C5C6803A18535BC59D6AF23B55A53A55A53A15FE96CDC59D928759046E7101B842705B23B6BF819475DA0092B64B905B2BDC563C56B24B80A9579DC9685711088FA4C48BA8E96093DDFCEC76080443AD3EC8AD4150476FD58D473A80802490BBA62D79E73DA8C2DE920A0F85BE43F8FB1EF6BF6A5D68862354983B70D403C18768408EE1611206C121258FCBE763D015F9CDF930516946BDFC93CACF4920F84438080E8052D675BF810AB128C392EA2C007A9E43008760E0CBDFF39D52AE86E424D59845E51B16ED11C028910A1C449080F0E8E2AA6DA54C675E02DCE1949DB6DD81A9D1D1D1C1FEE27E1CDB0B3D10F14B0ACA4508C223A43C4C41905236F69C58A36F1FAC213BF387843B060E972386C46730F861059A03446D25DCC06AD3A982C56A149ED04652BAF84692B5F82C26219E487C0E83BF82C15FC5E0AF61F0A318104EF83184E83185030899D8A6CBFA0D38DAAB8416127F1D831FC7E0F318FC0D1126CAF95EB8FC2422832BC5C82082CAC187E16990F41DDAB45AF9D40CA821881E06C3D003BE322E229ABE27A361E8E19BA3FBA1877F1FD91F3D7C3354FDC0470F9F9207803F258503424162A120B1B6277AD082506C2973D14311BFF70C284231E3B8A732170AAD9A4400D8F3E1C088F8E688F8264D7C53044F4802F8FFD33044F02864E43745C56A15E201886735696CE353118F52AE11A49A106CA6630F58DCE9C3E2686886AE0039DCEDC1E20EEA6F0F9E90AB088B158AE891AAD01561F1A18A63B794F90F30F49D780E11D0017C83559E187F15D505F67E69D57AF3F3B59C71A1E13AC44080F448B122FA8137A9CE021EDE17F9B417231FD6C203D0756A6FDC430289918BB6394F58A83F1C0B314CA4ED20557100093158E5846742452F95211702E2415909E56B0B60184EF4304B5293E426FE6883E0D711D6090425B0B4E5BA2668826C6D55E02C81466709B69FD0430429A84D22D0882D461E35CA14D4A7A05CAD8A30ABE4070A3F67003005D83AB7DAA39E980506A65703D8B0D1A20E206C4D33C3AA493303F091376322FF250453C178A22BC9CEC3E2007EE6DBF260D21A8C570544CAD3A102366525376BDB1D68DB13315513E280B10E601E1F9F793888B9961D7958C835B305B7042D5183B791D9A8564F857CEA5D4246053E01849A93F2C04ECA00AA81B013095CF84C9891D9806A7FBC143DFD1C5C7E5793E23D01C80755F0983F192004D4A975F928A74A4D8F1FF4A6074C00420A9122A4C05345A8A9C232643C761211DB2C59ADC2A9728E80234F95284E059E2A119A2A1C81F382BEF525F8D6116AB136F0ADBF57C0C40AC607BF751DADF7FDE846343430C20BC2B988F40C9074B6D94FD4DC3C5273FD4E219D36F23BF36EBE6015AFEDC44F89720E3EF1B73DFAA1223FFB7730F067C54F63091C5DA62CFE3E063FAB680C4A9054087F31A63A6E947ECC9BF8892252DE2B889EE08FD94B9F511181A7456522B00470FCA1518224D47CBF9460E26F61504C053245E7137848DBB9D8F1D7AD9D95AC91376F207045EDD7041DF1C33973F5CE6B2C8EF0946EFD6F436BCEA1A10E507C94FCF318147F1EF565BE0F2EBF89CBEC1E7D99722A7088A8C026AD3E12451141A4ADA5A7B1562B170ED44318D74ED17D5C6BF7EE698962DF3D7B11C3A154E1FE4283DF8DDCDFA98DBC78B1E66FF862CD5FF4310490838813EA51DBB4A148DB945BAB56ADD5889D3FAE44A184E183A2508E5076060292CB52D1A79262CA12ACE15A43EC3F518DD798F3DFE0834CC0C56F742A61E5D2D69F8791EAA2917A073549E1F93EFC0803757B22D38E72D9258C454FE85875F922D3DAD00C7CFE8C7BD1E3D188BD34B0BD288EA828323D249F8174DAADDD4B647A38D0BF2372FC0E491AF1DFC10B1FA6171E4422EDEBF5453DFAF0A8F8A0E4C3C745118DC63B689544C46AFBA55844BC1F1E6304542E0461A51EA66D1087343E648D13FD91BCEE32155A262B381E8E2682BB845F4B42E6237B629003089611BA5B691AF95049C11731F8BBA25C6670CA433367158661D4F30B881AEA142E60D47EC7C30FDF8DC16DA10409E1E283EF87CB994808E28806A4C9A80112259AED02A4617A5D885CB99ECE1F874A9BE341C471ACEAF910C757A21FA6B4D91725FCCE738A12CE462A0A9CCBFB55F59C02E76A4284246860445813180D627D7B8B46A456B55CAB5AAE2344C802EA3A5F405D4D02EABA4A02EA5281B41256CB122C4CA8E36E1328670B451B7CC6D6ED252971AF4484AB9A1AEB519417C0F37DF82D611F0E7B4CFCA170E1F48104D947FC09130BCDC0A7A9B957C73C7C7184069AEC1B55149EE8F2F93874C537701592F1C41EC293FF0F26DB511A805ED4CFFFEFE5C53D44D9178E283F60293E0B67508ACF0AAB2F44908F9AC7E1E85B1E4BAC8CB45956DE1940DBE5327B9F373C001E2E43C1A7F647C17FC8127BEF54E38BE6D3A4AEC93ED2FA8A38D893DB33D28D7B48F72714E625C62DF7ACF8F707E0F216E2DFD512FC5B2CBE67C64D09F02B61E452513E63E450017F91E9BFDF7B4E56EE8B07C4C80F5E38467E7102FEE6C88105FC8894F93D5AF93DDA8A184A7E8F6AF51E35848919E51226AE0D94AE9558B1D3AFA14ED550A76A880530718C0E1C1226AE224CCC119E164A0DAB0C56C2C4BD0494E9CC22E20FBC274CDC5D01131F9698F86D0F130F10267E1B30F1DB8089AB8208E948280E099F18C7FC89110FCD703C8080F420026AC64380BBF1CA08A84F3E037ED98DEF85805EDA03F3FE3B0FF3FEDFC4927E9DBEA887694FEE8B69FF286E59F48761D67D762D3CB5430FB7EE4B4B4C8922C92E6DD6234E3CE78977E7D45F7FC8D99FE767A8CB10F999BD11F9872575FDE0B039DB112CDEFA7876345EEFA1F19F57683CF10F3D04FE4F3C2C7EE720A81CF72CBE632F545E2283A5B817C05C7F2495FD482AFB9154F66B512A5B82868A8E1A5412CE2A7DF717249CFD13FBE29270E16C7F3842093978F17525A3E51307CFC3203E9390D6C7310762123F0D975FFC4848FB9190F62321EDD78CACF22321ED8723A4E53A51487B9B71D1072DA51D3818122F11D6064FAA85096BF7C7CB6528F9DC8151F2D795CC16D1F10725B36516EF1945B63F0497AF7E0822DB22FCFCBC225BED8047763E12D93E97C8B62680976B4A45B635CF2CB2AD09886C6BBE0644B6E1E8681F49E61EE82828C9DC131DFD618B6CBF065FF44316D926D004FD0725AF1DDA03C7EE23B6F54E807FE862DBE740E9630742E95F9FD2DB4CF6FD4A6F7D841E26BDFDBC87C67DE96D3842FF9FE032112D45E8A24A2B13DC6A1504B7A25160893AC55A8B50865B9420F4226B32DFA555B426D3447832E2410197F03BDB0709989501B81148A8169E5999E2845AE19995294E8809CFAC8C9F502DD17254A165845A2E997DE5524D32B1599458A66D218B2E5F4591AECA8F95B50ADFB04C541EC9FD0B54A087ADA978B65BBE91627B297648D96E51C65AD8D4CA214ABCEC196BB9AD6CC940EC5B9EB1163E10708462B3DE40F61DA06795E483BE5915B6637D295F70ADD7B2F9A4C53655D81ECAAD42CAB58369644A85ED9C2C1672B09ECB723C544076507FCB58CF66D9CEB6950F354642CBE8723E9B759FDD1CC98BB13AE29B0A692C8615278B814389BD103A358FEFCBA7F47110E848029F9DC79121CA7F074740326561A63DD0F474F96ADE824B321A381D8FA63DEA2B9AF3A80D33DE51446A7FA7773A9E8FA348BCCF2436D0DA6A998690DA3B863808B55D214F8D9F2782743DEEA4D42255BB412B04C8EEA5CC04341AA3467F99A084BFD75C5F428AD704CEBF132D59DC58BD2445C977CA6ED43B1A753DD83090D3DB7F0229AC2B8FDED58080DE9924B147139A06419F8B1175523E2E3BD0E4CBC476E52B438D7CC6AA8529F2564EC265C714B4473E6FD0160C10E1261D8D270F1E34F21DE803151BAB23D2BBC3A3F642CE40CA63A145A7E7A92B3572CB8080C13A9D89A46D073256FDB108B67348B51343E2EE45B403F364AB1508F466DCF3D8E06DA56F8904D8A0E0573B1C384D7F840FA4B50231DFAA2DF970897D1BA1D1FE6208E513836CAF10C8C07B461E890B0FD52F65F3263A00401AAED6831B1DC5691920E356898C9307C09974E0236BB2B43C9544B0E729D6C1B6FCEEE5D12C9FF33F089F1A5B192AF3E8C52EC3467C4762C554D5F8C4CCCCD4DCDCE8DCD4DCD8F4D4D4A9F1A9F1A999CBA3AB6393A386B16299AB2BD35346727CC6989998B3CC31637C7C7A6265AC5FFA7C43D750FD8EB9B9FC989DA1CE8FF74BC770DDD0ABFEA07FB77EDF9F1BDAC8C752F376D6E9AFEC1DAEDFB1D7E62756A7A6A656E7E6A01F63AB4973C63046939393AB53B3AB53E3E3D6EA74E28482E8F491465610D6FBA42A1F00243A59F995D8256B02E88A2C384E6C19090F0D2DA79D35A2F003152E27AD4DCF06D823FD75A01B371D5BA7FCE46576006D670528381F5287D072EC35CC326006642C878E2D051C05F874DA5F127BD17D44ACF95E053C2A2E40B67926FCC90F051D40736C460246C6B4C92744CEF1093B4A32D18746E25D1146C97D092EDF80B0FF5509FB1B01FA232517256F592852E9076A0E3D632195778268B806788E6B53F06B91774D5A3BE4F568340F13FC270850BCE0121E60C36C482469921063036D465454C853E5E7A9123B2EC17D3A850830C6A811DB97912431C993D695475324CD61F1036F9FD691072B1161026B83691EB939CE68696B5D2C6DF7A0DCECCAA31649886DB019BAAD1F114B8C2E9A031B242D5E3F5B259AD0980274033EB709DA2042BE1D002BECCCE7264280A025D880AB48FA6E367E1F1747D025F0C33E20F26B0E431B369C418C880769F8B3B685CC4BACC626B8A4BC473EF13D6020AF4313246F64D6D899844B9E9B81A3A492ECC799790F6BDB4DF244434BA9743483333BC491B05FB9AC9D2999FC4C852053414BD52D9F81FF142E3F8833F010CD1E4D7A696B8619D54387251BD1E904CF2D1C42CF064F6DF4F9A88C5F8EDCDF2920B672D91173447D762AB2414693F154ECAD08BC65FE7311D6B0C0A4982422605AE231FFA02E078ACDC86D32BB0C46A44EC7A3C9113039148619D94C460C3422F37F46DCCD90E607A06DC49F9BD4F73A3A1FEDE1E916C1B2406E46DA15C384225B610DDC9B461679D58A5E259069440C8C78B85348AD3A9282B93D82C88C5EC984F408D6F5884A925E9A06DB8DF3BE1A4DEA3F4B16C16A10BD13BAFFD7D4DD2ED5DD7AB15B5FA9BBF55E770F055B3F2CB17E48EB1847AD7693FFE46E495D69486EF4881914321E4587CAD020D686C6088E4B1507599BABE3809364B057EE25F550ACF70D28AD47322FBD2A0DBD9D05D28E703F65DAA1B2B4135EDA91601ACD44EE611F8392B71194DC8389C8AE997F89089813828C54F690E1FB194884CF79526C9CA22C11F2162EC7EA8494B5F57822B5FE6209974EB0E69F2B52C2236A70997E444F48FE92D13F314FCBD94D7F8FE95F084F0CF5ECDB6CCC576270E7750269BEF08AAFF32CEF0BA55112BF21A4F031D837499A50A597EFB2CF7B69030189C5C47761F0ED188B40373E3C3CCCA8A19894A1F3D20344C054A462FE770CFE22065F16529743513112D6E7ED5CE25F2B4C52C8A7D0F1682DDFA2DB5522619820C91A261339249B225AC6A77490BA600B0C64735AFA95625996CFF4E2FE28F95B625F48C69A910A88B6482CF639855858D245C8C62799902526BCCB7C2F99212A6C1A89FF4D3DA199F5045AB064C1A4EEE5429F4C38C6D9CDC06E5638C9F47FC2E5F388B0B0A71EC944BA2568AC2106BF2EB8C3FB2A2295F07C38924D17885CC2A7C9088AB83EAD75C2B5A59874F2C45B482B5EDF9E45FC76E5D128A2B25D2160E557A6A4885A220E74AB0EA89A592495A828A1B7BD8B567B452F8B25181295B75A72D57247CB13F1902187007113908F2E2AF968F89AAB34DD4B85306CF2EF7742664809718106BB89B848967F2B9C10FF2A2A571F62FF46AD5A1B8C7048638E93D923297E18B25EDF692DA62998828810543E4514C464E43EE688A2310D2637F27F4FC8986A190323BB1915F9CF9279E25A56B3ABF3C5189E7195887CAE15CAAF247F692005904CF822F2166644E2D90D668AB7BE28B6FE1E25C48B133242F81D8B53C7EE6B7EC7E2818E5DD2F6E958BCBC632CA6E48EBDA185B4BFF586B6753F2C21232A8D43BD5F7DA4529E063F4F54ECBC876F8E0216129F3051BF41824B14B61882842D6F95926068F69A7069B0E2667E96DAAF8DBE759B10EDD616E6177E41934C018B22EE1326EFF6BADD23A9985A4959D4066448BD48420049E50942825DE9F0DE51D28B35B8290774854BAF89F4047EB92AE1FC58C43D26EFF37F10091BAFE2D73AEA912698D6C9C4924C3B164C032AE498A8F02E5D1E55235BFE7834ACE5EEE2590D350AE1F7F64F552C83E45B4FB04C782F7A0310E890F0489E560FA20011939570867C5E042094C4CD080874E910E0E2C0CE486610F8AECF2B641080595495921294F16EED124EB979A38C83DBC7F25F653E4E1221DC18335FE8C481717D1BC792A172C334E01D1C009FA180B7643C92B9123FB1442E96EC4E49FED065DFDDD6B68D4EC3F16D803374EC77ADD2DDAB973C2C8FE404FBD740870B240171A5A7FB2C1049F0F68CE70FB05D552238118A146067EC386E69831D20C287C95B9BD441F9CDFDFB648E880872F7C0487D9D2E10FF6515957BC2973CA3FB7F128AE97F0D2EBF85D8E3A6C41E51A9B7D206ACE924E0F6DE489716BC3F2937C0D0B1F839F8217EEF8F0C038EEFD1EE468E41AE01FA11E6C1AFE4094AD60EEC1A614D20208615C5226ECF6B50B5C4E4357C05744F9EC691C31401AB776CC769A34EA9638A00D3592DB6FF9126AD585F79F4450DC15004452833685A8C0E3430F344CECD1A44EFD35A75A600353BB8BD26524327F9B508A80660BB31D56E5CEC7C4C43FEB2054AD64B9D1093B4399E0237DB408C34F934DC2047014F1B51F373B79138E529F413B0213C93D5CD682D0F58C1E556BA69236937F2F21AF29E100530EF02A610389703DBA41A2586EF69B3D8E915BBCDD46E53A0DD1618FF11B9994823DD2101568401D66EBD8C23F7E34FBE4D10477648ECB650571B2328ECEFC477411E5449284A3F99AA282AA441D8ADA9C8811A24E9F8B779C8E806A9AF7C9BB80FBF2504A111E230A959D67C21C889BBD4FB7B4228859F47859456B936FAE3DD946C46CAC8248DFCFB71EB44F0EE16F0846F1652259A7FC4FBB0FA1FF150CFC19ED1D130841CAF0E9F21EF36B44786D99DE5406DC4F80E7B6923C96C66D55EE3E88BC34E3E39BF9A4BBADBFDC376C64DA131B26174CD03374337AEF40FA3D5E97955956DFAF524E8BCD437621F7083AF2F0D3D43FF3DC35713893B89E51BB7DF5CB879E30A30BF5713B7176E5DED7B65F80CA307F6C3738BBD40D113E9873C86212293B1FBD72491093951C2913DC869805E0FB297B82FA828B3FEAC02A85025C25BE9659B60B4434EBF989723FF10A8B492C04DC6044E95C42406A85A9298F1403E318FA4FB40F8866C7F9353058FA14BE79C7542051BE914D5CDBE0DF06ED34E1B9BACCBA07B13E7931878D661A5FF5B9B8BD277A2CAB2EE134ADA4CD9EC9C7723CBD79C15B263FACFE0D25D15385EA09C21D4C0BFA374C4001518E3A4C91027ED87285CD1902C3ABC6D008410A73CE83F27AA9DD26A21AE551B8820D2A80B9A94F558933F10A2982D91A24F25F094BE28DF63639001D167358186CB9E2E638D60D507803EAC50551320F46B5861A14EDA73C49D56F231CB561B015E32220004813CC0F708A530B04C7BA0D43140179B301D7ED24BBB4C6E6D2304107F97C4F7DED66754C88369FC4AF8C0CE5E940657ADAA8500172EEFDBE4CC9E3E8D91B38BA118CE2FBDF8EFA17EC5C86F0EBD76497FF4786C78B6249576BC6F64608DB3BE50CAD6A1D2A14D6B475FB11D435F2FAC142055778C1D0372A48C54C156AEB79616867D2F7267A7C7F5D9F1F1D9D9F189B1A98929126E3D7CC5778DA72FDCBDA1BF7EF52DCC7F22007672004EAD15803C0873F2C6939185DBAB2B897B7727686F903C85917236F46A047AE541475E13BE57EA30E18EBF7ACF162F6192C3E002358D55D7C8974A6D1685329F77432DEE6C26E11153CEE3F2F5D005B0E62AAE8773B41E1A03429118B0DA9D30F71A68DF284E06101B683F29AE74078AC41E3B9509A10F53ABA7AA54AB2798D0283C6751C509CDC27316559C10D09C294E68179EB3A86295A24E99D015542992FE918B9C4515A9141DA275FA44938A3BDDB2B2C33EB1E1A914B193A8D3B44CFFACA752F45B01D7513FE4B983FAB580EBA89FF2348CBE18701DF5AB9E86D16702AEA3FE8DA761F4ED01D751FF4553EEA0DE0DB88E6AF5DC419901D751A73D7750B703AEA3E63D7750B301D7518B9132CDA53D0680200CAA1756D45C429E6F504F5846D2D5796F4ECF651D37A8C0C489AC744969A4C0D4416997B380678B4B065C41C954F4FACE69E40A8A5D4D2D98C5D6E0651537AEB01FA8239C09C89E9C1B3C6091B7B60AC08B39EC0B8A3B71C502E41C3C86E1B01BA8D6D254EAC33B98D4C8D50367055C6A2A957DF2919FA7523F4F7B2B6F21099AC789B12C7774E35E047E6E4AE794E54D9A05988E29F291709D69F2359964D04D5F8A72F22D7E2F4AE16F14D0FEC2D94CDA5F2BD990EDD73C5CDE41D07D9B4077B9F2D7FBF4EA843DF488982978853582E621205E3B3888A76B08842F8A0F00F8A2F8007C2F8A0F80F7A2F80074F7E2AB4B81BB82C8ADB290548924785EA5C49BC84C4F4B9D4D00E840FE647E90226728F29406F00923BF95226729F2BC06501D23D99EF71C45DED1008261E40A459EA7485303660F236F51E4CB14B9A399C7297296222F50E49F2B57E96C952F7542F6BBCF078CA5AFF69228F210EF1DEB0CCB73CACF532D7634E540A61F85B4DD806C58916D8015C1A4321B1F10A95107446AC5F67792A2D9A0E805E4D27BE5D17B727B1C3DBBA3A299207D8E1853C175A45CD74F670BD0913BED2B0F790D10993C2CF8F4CC88EC25FA746E412D3AB6AC4EF250EC9FF4F18C7DBCC67BED52A36D6B21424E98C74B5E7782AFD7D4904D7A3CFF7B74F062824CFCD30950DADE0D1FFBA90056561BBCF37B21A65A02C537ED4D2BCEC84846641F43C4031F582F659FC419E77086EBC6BA116744C319160D33CE38A58E817E662DBF133F18BC47184D2CD4CD1BAF5F2510CD4F77DEBC4A109AC0D7D29D2582CF94747DE1FA0281674A5A5CB842C09918B385DBD7126F31BC2D452161E7247D453D96B9DEC433090739368928DD731A6359A6F418C30AC5C54719586506F204CE34A01077842039FAC1BDE8EEE4AC50E33D81DD5B2987A64D5C75A8E1D93CE67AEA89BED3DC0F1849FA78AFEB19F0DE3E420392372391E1EFEB8609875116400A702EBB0433361DDB170658E9AC2B1D89E52CD68EC3E9435F91997E6F37579A6B7FB71C05F602F7F2671005B26BC31A0F097E56A2BC58855F3CD2A7297DA59780B3E9248EFF84362B5AC92770BDD25BAA0F22C12FF948B0BD0809FE778201195B35166340C6567DC51890B1D54C31069CA7C89BC518F0158A348A31E0458A7C5C8C015F555E703F140CB81E42F594E3C43D7061581303FC2C71241D194544D34888E67F8C5446346702886656BBEFE3E7B3DC9773123F0FEDD5A73AC2CF7588557B01A3327E26F56FC0ABBD4FE3849F9BD03C9BC2CFCDE8C517EDB021FE24FC7CBA183F4F103EEE46F4E921C4560118B8043F4F07F1332167D47443FCFCAF183FCF948CD46C097E9E0BC1CF3C6CBFB8077E0ECC0C0F3F2FFC91C0CF89973485194E6A0A299CD2143E40DFE68C0A4E6B0A0B0C680A018422E24A07274BF0F0FB327D57E9086585D393FADE6D3D9E181E7D26C49F18C421F091FE7E34452851F02229803F6AE8FE94C2F9893338927BE27B3630775693CC6BE21CDE0D6972EC13C37857ABB0396A33FBDA5A235AD81EEE2188FD0222F7B73DE4DEFECCC8BDCDB352D01F2941F55A6BA408D517F1BB6D911784EAF98473487A7510BB56C853E3E78956CA531B44652887AF29DA17DE69D74210172946A365BA08130F84341A8A9005ED1E20B2A82F66E6D80C5D90996BF698B9068F99AB45CE0E1B883195B12B4F1D069105D2194DC59253566E29411652808A7D2CD6EFD93AA611B2E82A1992EE92A1EF0941161D842C1A09C7767A03DB2545B158CBA1106411E2F5CCDF676537C2F7006AE8A72FBC725A2F64DCC2A6BE62E46D07C34265DE87CF13BC4E3223231F0285B1910BAF1C50D5F57D404894688E4025D811E7A2DCA9FCC019234650524D8607C5E790BAF7846404C4F63E9C7340F8C5C0CED75024CE85F626F1B3040EF144398E201AEDAB6CA69961F94905D028D60EF1667C188AFC7695775ED3076947095C9DA4DDC6B6BDC154D1498A3F17797162B90F1D52BD190297F68047614D34F804756380A06EA0C5BE88362D814A54059B44F191EE12829AFBD2C27D699530B2CDEF539DEA539DEA538C60648C0E5FA001158291712498F1F0C55376EDD92CF563A88116045EA81942A6C9AA8B5E5A93A08B8E2B0581529B0092B904461E0EC2C80E79E29260E477338C3C523252474B3EFAB11018D91038B3B9FFB0291879FC7DC1C84A64E9B381C80F846ADDCBF087BE77831F38E95A0170BF50E2F57D43E262087B60C2927659620A12E3F68A0F8D13E39A222E2714859918C53BCFDC1569985B246B5ACB5BB972A87C0432D40148752E7850B9AB0C2A1F988C2C524B47D7F5CF0A9FDD2A7FEF382A576D153F3308DE19936A79009977A5E2A0024A51299440F01A979A82BC98EB6931FF04B5D7E035D2E8C365123655F98B79FB7364B73106B08CB40AFF3202692912A82683C6EFB16927140B34A8383ECDC6BD811E00E4369BD449B9592DECDD9AF919CBB7A0ED2C5204D8FA87A15D6CAD907729ACE2362133C8416D2F027104AB3AF682554D4182849587F75B8603A2922BF3B1324FE67B513D89BF298A967FFFDEEDFA0A7A619A742C66CD172C09442FC9433C1E37CE90F41A9EDAC1977C36A8D01202150E6A2FA7124018F01945331B58ECB4BAFF3D069F5719F2D656F98A3E0A759CC5153D50B2A259C917CF49F7936BF03E6017CF458648E64BABB726B87A3FFD4C766AF659B75A60D16A72D1FA56E6EAA48AAE5CB4513A075D4FA732EB68A1E1FAF939AF8D06290BAE92B2E0E034AF2F9AE6F001B63FA3EAA1454C024FE4CEE8C8ABE2E78AF47ECB3B440BCEAD974732A4259A90CE309D4495B61101B6F52BA103D31E922F741177ECF976B4883BF75AC42554C241355D9506C64536925589037AF88817292D65265D6E674BB5380EE8C4E160EB9B37B3F2C07F54B0CDC5EE02F00861D1BA568719D960E83D2B6FA70D880C2C7652D27BDE95EC9FEEA3F54B27C068DD7A721F5EB79E8B81F2457B0C725EF217ADBF640FD3B9BCA3E4A5B595906F9B361019ACB068DF7A81C6A5AA45891ADACEA7D42AAE0DFA0DE0C5A2A9C51211DB9FA02D0B5A7277A5B6246252667C109FB2F118D463A74515F40DA08535DF2889FBEB5BDF8A27922F7AE4F9EF927EE88827FFEE232EA729943D6A0A9C690A69A3D9E3099AA9784BF95A6BADB0D6680174D35CBA71FBB53BB0D42EDF4BDC3C7B1967FAE2BD3B77139E42E423785E48DC7B3F7AE7BCE6EE79D8F1CC81AB295420F9F9E00E2B5199B82C78E92CBAD95C0EE8F417801207BD05B2F7423AAEC933393732A6B5CDAE937FC14393B81C038BAB645D7D528D48F9E2C26A1F54CBB12A561662C3EAADA4EFDCAB9D8AB00E685FC45B5E311138EEF2198D16964FD1460E207108A2432951E0451615FE415529228045F69B6A9191338E8A2872FB6FA86575E5D10F934CA12AB8B46A8B96962F48E07A6A543DB562672428D844BE3EA4CBD2AAD3B5EB4B5BDFE151D3B3B4D642DEA345E5C6F3A0AD6206860B8ADD87DFD212AD555EBBD98044A2B4C1B6E2B55ADA42BBB75679A97754C08B7418E4852DD8D1FD571AF0B561ABF5405C2C4A072EB2558379568E2B21992F68D2B9AC3A445142BA362AECB66EE40A78028FCFE0BFAFD5DCFCFCAB99B856147684AE6332DBC227F0B3B9C0822639E2E7D5A0F1A177AAFC42E83E89AEB1A03220540C5BDC7C94A10958D646C09C5D91A225DE125CE2EDD11780417D11604D311295C6D9A429B8903CB57E9E0876824F2EE38161EA07ACEBF7C8964B58D918B6BD7D2CB28BE7E7BB220C1358A4C854ACE452F17C5CB53A08A72A8A152B4CECD63071BD5BAB96DACEBFC5CD0E001076BDF470300DCB1496E2349EA5EB80A74E94E74D9BDD28B8A34B2F5CC860F1B479048F744D23A46983A76368BE7EDAD4D19AEEF4D39830FBC4AE348A4BA7F04E927B826E3CC1714AD980213335663F3DB2DD9646B53B8F27F41AF1D415C692F6B5795A4CCB537628661C401503DA9DD19E3628BB730DCAEE5C231ED1431DBDA74DF095CF78FB23645639EC239F2552E4E7C4FD9DCF08B6F0669E636104AA1C309DF1B419550BDC2E3A9ED7AEF886DD48587D236A8C3309D43E282DF4E391B0AF3DE6153A1A4AEE8CABF425E8291EB083FEC26F29AC2E5F01B03A440190E0E9D45EB4CF912028BD6BA56DC758D74FA3D58A5DB48E729AE1290B0FE934320036E0F48929E17DA1B7B285BC176BFB67924B6030193D21A8CE67EC48B6C766725F230562B691AB2A9AD75D1D12A7C7576793D6DCEACCE4CA18DC4E26C7C62792C9F189C9891963D298184F10F14FA08EA8175293267B1C649C0A591BDEB746A492A0DD651207F628F0EA1BA149D05934DA8A46784F6A0C63C3A3091C210761A691B39737AD9DF9D9D9716376726E74627ACC34E6666746C75756E7668CD1F131D34C8E4D9AC9BC655A19D7365200FF712F5E9999917668DE11645A138DD57C6CF1CE6DCFDA4CDA40D3CAD632D07A63954DD0040DD84894E3BF836FD0065F8131C3B2E3A496F396039F0950D2FCE8E379782B3AA557705787661378CC3281F8C4391D407365467BF2C8125AF9C7567E38B79E4BA0B49C0DDDB0B8E92636AFAFC20B5AE6BEF8120779C429AC38C9BC4D169ECBB12529A4480C9998D53C64A88440329D10D48B9207FBEA05841DFF7F0CC856CD1521B501AD4C326B5A89398C3C8FC1CB84DFF6C2A52C2C227131329B4C6413D2C4A3E1ECE700ADC5F826CBB289FF179FAB35898871DEC1C85B2E532464F2F9B6C2C1857CCA618324242D8301A4D5578E6E4F4075DF51ED1D2DF791ED496255D1EE5E33D1D283105FA32D02C38A67A60E699D80809B10F992AB844E4DC75FE404E4AAF3113136EE9DA8EA7DB1F22744A4D574AC689774DE647E653A44E91D2A11F05A688D7CE88A95CD622CC98AEFE32F21A4967AF695D040A29FB00C8D25629FA67270DC5C011C074CF65D3672A8CA83A034EC508BCAC506C491BE9DD97BC129FA56C28BBB7716EFF5EFB549CE4B7AD1355C20464982CC74F0B3ADB3307BC32572195F865A69F904CE05E25D1246C657B32D9FE50E5C7EBA5AE2B5A030A60138C45E22264F68E1B3B6E77D0B60DC0A2AB2A8AACAD839B86511F5C58FFE3CE763AE3FAA743B6B856FB326A0274BF37C950AB4129DF1259C69CC6CC29446930911C8718572B4518EDF41CA8FB75930479473F41DA0219AB4EDE593D6D736C4E932A82F6CBAF62A9E3C853F563A8C51FC2D83CEEFF324FF552131EBDE07B66695D620DBA1064C6AEDA38316AB04D4FD4D3E6FB291007ECD78B7E4CCE9BA41AD19F81E942593CD94CF30981FE24B38C3F4A219361638AC54A3B570184962A12239C4AA1638551D003FE41206C14F95043FBB34C750F206536C97B80EB6F4B81A115DC011D864AE01A6D934FA7F11A20BBEE1F453524D40DE80EC4A22D11E238E815410DED3224FC940296EFD57D1B4F84DB27A23CD1D11F4AA95DBE46C063A24B1D94B8C5161256D936732DF234F2C5E03AF6A6175B4BD8F06FA90A150E51AA4484119340A5A0C63F9FAD9FD406309F14180EE5BE02B3D36F2B6B1920292E9937DA37DE73FD9673BCBCE3AEE915966DF79FD9473AE4F11673644F4CDADCCCC8ECD8D2587662757CDA14963657A6865D43086C6E65626E65647278DE99564DFB93E23E966F35CE29403CFC9940DC4E272BA00D0D6AB6B6574626CDA34A787265621985CB156860CCB9C1C5A59199F4B4E99C9E9C995D1BEA74FFB2540C7530DFD663649A4E3E4CCCCE8E4C4F8DCF8C4F4CCD8EC44FF56C1CAEF2CA3EF89F91BCEA27C8145CBBD255BEC279B7BCBA6B56AA0799F79DC71937199422A25238AEC26CA3E13A50E75234D340FD1AB2BCB48ABE4ADAD65165CA6F66C5766877A53567E399902DA353C271D603772B9949DA4E791EDA1274F9E0C210D3D04F40F13652661C5CB594049197708851704AEEE2C00710C1FCB456C84F7D9BCFD2ED7DAB7F7CCA0A8AD14115D24BB59B70C138855DAFA2C9E0C384201242EF746AD14B052A66D48B8F88C876EDACB5A213028593155795A025782A201929891EFDEDB1E449396C0C893458092A12A0148A67E713DB0362C12AA6B2CE821AAF4B6A6443E1DA1D29E1B10FB15049B27096CB6D3B1FC2EAD07C0E46120348FCA7D12EFCEB79557A540E7B1CA08BADDB35814C0D2D27091CBC6E9A241684B1E8E11DAD62A99EC2708E056C98D0F96C956934C16EF6A58F4A291E7E31A56838A31D68C4B741E6374EE495FAA39EEDAF5AD2470EE2199EBF919815983B447416A513109FACA0A3478898D648AAFB19CB66C2A43D361867648E2B637B9C85E6CF6DCBE28B774869310AD90B2A5313643E629423D1BED18B6A7B7B7A6A43F917D851C1257D67A7398C6C5765C52C5A159CCCA35F07417A7EF1EFB1005B87CD59FBE8CF5AB01EFA305167F1FA2511B024AC0A32F8B4E617DC3C1ED4CF0C45D23ABD06B4542C3E7105856A1A52994475629D7142C89AC960A36752431FB79EA422CB4A1BA00D5890A363DE851E2CAA3164D5A0D24C4BC41F398CDB6C0FF4F44900CC14D093289880488A42F513C887399350F59CC8762C056292E34493888B483A5E18A6B938695B5B0F76C9262C718D70BABED9750D6292D7D61259FD3709D9755D21EA8A435A41254442A2B141C94B69042A112BA767E46E5A58EE03A57AAE3ED5E5AA42CADCA4B8B06D3FC9DDE60439D25A2C0AE72F8D05D061F5E8C2890257E611606D9D6F14706A19581AD9BF6630B376A2B1B676639173915C28CECA8E38A6DF9DBBBCAC0239B697C36C0DA190258C3845FFF0B06BE75C4FF28942CEC73FB83E2EB0A00F3D91A8F794FC1BB938963E2AE4CDB6211D8FF2A243BEFCBBD7CF359E5A0F8315C9AE1A59DF301505C4F86B06A489C156527129146CF7944A336002C1984103718B17D005D17A42FFEE58BD0C07851FB47515FC81510167872AB885AEB369BC7AA17D3A8B8D1002CA052A504388A2A8C0838B40D363DD82C352B51274A390E0018E9C4358081C1183CAEDA28A4698336C13A9100C90028E1464CB5D87E977C723693F71D7485714B99906D51160E6BD1F80D565AAB2AB5B4AD59FC1F7CFB8021A16BE83B3B0E341C6459CAF486A2A2439580DFE112E077A41CF8853809F381DFB920F06361B66965D68C8C6E6C1632FA8E9159D36DB2DB65DA7AD2DA6460C8FBB66F008854BEFDCA5D6CD0464777605197EC731084E43D0B2C5C64AD2BBD623876B21858F26605D262BE109F365BD04DE090693DB6936C0D2F4DE08A1C18EE559F63B92EBC9933021C9633E21A2B00514788565AB50C002250473D55914E0DE78C3CBC2B4B4B26E42A7E9CF8692119823F9688F9A3C2834836D59F114655B4755302A3AE285062E40CE66608065D52B2764A7380BA4348C56F6467CC65E037591B0D1EC8AC388BE37DE04374AE677E6CC5E1D37EE102C7277019AF09515041B1FAA0D60130E8A4768CECB3B7693591D3D15E1FFAE09F277EFCCF42847916E1D91D55B31BA00C0B9E8BAD8C558912EB62A400ED9FB997273438411DB66F54118153111CD1E213461C11385ACF111D3ECDC1115DC23BC306BDEC9196FE7ABD7E1F1224ECC1A5A9EC92D6F94B13490FB6B347284EDFDD4529228405DC4AD20F3F9C49EBCCBC9FF7958D0F3F1C4D930C915C83EBA57F9749288073115661E0C84348E6C5422E97CDBB2AF3F0C8ADFCF04D942484D6BCC83B6951CE4C9B591E33054BBBB042CBEA96BD68B93B46590D743EE35E369B72F855CF2B7B81616DBDC9240BAD1FC839363C1792EB26C0A6022A957472AEAB9935E072D6476E64D04AA8631B9992425770DB2061A52C0300443D171A9F1A1E9B181E1D0BEB050A558CA44BA41F641D9D1D9F189A9D866072767CB6A880DC32B809202803755773813BAFD360C5E371673A90F97236B793B7D7D65DFD673FAB8F8F8ECDE972CC86F585544A4F609203FDA45D47935D4F601D07802024B7F08D155EF320C8750F789CF160C9C9A2855DB4BA77E0720B57771FADEE46AD1398BE56F8D74BA1FA574D56FE9B348231CBCB405D66969749424DDF18F1C3D0C21A1E71410BD37772404C8ECC0DCF8EEA030B19339FB5CD97758A8401C8D82313E3C3A3C3E3E3539323B353C3FA1B2FEBB639A8DFC5FDD7ECC8F8F0D8F8F0E4F8849A1923F038363DB0E6BD65C67BCB730A44F2319533DE18D0B8D05915B483CD564E0C982B69DED7AC4510F815A168BBFF4B287A6F58C80D49A67E95638E34E084758341EABB188915029EB1A6275D14D793CF855C3E8B043DE099E11CCC7D36B41D25E83A99207D80876ABBF3EA369A95837763CF9359B390621AF4735E6F2DF4FDE1A2980D9097145C921410FD48619F2EE5B34FD0F106DAE7A4884526DC130FB095562EB8BC0E6F9DB296F3D995ACEB506BAFA1884D55ECA55BAB30F86C567619573ABDD6F57BF7EE2638E52EBF1C4010D228304D292924F461B8891FC1DE7F0183BF83C13FC2E01F63F08B18FC1206BF8CC1AF6040FE27C8EBC4FF83C1EF61F05B18FC3E061A7E9C8826D119191FCF1732644193F1641603F235FD3D187C2F06DF87C1F763F00318FC20069FC6E0873040D7BAE491935CB391772CF23842AE2CC82239599F25939B64BC8DCCD7D031773A184AE790E8E802A94293CA262977D19633EDC8D1A609DBE14539204953888E27F449AB4C2E1D24A360E984AE46CA72812684F50A2D2E7CB11AADF85F54FB19A0F53B00DF9E8BC4C9F42DD2FF476843E600FF22B185D8ADD8176267634763A763AFC7466273F00FC3D1D891585FAC357626D600CFC3B1C390E76C6C0AF2BD14EB8EF5C40EC58EC5CEC5DA621D0D5DB14E883B0C77DA7F037B951EB5"))))
| 12,127.333333
| 36,354
| 0.999505
| 12
| 36,382
| 3,030.333333
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.635944
| 0.000082
| 36,382
| 2
| 36,355
| 18,191
| 0.363644
| 0
| 0
| 0
| 0
| 0
| 0.997636
| 0.997636
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 12
|
76cf14cb16153e9545d11e8565f9c2fb268aeaa9
| 74,534
|
py
|
Python
|
sdk/python/pulumi_alicloud/slb/rule.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/slb/rule.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/slb/rule.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['RuleArgs', 'Rule']
@pulumi.input_type
class RuleArgs:
def __init__(__self__, *,
frontend_port: pulumi.Input[int],
load_balancer_id: pulumi.Input[str],
server_group_id: pulumi.Input[str],
cookie: Optional[pulumi.Input[str]] = None,
cookie_timeout: Optional[pulumi.Input[int]] = None,
delete_protection_validation: Optional[pulumi.Input[bool]] = None,
domain: Optional[pulumi.Input[str]] = None,
health_check: Optional[pulumi.Input[str]] = None,
health_check_connect_port: Optional[pulumi.Input[int]] = None,
health_check_domain: Optional[pulumi.Input[str]] = None,
health_check_http_code: Optional[pulumi.Input[str]] = None,
health_check_interval: Optional[pulumi.Input[int]] = None,
health_check_timeout: Optional[pulumi.Input[int]] = None,
health_check_uri: Optional[pulumi.Input[str]] = None,
healthy_threshold: Optional[pulumi.Input[int]] = None,
listener_sync: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
scheduler: Optional[pulumi.Input[str]] = None,
sticky_session: Optional[pulumi.Input[str]] = None,
sticky_session_type: Optional[pulumi.Input[str]] = None,
unhealthy_threshold: Optional[pulumi.Input[int]] = None,
url: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Rule resource.
:param pulumi.Input[int] frontend_port: The listener frontend port which is used to launch the new forwarding rule. Valid range: [1-65535].
:param pulumi.Input[str] load_balancer_id: The Load Balancer ID which is used to launch the new forwarding rule.
:param pulumi.Input[str] server_group_id: ID of a virtual server group that will be forwarded.
:param pulumi.Input[str] cookie: The cookie configured on the server. It is mandatory when `sticky_session` is "on" and `sticky_session_type` is "server". Otherwise, it will be ignored. Valid value:String in line with RFC 2965, with length being 1- 200. It only contains characters such as ASCII codes, English letters and digits instead of the comma, semicolon or spacing, and it cannot start with $.
:param pulumi.Input[int] cookie_timeout: Cookie timeout. It is mandatory when `sticky_session` is "on" and `sticky_session_type` is "insert". Otherwise, it will be ignored. Valid value range: [1-86400] in seconds.
:param pulumi.Input[bool] delete_protection_validation: Checking DeleteProtection of SLB instance before deleting. If true, this resource will not be deleted when its SLB instance enabled DeleteProtection. Default to false.
:param pulumi.Input[str] domain: Domain name of the forwarding rule. It can contain letters a-z, numbers 0-9, hyphens (-), and periods (.),
and wildcard characters. The following two domain name formats are supported:
- Standard domain name: www.test.com
- Wildcard domain name: *.test.com. wildcard (*) must be the first character in the format of (*.)
:param pulumi.Input[str] health_check: Whether to enable health check. Valid values are`on` and `off`. TCP and UDP listener's HealthCheck is always on, so it will be ignore when launching TCP or UDP listener. This parameter is required and takes effect only when ListenerSync is set to off.
:param pulumi.Input[int] health_check_connect_port: Port used for health check. Valid value range: [1-65535]. Default to "None" means the backend server port is used.
:param pulumi.Input[str] health_check_domain: Domain name used for health check. When it used to launch TCP listener, `health_check_type` must be "http". Its length is limited to 1-80 and only characters such as letters, digits, ‘-‘ and ‘.’ are allowed. When it is not set or empty, Server Load Balancer uses the private network IP address of each backend server as Domain used for health check.
:param pulumi.Input[str] health_check_http_code: Regular health check HTTP status code. Multiple codes are segmented by “,”. It is required when `health_check` is on. Default to `http_2xx`. Valid values are: `http_2xx`, `http_3xx`, `http_4xx` and `http_5xx`.
:param pulumi.Input[int] health_check_interval: Time interval of health checks. It is required when `health_check` is on. Valid value range: [1-50] in seconds. Default to 2.
:param pulumi.Input[int] health_check_timeout: Maximum timeout of each health check response. It is required when `health_check` is on. Valid value range: [1-300] in seconds. Default to 5. Note: If `health_check_timeout` < `health_check_interval`, its will be replaced by `health_check_interval`.
:param pulumi.Input[str] health_check_uri: URI used for health check. When it used to launch TCP listener, `health_check_type` must be "http". Its length is limited to 1-80 and it must start with /. Only characters such as letters, digits, ‘-’, ‘/’, ‘.’, ‘%’, ‘?’, #’ and ‘&’ are allowed.
:param pulumi.Input[int] healthy_threshold: Threshold determining the result of the health check is success. It is required when `health_check` is on. Valid value range: [1-10] in seconds. Default to 3.
:param pulumi.Input[str] listener_sync: Indicates whether a forwarding rule inherits the settings of a health check , session persistence, and scheduling algorithm from a listener. Default to on.
:param pulumi.Input[str] name: Name of the forwarding rule. Our plugin provides a default name: "tf-slb-rule".
:param pulumi.Input[str] scheduler: Scheduling algorithm, Valid values are `wrr`, `rr` and `wlc`. Default to "wrr". This parameter is required and takes effect only when ListenerSync is set to off.
:param pulumi.Input[str] sticky_session: Whether to enable session persistence, Valid values are `on` and `off`. Default to `off`. This parameter is required and takes effect only when ListenerSync is set to off.
:param pulumi.Input[str] sticky_session_type: Mode for handling the cookie. If `sticky_session` is "on", it is mandatory. Otherwise, it will be ignored. Valid values are `insert` and `server`. `insert` means it is inserted from Server Load Balancer; `server` means the Server Load Balancer learns from the backend server.
:param pulumi.Input[int] unhealthy_threshold: Threshold determining the result of the health check is fail. It is required when `health_check` is on. Valid value range: [1-10] in seconds. Default to 3.
:param pulumi.Input[str] url: Domain of the forwarding rule. It must be 2-80 characters in length. Only letters a-z, numbers 0-9,
and characters '-' '/' '?' '%' '#' and '&' are allowed. URLs must be started with the character '/', but cannot be '/' alone.
"""
pulumi.set(__self__, "frontend_port", frontend_port)
pulumi.set(__self__, "load_balancer_id", load_balancer_id)
pulumi.set(__self__, "server_group_id", server_group_id)
if cookie is not None:
pulumi.set(__self__, "cookie", cookie)
if cookie_timeout is not None:
pulumi.set(__self__, "cookie_timeout", cookie_timeout)
if delete_protection_validation is not None:
pulumi.set(__self__, "delete_protection_validation", delete_protection_validation)
if domain is not None:
pulumi.set(__self__, "domain", domain)
if health_check is not None:
pulumi.set(__self__, "health_check", health_check)
if health_check_connect_port is not None:
pulumi.set(__self__, "health_check_connect_port", health_check_connect_port)
if health_check_domain is not None:
pulumi.set(__self__, "health_check_domain", health_check_domain)
if health_check_http_code is not None:
pulumi.set(__self__, "health_check_http_code", health_check_http_code)
if health_check_interval is not None:
pulumi.set(__self__, "health_check_interval", health_check_interval)
if health_check_timeout is not None:
pulumi.set(__self__, "health_check_timeout", health_check_timeout)
if health_check_uri is not None:
pulumi.set(__self__, "health_check_uri", health_check_uri)
if healthy_threshold is not None:
pulumi.set(__self__, "healthy_threshold", healthy_threshold)
if listener_sync is not None:
pulumi.set(__self__, "listener_sync", listener_sync)
if name is not None:
pulumi.set(__self__, "name", name)
if scheduler is not None:
pulumi.set(__self__, "scheduler", scheduler)
if sticky_session is not None:
pulumi.set(__self__, "sticky_session", sticky_session)
if sticky_session_type is not None:
pulumi.set(__self__, "sticky_session_type", sticky_session_type)
if unhealthy_threshold is not None:
pulumi.set(__self__, "unhealthy_threshold", unhealthy_threshold)
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter(name="frontendPort")
def frontend_port(self) -> pulumi.Input[int]:
"""
The listener frontend port which is used to launch the new forwarding rule. Valid range: [1-65535].
"""
return pulumi.get(self, "frontend_port")
@frontend_port.setter
def frontend_port(self, value: pulumi.Input[int]):
pulumi.set(self, "frontend_port", value)
@property
@pulumi.getter(name="loadBalancerId")
def load_balancer_id(self) -> pulumi.Input[str]:
"""
The Load Balancer ID which is used to launch the new forwarding rule.
"""
return pulumi.get(self, "load_balancer_id")
@load_balancer_id.setter
def load_balancer_id(self, value: pulumi.Input[str]):
pulumi.set(self, "load_balancer_id", value)
@property
@pulumi.getter(name="serverGroupId")
def server_group_id(self) -> pulumi.Input[str]:
"""
ID of a virtual server group that will be forwarded.
"""
return pulumi.get(self, "server_group_id")
@server_group_id.setter
def server_group_id(self, value: pulumi.Input[str]):
pulumi.set(self, "server_group_id", value)
@property
@pulumi.getter
def cookie(self) -> Optional[pulumi.Input[str]]:
"""
The cookie configured on the server. It is mandatory when `sticky_session` is "on" and `sticky_session_type` is "server". Otherwise, it will be ignored. Valid value:String in line with RFC 2965, with length being 1- 200. It only contains characters such as ASCII codes, English letters and digits instead of the comma, semicolon or spacing, and it cannot start with $.
"""
return pulumi.get(self, "cookie")
@cookie.setter
def cookie(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cookie", value)
@property
@pulumi.getter(name="cookieTimeout")
def cookie_timeout(self) -> Optional[pulumi.Input[int]]:
"""
Cookie timeout. It is mandatory when `sticky_session` is "on" and `sticky_session_type` is "insert". Otherwise, it will be ignored. Valid value range: [1-86400] in seconds.
"""
return pulumi.get(self, "cookie_timeout")
@cookie_timeout.setter
def cookie_timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "cookie_timeout", value)
@property
@pulumi.getter(name="deleteProtectionValidation")
def delete_protection_validation(self) -> Optional[pulumi.Input[bool]]:
"""
Checking DeleteProtection of SLB instance before deleting. If true, this resource will not be deleted when its SLB instance enabled DeleteProtection. Default to false.
"""
return pulumi.get(self, "delete_protection_validation")
@delete_protection_validation.setter
def delete_protection_validation(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "delete_protection_validation", value)
@property
@pulumi.getter
def domain(self) -> Optional[pulumi.Input[str]]:
"""
Domain name of the forwarding rule. It can contain letters a-z, numbers 0-9, hyphens (-), and periods (.),
and wildcard characters. The following two domain name formats are supported:
- Standard domain name: www.test.com
- Wildcard domain name: *.test.com. wildcard (*) must be the first character in the format of (*.)
"""
return pulumi.get(self, "domain")
@domain.setter
def domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "domain", value)
@property
@pulumi.getter(name="healthCheck")
def health_check(self) -> Optional[pulumi.Input[str]]:
"""
Whether to enable health check. Valid values are`on` and `off`. TCP and UDP listener's HealthCheck is always on, so it will be ignore when launching TCP or UDP listener. This parameter is required and takes effect only when ListenerSync is set to off.
"""
return pulumi.get(self, "health_check")
@health_check.setter
def health_check(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check", value)
@property
@pulumi.getter(name="healthCheckConnectPort")
def health_check_connect_port(self) -> Optional[pulumi.Input[int]]:
"""
Port used for health check. Valid value range: [1-65535]. Default to "None" means the backend server port is used.
"""
return pulumi.get(self, "health_check_connect_port")
@health_check_connect_port.setter
def health_check_connect_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_connect_port", value)
@property
@pulumi.getter(name="healthCheckDomain")
def health_check_domain(self) -> Optional[pulumi.Input[str]]:
"""
Domain name used for health check. When it used to launch TCP listener, `health_check_type` must be "http". Its length is limited to 1-80 and only characters such as letters, digits, ‘-‘ and ‘.’ are allowed. When it is not set or empty, Server Load Balancer uses the private network IP address of each backend server as Domain used for health check.
"""
return pulumi.get(self, "health_check_domain")
@health_check_domain.setter
def health_check_domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_domain", value)
@property
@pulumi.getter(name="healthCheckHttpCode")
def health_check_http_code(self) -> Optional[pulumi.Input[str]]:
"""
Regular health check HTTP status code. Multiple codes are segmented by “,”. It is required when `health_check` is on. Default to `http_2xx`. Valid values are: `http_2xx`, `http_3xx`, `http_4xx` and `http_5xx`.
"""
return pulumi.get(self, "health_check_http_code")
@health_check_http_code.setter
def health_check_http_code(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_http_code", value)
@property
@pulumi.getter(name="healthCheckInterval")
def health_check_interval(self) -> Optional[pulumi.Input[int]]:
"""
Time interval of health checks. It is required when `health_check` is on. Valid value range: [1-50] in seconds. Default to 2.
"""
return pulumi.get(self, "health_check_interval")
@health_check_interval.setter
def health_check_interval(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_interval", value)
@property
@pulumi.getter(name="healthCheckTimeout")
def health_check_timeout(self) -> Optional[pulumi.Input[int]]:
"""
Maximum timeout of each health check response. It is required when `health_check` is on. Valid value range: [1-300] in seconds. Default to 5. Note: If `health_check_timeout` < `health_check_interval`, its will be replaced by `health_check_interval`.
"""
return pulumi.get(self, "health_check_timeout")
@health_check_timeout.setter
def health_check_timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_timeout", value)
@property
@pulumi.getter(name="healthCheckUri")
def health_check_uri(self) -> Optional[pulumi.Input[str]]:
"""
URI used for health check. When it used to launch TCP listener, `health_check_type` must be "http". Its length is limited to 1-80 and it must start with /. Only characters such as letters, digits, ‘-’, ‘/’, ‘.’, ‘%’, ‘?’, #’ and ‘&’ are allowed.
"""
return pulumi.get(self, "health_check_uri")
@health_check_uri.setter
def health_check_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_uri", value)
@property
@pulumi.getter(name="healthyThreshold")
def healthy_threshold(self) -> Optional[pulumi.Input[int]]:
"""
Threshold determining the result of the health check is success. It is required when `health_check` is on. Valid value range: [1-10] in seconds. Default to 3.
"""
return pulumi.get(self, "healthy_threshold")
@healthy_threshold.setter
def healthy_threshold(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "healthy_threshold", value)
@property
@pulumi.getter(name="listenerSync")
def listener_sync(self) -> Optional[pulumi.Input[str]]:
"""
Indicates whether a forwarding rule inherits the settings of a health check , session persistence, and scheduling algorithm from a listener. Default to on.
"""
return pulumi.get(self, "listener_sync")
@listener_sync.setter
def listener_sync(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "listener_sync", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the forwarding rule. Our plugin provides a default name: "tf-slb-rule".
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def scheduler(self) -> Optional[pulumi.Input[str]]:
"""
Scheduling algorithm, Valid values are `wrr`, `rr` and `wlc`. Default to "wrr". This parameter is required and takes effect only when ListenerSync is set to off.
"""
return pulumi.get(self, "scheduler")
@scheduler.setter
def scheduler(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "scheduler", value)
@property
@pulumi.getter(name="stickySession")
def sticky_session(self) -> Optional[pulumi.Input[str]]:
"""
Whether to enable session persistence, Valid values are `on` and `off`. Default to `off`. This parameter is required and takes effect only when ListenerSync is set to off.
"""
return pulumi.get(self, "sticky_session")
@sticky_session.setter
def sticky_session(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sticky_session", value)
@property
@pulumi.getter(name="stickySessionType")
def sticky_session_type(self) -> Optional[pulumi.Input[str]]:
"""
Mode for handling the cookie. If `sticky_session` is "on", it is mandatory. Otherwise, it will be ignored. Valid values are `insert` and `server`. `insert` means it is inserted from Server Load Balancer; `server` means the Server Load Balancer learns from the backend server.
"""
return pulumi.get(self, "sticky_session_type")
@sticky_session_type.setter
def sticky_session_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sticky_session_type", value)
@property
@pulumi.getter(name="unhealthyThreshold")
def unhealthy_threshold(self) -> Optional[pulumi.Input[int]]:
"""
Threshold determining the result of the health check is fail. It is required when `health_check` is on. Valid value range: [1-10] in seconds. Default to 3.
"""
return pulumi.get(self, "unhealthy_threshold")
@unhealthy_threshold.setter
def unhealthy_threshold(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "unhealthy_threshold", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
"""
Domain of the forwarding rule. It must be 2-80 characters in length. Only letters a-z, numbers 0-9,
and characters '-' '/' '?' '%' '#' and '&' are allowed. URLs must be started with the character '/', but cannot be '/' alone.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
@pulumi.input_type
class _RuleState:
def __init__(__self__, *,
cookie: Optional[pulumi.Input[str]] = None,
cookie_timeout: Optional[pulumi.Input[int]] = None,
delete_protection_validation: Optional[pulumi.Input[bool]] = None,
domain: Optional[pulumi.Input[str]] = None,
frontend_port: Optional[pulumi.Input[int]] = None,
health_check: Optional[pulumi.Input[str]] = None,
health_check_connect_port: Optional[pulumi.Input[int]] = None,
health_check_domain: Optional[pulumi.Input[str]] = None,
health_check_http_code: Optional[pulumi.Input[str]] = None,
health_check_interval: Optional[pulumi.Input[int]] = None,
health_check_timeout: Optional[pulumi.Input[int]] = None,
health_check_uri: Optional[pulumi.Input[str]] = None,
healthy_threshold: Optional[pulumi.Input[int]] = None,
listener_sync: Optional[pulumi.Input[str]] = None,
load_balancer_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
scheduler: Optional[pulumi.Input[str]] = None,
server_group_id: Optional[pulumi.Input[str]] = None,
sticky_session: Optional[pulumi.Input[str]] = None,
sticky_session_type: Optional[pulumi.Input[str]] = None,
unhealthy_threshold: Optional[pulumi.Input[int]] = None,
url: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Rule resources.
:param pulumi.Input[str] cookie: The cookie configured on the server. It is mandatory when `sticky_session` is "on" and `sticky_session_type` is "server". Otherwise, it will be ignored. Valid value:String in line with RFC 2965, with length being 1- 200. It only contains characters such as ASCII codes, English letters and digits instead of the comma, semicolon or spacing, and it cannot start with $.
:param pulumi.Input[int] cookie_timeout: Cookie timeout. It is mandatory when `sticky_session` is "on" and `sticky_session_type` is "insert". Otherwise, it will be ignored. Valid value range: [1-86400] in seconds.
:param pulumi.Input[bool] delete_protection_validation: Checking DeleteProtection of SLB instance before deleting. If true, this resource will not be deleted when its SLB instance enabled DeleteProtection. Default to false.
:param pulumi.Input[str] domain: Domain name of the forwarding rule. It can contain letters a-z, numbers 0-9, hyphens (-), and periods (.),
and wildcard characters. The following two domain name formats are supported:
- Standard domain name: www.test.com
- Wildcard domain name: *.test.com. wildcard (*) must be the first character in the format of (*.)
:param pulumi.Input[int] frontend_port: The listener frontend port which is used to launch the new forwarding rule. Valid range: [1-65535].
:param pulumi.Input[str] health_check: Whether to enable health check. Valid values are`on` and `off`. TCP and UDP listener's HealthCheck is always on, so it will be ignore when launching TCP or UDP listener. This parameter is required and takes effect only when ListenerSync is set to off.
:param pulumi.Input[int] health_check_connect_port: Port used for health check. Valid value range: [1-65535]. Default to "None" means the backend server port is used.
:param pulumi.Input[str] health_check_domain: Domain name used for health check. When it used to launch TCP listener, `health_check_type` must be "http". Its length is limited to 1-80 and only characters such as letters, digits, ‘-‘ and ‘.’ are allowed. When it is not set or empty, Server Load Balancer uses the private network IP address of each backend server as Domain used for health check.
:param pulumi.Input[str] health_check_http_code: Regular health check HTTP status code. Multiple codes are segmented by “,”. It is required when `health_check` is on. Default to `http_2xx`. Valid values are: `http_2xx`, `http_3xx`, `http_4xx` and `http_5xx`.
:param pulumi.Input[int] health_check_interval: Time interval of health checks. It is required when `health_check` is on. Valid value range: [1-50] in seconds. Default to 2.
:param pulumi.Input[int] health_check_timeout: Maximum timeout of each health check response. It is required when `health_check` is on. Valid value range: [1-300] in seconds. Default to 5. Note: If `health_check_timeout` < `health_check_interval`, its will be replaced by `health_check_interval`.
:param pulumi.Input[str] health_check_uri: URI used for health check. When it used to launch TCP listener, `health_check_type` must be "http". Its length is limited to 1-80 and it must start with /. Only characters such as letters, digits, ‘-’, ‘/’, ‘.’, ‘%’, ‘?’, #’ and ‘&’ are allowed.
:param pulumi.Input[int] healthy_threshold: Threshold determining the result of the health check is success. It is required when `health_check` is on. Valid value range: [1-10] in seconds. Default to 3.
:param pulumi.Input[str] listener_sync: Indicates whether a forwarding rule inherits the settings of a health check , session persistence, and scheduling algorithm from a listener. Default to on.
:param pulumi.Input[str] load_balancer_id: The Load Balancer ID which is used to launch the new forwarding rule.
:param pulumi.Input[str] name: Name of the forwarding rule. Our plugin provides a default name: "tf-slb-rule".
:param pulumi.Input[str] scheduler: Scheduling algorithm, Valid values are `wrr`, `rr` and `wlc`. Default to "wrr". This parameter is required and takes effect only when ListenerSync is set to off.
:param pulumi.Input[str] server_group_id: ID of a virtual server group that will be forwarded.
:param pulumi.Input[str] sticky_session: Whether to enable session persistence, Valid values are `on` and `off`. Default to `off`. This parameter is required and takes effect only when ListenerSync is set to off.
:param pulumi.Input[str] sticky_session_type: Mode for handling the cookie. If `sticky_session` is "on", it is mandatory. Otherwise, it will be ignored. Valid values are `insert` and `server`. `insert` means it is inserted from Server Load Balancer; `server` means the Server Load Balancer learns from the backend server.
:param pulumi.Input[int] unhealthy_threshold: Threshold determining the result of the health check is fail. It is required when `health_check` is on. Valid value range: [1-10] in seconds. Default to 3.
:param pulumi.Input[str] url: Domain of the forwarding rule. It must be 2-80 characters in length. Only letters a-z, numbers 0-9,
and characters '-' '/' '?' '%' '#' and '&' are allowed. URLs must be started with the character '/', but cannot be '/' alone.
"""
if cookie is not None:
pulumi.set(__self__, "cookie", cookie)
if cookie_timeout is not None:
pulumi.set(__self__, "cookie_timeout", cookie_timeout)
if delete_protection_validation is not None:
pulumi.set(__self__, "delete_protection_validation", delete_protection_validation)
if domain is not None:
pulumi.set(__self__, "domain", domain)
if frontend_port is not None:
pulumi.set(__self__, "frontend_port", frontend_port)
if health_check is not None:
pulumi.set(__self__, "health_check", health_check)
if health_check_connect_port is not None:
pulumi.set(__self__, "health_check_connect_port", health_check_connect_port)
if health_check_domain is not None:
pulumi.set(__self__, "health_check_domain", health_check_domain)
if health_check_http_code is not None:
pulumi.set(__self__, "health_check_http_code", health_check_http_code)
if health_check_interval is not None:
pulumi.set(__self__, "health_check_interval", health_check_interval)
if health_check_timeout is not None:
pulumi.set(__self__, "health_check_timeout", health_check_timeout)
if health_check_uri is not None:
pulumi.set(__self__, "health_check_uri", health_check_uri)
if healthy_threshold is not None:
pulumi.set(__self__, "healthy_threshold", healthy_threshold)
if listener_sync is not None:
pulumi.set(__self__, "listener_sync", listener_sync)
if load_balancer_id is not None:
pulumi.set(__self__, "load_balancer_id", load_balancer_id)
if name is not None:
pulumi.set(__self__, "name", name)
if scheduler is not None:
pulumi.set(__self__, "scheduler", scheduler)
if server_group_id is not None:
pulumi.set(__self__, "server_group_id", server_group_id)
if sticky_session is not None:
pulumi.set(__self__, "sticky_session", sticky_session)
if sticky_session_type is not None:
pulumi.set(__self__, "sticky_session_type", sticky_session_type)
if unhealthy_threshold is not None:
pulumi.set(__self__, "unhealthy_threshold", unhealthy_threshold)
if url is not None:
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def cookie(self) -> Optional[pulumi.Input[str]]:
"""
The cookie configured on the server. It is mandatory when `sticky_session` is "on" and `sticky_session_type` is "server". Otherwise, it will be ignored. Valid value:String in line with RFC 2965, with length being 1- 200. It only contains characters such as ASCII codes, English letters and digits instead of the comma, semicolon or spacing, and it cannot start with $.
"""
return pulumi.get(self, "cookie")
@cookie.setter
def cookie(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cookie", value)
@property
@pulumi.getter(name="cookieTimeout")
def cookie_timeout(self) -> Optional[pulumi.Input[int]]:
"""
Cookie timeout. It is mandatory when `sticky_session` is "on" and `sticky_session_type` is "insert". Otherwise, it will be ignored. Valid value range: [1-86400] in seconds.
"""
return pulumi.get(self, "cookie_timeout")
@cookie_timeout.setter
def cookie_timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "cookie_timeout", value)
@property
@pulumi.getter(name="deleteProtectionValidation")
def delete_protection_validation(self) -> Optional[pulumi.Input[bool]]:
"""
Checking DeleteProtection of SLB instance before deleting. If true, this resource will not be deleted when its SLB instance enabled DeleteProtection. Default to false.
"""
return pulumi.get(self, "delete_protection_validation")
@delete_protection_validation.setter
def delete_protection_validation(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "delete_protection_validation", value)
@property
@pulumi.getter
def domain(self) -> Optional[pulumi.Input[str]]:
"""
Domain name of the forwarding rule. It can contain letters a-z, numbers 0-9, hyphens (-), and periods (.),
and wildcard characters. The following two domain name formats are supported:
- Standard domain name: www.test.com
- Wildcard domain name: *.test.com. wildcard (*) must be the first character in the format of (*.)
"""
return pulumi.get(self, "domain")
@domain.setter
def domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "domain", value)
@property
@pulumi.getter(name="frontendPort")
def frontend_port(self) -> Optional[pulumi.Input[int]]:
"""
The listener frontend port which is used to launch the new forwarding rule. Valid range: [1-65535].
"""
return pulumi.get(self, "frontend_port")
@frontend_port.setter
def frontend_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "frontend_port", value)
@property
@pulumi.getter(name="healthCheck")
def health_check(self) -> Optional[pulumi.Input[str]]:
"""
Whether to enable health check. Valid values are`on` and `off`. TCP and UDP listener's HealthCheck is always on, so it will be ignore when launching TCP or UDP listener. This parameter is required and takes effect only when ListenerSync is set to off.
"""
return pulumi.get(self, "health_check")
@health_check.setter
def health_check(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check", value)
@property
@pulumi.getter(name="healthCheckConnectPort")
def health_check_connect_port(self) -> Optional[pulumi.Input[int]]:
"""
Port used for health check. Valid value range: [1-65535]. Default to "None" means the backend server port is used.
"""
return pulumi.get(self, "health_check_connect_port")
@health_check_connect_port.setter
def health_check_connect_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_connect_port", value)
@property
@pulumi.getter(name="healthCheckDomain")
def health_check_domain(self) -> Optional[pulumi.Input[str]]:
"""
Domain name used for health check. When it used to launch TCP listener, `health_check_type` must be "http". Its length is limited to 1-80 and only characters such as letters, digits, ‘-‘ and ‘.’ are allowed. When it is not set or empty, Server Load Balancer uses the private network IP address of each backend server as Domain used for health check.
"""
return pulumi.get(self, "health_check_domain")
@health_check_domain.setter
def health_check_domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_domain", value)
@property
@pulumi.getter(name="healthCheckHttpCode")
def health_check_http_code(self) -> Optional[pulumi.Input[str]]:
"""
Regular health check HTTP status code. Multiple codes are segmented by “,”. It is required when `health_check` is on. Default to `http_2xx`. Valid values are: `http_2xx`, `http_3xx`, `http_4xx` and `http_5xx`.
"""
return pulumi.get(self, "health_check_http_code")
@health_check_http_code.setter
def health_check_http_code(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_http_code", value)
@property
@pulumi.getter(name="healthCheckInterval")
def health_check_interval(self) -> Optional[pulumi.Input[int]]:
"""
Time interval of health checks. It is required when `health_check` is on. Valid value range: [1-50] in seconds. Default to 2.
"""
return pulumi.get(self, "health_check_interval")
@health_check_interval.setter
def health_check_interval(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_interval", value)
@property
@pulumi.getter(name="healthCheckTimeout")
def health_check_timeout(self) -> Optional[pulumi.Input[int]]:
"""
Maximum timeout of each health check response. It is required when `health_check` is on. Valid value range: [1-300] in seconds. Default to 5. Note: If `health_check_timeout` < `health_check_interval`, its will be replaced by `health_check_interval`.
"""
return pulumi.get(self, "health_check_timeout")
@health_check_timeout.setter
def health_check_timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_timeout", value)
@property
@pulumi.getter(name="healthCheckUri")
def health_check_uri(self) -> Optional[pulumi.Input[str]]:
"""
URI used for health check. When it used to launch TCP listener, `health_check_type` must be "http". Its length is limited to 1-80 and it must start with /. Only characters such as letters, digits, ‘-’, ‘/’, ‘.’, ‘%’, ‘?’, #’ and ‘&’ are allowed.
"""
return pulumi.get(self, "health_check_uri")
@health_check_uri.setter
def health_check_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_uri", value)
@property
@pulumi.getter(name="healthyThreshold")
def healthy_threshold(self) -> Optional[pulumi.Input[int]]:
"""
Threshold determining the result of the health check is success. It is required when `health_check` is on. Valid value range: [1-10] in seconds. Default to 3.
"""
return pulumi.get(self, "healthy_threshold")
@healthy_threshold.setter
def healthy_threshold(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "healthy_threshold", value)
@property
@pulumi.getter(name="listenerSync")
def listener_sync(self) -> Optional[pulumi.Input[str]]:
"""
Indicates whether a forwarding rule inherits the settings of a health check , session persistence, and scheduling algorithm from a listener. Default to on.
"""
return pulumi.get(self, "listener_sync")
@listener_sync.setter
def listener_sync(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "listener_sync", value)
@property
@pulumi.getter(name="loadBalancerId")
def load_balancer_id(self) -> Optional[pulumi.Input[str]]:
"""
The Load Balancer ID which is used to launch the new forwarding rule.
"""
return pulumi.get(self, "load_balancer_id")
@load_balancer_id.setter
def load_balancer_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "load_balancer_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the forwarding rule. Our plugin provides a default name: "tf-slb-rule".
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def scheduler(self) -> Optional[pulumi.Input[str]]:
"""
Scheduling algorithm, Valid values are `wrr`, `rr` and `wlc`. Default to "wrr". This parameter is required and takes effect only when ListenerSync is set to off.
"""
return pulumi.get(self, "scheduler")
@scheduler.setter
def scheduler(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "scheduler", value)
@property
@pulumi.getter(name="serverGroupId")
def server_group_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of a virtual server group that will be forwarded.
"""
return pulumi.get(self, "server_group_id")
@server_group_id.setter
def server_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "server_group_id", value)
@property
@pulumi.getter(name="stickySession")
def sticky_session(self) -> Optional[pulumi.Input[str]]:
"""
Whether to enable session persistence, Valid values are `on` and `off`. Default to `off`. This parameter is required and takes effect only when ListenerSync is set to off.
"""
return pulumi.get(self, "sticky_session")
@sticky_session.setter
def sticky_session(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sticky_session", value)
@property
@pulumi.getter(name="stickySessionType")
def sticky_session_type(self) -> Optional[pulumi.Input[str]]:
"""
Mode for handling the cookie. If `sticky_session` is "on", it is mandatory. Otherwise, it will be ignored. Valid values are `insert` and `server`. `insert` means it is inserted from Server Load Balancer; `server` means the Server Load Balancer learns from the backend server.
"""
return pulumi.get(self, "sticky_session_type")
@sticky_session_type.setter
def sticky_session_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sticky_session_type", value)
@property
@pulumi.getter(name="unhealthyThreshold")
def unhealthy_threshold(self) -> Optional[pulumi.Input[int]]:
"""
Threshold determining the result of the health check is fail. It is required when `health_check` is on. Valid value range: [1-10] in seconds. Default to 3.
"""
return pulumi.get(self, "unhealthy_threshold")
@unhealthy_threshold.setter
def unhealthy_threshold(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "unhealthy_threshold", value)
@property
@pulumi.getter
def url(self) -> Optional[pulumi.Input[str]]:
"""
Domain of the forwarding rule. It must be 2-80 characters in length. Only letters a-z, numbers 0-9,
and characters '-' '/' '?' '%' '#' and '&' are allowed. URLs must be started with the character '/', but cannot be '/' alone.
"""
return pulumi.get(self, "url")
@url.setter
def url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "url", value)
class Rule(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cookie: Optional[pulumi.Input[str]] = None,
cookie_timeout: Optional[pulumi.Input[int]] = None,
delete_protection_validation: Optional[pulumi.Input[bool]] = None,
domain: Optional[pulumi.Input[str]] = None,
frontend_port: Optional[pulumi.Input[int]] = None,
health_check: Optional[pulumi.Input[str]] = None,
health_check_connect_port: Optional[pulumi.Input[int]] = None,
health_check_domain: Optional[pulumi.Input[str]] = None,
health_check_http_code: Optional[pulumi.Input[str]] = None,
health_check_interval: Optional[pulumi.Input[int]] = None,
health_check_timeout: Optional[pulumi.Input[int]] = None,
health_check_uri: Optional[pulumi.Input[str]] = None,
healthy_threshold: Optional[pulumi.Input[int]] = None,
listener_sync: Optional[pulumi.Input[str]] = None,
load_balancer_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
scheduler: Optional[pulumi.Input[str]] = None,
server_group_id: Optional[pulumi.Input[str]] = None,
sticky_session: Optional[pulumi.Input[str]] = None,
sticky_session_type: Optional[pulumi.Input[str]] = None,
unhealthy_threshold: Optional[pulumi.Input[int]] = None,
url: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
A forwarding rule is configured in `HTTP`/`HTTPS` listener and it used to listen a list of backend servers which in one specified virtual backend server group.
You can add forwarding rules to a listener to forward requests based on the domain names or the URL in the request.
> **NOTE:** One virtual backend server group can be attached in multiple forwarding rules.
> **NOTE:** At least one "Domain" or "Url" must be specified when creating a new rule.
> **NOTE:** Having the same 'Domain' and 'Url' rule can not be created repeatedly in the one listener.
> **NOTE:** Rule only be created in the `HTTP` or `HTTPS` listener.
> **NOTE:** Only rule's virtual server group can be modified.
## Import
Load balancer forwarding rule can be imported using the id, e.g.
```sh
$ pulumi import alicloud:slb/rule:Rule example rule-abc123456
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] cookie: The cookie configured on the server. It is mandatory when `sticky_session` is "on" and `sticky_session_type` is "server". Otherwise, it will be ignored. Valid value:String in line with RFC 2965, with length being 1- 200. It only contains characters such as ASCII codes, English letters and digits instead of the comma, semicolon or spacing, and it cannot start with $.
:param pulumi.Input[int] cookie_timeout: Cookie timeout. It is mandatory when `sticky_session` is "on" and `sticky_session_type` is "insert". Otherwise, it will be ignored. Valid value range: [1-86400] in seconds.
:param pulumi.Input[bool] delete_protection_validation: Checking DeleteProtection of SLB instance before deleting. If true, this resource will not be deleted when its SLB instance enabled DeleteProtection. Default to false.
:param pulumi.Input[str] domain: Domain name of the forwarding rule. It can contain letters a-z, numbers 0-9, hyphens (-), and periods (.),
and wildcard characters. The following two domain name formats are supported:
- Standard domain name: www.test.com
- Wildcard domain name: *.test.com. wildcard (*) must be the first character in the format of (*.)
:param pulumi.Input[int] frontend_port: The listener frontend port which is used to launch the new forwarding rule. Valid range: [1-65535].
:param pulumi.Input[str] health_check: Whether to enable health check. Valid values are`on` and `off`. TCP and UDP listener's HealthCheck is always on, so it will be ignore when launching TCP or UDP listener. This parameter is required and takes effect only when ListenerSync is set to off.
:param pulumi.Input[int] health_check_connect_port: Port used for health check. Valid value range: [1-65535]. Default to "None" means the backend server port is used.
:param pulumi.Input[str] health_check_domain: Domain name used for health check. When it used to launch TCP listener, `health_check_type` must be "http". Its length is limited to 1-80 and only characters such as letters, digits, ‘-‘ and ‘.’ are allowed. When it is not set or empty, Server Load Balancer uses the private network IP address of each backend server as Domain used for health check.
:param pulumi.Input[str] health_check_http_code: Regular health check HTTP status code. Multiple codes are segmented by “,”. It is required when `health_check` is on. Default to `http_2xx`. Valid values are: `http_2xx`, `http_3xx`, `http_4xx` and `http_5xx`.
:param pulumi.Input[int] health_check_interval: Time interval of health checks. It is required when `health_check` is on. Valid value range: [1-50] in seconds. Default to 2.
:param pulumi.Input[int] health_check_timeout: Maximum timeout of each health check response. It is required when `health_check` is on. Valid value range: [1-300] in seconds. Default to 5. Note: If `health_check_timeout` < `health_check_interval`, its will be replaced by `health_check_interval`.
:param pulumi.Input[str] health_check_uri: URI used for health check. When it used to launch TCP listener, `health_check_type` must be "http". Its length is limited to 1-80 and it must start with /. Only characters such as letters, digits, ‘-’, ‘/’, ‘.’, ‘%’, ‘?’, #’ and ‘&’ are allowed.
:param pulumi.Input[int] healthy_threshold: Threshold determining the result of the health check is success. It is required when `health_check` is on. Valid value range: [1-10] in seconds. Default to 3.
:param pulumi.Input[str] listener_sync: Indicates whether a forwarding rule inherits the settings of a health check , session persistence, and scheduling algorithm from a listener. Default to on.
:param pulumi.Input[str] load_balancer_id: The Load Balancer ID which is used to launch the new forwarding rule.
:param pulumi.Input[str] name: Name of the forwarding rule. Our plugin provides a default name: "tf-slb-rule".
:param pulumi.Input[str] scheduler: Scheduling algorithm, Valid values are `wrr`, `rr` and `wlc`. Default to "wrr". This parameter is required and takes effect only when ListenerSync is set to off.
:param pulumi.Input[str] server_group_id: ID of a virtual server group that will be forwarded.
:param pulumi.Input[str] sticky_session: Whether to enable session persistence, Valid values are `on` and `off`. Default to `off`. This parameter is required and takes effect only when ListenerSync is set to off.
:param pulumi.Input[str] sticky_session_type: Mode for handling the cookie. If `sticky_session` is "on", it is mandatory. Otherwise, it will be ignored. Valid values are `insert` and `server`. `insert` means it is inserted from Server Load Balancer; `server` means the Server Load Balancer learns from the backend server.
:param pulumi.Input[int] unhealthy_threshold: Threshold determining the result of the health check is fail. It is required when `health_check` is on. Valid value range: [1-10] in seconds. Default to 3.
:param pulumi.Input[str] url: Domain of the forwarding rule. It must be 2-80 characters in length. Only letters a-z, numbers 0-9,
and characters '-' '/' '?' '%' '#' and '&' are allowed. URLs must be started with the character '/', but cannot be '/' alone.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RuleArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
A forwarding rule is configured in `HTTP`/`HTTPS` listener and it used to listen a list of backend servers which in one specified virtual backend server group.
You can add forwarding rules to a listener to forward requests based on the domain names or the URL in the request.
> **NOTE:** One virtual backend server group can be attached in multiple forwarding rules.
> **NOTE:** At least one "Domain" or "Url" must be specified when creating a new rule.
> **NOTE:** Having the same 'Domain' and 'Url' rule can not be created repeatedly in the one listener.
> **NOTE:** Rule only be created in the `HTTP` or `HTTPS` listener.
> **NOTE:** Only rule's virtual server group can be modified.
## Import
Load balancer forwarding rule can be imported using the id, e.g.
```sh
$ pulumi import alicloud:slb/rule:Rule example rule-abc123456
```
:param str resource_name: The name of the resource.
:param RuleArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RuleArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cookie: Optional[pulumi.Input[str]] = None,
cookie_timeout: Optional[pulumi.Input[int]] = None,
delete_protection_validation: Optional[pulumi.Input[bool]] = None,
domain: Optional[pulumi.Input[str]] = None,
frontend_port: Optional[pulumi.Input[int]] = None,
health_check: Optional[pulumi.Input[str]] = None,
health_check_connect_port: Optional[pulumi.Input[int]] = None,
health_check_domain: Optional[pulumi.Input[str]] = None,
health_check_http_code: Optional[pulumi.Input[str]] = None,
health_check_interval: Optional[pulumi.Input[int]] = None,
health_check_timeout: Optional[pulumi.Input[int]] = None,
health_check_uri: Optional[pulumi.Input[str]] = None,
healthy_threshold: Optional[pulumi.Input[int]] = None,
listener_sync: Optional[pulumi.Input[str]] = None,
load_balancer_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
scheduler: Optional[pulumi.Input[str]] = None,
server_group_id: Optional[pulumi.Input[str]] = None,
sticky_session: Optional[pulumi.Input[str]] = None,
sticky_session_type: Optional[pulumi.Input[str]] = None,
unhealthy_threshold: Optional[pulumi.Input[int]] = None,
url: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RuleArgs.__new__(RuleArgs)
__props__.__dict__["cookie"] = cookie
__props__.__dict__["cookie_timeout"] = cookie_timeout
__props__.__dict__["delete_protection_validation"] = delete_protection_validation
__props__.__dict__["domain"] = domain
if frontend_port is None and not opts.urn:
raise TypeError("Missing required property 'frontend_port'")
__props__.__dict__["frontend_port"] = frontend_port
__props__.__dict__["health_check"] = health_check
__props__.__dict__["health_check_connect_port"] = health_check_connect_port
__props__.__dict__["health_check_domain"] = health_check_domain
__props__.__dict__["health_check_http_code"] = health_check_http_code
__props__.__dict__["health_check_interval"] = health_check_interval
__props__.__dict__["health_check_timeout"] = health_check_timeout
__props__.__dict__["health_check_uri"] = health_check_uri
__props__.__dict__["healthy_threshold"] = healthy_threshold
__props__.__dict__["listener_sync"] = listener_sync
if load_balancer_id is None and not opts.urn:
raise TypeError("Missing required property 'load_balancer_id'")
__props__.__dict__["load_balancer_id"] = load_balancer_id
__props__.__dict__["name"] = name
__props__.__dict__["scheduler"] = scheduler
if server_group_id is None and not opts.urn:
raise TypeError("Missing required property 'server_group_id'")
__props__.__dict__["server_group_id"] = server_group_id
__props__.__dict__["sticky_session"] = sticky_session
__props__.__dict__["sticky_session_type"] = sticky_session_type
__props__.__dict__["unhealthy_threshold"] = unhealthy_threshold
__props__.__dict__["url"] = url
super(Rule, __self__).__init__(
'alicloud:slb/rule:Rule',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
cookie: Optional[pulumi.Input[str]] = None,
cookie_timeout: Optional[pulumi.Input[int]] = None,
delete_protection_validation: Optional[pulumi.Input[bool]] = None,
domain: Optional[pulumi.Input[str]] = None,
frontend_port: Optional[pulumi.Input[int]] = None,
health_check: Optional[pulumi.Input[str]] = None,
health_check_connect_port: Optional[pulumi.Input[int]] = None,
health_check_domain: Optional[pulumi.Input[str]] = None,
health_check_http_code: Optional[pulumi.Input[str]] = None,
health_check_interval: Optional[pulumi.Input[int]] = None,
health_check_timeout: Optional[pulumi.Input[int]] = None,
health_check_uri: Optional[pulumi.Input[str]] = None,
healthy_threshold: Optional[pulumi.Input[int]] = None,
listener_sync: Optional[pulumi.Input[str]] = None,
load_balancer_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
scheduler: Optional[pulumi.Input[str]] = None,
server_group_id: Optional[pulumi.Input[str]] = None,
sticky_session: Optional[pulumi.Input[str]] = None,
sticky_session_type: Optional[pulumi.Input[str]] = None,
unhealthy_threshold: Optional[pulumi.Input[int]] = None,
url: Optional[pulumi.Input[str]] = None) -> 'Rule':
"""
Get an existing Rule resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] cookie: The cookie configured on the server. It is mandatory when `sticky_session` is "on" and `sticky_session_type` is "server". Otherwise, it will be ignored. Valid value:String in line with RFC 2965, with length being 1- 200. It only contains characters such as ASCII codes, English letters and digits instead of the comma, semicolon or spacing, and it cannot start with $.
:param pulumi.Input[int] cookie_timeout: Cookie timeout. It is mandatory when `sticky_session` is "on" and `sticky_session_type` is "insert". Otherwise, it will be ignored. Valid value range: [1-86400] in seconds.
:param pulumi.Input[bool] delete_protection_validation: Checking DeleteProtection of SLB instance before deleting. If true, this resource will not be deleted when its SLB instance enabled DeleteProtection. Default to false.
:param pulumi.Input[str] domain: Domain name of the forwarding rule. It can contain letters a-z, numbers 0-9, hyphens (-), and periods (.),
and wildcard characters. The following two domain name formats are supported:
- Standard domain name: www.test.com
- Wildcard domain name: *.test.com. wildcard (*) must be the first character in the format of (*.)
:param pulumi.Input[int] frontend_port: The listener frontend port which is used to launch the new forwarding rule. Valid range: [1-65535].
:param pulumi.Input[str] health_check: Whether to enable health check. Valid values are`on` and `off`. TCP and UDP listener's HealthCheck is always on, so it will be ignore when launching TCP or UDP listener. This parameter is required and takes effect only when ListenerSync is set to off.
:param pulumi.Input[int] health_check_connect_port: Port used for health check. Valid value range: [1-65535]. Default to "None" means the backend server port is used.
:param pulumi.Input[str] health_check_domain: Domain name used for health check. When it used to launch TCP listener, `health_check_type` must be "http". Its length is limited to 1-80 and only characters such as letters, digits, ‘-‘ and ‘.’ are allowed. When it is not set or empty, Server Load Balancer uses the private network IP address of each backend server as Domain used for health check.
:param pulumi.Input[str] health_check_http_code: Regular health check HTTP status code. Multiple codes are segmented by “,”. It is required when `health_check` is on. Default to `http_2xx`. Valid values are: `http_2xx`, `http_3xx`, `http_4xx` and `http_5xx`.
:param pulumi.Input[int] health_check_interval: Time interval of health checks. It is required when `health_check` is on. Valid value range: [1-50] in seconds. Default to 2.
:param pulumi.Input[int] health_check_timeout: Maximum timeout of each health check response. It is required when `health_check` is on. Valid value range: [1-300] in seconds. Default to 5. Note: If `health_check_timeout` < `health_check_interval`, its will be replaced by `health_check_interval`.
:param pulumi.Input[str] health_check_uri: URI used for health check. When it used to launch TCP listener, `health_check_type` must be "http". Its length is limited to 1-80 and it must start with /. Only characters such as letters, digits, ‘-’, ‘/’, ‘.’, ‘%’, ‘?’, #’ and ‘&’ are allowed.
:param pulumi.Input[int] healthy_threshold: Threshold determining the result of the health check is success. It is required when `health_check` is on. Valid value range: [1-10] in seconds. Default to 3.
:param pulumi.Input[str] listener_sync: Indicates whether a forwarding rule inherits the settings of a health check , session persistence, and scheduling algorithm from a listener. Default to on.
:param pulumi.Input[str] load_balancer_id: The Load Balancer ID which is used to launch the new forwarding rule.
:param pulumi.Input[str] name: Name of the forwarding rule. Our plugin provides a default name: "tf-slb-rule".
:param pulumi.Input[str] scheduler: Scheduling algorithm, Valid values are `wrr`, `rr` and `wlc`. Default to "wrr". This parameter is required and takes effect only when ListenerSync is set to off.
:param pulumi.Input[str] server_group_id: ID of a virtual server group that will be forwarded.
:param pulumi.Input[str] sticky_session: Whether to enable session persistence, Valid values are `on` and `off`. Default to `off`. This parameter is required and takes effect only when ListenerSync is set to off.
:param pulumi.Input[str] sticky_session_type: Mode for handling the cookie. If `sticky_session` is "on", it is mandatory. Otherwise, it will be ignored. Valid values are `insert` and `server`. `insert` means it is inserted from Server Load Balancer; `server` means the Server Load Balancer learns from the backend server.
:param pulumi.Input[int] unhealthy_threshold: Threshold determining the result of the health check is fail. It is required when `health_check` is on. Valid value range: [1-10] in seconds. Default to 3.
:param pulumi.Input[str] url: Domain of the forwarding rule. It must be 2-80 characters in length. Only letters a-z, numbers 0-9,
and characters '-' '/' '?' '%' '#' and '&' are allowed. URLs must be started with the character '/', but cannot be '/' alone.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RuleState.__new__(_RuleState)
__props__.__dict__["cookie"] = cookie
__props__.__dict__["cookie_timeout"] = cookie_timeout
__props__.__dict__["delete_protection_validation"] = delete_protection_validation
__props__.__dict__["domain"] = domain
__props__.__dict__["frontend_port"] = frontend_port
__props__.__dict__["health_check"] = health_check
__props__.__dict__["health_check_connect_port"] = health_check_connect_port
__props__.__dict__["health_check_domain"] = health_check_domain
__props__.__dict__["health_check_http_code"] = health_check_http_code
__props__.__dict__["health_check_interval"] = health_check_interval
__props__.__dict__["health_check_timeout"] = health_check_timeout
__props__.__dict__["health_check_uri"] = health_check_uri
__props__.__dict__["healthy_threshold"] = healthy_threshold
__props__.__dict__["listener_sync"] = listener_sync
__props__.__dict__["load_balancer_id"] = load_balancer_id
__props__.__dict__["name"] = name
__props__.__dict__["scheduler"] = scheduler
__props__.__dict__["server_group_id"] = server_group_id
__props__.__dict__["sticky_session"] = sticky_session
__props__.__dict__["sticky_session_type"] = sticky_session_type
__props__.__dict__["unhealthy_threshold"] = unhealthy_threshold
__props__.__dict__["url"] = url
return Rule(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def cookie(self) -> pulumi.Output[Optional[str]]:
"""
The cookie configured on the server. It is mandatory when `sticky_session` is "on" and `sticky_session_type` is "server". Otherwise, it will be ignored. Valid value:String in line with RFC 2965, with length being 1- 200. It only contains characters such as ASCII codes, English letters and digits instead of the comma, semicolon or spacing, and it cannot start with $.
"""
return pulumi.get(self, "cookie")
@property
@pulumi.getter(name="cookieTimeout")
def cookie_timeout(self) -> pulumi.Output[Optional[int]]:
"""
Cookie timeout. It is mandatory when `sticky_session` is "on" and `sticky_session_type` is "insert". Otherwise, it will be ignored. Valid value range: [1-86400] in seconds.
"""
return pulumi.get(self, "cookie_timeout")
@property
@pulumi.getter(name="deleteProtectionValidation")
def delete_protection_validation(self) -> pulumi.Output[Optional[bool]]:
"""
Checking DeleteProtection of SLB instance before deleting. If true, this resource will not be deleted when its SLB instance enabled DeleteProtection. Default to false.
"""
return pulumi.get(self, "delete_protection_validation")
@property
@pulumi.getter
def domain(self) -> pulumi.Output[Optional[str]]:
"""
Domain name of the forwarding rule. It can contain letters a-z, numbers 0-9, hyphens (-), and periods (.),
and wildcard characters. The following two domain name formats are supported:
- Standard domain name: www.test.com
- Wildcard domain name: *.test.com. wildcard (*) must be the first character in the format of (*.)
"""
return pulumi.get(self, "domain")
@property
@pulumi.getter(name="frontendPort")
def frontend_port(self) -> pulumi.Output[int]:
"""
The listener frontend port which is used to launch the new forwarding rule. Valid range: [1-65535].
"""
return pulumi.get(self, "frontend_port")
@property
@pulumi.getter(name="healthCheck")
def health_check(self) -> pulumi.Output[Optional[str]]:
"""
Whether to enable health check. Valid values are`on` and `off`. TCP and UDP listener's HealthCheck is always on, so it will be ignore when launching TCP or UDP listener. This parameter is required and takes effect only when ListenerSync is set to off.
"""
return pulumi.get(self, "health_check")
@property
@pulumi.getter(name="healthCheckConnectPort")
def health_check_connect_port(self) -> pulumi.Output[int]:
"""
Port used for health check. Valid value range: [1-65535]. Default to "None" means the backend server port is used.
"""
return pulumi.get(self, "health_check_connect_port")
@property
@pulumi.getter(name="healthCheckDomain")
def health_check_domain(self) -> pulumi.Output[Optional[str]]:
"""
Domain name used for health check. When it used to launch TCP listener, `health_check_type` must be "http". Its length is limited to 1-80 and only characters such as letters, digits, ‘-‘ and ‘.’ are allowed. When it is not set or empty, Server Load Balancer uses the private network IP address of each backend server as Domain used for health check.
"""
return pulumi.get(self, "health_check_domain")
@property
@pulumi.getter(name="healthCheckHttpCode")
def health_check_http_code(self) -> pulumi.Output[Optional[str]]:
"""
Regular health check HTTP status code. Multiple codes are segmented by “,”. It is required when `health_check` is on. Default to `http_2xx`. Valid values are: `http_2xx`, `http_3xx`, `http_4xx` and `http_5xx`.
"""
return pulumi.get(self, "health_check_http_code")
@property
@pulumi.getter(name="healthCheckInterval")
def health_check_interval(self) -> pulumi.Output[Optional[int]]:
"""
Time interval of health checks. It is required when `health_check` is on. Valid value range: [1-50] in seconds. Default to 2.
"""
return pulumi.get(self, "health_check_interval")
@property
@pulumi.getter(name="healthCheckTimeout")
def health_check_timeout(self) -> pulumi.Output[Optional[int]]:
"""
Maximum timeout of each health check response. It is required when `health_check` is on. Valid value range: [1-300] in seconds. Default to 5. Note: If `health_check_timeout` < `health_check_interval`, its will be replaced by `health_check_interval`.
"""
return pulumi.get(self, "health_check_timeout")
@property
@pulumi.getter(name="healthCheckUri")
def health_check_uri(self) -> pulumi.Output[Optional[str]]:
"""
URI used for health check. When it used to launch TCP listener, `health_check_type` must be "http". Its length is limited to 1-80 and it must start with /. Only characters such as letters, digits, ‘-’, ‘/’, ‘.’, ‘%’, ‘?’, #’ and ‘&’ are allowed.
"""
return pulumi.get(self, "health_check_uri")
@property
@pulumi.getter(name="healthyThreshold")
def healthy_threshold(self) -> pulumi.Output[Optional[int]]:
"""
Threshold determining the result of the health check is success. It is required when `health_check` is on. Valid value range: [1-10] in seconds. Default to 3.
"""
return pulumi.get(self, "healthy_threshold")
@property
@pulumi.getter(name="listenerSync")
def listener_sync(self) -> pulumi.Output[Optional[str]]:
"""
Indicates whether a forwarding rule inherits the settings of a health check , session persistence, and scheduling algorithm from a listener. Default to on.
"""
return pulumi.get(self, "listener_sync")
@property
@pulumi.getter(name="loadBalancerId")
def load_balancer_id(self) -> pulumi.Output[str]:
"""
The Load Balancer ID which is used to launch the new forwarding rule.
"""
return pulumi.get(self, "load_balancer_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the forwarding rule. Our plugin provides a default name: "tf-slb-rule".
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def scheduler(self) -> pulumi.Output[Optional[str]]:
"""
Scheduling algorithm, Valid values are `wrr`, `rr` and `wlc`. Default to "wrr". This parameter is required and takes effect only when ListenerSync is set to off.
"""
return pulumi.get(self, "scheduler")
@property
@pulumi.getter(name="serverGroupId")
def server_group_id(self) -> pulumi.Output[str]:
"""
ID of a virtual server group that will be forwarded.
"""
return pulumi.get(self, "server_group_id")
@property
@pulumi.getter(name="stickySession")
def sticky_session(self) -> pulumi.Output[Optional[str]]:
"""
Whether to enable session persistence, Valid values are `on` and `off`. Default to `off`. This parameter is required and takes effect only when ListenerSync is set to off.
"""
return pulumi.get(self, "sticky_session")
@property
@pulumi.getter(name="stickySessionType")
def sticky_session_type(self) -> pulumi.Output[Optional[str]]:
"""
Mode for handling the cookie. If `sticky_session` is "on", it is mandatory. Otherwise, it will be ignored. Valid values are `insert` and `server`. `insert` means it is inserted from Server Load Balancer; `server` means the Server Load Balancer learns from the backend server.
"""
return pulumi.get(self, "sticky_session_type")
@property
@pulumi.getter(name="unhealthyThreshold")
def unhealthy_threshold(self) -> pulumi.Output[Optional[int]]:
"""
Threshold determining the result of the health check is fail. It is required when `health_check` is on. Valid value range: [1-10] in seconds. Default to 3.
"""
return pulumi.get(self, "unhealthy_threshold")
@property
@pulumi.getter
def url(self) -> pulumi.Output[Optional[str]]:
"""
Domain of the forwarding rule. It must be 2-80 characters in length. Only letters a-z, numbers 0-9,
and characters '-' '/' '?' '%' '#' and '&' are allowed. URLs must be started with the character '/', but cannot be '/' alone.
"""
return pulumi.get(self, "url")
| 61.905316
| 409
| 0.681139
| 9,924
| 74,534
| 4.937727
| 0.034563
| 0.08014
| 0.073283
| 0.053875
| 0.968838
| 0.961981
| 0.956145
| 0.9519
| 0.949022
| 0.939451
| 0
| 0.007572
| 0.220356
| 74,534
| 1,203
| 410
| 61.956775
| 0.835691
| 0.480667
| 0
| 0.876791
| 1
| 0
| 0.114891
| 0.028695
| 0
| 0
| 0
| 0
| 0
| 1
| 0.167622
| false
| 0.001433
| 0.007163
| 0
| 0.275072
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4f497964a17ba87394cba31ac5cd687e45701a08
| 301
|
py
|
Python
|
python/list_to_string.py
|
codevscolor/codevscolor
|
35ef9042bdc86f45ef87795c35963b75fb64d5d7
|
[
"Apache-2.0"
] | 6
|
2019-04-26T03:11:54.000Z
|
2021-05-07T21:48:29.000Z
|
python/list_to_string.py
|
akojif/codevscolor
|
56db3dffeac8f8d76ff8fcf5656770f33765941f
|
[
"Apache-2.0"
] | null | null | null |
python/list_to_string.py
|
akojif/codevscolor
|
56db3dffeac8f8d76ff8fcf5656770f33765941f
|
[
"Apache-2.0"
] | 26
|
2019-02-23T14:50:46.000Z
|
2022-02-04T23:44:24.000Z
|
#example 1
dummy_list = ["one","two","three","four","five","six"]
separator = ' '
result_string = separator.join(dummy_list)
print(result_string)
#example 2
dummy_list = ["one","two","three","four","five","six"]
separator = ','
result_string = separator.join(dummy_list)
print(result_string)
| 15.05
| 54
| 0.684385
| 40
| 301
| 4.95
| 0.4
| 0.181818
| 0.121212
| 0.151515
| 0.919192
| 0.919192
| 0.919192
| 0.919192
| 0.919192
| 0.919192
| 0
| 0.007491
| 0.112957
| 301
| 19
| 55
| 15.842105
| 0.734082
| 0.059801
| 0
| 0.75
| 0
| 0
| 0.163701
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4f84d9cf7a96955190402529480de26edd297756
| 630
|
py
|
Python
|
evaluate.py
|
zlxy9892/chatbot-retrieval-based-smn
|
65ae5391f0a01b84f998cbbec1e04eda30ddd569
|
[
"Apache-2.0"
] | 5
|
2018-09-05T02:52:17.000Z
|
2021-04-29T01:12:27.000Z
|
evaluate.py
|
zlxy9892/chatbot-retrieval-based-smn
|
65ae5391f0a01b84f998cbbec1e04eda30ddd569
|
[
"Apache-2.0"
] | 1
|
2020-05-30T13:19:46.000Z
|
2020-05-30T13:19:46.000Z
|
evaluate.py
|
zlxy9892/chatbot-retrieval-based-smn
|
65ae5391f0a01b84f998cbbec1e04eda30ddd569
|
[
"Apache-2.0"
] | null | null | null |
def ComputeR10_1(scores,labels,count = 10):
total = 0
correct = 0
for i in range(len(labels)):
if labels[i] == 1:
total = total+1
sublist = scores[i:i+count]
if max(sublist) == scores[i]:
correct = correct + 1
print(float(correct)/ total )
def ComputeR2_1(scores,labels,count = 2):
total = 0
correct = 0
for i in range(len(labels)):
if labels[i] == 1:
total = total+1
sublist = scores[i:i+count]
if max(sublist) == scores[i]:
correct = correct + 1
print(float(correct)/ total )
| 30
| 43
| 0.520635
| 82
| 630
| 3.97561
| 0.268293
| 0.159509
| 0.171779
| 0.110429
| 0.803681
| 0.803681
| 0.803681
| 0.803681
| 0.803681
| 0.803681
| 0
| 0.044335
| 0.355556
| 630
| 21
| 44
| 30
| 0.758621
| 0
| 0
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0
| 0
| 0.1
| 0.1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4f8ecb5d638d88e72dab5dee3cbc82eb0b76e10e
| 4,875
|
py
|
Python
|
backend/api/schedule/tests/test_schedule_invitation.py
|
patrick91/pycon
|
9d7e15f540adcf0eaceb61fdbf67206d6aef73ec
|
[
"MIT"
] | 2
|
2017-07-18T21:51:25.000Z
|
2017-12-23T11:08:39.000Z
|
backend/api/schedule/tests/test_schedule_invitation.py
|
patrick91/pycon
|
9d7e15f540adcf0eaceb61fdbf67206d6aef73ec
|
[
"MIT"
] | 23
|
2017-07-18T20:22:38.000Z
|
2018-01-05T05:45:15.000Z
|
backend/api/schedule/tests/test_schedule_invitation.py
|
patrick91/pycon
|
9d7e15f540adcf0eaceb61fdbf67206d6aef73ec
|
[
"MIT"
] | 2
|
2017-07-18T21:27:33.000Z
|
2017-07-18T22:07:03.000Z
|
import datetime
from pytest import mark
from schedule.models import ScheduleItem
pytestmark = mark.django_db
def test_fetch_an_invitation(
submission_factory,
graphql_client,
user,
schedule_item_factory,
slot_factory,
day_factory,
):
graphql_client.force_login(user)
submission = submission_factory(
speaker_id=user.id,
)
schedule_item_factory(
status=ScheduleItem.STATUS.confirmed,
speaker_invitation_notes="notes",
submission=submission,
type=ScheduleItem.TYPES.submission,
conference=submission.conference,
slot=slot_factory(
day=day_factory(
day=datetime.date(2020, 10, 10), conference=submission.conference
),
hour=datetime.time(10, 10, 0),
duration=30,
),
)
response = graphql_client.query(
"""query($submissionId: ID!) {
scheduleInvitation(submissionId: $submissionId) {
option
notes
dates {
start
end
}
}
}""",
variables={"submissionId": submission.hashid},
)
assert not response.get("errors")
assert response["data"]["scheduleInvitation"] == {
"option": "CONFIRM",
"notes": "notes",
"dates": [{"start": "2020-10-10T10:10:00", "end": "2020-10-10T10:40:00"}],
}
def test_random_user_cannot_fetch_the_invite(
submission_factory,
graphql_client,
user,
schedule_item_factory,
slot_factory,
day_factory,
):
graphql_client.force_login(user)
submission = submission_factory(
speaker_id=50000,
)
schedule_item_factory(
status=ScheduleItem.STATUS.confirmed,
speaker_invitation_notes="notes",
submission=submission,
type=ScheduleItem.TYPES.submission,
conference=submission.conference,
slot=slot_factory(
day=day_factory(
day=datetime.date(2020, 10, 10), conference=submission.conference
),
hour=datetime.time(10, 10, 0),
duration=30,
),
)
response = graphql_client.query(
"""query($submissionId: ID!) {
scheduleInvitation(submissionId: $submissionId) {
option
notes
dates {
start
end
}
}
}""",
variables={"submissionId": submission.hashid},
)
assert not response.get("errors")
assert response["data"]["scheduleInvitation"] is None
def test_staff_can_fetch_the_invite(
submission_factory,
graphql_client,
admin_user,
schedule_item_factory,
slot_factory,
day_factory,
):
graphql_client.force_login(admin_user)
submission = submission_factory(
speaker_id=50000,
)
schedule_item_factory(
status=ScheduleItem.STATUS.confirmed,
speaker_invitation_notes="notes",
submission=submission,
type=ScheduleItem.TYPES.submission,
conference=submission.conference,
slot=slot_factory(
day=day_factory(
day=datetime.date(2020, 10, 10), conference=submission.conference
),
hour=datetime.time(10, 10, 0),
duration=30,
),
)
response = graphql_client.query(
"""query($submissionId: ID!) {
scheduleInvitation(submissionId: $submissionId) {
option
notes
}
}""",
variables={"submissionId": submission.hashid},
)
assert not response.get("errors")
assert response["data"]["scheduleInvitation"] == {
"option": "CONFIRM",
"notes": "notes",
}
def test_requires_authentication(
submission_factory,
graphql_client,
schedule_item_factory,
slot_factory,
day_factory,
):
submission = submission_factory(
speaker_id=50000,
)
schedule_item_factory(
status=ScheduleItem.STATUS.confirmed,
speaker_invitation_notes="notes",
submission=submission,
type=ScheduleItem.TYPES.submission,
conference=submission.conference,
slot=slot_factory(
day=day_factory(
day=datetime.date(2020, 10, 10), conference=submission.conference
),
hour=datetime.time(10, 10, 0),
duration=30,
),
)
response = graphql_client.query(
"""query($submissionId: ID!) {
scheduleInvitation(submissionId: $submissionId) {
option
notes
dates {
start
end
}
}
}""",
variables={"submissionId": submission.hashid},
)
assert response["errors"][0]["message"] == "User not logged in"
assert response["data"]["scheduleInvitation"] is None
| 25.259067
| 82
| 0.590154
| 438
| 4,875
| 6.374429
| 0.173516
| 0.04298
| 0.054441
| 0.04298
| 0.889327
| 0.889327
| 0.872135
| 0.837034
| 0.837034
| 0.837034
| 0
| 0.030778
| 0.306872
| 4,875
| 192
| 83
| 25.390625
| 0.795502
| 0
| 0
| 0.773723
| 0
| 0
| 0.074183
| 0
| 0
| 0
| 0
| 0
| 0.058394
| 1
| 0.029197
| false
| 0
| 0.021898
| 0
| 0.051095
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4ffb0183f1a4d73a0046957acf4f4563eecd144c
| 21,935
|
py
|
Python
|
SCN.py
|
Tnorm/SCN
|
a89b829f86c16fc9359330427bdb71bd8b16349e
|
[
"MIT"
] | null | null | null |
SCN.py
|
Tnorm/SCN
|
a89b829f86c16fc9359330427bdb71bd8b16349e
|
[
"MIT"
] | null | null | null |
SCN.py
|
Tnorm/SCN
|
a89b829f86c16fc9359330427bdb71bd8b16349e
|
[
"MIT"
] | 1
|
2019-06-24T07:04:00.000Z
|
2019-06-24T07:04:00.000Z
|
#### SIMPLICIAL COMPLEX NEURAL NETWORK LEARN FUNCTIONS!
import torch
from torch.autograd import Variable
import numpy as np
import matplotlib.pyplot as plt
from scipy.stats import norm
class SCN(torch.nn.Module):
def __init__(self, visible_num, input_dim, visible_units, depth, model=1):
super(SCN, self).__init__()
# depth = number of hidden units
self.L = []
for _ in range(depth):
self.L.append(torch.nn.Parameter(torch.ones(1,visible_num)/visible_num, requires_grad=True))
## visible units are defined as columns of a matrix
### UNCOMMENT THESE TWO LINES FOR SCN_fractal_test
#self.visible_fs = torch.nn.Parameter(torch.randn(visible_num, 1)/5, requires_grad=True)
#self.biases = torch.nn.Parameter(torch.randn(depth, 1)/5, requires_grad=True)
###
self.visible_fs = torch.nn.Parameter(torch.zeros(visible_num, 1), requires_grad = True)
self.biases = torch.nn.Parameter(torch.zeros(depth,1), requires_grad = True)
self.visible_units = visible_units
self.depth = depth
self.visible_num = visible_num
self.input_dim = input_dim
## for one dimensional data
self.model = model
def forward(self, inp):
hidden_collect = []
f = self.visible_fs.repeat(inp.size()[0], 1, 1)
h = self.visible_units.repeat(inp.size()[0], 1, 1)
input_weights = self.get_first_input_weights_mdl1(inp, self.visible_units)
for i in range(self.depth):
input_weights, indices = self.update_weights(input_weights, self.L[i])
h_old = h.clone()
if self.model == 1:
f, h = self.update_h_mdl1(f, h, indices, self.L[i], i)
elif self.model == 2:
f, h = self.update_h_mdl2(f, h, indices, self.L[i], i)
hidden_collect.append([h_old, h[range(h.size()[0]), indices.long(), :]])
#out = torch.bmm(input_weights.view(inp.size()[0], 1, -1), f)
out = torch.nn.Sigmoid()(torch.bmm(input_weights.view(inp.size()[0], 1, -1), f))
return out, hidden_collect
def update_weights(self, inp_weights, h_w):
weights_div = inp_weights / (h_w + 1e-20)
values, indices = torch.min(weights_div, 1)
input_weights = self.update_inp_weights_mdl1(values, indices, inp_weights, h_w)
return input_weights, indices
def update_inp_weights_mdl1(self, values, indices, inp_weights, h_w):
new_weights = inp_weights - values.view(-1,1).repeat(1,self.visible_num) * h_w
new_weights[range(values.size()[0]),indices.long()] = values
return new_weights
def update_h_mdl1(self, f, h, indices, weights, i):
new_h = torch.bmm(weights.repeat(h.size()[0], 1, 1), h).view(-1, self.input_dim)
f[range(h.size()[0]), indices.long(), :] = (torch.bmm(weights.repeat(h.size()[0], 1, 1), f.clone())
+ self.biases[i]).squeeze(-1)
h[range(h.size()[0]), indices.long(), :] = new_h
return f, h
def update_h_mdl2(self, f, h, indices, weights, i):
new_h = torch.bmm(weights.repeat(h.size()[0], 1, 1), h).view(-1, self.input_dim)
f[range(h.size()[0]), indices.long(), :] = \
torch.bmm(weights.repeat(h.size()[0], 1, 1), f.clone()) + torch.nn.Sigmoid()(self.biases[i])
h[range(h.size()[0]), indices.long(), :] = new_h
return f, h
def get_first_input_weights_mdl1(self, inp, visible_units):
return torch.cat((1 - torch.sum(inp, 1).view(-1, 1), inp), 1)
def features_num(self, inp):
size = inp.size()[1:] # all dimensions except the batch dimension
num_features = 1
for s in size:
num_features = num_features * s
return num_features
def initial_weights(self):
return 1
def project_simplex(self, v, z=1): #sparsemax
v_sorted, _ = torch.sort(v, dim=0, descending=True)
cssv = torch.cumsum(v_sorted, dim=0) - z
ind = torch.arange(1, 1 + len(v)).to(dtype=v.dtype)
cond = v_sorted - cssv / ind > 0
rho = ind.masked_select(cond)[-1]
tau = cssv.masked_select(cond)[-1] / rho
w = torch.clamp(v - tau, min=0)
return w
class SCN_multi(torch.nn.Module):
def __init__(self, visible_num, input_dim, output_dim, visible_units, depth, model=1):
super(SCN_multi, self).__init__()
# depth = number of hidden units
self.L = torch.nn.ParameterList()
for _ in range(depth):
#self.L.append(torch.nn.Parameter(torch.ones(1,visible_num)/visible_num, requires_grad=True))
self.L.append(torch.nn.Parameter(torch.zeros(1, visible_num), requires_grad=True))
## visible units are defined as columns of a matrix
### UNCOMMENT THESE TWO LINES FOR SCN_fractal_test
#self.visible_fs = torch.nn.Parameter(torch.randn(visible_num, 1)/5, requires_grad=True)
#self.biases = torch.nn.Parameter(torch.randn(depth, 1)/5, requires_grad=True)
###
self.visible_fs = torch.nn.Parameter(torch.zeros(visible_num, output_dim), requires_grad = True)
self.biases = torch.nn.Parameter(torch.zeros(depth,output_dim), requires_grad = True)
self.bias_funcs = torch.nn.ModuleList([])
for _ in range(depth):
self.bias_funcs.append(torch.nn.Linear(input_dim, output_dim))
self.bias_funcs[-1].weight.data.fill_(0.0)
self.visible_units = visible_units
self.depth = depth
self.visible_num = visible_num
self.input_dim = input_dim
self.output_dim = output_dim
## for one dimensional data
self.model = model
self.sftmax = torch.nn.Softmax(dim=-1)
def forward(self, inp):
hidden_collect = []
f = self.visible_fs.repeat(inp.size()[0], 1, 1)
h = self.visible_units.repeat(inp.size()[0], 1, 1)
input_weights = self.get_first_input_weights_mdl1(inp, self.visible_units)
for i in range(self.depth):
input_weights, indices = self.update_weights(input_weights, self.sftmax(self.L[i]).detach())
h_old = h.clone()
if self.model == 1:
f, h, last_h = self.update_h_mdl1(f, h, indices, self.sftmax(self.L[i]), i)
elif self.model == 2:
f, h = self.update_h_mdl2(f, h, indices, self.sftmax(self.L[i]), i)
hidden_collect.append([h_old, h[range(h.size()[0]), indices.long(), :]])
out = torch.bmm(input_weights.view(inp.size()[0], 1, -1), f)
#out = torch.nn.Softmax(dim=-1)(torch.bmm(input_weights.view(inp.size()[0], 1, -1), f))
return out, hidden_collect, last_h
def update_weights(self, inp_weights, h_w):
weights_div = inp_weights / (h_w + 1e-20)
values, indices = torch.min(weights_div, 1)
input_weights = self.update_inp_weights_mdl1(values, indices, inp_weights, h_w)
return input_weights, indices
def update_inp_weights_mdl1(self, values, indices, inp_weights, h_w):
#new_weights = inp_weights - values.view(-1,1).repeat(1,self.visible_num) * h_w
new_weights = inp_weights - values.view(-1, 1) * h_w
new_weights[range(values.size()[0]),indices.long()] = values
return new_weights
def update_h_mdl0(self, f, h, indices, weights, i):
new_h = torch.bmm(weights.repeat(h.size()[0], 1, 1), h).view(-1, self.input_dim)
f[range(h.size()[0]), indices.long(), :] = (torch.bmm(weights.repeat(h.size()[0], 1, 1), f.clone()))\
.squeeze(-2) + self.biases[i]
h[range(h.size()[0]), indices.long(), :] = new_h
return f, h
def update_h_mdl1(self, f, h, indices, weights, i):
new_h = torch.matmul(weights, h.clone()).squeeze(-2)
#print(new_h[20])
# if i == 1:
# print(new_h, indices)
# plt.imshow((new_h[5]).reshape(28, 28).data.numpy(), cmap='gray', vmin=0.0,
# vmax=new_h[5].max())
# plt.savefig('hidden' + str(i) + '.png')
# plt.clf()
#print(new_h[0])
f[range(h.size()[0]), indices.long(), :] = torch.matmul(weights, f.clone()).squeeze(-2) + \
self.biases[i]
# s = torch.autograd.grad(f.sum(), weights)
# print(s)
#self.bias_funcs[i](new_h.detach())
h[range(h.size()[0]), indices.long(), :] = new_h
return f, h, new_h
def update_h_mdl2(self, f, h, indices, weights, i):
new_h = torch.bmm(weights.repeat(h.size()[0], 1, 1), h).view(-1, self.input_dim)
f[range(h.size()[0]), indices.long(), :] = \
torch.bmm(weights.repeat(h.size()[0], 1, 1), f.clone()) + torch.nn.Sigmoid()(self.biases[i])
h[range(h.size()[0]), indices.long(), :] = new_h
return f, h
def get_first_input_weights_mdl1(self, inp, visible_units):
return torch.cat((1 - torch.sum(inp, 1).view(-1, 1), inp), 1)
def features_num(self, inp):
size = inp.size()[1:] # all dimensions except the batch dimension
num_features = 1
for s in size:
num_features = num_features * s
return num_features
def initial_weights(self):
return 1
def project_simplex(self, v, z=1): #sparsemax
v_sorted, _ = torch.sort(v, dim=0, descending=True)
cssv = torch.cumsum(v_sorted, dim=0) - z
ind = torch.arange(1, 1 + len(v)).to(dtype=v.dtype)
cond = v_sorted - cssv / ind > 0
rho = ind.masked_select(cond)[-1]
tau = cssv.masked_select(cond)[-1] / rho
w = torch.clamp(v - tau, min=0)
return w
class SCN_multi_justified(torch.nn.Module):
def __init__(self, visible_num, input_dim, output_dim, visible_units, depth, model=1):
super(SCN_multi_justified, self).__init__()
self.L = torch.nn.ModuleList([])
self.bias_funcs = torch.nn.ModuleList([])
for _ in range(depth):
self.L.append(torch.nn.Linear(input_dim, 1))
self.bias_funcs.append(torch.nn.Linear(input_dim, output_dim))
self.bias_funcs[-1].weight.data.fill_(0.0)
self.visible_fs = torch.nn.Parameter(torch.zeros(visible_num, output_dim), requires_grad=True)
self.visible_ws = torch.nn.Parameter(torch.zeros(visible_num), requires_grad=True)
self.visible_units = visible_units
self.visible_num = visible_num
self.depth = depth
self.input_dim = input_dim
self.output_dim = output_dim
self.model = model
self.sftmax = torch.nn.Softmax(dim=-1)
def forward(self, inp):
hidden_collect = []
f = self.visible_fs.repeat(inp.size()[0], 1, 1)
w = self.visible_ws.repeat(inp.size()[0], 1)
h = self.visible_units.repeat(inp.size()[0], 1, 1)
input_weights = self.get_first_input_weights_mdl1(inp, self.visible_units)
for i in range(self.depth):
weights_div = input_weights / (self.sftmax(w.data) + 1e-20)
values, indices = torch.min(weights_div, 1)
input_weights = input_weights - values.view(-1, 1) * self.sftmax(w.data)
input_weights[range(values.size()[0]), indices.long()] = values
stfmax_w = self.sftmax(w.clone()).unsqueeze(-2)
new_h = torch.matmul(stfmax_w, h.clone()).squeeze(-2)
f[range(h.size()[0]), indices.long(), :] = torch.matmul(stfmax_w, f.clone()).squeeze(-2) + \
self.bias_funcs[i](new_h)
h[range(h.size()[0]), indices.long(), :] = new_h
w[range(values.size()[0]), indices.long()] = self.L[i](new_h).squeeze()
out = torch.bmm(input_weights.view(inp.size()[0], 1, -1), f)
return out, hidden_collect, None
def get_first_input_weights_mdl1(self, inp, visible_units):
return inp
#return torch.cat((1 - torch.sum(inp, 1).view(-1, 1), inp), 1)
# #### SIMPLICIAL COMPLEX NEURAL NETWORK LEARN FUNCTIONS!
#
# import torch
# from torch.autograd import Variable
# import numpy as np
# import matplotlib.pyplot as plt
# from scipy.stats import norm
#
# class SCN(torch.nn.Module):
#
# def __init__(self, visible_num, input_dim, visible_units, depth, model=1):
# super(SCN, self).__init__()
# # depth = number of hidden units
# self.L = []
# for _ in range(depth):
# self.L.append(torch.nn.Parameter(torch.ones(1,visible_num)/visible_num, requires_grad=True))
# ## visible units are defined as columns of a matrix
# ### UNCOMMENT THESE TWO LINES FOR SCN_fractal_test
# #self.visible_fs = torch.nn.Parameter(torch.randn(visible_num, 1)/5, requires_grad=True)
# #self.biases = torch.nn.Parameter(torch.randn(depth, 1)/5, requires_grad=True)
# ###
# self.visible_fs = torch.nn.Parameter(torch.zeros(visible_num, 1), requires_grad = True)
# self.biases = torch.nn.Parameter(torch.zeros(depth,1), requires_grad = True)
# self.visible_units = visible_units
#
# self.depth = depth
# self.visible_num = visible_num
# self.input_dim = input_dim
# ## for one dimensional data
# self.model = model
#
#
# def forward(self, inp):
# hidden_collect = []
# f = self.visible_fs.repeat(inp.size()[0], 1, 1)
# h = self.visible_units.repeat(inp.size()[0], 1, 1)
# input_weights = self.get_first_input_weights_mdl1(inp, self.visible_units)
# for i in range(self.depth):
# input_weights, indices = self.update_weights(input_weights, self.L[i])
# h_old = h.clone()
# if self.model == 1:
# f, h = self.update_h_mdl1(f, h, indices, self.L[i], i)
# elif self.model == 2:
# f, h = self.update_h_mdl2(f, h, indices, self.L[i], i)
# hidden_collect.append([h_old, h[range(h.size()[0]), indices.long(), :]])
# #out = torch.bmm(input_weights.view(inp.size()[0], 1, -1), f)
# out = torch.nn.Sigmoid()(torch.bmm(input_weights.view(inp.size()[0], 1, -1), f))
# return out, hidden_collect
#
#
# def update_weights(self, inp_weights, h_w):
# weights_div = inp_weights / (h_w + 1e-20)
# values, indices = torch.min(weights_div, 1)
# input_weights = self.update_inp_weights_mdl1(values, indices, inp_weights, h_w)
# return input_weights, indices
#
# def update_inp_weights_mdl1(self, values, indices, inp_weights, h_w):
# new_weights = inp_weights - values.view(-1,1).repeat(1,self.visible_num) * h_w
# new_weights[range(values.size()[0]),indices.long()] = values
# return new_weights
#
# def update_h_mdl1(self, f, h, indices, weights, i):
# new_h = torch.bmm(weights.repeat(h.size()[0], 1, 1), h).view(-1, self.input_dim)
# f[range(h.size()[0]), indices.long(), :] = (torch.bmm(weights.repeat(h.size()[0], 1, 1), f.clone())
# + self.biases[i]).squeeze(-1)
# h[range(h.size()[0]), indices.long(), :] = new_h
# return f, h
#
# def update_h_mdl2(self, f, h, indices, weights, i):
# new_h = torch.bmm(weights.repeat(h.size()[0], 1, 1), h).view(-1, self.input_dim)
# f[range(h.size()[0]), indices.long(), :] = \
# torch.bmm(weights.repeat(h.size()[0], 1, 1), f.clone()) + torch.nn.Sigmoid()(self.biases[i])
# h[range(h.size()[0]), indices.long(), :] = new_h
# return f, h
#
# def get_first_input_weights_mdl1(self, inp, visible_units):
# return torch.cat((1 - torch.sum(inp, 1).view(-1, 1), inp), 1)
#
# def features_num(self, inp):
# size = inp.size()[1:] # all dimensions except the batch dimension
# num_features = 1
# for s in size:
# num_features = num_features * s
# return num_features
#
# def initial_weights(self):
# return 1
#
#
# def project_simplex(self, v, z=1): #sparsemax
# v_sorted, _ = torch.sort(v, dim=0, descending=True)
# cssv = torch.cumsum(v_sorted, dim=0) - z
# ind = torch.arange(1, 1 + len(v)).to(dtype=v.dtype)
# cond = v_sorted - cssv / ind > 0
# rho = ind.masked_select(cond)[-1]
# tau = cssv.masked_select(cond)[-1] / rho
# w = torch.clamp(v - tau, min=0)
# return w
#
#
# class SCN_multi(torch.nn.Module):
#
# def __init__(self, visible_num, input_dim, output_dim, visible_units, depth, model=1):
# super(SCN_multi, self).__init__()
# # depth = number of hidden units
# self.L = torch.nn.ParameterList()
# for _ in range(depth):
# self.L.append(torch.nn.Parameter(torch.ones(1,visible_num)/visible_num, requires_grad=True))
# ## visible units are defined as columns of a matrix
# ### UNCOMMENT THESE TWO LINES FOR SCN_fractal_test
# #self.visible_fs = torch.nn.Parameter(torch.randn(visible_num, 1)/5, requires_grad=True)
# #self.biases = torch.nn.Parameter(torch.randn(depth, 1)/5, requires_grad=True)
# ###
# self.visible_fs = torch.nn.Parameter(torch.zeros(visible_num, output_dim), requires_grad = True)
# self.biases = torch.nn.Parameter(torch.zeros(depth,output_dim), requires_grad = True)
# self.bias_funcs = torch.nn.ModuleList([])
# for _ in range(depth):
# self.bias_funcs.append(torch.nn.Linear(input_dim, output_dim))
# self.bias_funcs[-1].weight.data.fill_(0.0)
# self.visible_units = visible_units
#
# self.depth = depth
# self.visible_num = visible_num
# self.input_dim = input_dim
# self.output_dim = output_dim
# ## for one dimensional data
# self.model = model
#
#
# def forward(self, inp):
# hidden_collect = []
# f = self.visible_fs.repeat(inp.size()[0], 1, 1)
# h = self.visible_units.repeat(inp.size()[0], 1, 1)
# input_weights = self.get_first_input_weights_mdl1(inp, self.visible_units)
# for i in range(self.depth):
# input_weights, indices = self.update_weights(input_weights, self.L[i])
# h_old = h.clone()
# if self.model == 1:
# f, h = self.update_h_mdl1(f, h, indices, self.L[i], i)
# elif self.model == 2:
# f, h = self.update_h_mdl2(f, h, indices, self.L[i], i)
# hidden_collect.append([h_old, h[range(h.size()[0]), indices.long(), :]])
# out = torch.bmm(input_weights.view(inp.size()[0], 1, -1), f)
# #out = torch.nn.Softmax(dim=-1)(torch.bmm(input_weights.view(inp.size()[0], 1, -1), f))
# return out, hidden_collect
#
# def update_weights(self, inp_weights, h_w):
# weights_div = inp_weights / (h_w + 1e-20)
# values, indices = torch.min(weights_div, 1)
# #print(indices)
# input_weights = self.update_inp_weights_mdl1(values, indices, inp_weights, h_w)
# return input_weights, indices
#
# def update_inp_weights_mdl1(self, values, indices, inp_weights, h_w):
# #new_weights = inp_weights - values.view(-1,1).repeat(1,self.visible_num) * h_w
# new_weights = inp_weights - values.view(-1, 1) * h_w
# new_weights[range(values.size()[0]),indices.long()] = values
# return new_weights
#
# def update_h_mdl0(self, f, h, indices, weights, i):
# new_h = torch.bmm(weights.repeat(h.size()[0], 1, 1), h).view(-1, self.input_dim)
# f[range(h.size()[0]), indices.long(), :] = (torch.bmm(weights.repeat(h.size()[0], 1, 1), f.clone()))\
# .squeeze(-2) + self.biases[i]
# h[range(h.size()[0]), indices.long(), :] = new_h
# return f, h
#
# def update_h_mdl1(self, f, h, indices, weights, i):
# new_h = torch.matmul(weights, h).squeeze()
# #print(new_h[20])
# # if i == 1:
# # print(new_h, indices)
# plt.imshow((new_h[3]).reshape(28, 28).data.numpy(), cmap='gray', vmin=0.0,
# vmax=new_h[3].max())
# plt.savefig('hidden' + str(i) + '.png')
# plt.clf()
# f[range(h.size()[0]), indices.long(), :] = torch.matmul(weights.detach(), f.clone()).squeeze() + \
# self.bias_funcs[i](new_h.detach())
# h[range(h.size()[0]), indices.long(), :] = new_h
# return f, h
#
# def update_h_mdl2(self, f, h, indices, weights, i):
# new_h = torch.bmm(weights.repeat(h.size()[0], 1, 1), h).view(-1, self.input_dim)
# f[range(h.size()[0]), indices.long(), :] = \
# torch.bmm(weights.repeat(h.size()[0], 1, 1), f.clone()) + torch.nn.Sigmoid()(self.biases[i])
# h[range(h.size()[0]), indices.long(), :] = new_h
# return f, h
#
# def get_first_input_weights_mdl1(self, inp, visible_units):
# return torch.cat((1 - torch.sum(inp, 1).view(-1, 1), inp), 1)
#
# def features_num(self, inp):
# size = inp.size()[1:] # all dimensions except the batch dimension
# num_features = 1
# for s in size:
# num_features = num_features * s
# return num_features
#
# def initial_weights(self):
# return 1
#
#
# def project_simplex(self, v, z=1): #sparsemax
# v_sorted, _ = torch.sort(v, dim=0, descending=True)
# cssv = torch.cumsum(v_sorted, dim=0) - z
# ind = torch.arange(1, 1 + len(v)).to(dtype=v.dtype)
# cond = v_sorted - cssv / ind > 0
# rho = ind.masked_select(cond)[-1]
# tau = cssv.masked_select(cond)[-1] / rho
# w = torch.clamp(v - tau, min=0)
# return w
| 45.985325
| 111
| 0.587782
| 3,152
| 21,935
| 3.914657
| 0.053617
| 0.027555
| 0.020423
| 0.019856
| 0.971067
| 0.965313
| 0.958587
| 0.95105
| 0.947808
| 0.92933
| 0
| 0.023194
| 0.257032
| 21,935
| 476
| 112
| 46.081933
| 0.733939
| 0.494051
| 0
| 0.723958
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.026042
| 0.026042
| 0.276042
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8b4762a27a32d1804b51e8cc84e795ae6aad3ae3
| 44,226
|
py
|
Python
|
mapel/voting/metrics/lp.py
|
szufix/mapel
|
9eaacf7963021ea3eab701a14efd82cf1bfc5c5b
|
[
"MIT"
] | 3
|
2020-02-15T11:37:30.000Z
|
2021-02-21T11:46:10.000Z
|
mapel/voting/metrics/lp.py
|
szufix/mapel
|
9eaacf7963021ea3eab701a14efd82cf1bfc5c5b
|
[
"MIT"
] | 9
|
2020-03-24T11:08:57.000Z
|
2020-03-24T11:48:04.000Z
|
mapel/voting/metrics/lp.py
|
szufix/mapel
|
9eaacf7963021ea3eab701a14efd82cf1bfc5c5b
|
[
"MIT"
] | 4
|
2020-04-21T17:47:50.000Z
|
2022-03-25T16:41:04.000Z
|
#!/usr/bin/env python
import os
try:
import cplex
except ImportError:
cplex = None
import numpy as np
from mapel.voting.metrics.inner_distances import hamming
# FOR SUBELECTIONS
def solve_lp_voter_subelection(election_1, election_2, metric_name='0'):
""" LP solver for voter subelection problem """
cp = cplex.Cplex()
cp.parameters.threads.set(1)
# OBJECTIVE FUNCTION
cp.objective.set_sense(cp.objective.sense.maximize)
objective = []
names = []
for v1 in range(election_1.num_voters):
for v2 in range(election_2.num_voters):
names.append('N' + str(v1) + '_' + str(v2))
objective.append(1.)
cp.variables.add(obj=objective,
names=names,
types=[
cp.variables.type.binary] * election_1.num_voters * election_2.num_voters)
# FIRST CONSTRAINT FOR VOTERS
lin_expr = []
for v1 in range(election_1.num_voters):
ind = []
for v2 in range(election_2.num_voters):
ind.append('N' + str(v1) + '_' + str(v2))
lin_expr.append(cplex.SparsePair(ind=ind, val=[1.0] * election_2.num_voters))
cp.linear_constraints.add(lin_expr=lin_expr,
senses=['L'] * election_1.num_voters,
rhs=[1.0] * election_1.num_voters,
names=['C1_' + str(i) for i in range(election_1.num_voters)])
# SECOND CONSTRAINT FOR VOTERS
lin_expr = []
for v2 in range(election_2.num_voters):
ind = []
for v1 in range(election_1.num_voters):
ind.append('N' + str(v1) + '_' + str(v2))
lin_expr.append(cplex.SparsePair(ind=ind, val=[1.0] * election_1.num_voters))
cp.linear_constraints.add(lin_expr=lin_expr,
senses=['L'] * election_2.num_voters,
rhs=[1.0] * election_2.num_voters,
names=['C2_' + str(i) for i in range(election_2.num_voters)])
# ADD VARIABLES FOR CANDIDATES
names = []
for c1 in range(election_1.num_candidates):
for c2 in range(election_2.num_candidates):
names.append('M' + str(c1) + '_' + str(c2))
cp.variables.add(names=list(names),
types=[
cp.variables.type.binary] * election_1.num_candidates * election_2.num_candidates)
# FIRST CONSTRAINT FOR CANDIDATES
lin_expr = []
for c1 in range(election_1.num_candidates):
ind = []
for c2 in range(election_2.num_candidates):
ind.append('M' + str(c1) + '_' + str(c2))
lin_expr.append(cplex.SparsePair(ind=ind, val=[1.0] * election_2.num_candidates))
cp.linear_constraints.add(lin_expr=lin_expr,
senses=['E'] * election_1.num_candidates,
rhs=[1.0] * election_1.num_candidates,
names=['C3_' + str(i) for i in range(election_1.num_candidates)])
# SECOND CONSTRAINT FOR CANDIDATES
lin_expr = []
for c2 in range(election_2.num_candidates):
ind = []
for c1 in range(election_1.num_candidates):
ind.append('M' + str(c1) + '_' + str(c2))
lin_expr.append(cplex.SparsePair(ind=ind, val=[1.0] * election_1.num_candidates))
cp.linear_constraints.add(lin_expr=lin_expr,
senses=['E'] * election_2.num_candidates,
rhs=[1.0] * election_2.num_candidates,
names=['C4_' + str(i) for i in range(election_2.num_candidates)])
# MAIN CONSTRAINT FOR VOTES
lin_expr = []
for v1 in range(election_1.num_voters):
for v2 in range(election_2.num_voters):
ind = []
val = []
for c1 in range(election_1.num_candidates):
for c2 in range(election_2.num_candidates):
ind.append('M' + str(c1) + '_' + str(c2))
if abs(election_1.potes[v1][c1] - election_2.potes[v2][c2]) <= int(metric_name):
val.append(1.)
else:
val.append(0.)
ind.append('N' + str(v1) + '_' + str(v2))
val.append(-election_1.num_candidates)
lin_expr.append(cplex.SparsePair(ind=ind, val=val))
cp.linear_constraints.add(lin_expr=lin_expr,
senses=['G'] * election_1.num_voters * election_2.num_voters,
rhs=[0.0] * election_1.num_voters * election_2.num_voters,
names=['C5_' + str(i) for i in
range(election_1.num_voters * election_2.num_voters)])
# cp.write('new.lp')
# SOLVE THE ILP
cp.set_results_stream(None)
try:
cp.solve()
except: # cplex.CplexSolverError:
print("Exception raised while solving")
return
objective_value = cp.solution.get_objective_value()
return objective_value
def solve_lp_candidate_subelections(lp_file_name, election_1, election_2):
""" LP solver for candidate subelection problem """
# PRECOMPUTING
# """
P = np.zeros([election_1.num_voters, election_2.num_voters, election_1.num_candidates,
election_2.num_candidates,
election_1.num_candidates, election_2.num_candidates])
for v in range(election_1.num_voters):
for u in range(election_2.num_voters):
for c1 in range(election_1.num_candidates):
for d1 in range(election_2.num_candidates):
for c2 in range(election_1.num_candidates):
for d2 in range(election_2.num_candidates):
if (election_1.potes[v][c1] > election_1.potes[v][c2] and
election_2.potes[u][d1] >
election_2.potes[u][d2]) or \
(election_1.potes[v][c1] < election_1.potes[v][c2] and
election_2.potes[u][d1] <
election_2.potes[u][d2]):
P[v][u][c1][d1][c2][d2] = 1
# print(P)
# """
# CREATE LP FILE
lp_file = open(lp_file_name, 'w')
lp_file.write("Maximize\nobj: ")
first = True
for c in range(election_1.num_candidates):
for d in range(election_2.num_candidates):
if not first:
lp_file.write(" + ")
first = False
lp_file.write(" M_" + str(c) + "_" + str(d))
lp_file.write("\n")
"""
first = True
for v in range(election_1.num_voters):
for u in range(election_2.num_voters):
for c1 in range(election_1.num_candidates):
for d1 in range(election_2.num_candidates):
for c2 in range(election_1.num_candidates):
if c1 == c2:
continue
for d2 in range(election_2.num_candidates):
if d1 == d2:
continue
if P[v][u][c1][d1][c2][d2] == 1:
if not first:
lp_file.write(" + ")
first = False
lp_file.write(" P_" + str(v) + "_" + str(u) + "_" +
str(c1) + "_" + str(d1) + "_" + str(c2) + "_" + str(d2))
lp_file.write("\n")
"""
lp_file.write("Subject To\n")
ctr_c = 0
# FIRST CONSTRAINT FOR VOTERS
for v in range(election_1.num_voters):
lp_file.write("c" + str(ctr_c) + ":")
first = True
for u in range(election_2.num_voters):
if not first:
lp_file.write(" +")
first = False
lp_file.write(" N_" + str(v) + "_" + str(u))
lp_file.write(" = 1" + "\n")
ctr_c += 1
# SECOND CONSTRAINT FOR VOTERS
for u in range(election_2.num_voters):
lp_file.write("c" + str(ctr_c) + ":")
first = True
for v in range(election_1.num_voters):
if not first:
lp_file.write(" +")
first = False
lp_file.write(" N_" + str(v) + "_" + str(u))
lp_file.write(" = 1" + "\n")
ctr_c += 1
# FIRST CONSTRAINT FOR CANDIDATES
for c in range(election_1.num_candidates):
lp_file.write("c" + str(ctr_c) + ":")
first = True
for d in range(election_2.num_candidates):
if not first:
lp_file.write(" +")
first = False
lp_file.write(" M_" + str(c) + "_" + str(d))
lp_file.write(" <= 1" + "\n")
ctr_c += 1
# SECOND CONSTRAINT FOR CANDIDATES
for d in range(election_2.num_candidates):
lp_file.write("c" + str(ctr_c) + ":")
first = True
for c in range(election_1.num_candidates):
if not first:
lp_file.write(" +")
first = False
lp_file.write(" M_" + str(c) + "_" + str(d))
lp_file.write(" <= 1" + "\n")
ctr_c += 1
# FIRST CONSTRAINT FOR P
for v in range(election_1.num_voters):
for u in range(election_2.num_voters):
for c1 in range(election_1.num_candidates):
for d1 in range(election_2.num_candidates):
for c2 in range(election_1.num_candidates):
if c1 == c2:
continue
for d2 in range(election_2.num_candidates):
if d1 == d2:
continue
# if P[v][u][c1][d1][c2][d2] == 1:
lp_file.write("c" + str(ctr_c) + ":")
lp_file.write(" P_" + str(v) + "_" + str(u) + "_" +
str(c1) + "_" + str(d1) + "_" + str(c2) + "_" + str(d2))
lp_file.write(" - 0.34 N_" + str(v) + "_" + str(u))
lp_file.write(" - 0.34 M_" + str(c1) + "_" + str(d1))
lp_file.write(" - 0.34 M_" + str(c2) + "_" + str(d2))
lp_file.write(" <= 0" + "\n")
ctr_c += 1
# SECOND CONSTRAINT FOR P
for v in range(election_1.num_voters):
for u in range(election_2.num_voters):
for c1 in range(election_1.num_candidates):
for d1 in range(election_2.num_candidates):
for c2 in range(election_1.num_candidates):
if c1 == c2:
continue
for d2 in range(election_2.num_candidates):
if d1 == d2:
continue
# if P[v][u][c1][d1][c2][d2] == 1:
lp_file.write("c" + str(ctr_c) + ":")
lp_file.write(" P_" + str(v) + "_" + str(u) + "_" +
str(c1) + "_" + str(d1) + "_" + str(c2) + "_" + str(d2))
# lp_file.write(" + 1")
lp_file.write(" - 0.34 N_" + str(v) + "_" + str(u))
lp_file.write(" - 0.34 M_" + str(c1) + "_" + str(d1))
lp_file.write(" - 0.34 M_" + str(c2) + "_" + str(d2))
lp_file.write(" > -1" + "\n")
ctr_c += 1
# THIRD CONSTRAINT FOR P
for v in range(election_1.num_voters):
for u in range(election_2.num_voters):
for c1 in range(election_1.num_candidates):
for d1 in range(election_2.num_candidates):
for c2 in range(election_1.num_candidates):
if c1 == c2:
continue
for d2 in range(election_2.num_candidates):
if d1 == d2:
continue
# if P[v][u][c1][d1][c2][d2] == 1:
lp_file.write("c" + str(ctr_c) + ":")
lp_file.write(" P_" + str(v) + "_" + str(u) + "_" +
str(c1) + "_" + str(d1) + "_" + str(c2) + "_" + str(d2))
lp_file.write(" <= " + str(P[v][u][c1][d1][c2][d2]) + "\n")
ctr_c += 1
"""
# NEW 1
for c1 in range(election_1.num_candidates):
for d1 in range(election_2.num_candidates):
lp_file.write("c" + str(ctr_c) + ":")
first = True
for v in range(election_1.num_voters):
for u in range(election_2.num_voters):
for c2 in range(election_1.num_candidates):
if c1 == c2:
continue
for d2 in range(election_2.num_candidates):
if d1 == d2:
continue
if P[v][u][c1][d1][c2][d2] == 1:
if not first:
lp_file.write(" +")
first = False
lp_file.write(" P_" + str(v) + "_" + str(u) + "_" +
str(c1) + "_" + str(d1) + "_" + str(c2) + "_" + str(d2))
lp_file.write(' - ' + str((magic_param-1)*election_1.num_voters) + ' M_' + str(c1) + '_' + str(d1) + ' = 0' + "\n")
ctr_c += 1
# NEW 2
for v in range(election_1.num_voters):
for u in range(election_2.num_voters):
lp_file.write("c" + str(ctr_c) + ":")
first = True
for c1 in range(election_1.num_candidates):
for d1 in range(election_2.num_candidates):
for c2 in range(election_1.num_candidates):
if c1 == c2:
continue
for d2 in range(election_2.num_candidates):
if d1 == d2:
continue
if P[v][u][c1][d1][c2][d2] == 1:
if not first:
lp_file.write(" +")
first = False
lp_file.write(" P_" + str(v) + "_" + str(u) + "_" +
str(c1) + "_" + str(d1) + "_" + str(c2) + "_" + str(d2))
lp_file.write(' - ' + str((magic_param-1)*2) + ' N_' + str(v) + '_' + str(u) + ' = 0' + "\n")
ctr_c += 1
"""
lp_file.write("Binary\n")
for v in range(election_1.num_voters):
for u in range(election_2.num_voters):
for c1 in range(election_1.num_candidates):
for d1 in range(election_2.num_candidates):
for c2 in range(election_1.num_candidates):
if c1 == c2:
continue
for d2 in range(election_2.num_candidates):
if d1 == d2:
continue
# if P[v][u][c1][d1][c2][d2] == 1:
lp_file.write("P_" + str(v) + "_" + str(u) + "_" +
str(c1) + "_" + str(d1) + "_" + str(c2) + "_" + str(
d2) + "\n")
for v in range(election_1.num_voters):
for u in range(election_2.num_voters):
lp_file.write("N_" + str(v) + "_" + str(u) + "\n")
for c in range(election_1.num_candidates):
for d in range(election_2.num_candidates):
lp_file.write("M_" + str(c) + "_" + str(d) + "\n")
lp_file.write("End\n")
lp_file.close()
### SECOND PART
cp_lp = cplex.Cplex(lp_file_name)
cp_lp.parameters.threads.set(1)
cp_lp.set_results_stream(None)
try:
cp_lp.solve()
except: # cplex.CplexSolverError:
print("Exception raised during solve")
return
##########################
##########################
result = np.zeros([election_1.num_candidates, election_1.num_candidates])
for i in range(election_1.num_candidates):
for j in range(election_1.num_candidates):
name = 'M_' + str(i) + '_' + str(j)
result[i][j] = cp_lp.solution.get_values(name)
# print('M', result)
"""
result_2 = np.zeros([election_1.num_voters, election_1.num_voters])
for i in range(election_1.num_voters):
for j in range(election_1.num_voters):
election_id = 'N_' + str(i) + '_' + str(j)
result_2[i][j] = cp_lp.solution.get_values(election_id)
print('N', result_2)
total = 0
for v in range(election_1.num_voters):
for u in range(election_1.num_voters):
for c1 in range(election_1.num_candidates):
for d1 in range(election_1.num_candidates):
for c2 in range(election_1.num_candidates):
if c1 == c2:
continue
for d2 in range(election_1.num_candidates):
if d1 == d2:
continue
#if P[v][u][c1][d1][c2][d2] == 1:
election_id = "P_" + str(v) + "_" + str(u) + "_" + str(c1) + "_" + str(d1) + "_" + str(c2) + "_" + str(d2)
value = cp_lp.solution.get_values(election_id)
#print(value)
if value == 1:
print(election_id)
total += value
print(total)
"""
##########################
##########################
# objective_value = cp_lp.solution.get_objective_value()
# print('O-V: ', objective_value)
# print(sum(sum(result)))
return sum(sum(result))
# FOR METRICS
def solve_lp_matching_vector_with_lp(cost_table, length):
""" LP solver for vectors' matching """
# print(cost_table)
cp = cplex.Cplex()
cp.parameters.threads.set(1)
# OBJECTIVE FUNCTION
cp.objective.set_sense(cp.objective.sense.minimize)
objective = []
names = []
pos = 0
for i in range(length):
for j in range(length):
names.append('x' + str(pos))
objective.append(cost_table[i][j])
pos += 1
cp.variables.add(obj=objective,
names=names,
types=[cp.variables.type.binary] * length ** 2)
# FIRST GROUP OF CONSTRAINTS
lin_expr = []
for i in range(length):
ind = []
for j in range(length):
pos = i * length + j
ind.append('x' + str(pos))
lin_expr.append(cplex.SparsePair(ind=ind, val=[1.0] * length))
cp.linear_constraints.add(lin_expr=lin_expr,
senses=['E'] * length,
rhs=[1.0] * length)
# SECOND GROUP OF CONSTRAINTS
lin_expr = []
for j in range(length):
ind = []
for i in range(length):
pos = i * length + j
ind.append('x' + str(pos))
lin_expr.append(cplex.SparsePair(ind=ind, val=[1.0] * length))
cp.linear_constraints.add(lin_expr=lin_expr,
senses=['E'] * length,
rhs=[1.0] * length)
# c.write('new.lp')
# SOLVE THE ILP
cp.set_results_stream(None)
try:
cp.solve()
except: # cplex.CplexSolverError:
print("Exception raised while solving")
return
# UNPACK THE RESULTS
"""
result = [0.] * length ** 2
for i in range(len(result)):
result[i] = c.solution.get_values('x' + str(i))
matching = [0] * length
ctr = 0
for i in range(len(result)):
if result[i] == 1:
matching[ctr] = i % length
ctr += 1
"""
objective_value = cp.solution.get_objective_value()
return objective_value
def solve_lp_matching_interval(cost_table, length_1, length_2):
precision = length_1 * length_2
# print(cost_table)
c = cplex.Cplex()
c.parameters.threads.set(1)
# OBJECTIVE FUNCTION
c.objective.set_sense(c.objective.sense.minimize)
c.objective.set_name("Obj")
objective = []
names = []
pos = 0
for i in range(length_1):
for j in range(length_2):
names.append('x' + str(pos))
objective.append(cost_table[i][j])
pos += 1
c.variables.add(obj=objective,
names=names,
types=[c.variables.type.integer] * precision)
# FIRST GROUP OF CONSTRAINTS
lin_expr = []
c_names = []
for i in range(length_1):
ind = []
for j in range(length_2):
pos = i * length_2 + j
ind.append('x' + str(pos))
lin_expr.append(cplex.SparsePair(ind=ind, val=[1.0] * length_2))
c_names.append('c1_' + str(i))
c.linear_constraints.add(lin_expr=lin_expr,
senses=['E'] * length_1,
rhs=[length_2] * length_1,
names=c_names)
# SECOND GROUP OF CONSTRAINTS
lin_expr = []
c_names = []
for j in range(length_2):
ind = []
for i in range(length_1):
pos = i * length_2 + j
ind.append('x' + str(pos))
lin_expr.append(cplex.SparsePair(ind=ind, val=[1.0] * length_1))
c_names.append('c2_' + str(j))
c.linear_constraints.add(lin_expr=lin_expr,
senses=['E'] * length_2,
rhs=[length_1] * length_2,
names=c_names)
c.write('interval.lp')
c.write('interval.mps')
# SOLVE THE ILP
c.set_results_stream(None)
try:
c.solve()
except: # cplex.CplexSolverError:
print("Exception raised while solving")
return
result = c.solution.get_objective_value() / precision
return result
# DODGSON SCORE
def generate_lp_file_dodgson_score(lp_file_name, N=None, e=None, D=None):
lp_file = open(lp_file_name, 'w')
lp_file.write("Minimize\nobj: ")
first = True
for i in range(len(N)):
for j in range(1, len(D)):
if not first:
lp_file.write(" + ")
first = False
lp_file.write(str(j) + " y" + str(i) + "_" + str(j))
lp_file.write("\n")
lp_file.write("Subject To\n")
ctr_c = 0
for i in range(len(N)):
lp_file.write("c" + str(ctr_c) + ":")
lp_file.write(" y" + str(i) + "_" + str(0) + " = " + str(N[i]) + "\n")
ctr_c += 1
# """
for k in range(len(D)):
lp_file.write("c" + str(ctr_c) + ":")
first = True
for i in range(len(N)):
for j in range(1, len(D)):
# print(i,j,k)
# print(e[i][j][k], e[i][j-1][k])
if not first:
lp_file.write(" +")
first = False
lp_file.write(" " + str(e[i][j][k] - e[i][j - 1][k]) + " y" + str(i) + "_" + str(j))
lp_file.write(" >= " + str(D[k]) + "\n")
ctr_c += 1
# """
# """
for i in range(len(N)):
for j in range(1, len(D)):
lp_file.write("c" + str(ctr_c) + ":")
lp_file.write(
" y" + str(i) + "_" + str(j - 1) + " - y" + str(i) + "_" + str(j) + " >= 0" + "\n")
ctr_c += 1
# """
# """
# chyba nie potrzeba bo integer zalatwia sprawe...
for i in range(len(N)):
for j in range(len(D)):
lp_file.write("c" + str(ctr_c) + ":")
lp_file.write(" y" + str(i) + "_" + str(j) + " >= 0" + "\n")
ctr_c += 1
# """
# """
lp_file.write("General\n")
for i in range(len(N)):
for j in range(len(D)):
lp_file.write("y" + str(i) + "_" + str(j) + "\n")
ctr_c += 1
# """
lp_file.write("End\n")
def solve_lp_dodgson_score(lp_file_name):
""" this function ..."""
cp_lp = cplex.Cplex(lp_file_name)
cp_lp.parameters.threads.set(1)
cp_lp.set_results_stream(None)
try:
cp_lp.solve()
except: # cplex.CplexSolverError:
print("Exception raised during solve")
return
"""
import numpy as np
result = np.zeros([len(N), len(D)])
for i in range(len(N)):
for j in range(len(D)):
result[i] = cp_lp.solution.get_values('y' + str(i) + '_' + str(j))
"""
return cp_lp.solution.get_objective_value()
# FOR WINNERS - needs update
def generate_lp_file_borda_owa(owa, lp_file_name, params, votes):
""" this function generates lp file"""
lp_file = open(lp_file_name, 'w')
lp_file.write("Maximize\nobj: ")
pos = 0
first = True
for i in range(params['voters']):
for j in range(params['orders']):
for k in range(params['candidates']):
if not first and owa[j] >= 0.:
lp_file.write(" + ")
first = False
lp_file.write(str(owa[j]) + " x" + str(pos))
pos += 1
lp_file.write("\n")
lp_file.write("Subject To\n")
lp_file.write("c0:")
first = True
for i in range(params['candidates']):
if not first:
lp_file.write(" +")
first = False
lp_file.write(" y" + str(i))
lp_file.write(' = ' + str(params['orders']) + '\n')
for i in range(params['voters']):
for j in range(params['candidates']):
lp_file.write("c" + str(i * params['candidates'] + j + 1) + ": ")
pos = i * params['orders'] * params['candidates'] + j
first = True
for k in range(params['orders']):
if not first:
lp_file.write(" +")
first = False
lp_file.write(" x" + str(pos + params['candidates'] * k))
for k in range(0, j + 1):
lp_file.write(" - y" + str(int(votes[i][k])))
lp_file.write(" <= 0 \n")
lp_file.write("Binary\n")
for i in range(params['voters'] * params['orders'] * params['candidates']):
lp_file.write("x" + str(i) + "\n")
for i in range(params['candidates']):
lp_file.write("y" + str(i) + "\n")
lp_file.write("End\n")
def generate_lp_file_bloc_owa(owa, lp_file_name, params, votes, t_bloc):
""" this function generates lp file"""
lp_file = open(lp_file_name, 'w')
lp_file.write("Maximize\nobj: ")
pos = 0
first = True
for i in range(params['voters']):
for j in range(params['orders']):
for k in range(params['candidates']):
if not first:
if k == t_bloc - 1:
lp_file.write(" + ")
first = False
if k == t_bloc - 1:
lp_file.write(str(owa[j]) + " x" + str(pos))
pos += 1
lp_file.write("\n")
lp_file.write("Subject To\n")
lp_file.write("c0:")
first = True
for i in range(params['candidates']):
if not first:
lp_file.write(" +")
first = False
lp_file.write(" y" + str(i))
lp_file.write(' = ' + str(params['orders']) + '\n')
for i in range(params['voters']):
for j in range(params['candidates']):
lp_file.write("c" + str(i * params['candidates'] + j + 1) + ": ")
pos = i * params['orders'] * params['candidates'] + j
first = True
for k in range(params['orders']):
if not first:
lp_file.write(" +")
first = False
lp_file.write(" x" + str(pos + params['candidates'] * k))
for k in range(0, j + 1):
lp_file.write(" - y" + str(int(votes[i][k])))
lp_file.write(" <= 0 \n")
lp_file.write("Binary\n")
for i in range(params['voters'] * params['orders'] * params['candidates']):
lp_file.write("x" + str(i) + "\n")
for i in range(params['candidates']):
lp_file.write("y" + str(i) + "\n")
lp_file.write("End\n")
def get_winners_from_lp(tmp_file, params, candidates):
""" this function ..."""
cp_lp = cplex.Cplex(tmp_file)
cp_lp.parameters.threads.set(1)
cp_lp.set_results_stream(None)
try:
cp_lp.solve()
except cplex.CplexSolverError:
print("Exception raised during solve")
return
result = [0.] * params['candidates']
for i in range(params['candidates']):
result[i] = cp_lp.solution.get_values('y' + str(i))
# print(result)
params['pure'] = True
winner_id = 0
winners = [0.] * params['orders']
for i in range(params['candidates']):
if result[i] == 1.:
if params['pure']:
winners[winner_id] = i
else:
winners[winner_id] = candidates[i]
winner_id += 1
winners = sorted(winners)
return winners
"""
def generate_lp_file_matching_matrix_half(lp_file_name, matrix_1, matrix_2, length):
# [1, 4, 6, 9, 11]
# [1, 5, 6, 9, 11]
print(matrix_1)
print(matrix_2)
lp_file = open(lp_file_name, 'w')
lp_file.write("Minimize\n") # obj: ")
first = True
for k in range(length):
for l in range(length):
for i in range(k+1, length):
for j in range(l+1, length):
if not first:
lp_file.write(" + ")
first = False
weight = abs(matrix_1[k][i] - matrix_2[l][j])#**2
print(weight)
lp_file.write(str(weight) + " P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(j))
lp_file.write("\n")
lp_file.write("Subject To\n")
for k in range(length):
for l in range(length):
for i in range(k+1, length):
for j in range(l+1, length):
lp_file.write("P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(j))
lp_file.write(" - " + "M" + "i" + str(i) + "j" + str(j) + " <= 0" + "\n")
lp_file.write("P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(j))
lp_file.write(" - " + "M" + "i" + str(k) + "j" + str(l) + " <= 0" + "\n")
for i in range(length):
first = True
for j in range(length):
if not first:
lp_file.write(" + ")
first = False
lp_file.write("M" + "i" + str(i) + "j" + str(j))
lp_file.write(" = 1" + "\n")
for j in range(length):
first = True
for i in range(length):
if not first:
lp_file.write(" + ")
first = False
lp_file.write("M" + "i" + str(i) + "j" + str(j))
lp_file.write(" = 1" + "\n")
# Not sure about this part #
for k in range(length):
for i in range(k+1, length):
if k == i:
continue
first = True
for l in range(length):
for j in range(l+1, length):
if l == j:
continue
if not first:
lp_file.write(" + ")
first = False
lp_file.write("P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(j))
lp_file.write(" = 1" + "\n")
# Not sure about this part #
for l in range(length):
for j in range(l+1, length):
if l == j:
continue
first = True
for k in range(length):
for i in range(k+1, length):
if k == i:
continue
if not first:
lp_file.write(" + ")
first = False
lp_file.write("P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(j))
lp_file.write(" = 1" + "\n")
lp_file.write("Binary\n")
for k in range(length):
for l in range(length):
for i in range(k+1, length):
for j in range(l+1, length):
lp_file.write("P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(j) + "\n")
for i in range(length):
for j in range(length):
lp_file.write("M" + "i" + str(i) + "j" + str(j) + "\n")
lp_file.write("End\n")
"""
def generate_lp_file_matching_matrix(lp_file_name, matrix_1, matrix_2, length, inner_distance):
lp_file = open(lp_file_name, 'w')
lp_file.write("Minimize\n")
first = True
for k in range(length):
for l in range(length):
for i in range(length):
if i == k:
continue
for j in range(length):
if j == l:
continue
if not first:
lp_file.write(" + ")
first = False
weight = inner_distance(matrix_1[k][i], matrix_2[l][j])
lp_file.write(
str(weight) + " P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(
j))
lp_file.write("\n")
lp_file.write("Subject To\n")
for k in range(length):
for l in range(length):
for i in range(length):
if i == k:
continue
for j in range(length):
if j == l:
continue
lp_file.write("P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(j))
lp_file.write(" - " + "M" + "i" + str(i) + "j" + str(j) + " <= 0" + "\n")
lp_file.write("P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(j))
lp_file.write(" - " + "M" + "i" + str(k) + "j" + str(l) + " <= 0" + "\n")
for i in range(length):
first = True
for j in range(length):
if not first:
lp_file.write(" + ")
first = False
lp_file.write("M" + "i" + str(i) + "j" + str(j))
lp_file.write(" = 1" + "\n")
for j in range(length):
first = True
for i in range(length):
if not first:
lp_file.write(" + ")
first = False
lp_file.write("M" + "i" + str(i) + "j" + str(j))
lp_file.write(" = 1" + "\n")
# Not sure about this part #
for k in range(length):
for i in range(length):
if k == i:
continue
first = True
for l in range(length):
for j in range(length):
if l == j:
continue
if not first:
lp_file.write(" + ")
first = False
lp_file.write("P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(j))
lp_file.write(" = 1" + "\n")
# Not sure about this part #
for l in range(length):
for j in range(length):
if l == j:
continue
first = True
for k in range(length):
for i in range(length):
if k == i:
continue
if not first:
lp_file.write(" + ")
first = False
lp_file.write("P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(j))
lp_file.write(" = 1" + "\n")
lp_file.write("Binary\n")
for k in range(length):
for l in range(length):
for i in range(length):
if i == k:
continue
for j in range(length):
if j == l:
continue
lp_file.write(
"P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(j) + "\n")
for i in range(length):
for j in range(length):
lp_file.write("M" + "i" + str(i) + "j" + str(j) + "\n")
lp_file.write("End\n")
def solve_lp_matrix(lp_file_name, matrix_1, matrix_2, length):
cp_lp = cplex.Cplex(lp_file_name)
cp_lp.set_results_stream(None)
cp_lp.parameters.threads.set(1)
# cp_lp.parameters.mip.tolerances.mipgap = 0.0001
# cp_lp.parameters.mip.strategy.probe.set(3)
try:
cp_lp.solve()
except:
print("Exception raised during solve")
return
"""
for k in range(length):
for l in range(length):
for i in range(k+1, length):
if k == i:
continue
for j in range(l+1, length):
if l == j:
continue
A = "P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(j)
if int(cp_lp.solution.get_values(A)) == 1:
print(A)
"""
"""
for i in range(length):
for j in range(length):
A = "M" + "i" + str(i) + "j" + str(j)
if int(cp_lp.solution.get_values(A)) == 1:
print(A)
"""
# print(cp_lp.solution.get_objective_value())
return cp_lp.solution.get_objective_value()
# SPEARMAN - old
def generate_ilp_distance(lp_file_name, votes_1, votes_2, params, metric_name):
lp_file = open(lp_file_name, 'w')
lp_file.write("Minimize\n") # obj: ")
first = True
for k in range(params['voters']):
for l in range(params['voters']):
vote_1 = votes_1[k]
vote_2 = votes_2[l]
if metric_name == 'spearman':
pote_1 = [0] * params['candidates']
pote_2 = [0] * params['candidates']
for i in range(params['candidates']):
pote_1[vote_1[i]] = i
pote_2[vote_2[i]] = i
for i in range(params['candidates']):
for j in range(params['candidates']):
if not first:
lp_file.write(" + ")
first = False
if metric_name == "spearman":
weight = abs(pote_1[i] - pote_2[j])
elif metric_name == "alt":
weight = float(abs(pote_1[i] - pote_2[j]) ** (2)) / float(
1. + min(pote_1[i], pote_2[j]))
elif metric_name == 'hamming':
weight = hamming(vote_1, vote_2)
else:
weight = 0
lp_file.write(
str(weight) + " P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(
j))
lp_file.write("\n")
lp_file.write("Subject To\n")
for k in range(params['voters']):
for l in range(params['voters']):
for i in range(params['candidates']):
for j in range(params['candidates']):
lp_file.write("P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(j))
lp_file.write(" - " + "M" + "i" + str(i) + "j" + str(j) + " <= 0" + "\n")
lp_file.write("P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(j))
lp_file.write(" - " + "N" + "k" + str(k) + "l" + str(l) + " <= 0" + "\n")
for k in range(params['voters']):
first = True
for l in range(params['voters']):
if not first:
lp_file.write(" + ")
first = False
lp_file.write("N" + "k" + str(k) + "l" + str(l))
lp_file.write(" = 1" + "\n")
for l in range(params['voters']):
first = True
for k in range(params['voters']):
if not first:
lp_file.write(" + ")
first = False
lp_file.write("N" + "k" + str(k) + "l" + str(l))
lp_file.write(" = 1" + "\n")
for i in range(params['candidates']):
first = True
for j in range(params['candidates']):
if not first:
lp_file.write(" + ")
first = False
lp_file.write("M" + "i" + str(i) + "j" + str(j))
lp_file.write(" = 1" + "\n")
for j in range(params['candidates']):
first = True
for i in range(params['candidates']):
if not first:
lp_file.write(" + ")
first = False
lp_file.write("M" + "i" + str(i) + "j" + str(j))
lp_file.write(" = 1" + "\n")
# IMPORTANT #
for k in range(params['voters']):
for i in range(params['candidates']):
first = True
for l in range(params['voters']):
for j in range(params['candidates']):
if not first:
lp_file.write(" + ")
first = False
lp_file.write("P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(j))
lp_file.write(" = 1" + "\n")
# IMPORTANT #
for l in range(params['voters']):
for j in range(params['candidates']):
first = True
for k in range(params['voters']):
for i in range(params['candidates']):
if not first:
lp_file.write(" + ")
first = False
lp_file.write("P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(j))
lp_file.write(" = 1" + "\n")
lp_file.write("Binary\n")
for k in range(params['voters']):
for l in range(params['voters']):
for i in range(params['candidates']):
for j in range(params['candidates']):
lp_file.write(
"P" + "k" + str(k) + "l" + str(l) + "i" + str(i) + "j" + str(j) + "\n")
for k in range(params['voters']):
for l in range(params['voters']):
lp_file.write("N" + "k" + str(k) + "l" + str(l) + "\n")
for i in range(params['candidates']):
for j in range(params['candidates']):
lp_file.write("M" + "i" + str(i) + "j" + str(j) + "\n")
lp_file.write("End\n")
def solve_ilp_distance(lp_file_name, votes_1, votes_2, params, metric_name):
cp_lp = cplex.Cplex(lp_file_name)
cp_lp.set_results_stream(None)
# cp_lp.parameters.threads.set(1)
# cp_lp.parameters.timelimit.set(60)
try:
cp_lp.solve()
except cplex.CplexSolverError:
print("Exception raised during solve")
return
total = cp_lp.solution.get_objective_value()
return total
def spearman_cost(single_votes_1, single_votes_2, params, perm):
pote_1 = [0] * params['candidates']
pote_2 = [0] * params['candidates']
for i in range(params['candidates']):
id_1 = int(perm[0][single_votes_1[i]])
pote_1[id_1] = i
id_2 = int(perm[1][single_votes_2[i]])
pote_2[id_2] = i
total_diff = 0.
for i in range(params['candidates']):
local_diff = float(abs(pote_1[i] - pote_2[i]))
total_diff += local_diff
return total_diff
def spearman_cost_per_cand(single_votes_1, single_votes_2, params, perm):
pote_1 = [0] * params['candidates']
pote_2 = [0] * params['candidates']
for i in range(params['candidates']):
id_1 = int(perm[0][single_votes_1[i]])
pote_1[id_1] = i
id_2 = int(perm[1][single_votes_2[i]])
pote_2[id_2] = i
cand_diff = [0] * params['candidates']
for i in range(params['candidates']):
cand_diff[i] = float(abs(pote_1[i] - pote_2[i]))
return cand_diff
def remove_lp_file(path):
""" Safely remove lp file """
try:
os.remove(path)
except:
pass
| 34.632733
| 134
| 0.467666
| 5,592
| 44,226
| 3.526645
| 0.041488
| 0.084478
| 0.105421
| 0.036256
| 0.877643
| 0.863749
| 0.821865
| 0.796511
| 0.752599
| 0.710258
| 0
| 0.023567
| 0.386922
| 44,226
| 1,276
| 135
| 34.659875
| 0.703769
| 0.043006
| 0
| 0.7979
| 0
| 0
| 0.056566
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020997
| false
| 0.001312
| 0.006562
| 0
| 0.048556
| 0.010499
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8c670bd80a26f44623ab287ff7aadedb2184afce
| 14,028
|
py
|
Python
|
finley/test/python/run_trilinosComplexSolversOnFinley.py
|
markendr/esys-escript.github.io
|
0023eab09cd71f830ab098cb3a468e6139191e8d
|
[
"Apache-2.0"
] | null | null | null |
finley/test/python/run_trilinosComplexSolversOnFinley.py
|
markendr/esys-escript.github.io
|
0023eab09cd71f830ab098cb3a468e6139191e8d
|
[
"Apache-2.0"
] | null | null | null |
finley/test/python/run_trilinosComplexSolversOnFinley.py
|
markendr/esys-escript.github.io
|
0023eab09cd71f830ab098cb3a468e6139191e8d
|
[
"Apache-2.0"
] | null | null | null |
##############################################################################
#
# Copyright (c) 2003-2018 by The University of Queensland
# http://www.uq.edu.au
#
# Primary Business: Queensland, Australia
# Licensed under the Apache License, version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
#
# Development until 2012 by Earth Systems Science Computational Center (ESSCC)
# Development 2012-2013 by School of Earth Sciences
# Development from 2014 by Centre for Geoscience Computing (GeoComp)
#
##############################################################################
from __future__ import print_function, division
__copyright__="""Copyright (c) 2003-2018 by The University of Queensland
http://www.uq.edu.au
Primary Business: Queensland, Australia"""
__license__="""Licensed under the Open Software License version 3.0
http://www.opensource.org/licenses/osl-3.0.php"""
__url__="https://launchpad.net/escript-finley"
"""
Test suite for PDE solvers on finley
"""
from test_simplesolve import ComplexSolveTestCase, ComplexSolveTestCaseOrder2
import esys.escriptcore.utestselect as unittest
from esys.escriptcore.testing import *
from esys.escript import Data, Solution, Vector, hasFeature
from esys.finley import Rectangle, Brick
from esys.escript.linearPDEs import SolverOptions
HAVE_TRILINOS = hasFeature('trilinos')
skip_muelu_long = False #hasFeature("longindex")
# number of elements in the spatial directions
NE0=12
NE1=12
NE2=8
OPTIMIZE=True
@unittest.skipIf(not HAVE_TRILINOS, "Trilinos not available")
class ComplexSolveOnTrilinos(ComplexSolveTestCase):
pass
@unittest.skipIf(not HAVE_TRILINOS, "Trilinos not available")
class ComplexSolveOnTrilinosOrder2(ComplexSolveTestCaseOrder2):
pass
## direct
class Test_ComplexSolveFinleyRect_Order1_Trilinos_Direct(ComplexSolveOnTrilinos):
def setUp(self):
self.domain = Rectangle(NE0, NE1, 1, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.DIRECT
def tearDown(self):
del self.domain
## direct
class Test_ComplexSolveFinleyRect_Order2_Trilinos_Direct(ComplexSolveOnTrilinosOrder2):
def setUp(self):
self.domain = Rectangle(NE0, NE1, 2, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.DIRECT
def tearDown(self):
del self.domain
## direct
class Test_ComplexSolveFinleyBrick_Order1_Trilinos_Direct(ComplexSolveOnTrilinos):
def setUp(self):
self.domain = Brick(NE0, NE1, NE2, 1, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.DIRECT
def tearDown(self):
del self.domain
## direct
class Test_ComplexSolveFinleyBrick_Order2_Trilinos_Direct(ComplexSolveOnTrilinos):
def setUp(self):
self.domain = Brick(NE0, NE1, NE2, 2, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.DIRECT
def tearDown(self):
del self.domain
### BiCGStab + Jacobi
@unittest.skip("fails with Nan during iteration.")
class Test_ComplexSolveFinleyRect_Order1_Trilinos_BICGSTAB_Jacobi(ComplexSolveOnTrilinos):
def setUp(self):
self.domain = Rectangle(NE0, NE1, 1, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.BICGSTAB
self.preconditioner = SolverOptions.JACOBI
def tearDown(self):
del self.domain
@unittest.skip("fails with Nan during iteration.")
class Test_ComplexSolveFinleyRect_Order2_Trilinos_BICGSTAB_Jacobi(ComplexSolveOnTrilinosOrder2):
def setUp(self):
self.domain = Rectangle(NE0, NE1, 2, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.BICGSTAB
self.preconditioner = SolverOptions.JACOBI
def tearDown(self):
del self.domain
class Test_ComplexSolveFinleyBrick_Order1_Trilinos_BICGSTAB_Jacobi(ComplexSolveOnTrilinos):
def setUp(self):
self.domain = Brick(NE0, NE1, NE2, 1, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.BICGSTAB
self.preconditioner = SolverOptions.JACOBI
def tearDown(self):
del self.domain
@unittest.skip("convergence problems")
class Test_ComplexSolveFinleyBrick_Order2_Trilinos_BICGSTAB_Jacobi(ComplexSolveOnTrilinosOrder2):
def setUp(self):
self.domain = Brick(NE0, NE1, NE2, 2, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.BICGSTAB
self.preconditioner = SolverOptions.JACOBI
self.REL_TOL=5.e-6
def tearDown(self):
del self.domain
### PCG + Jacobi
class Test_ComplexSolveFinleyRect_Order1_Trilinos_PCG_Jacobi(ComplexSolveOnTrilinos):
def setUp(self):
self.domain = Rectangle(NE0, NE1, 1, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.PCG
self.preconditioner = SolverOptions.JACOBI
def tearDown(self):
del self.domain
class Test_ComplexSolveFinleyRect_Order2_Trilinos_PCG_Jacobi(ComplexSolveOnTrilinosOrder2):
def setUp(self):
self.domain = Rectangle(NE0, NE1, 2, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.PCG
self.preconditioner = SolverOptions.JACOBI
def tearDown(self):
del self.domain
class Test_ComplexSolveFinleyBrick_Order1_Trilinos_PCG_Jacobi(ComplexSolveOnTrilinos):
def setUp(self):
self.domain = Brick(NE0, NE1, NE2, 1, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.PCG
self.preconditioner = SolverOptions.JACOBI
def tearDown(self):
del self.domain
class Test_ComplexSolveFinleyBrick_Order2_Trilinos_PCG_Jacobi(ComplexSolveOnTrilinosOrder2):
def setUp(self):
self.domain = Brick(NE0, NE1, NE2, 2, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.PCG
self.preconditioner = SolverOptions.JACOBI
def tearDown(self):
del self.domain
### BiCGStab + Gauss-Seidel
class Test_ComplexSolveFinleyRect_Order1_Trilinos_BICGSTAB_GaussSeidel(ComplexSolveOnTrilinos):
def setUp(self):
self.domain = Rectangle(NE0, NE1, 1, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.BICGSTAB
self.preconditioner = SolverOptions.GAUSS_SEIDEL
def tearDown(self):
del self.domain
class Test_ComplexSolveFinleyRect_Order2_Trilinos_BICGSTAB_GaussSeidel(ComplexSolveOnTrilinosOrder2):
def setUp(self):
self.domain = Rectangle(NE0, NE1, 2, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.BICGSTAB
self.preconditioner = SolverOptions.GAUSS_SEIDEL
def tearDown(self):
del self.domain
class Test_ComplexSolveFinleyBrick_Order1_Trilinos_BICGSTAB_GaussSeidel(ComplexSolveOnTrilinos):
def setUp(self):
self.domain = Brick(NE0, NE1, NE2, 1, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.BICGSTAB
self.preconditioner = SolverOptions.GAUSS_SEIDEL
def tearDown(self):
del self.domain
@unittest.skip("convergence problems")
class Test_ComplexSolveFinleyBrick_Order2_Trilinos_BICGSTAB_GaussSeidel(ComplexSolveOnTrilinosOrder2):
def setUp(self):
self.domain = Brick(NE0, NE1, NE2, 2, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.BICGSTAB
self.preconditioner = SolverOptions.GAUSS_SEIDEL
self.REL_TOL=5.e-6
def tearDown(self):
del self.domain
### PCG + AMG
@unittest.skipIf(skip_muelu_long, "MueLu AMG incompatible with index type long")
class Test_ComplexSolveFinleyRect_Order1_Trilinos_PCG_AMG(ComplexSolveOnTrilinos):
def setUp(self):
self.domain = Rectangle(NE0, NE1, 1, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.PCG
self.preconditioner = SolverOptions.AMG
def tearDown(self):
del self.domain
@unittest.skipIf(skip_muelu_long, "MueLu AMG incompatible with index type long")
class Test_ComplexSolveFinleyRect_Order2_Trilinos_PCG_AMG(ComplexSolveOnTrilinosOrder2):
def setUp(self):
self.domain = Rectangle(NE0, NE1, 2, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.PCG
self.preconditioner = SolverOptions.AMG
def tearDown(self):
del self.domain
@unittest.skipIf(skip_muelu_long, "MueLu AMG incompatible with index type long")
class Test_ComplexSolveFinleyBrick_Order1_Trilinos_PCG_AMG(ComplexSolveOnTrilinos):
def setUp(self):
self.domain = Brick(NE0, NE1, NE2, 1, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.PCG
self.preconditioner = SolverOptions.AMG
def tearDown(self):
del self.domain
@unittest.skipIf(skip_muelu_long, "MueLu AMG incompatible with index type long")
class Test_ComplexSolveFinleyBrick_Order2_Trilinos_PCG_AMG(ComplexSolveOnTrilinosOrder2):
def setUp(self):
self.domain = Brick(NE0, NE1, NE2, 2, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.PCG
self.preconditioner = SolverOptions.AMG
def tearDown(self):
del self.domain
### BiCGStab + RILU
class Test_ComplexSolveFinleyRect_Order1_Trilinos_BICGSTAB_RILU(ComplexSolveOnTrilinos):
def setUp(self):
self.domain = Rectangle(NE0, NE1, 1, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.BICGSTAB
self.preconditioner = SolverOptions.RILU
def tearDown(self):
del self.domain
class Test_ComplexSolveFinleyRect_Order2_Trilinos_BICGSTAB_RILU(ComplexSolveOnTrilinosOrder2):
def setUp(self):
self.domain = Rectangle(NE0, NE1, 2, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.BICGSTAB
self.preconditioner = SolverOptions.RILU
def tearDown(self):
del self.domain
class Test_ComplexSolveFinleyBrick_Order1_Trilinos_BICGSTAB_RILU(ComplexSolveOnTrilinos):
def setUp(self):
self.domain = Brick(NE0, NE1, NE2, 1, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.BICGSTAB
self.preconditioner = SolverOptions.RILU
def tearDown(self):
del self.domain
class Test_ComplexSolveFinleyBrick_Order2_Trilinos_BICGSTAB_RILU(ComplexSolveOnTrilinosOrder2):
def setUp(self):
self.domain = Brick(NE0, NE1, NE2, 2, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.BICGSTAB
self.preconditioner = SolverOptions.RILU
def tearDown(self):
del self.domain
### PCG + RILU
class Test_ComplexSolveFinleyRect_Order1_Trilinos_PCG_RILU(ComplexSolveOnTrilinos):
def setUp(self):
self.domain = Rectangle(NE0, NE1, 1, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.PCG
self.preconditioner = SolverOptions.RILU
def tearDown(self):
del self.domain
class Test_ComplexSolveFinleyRect_Order2_Trilinos_PCG_RILU(ComplexSolveOnTrilinosOrder2):
def setUp(self):
self.domain = Rectangle(NE0, NE1, 2, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.PCG
self.preconditioner = SolverOptions.RILU
def tearDown(self):
del self.domain
class Test_ComplexSolveFinleyBrick_Order1_Trilinos_PCG_RILU(ComplexSolveOnTrilinos):
def setUp(self):
self.domain = Brick(NE0, NE1, NE2, 1, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.PCG
self.preconditioner = SolverOptions.RILU
def tearDown(self):
del self.domain
class Test_ComplexSolveFinleyBrick_Order2_Trilinos_PCG_RILU(ComplexSolveOnTrilinosOrder2):
def setUp(self):
self.domain = Brick(NE0, NE1, NE2, 2, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.PCG
self.preconditioner = SolverOptions.RILU
def tearDown(self):
del self.domain
### PCG + ILUT
class Test_ComplexSolveFinleyRect_Order1_Trilinos_PCG_ILUT(ComplexSolveOnTrilinos):
def setUp(self):
self.domain = Rectangle(NE0, NE1, 1, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.PCG
self.preconditioner = SolverOptions.ILUT
def tearDown(self):
del self.domain
class Test_ComplexSolveFinleyRect_Order2_Trilinos_PCG_ILUT(ComplexSolveOnTrilinosOrder2):
def setUp(self):
self.domain = Rectangle(NE0, NE1, 2, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.PCG
self.preconditioner = SolverOptions.ILUT
def tearDown(self):
del self.domain
class Test_ComplexSolveFinleyBrick_Order1_Trilinos_PCG_ILUT(ComplexSolveOnTrilinos):
def setUp(self):
self.domain = Brick(NE0, NE1, NE2, 1, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.PCG
self.preconditioner = SolverOptions.ILUT
def tearDown(self):
del self.domain
class Test_ComplexSolveFinleyBrick_Order2_Trilinos_PCG_ILUT(ComplexSolveOnTrilinosOrder2):
def setUp(self):
self.domain = Brick(NE0, NE1, NE2, 2, optimize=OPTIMIZE)
self.package = SolverOptions.TRILINOS
self.method = SolverOptions.PCG
self.preconditioner = SolverOptions.ILUT
def tearDown(self):
del self.domain
if __name__ == '__main__':
run_tests(__name__, exit_on_failure=True)
| 34.895522
| 102
| 0.721699
| 1,499
| 14,028
| 6.615077
| 0.104069
| 0.064542
| 0.038725
| 0.051634
| 0.897338
| 0.889976
| 0.873538
| 0.871117
| 0.871117
| 0.857604
| 0
| 0.018777
| 0.187553
| 14,028
| 401
| 103
| 34.982544
| 0.851277
| 0.0427
| 0
| 0.809689
| 0
| 0
| 0.044527
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.221453
| false
| 0.00692
| 0.024221
| 0
| 0.363322
| 0.00346
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8c792c195a520a7c0db7d35e44bfe815ead9cb45
| 1,067
|
py
|
Python
|
data-engineering/labs-concurrent-distributed-programming/config.py
|
xR86/ml-stuff
|
2a1b79408897171b78032ff2531ab6f8b18be6c4
|
[
"MIT"
] | 3
|
2018-12-11T03:03:15.000Z
|
2020-02-11T19:38:07.000Z
|
data-engineering/labs-concurrent-distributed-programming/config.py
|
xR86/ml-stuff
|
2a1b79408897171b78032ff2531ab6f8b18be6c4
|
[
"MIT"
] | 6
|
2017-05-31T20:58:32.000Z
|
2021-02-16T23:13:15.000Z
|
data-engineering/labs-concurrent-distributed-programming/config.py
|
xR86/ml-stuff
|
2a1b79408897171b78032ff2531ab6f8b18be6c4
|
[
"MIT"
] | null | null | null |
CONFIG = dict({
'demo_battery': [
{
'protocol': 'UDP',
'setup': 'single',
'mode': 'simple',
'size': '1MB',
'test_count': 1
}
],
'full_battery': [
{
'protocol': 'UDP',
'setup': 'single',
'mode': 'simple',
'size': '1MB',
'test_count': 3
},
{
'protocol': 'UDP',
'setup': 'single',
'mode': 'simple',
'size': '10MB',
'test_count': 3
},
{
'protocol': 'UDP',
'setup': 'multi',
'mode': 'simple',
'size': '1MB',
'test_count': 3
},
{
'protocol': 'UDP',
'setup': 'multi',
'mode': 'simple',
'size': '10MB',
'test_count': 3
},
{
'protocol': 'TCP',
'setup': 'single',
'mode': 'simple',
'size': '1MB',
'test_count': 3
},
{
'protocol': 'TCP',
'setup': 'single',
'mode': 'simple',
'size': '10MB',
'test_count': 3
},
{
'protocol': 'TCP',
'setup': 'multi',
'mode': 'simple',
'size': '1MB',
'test_count': 3
},
{
'protocol': 'TCP',
'setup': 'multi',
'mode': 'simple',
'size': '10MB',
'test_count': 3
},
]
})
| 14.226667
| 21
| 0.453608
| 105
| 1,067
| 4.504762
| 0.190476
| 0.190275
| 0.266385
| 0.266385
| 0.959831
| 0.959831
| 0.959831
| 0.959831
| 0.959831
| 0.930233
| 0
| 0.028609
| 0.279288
| 1,067
| 74
| 22
| 14.418919
| 0.586476
| 0
| 0
| 0.637681
| 0
| 0
| 0.435801
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8c7dbf9e8e3f22fa948818b20f9b851b189bc115
| 80
|
py
|
Python
|
src/hello/helloWorld.py
|
codeWriter9/python-starter
|
1786406cffad3253bbeb3362c800d296641f953b
|
[
"MIT"
] | null | null | null |
src/hello/helloWorld.py
|
codeWriter9/python-starter
|
1786406cffad3253bbeb3362c800d296641f953b
|
[
"MIT"
] | null | null | null |
src/hello/helloWorld.py
|
codeWriter9/python-starter
|
1786406cffad3253bbeb3362c800d296641f953b
|
[
"MIT"
] | null | null | null |
from hello import helloWorldUtil
def hello_world():
return "Hello World!!"
| 16
| 32
| 0.7375
| 10
| 80
| 5.8
| 0.7
| 0.344828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175
| 80
| 4
| 33
| 20
| 0.878788
| 0
| 0
| 0
| 0
| 0
| 0.1625
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
5069cddc609260e3cfc5a9be138b957bc7229fcd
| 5,693
|
py
|
Python
|
imcsdk/mometa/aaa/AaaUserPasswordExpiration.py
|
vadimkuznetsov/imcsdk
|
ed038ce1dbc8031f99d2dfb3ccee3bf0b48309d8
|
[
"Apache-2.0"
] | null | null | null |
imcsdk/mometa/aaa/AaaUserPasswordExpiration.py
|
vadimkuznetsov/imcsdk
|
ed038ce1dbc8031f99d2dfb3ccee3bf0b48309d8
|
[
"Apache-2.0"
] | null | null | null |
imcsdk/mometa/aaa/AaaUserPasswordExpiration.py
|
vadimkuznetsov/imcsdk
|
ed038ce1dbc8031f99d2dfb3ccee3bf0b48309d8
|
[
"Apache-2.0"
] | 1
|
2019-11-10T18:42:04.000Z
|
2019-11-10T18:42:04.000Z
|
"""This module contains the general information for AaaUserPasswordExpiration ManagedObject."""
from ...imcmo import ManagedObject
from ...imccoremeta import MoPropertyMeta, MoMeta
from ...imcmeta import VersionMeta
class AaaUserPasswordExpirationConsts:
ADMIN_ACTION_RESTORE_DEFAULT = "restore-default"
class AaaUserPasswordExpiration(ManagedObject):
"""This is AaaUserPasswordExpiration class."""
consts = AaaUserPasswordExpirationConsts()
naming_props = set([])
mo_meta = {
"classic": MoMeta("AaaUserPasswordExpiration", "aaaUserPasswordExpiration", "password-expiration", VersionMeta.Version301c, "InputOutput", 0x1ff, [], ["admin", "user"], [u'aaaUserEp'], [], ["Get", "Set"]),
"modular": MoMeta("AaaUserPasswordExpiration", "aaaUserPasswordExpiration", "password-expiration", VersionMeta.Version301c, "InputOutput", 0x1ff, [], ["admin", "user"], [u'aaaUserEp'], [], ["Get", "Set"])
}
prop_meta = {
"classic": {
"admin_action": MoPropertyMeta("admin_action", "adminAction", "string", VersionMeta.Version301c, MoPropertyMeta.READ_WRITE, 0x2, 0, 510, None, ["restore-default"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version301c, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version301c, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"password_expiry_duration": MoPropertyMeta("password_expiry_duration", "passwordExpiryDuration", "uint", VersionMeta.Version301c, MoPropertyMeta.READ_WRITE, 0x8, None, None, None, [], ["0-3650"]),
"password_grace_period": MoPropertyMeta("password_grace_period", "passwordGracePeriod", "uint", VersionMeta.Version301c, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, [], ["0-5"]),
"password_history": MoPropertyMeta("password_history", "passwordHistory", "uint", VersionMeta.Version301c, MoPropertyMeta.READ_WRITE, 0x20, None, None, None, [], ["0-5"]),
"password_notification_period": MoPropertyMeta("password_notification_period", "passwordNotificationPeriod", "uint", VersionMeta.Version301c, MoPropertyMeta.READ_WRITE, 0x40, None, None, None, [], ["0-15"]),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version301c, MoPropertyMeta.READ_WRITE, 0x80, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version301c, MoPropertyMeta.READ_WRITE, 0x100, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
},
"modular": {
"admin_action": MoPropertyMeta("admin_action", "adminAction", "string", VersionMeta.Version301c, MoPropertyMeta.READ_WRITE, 0x2, 0, 510, None, ["restore-default"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version301c, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version301c, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"password_expiry_duration": MoPropertyMeta("password_expiry_duration", "passwordExpiryDuration", "uint", VersionMeta.Version301c, MoPropertyMeta.READ_WRITE, 0x8, None, None, None, [], ["0-3650"]),
"password_grace_period": MoPropertyMeta("password_grace_period", "passwordGracePeriod", "uint", VersionMeta.Version301c, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, [], ["0-5"]),
"password_history": MoPropertyMeta("password_history", "passwordHistory", "uint", VersionMeta.Version301c, MoPropertyMeta.READ_WRITE, 0x20, None, None, None, [], ["0-5"]),
"password_notification_period": MoPropertyMeta("password_notification_period", "passwordNotificationPeriod", "uint", VersionMeta.Version301c, MoPropertyMeta.READ_WRITE, 0x40, None, None, None, [], ["0-15"]),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version301c, MoPropertyMeta.READ_WRITE, 0x80, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version301c, MoPropertyMeta.READ_WRITE, 0x100, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
},
}
prop_map = {
"classic": {
"adminAction": "admin_action",
"childAction": "child_action",
"dn": "dn",
"passwordExpiryDuration": "password_expiry_duration",
"passwordGracePeriod": "password_grace_period",
"passwordHistory": "password_history",
"passwordNotificationPeriod": "password_notification_period",
"rn": "rn",
"status": "status",
},
"modular": {
"adminAction": "admin_action",
"childAction": "child_action",
"dn": "dn",
"passwordExpiryDuration": "password_expiry_duration",
"passwordGracePeriod": "password_grace_period",
"passwordHistory": "password_history",
"passwordNotificationPeriod": "password_notification_period",
"rn": "rn",
"status": "status",
},
}
def __init__(self, parent_mo_or_dn, **kwargs):
self._dirty_mask = 0
self.admin_action = None
self.child_action = None
self.password_expiry_duration = None
self.password_grace_period = None
self.password_history = None
self.password_notification_period = None
self.status = None
ManagedObject.__init__(self, "AaaUserPasswordExpiration", parent_mo_or_dn, **kwargs)
| 61.880435
| 220
| 0.659758
| 503
| 5,693
| 7.264414
| 0.180915
| 0.056924
| 0.17734
| 0.175151
| 0.813355
| 0.803503
| 0.803503
| 0.803503
| 0.803503
| 0.803503
| 0
| 0.033931
| 0.187248
| 5,693
| 91
| 221
| 62.56044
| 0.755781
| 0.022835
| 0
| 0.571429
| 0
| 0
| 0.305531
| 0.136012
| 0
| 0
| 0.01261
| 0
| 0
| 1
| 0.014286
| false
| 0.371429
| 0.042857
| 0
| 0.171429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
5085ede90f07436e9c56aac40f444c1ff54931e8
| 221
|
py
|
Python
|
tests/micropython/viper_const.py
|
sebi5361/micropython
|
6c054cd124bc6229bee127128264dc0829dea53c
|
[
"MIT"
] | 198
|
2017-03-24T23:23:54.000Z
|
2022-01-07T07:14:00.000Z
|
tests/micropython/viper_const.py
|
sebi5361/micropython
|
6c054cd124bc6229bee127128264dc0829dea53c
|
[
"MIT"
] | 509
|
2017-03-28T19:37:18.000Z
|
2022-03-31T20:31:43.000Z
|
tests/micropython/viper_const.py
|
sebi5361/micropython
|
6c054cd124bc6229bee127128264dc0829dea53c
|
[
"MIT"
] | 187
|
2017-03-24T23:23:58.000Z
|
2022-02-25T01:48:45.000Z
|
# test loading constants in viper functions
@micropython.viper
def f():
return b'bytes'
print(f())
@micropython.viper
def f():
@micropython.viper
def g() -> int:
return 123
return g
print(f()())
| 14.733333
| 43
| 0.628959
| 30
| 221
| 4.633333
| 0.533333
| 0.345324
| 0.410072
| 0.28777
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017751
| 0.235294
| 221
| 14
| 44
| 15.785714
| 0.804734
| 0.18552
| 0
| 0.454545
| 0
| 0
| 0.02809
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| true
| 0
| 0
| 0.181818
| 0.545455
| 0.181818
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
508ad3506135a4f73824667aeafcdbeaf42b8c78
| 1,499
|
py
|
Python
|
tests/test_common.py
|
pythonerheaven/helloworld
|
b00746d5f0eff7aa65719b0d22525c387860e991
|
[
"Apache-2.0"
] | 2
|
2018-02-02T04:42:12.000Z
|
2021-01-21T07:03:36.000Z
|
tests/test_common.py
|
htq310542/helloworld
|
90605529ef67c0c3397bbd8f00336962e80aa863
|
[
"Apache-2.0"
] | null | null | null |
tests/test_common.py
|
htq310542/helloworld
|
90605529ef67c0c3397bbd8f00336962e80aa863
|
[
"Apache-2.0"
] | 1
|
2018-02-02T04:42:13.000Z
|
2018-02-02T04:42:13.000Z
|
hk_trading = 1
is_hk_trade = 1
us_trading = 1
trading = hk_trading if is_hk_trade else us_trading
print(trading)
print(hk_trading)
hk_trading = 1
is_hk_trade = 1
us_trading = 0
trading = hk_trading if is_hk_trade else us_trading
print(trading)
print(hk_trading)
hk_trading = 1
is_hk_trade = 0
us_trading = 0
trading = hk_trading if is_hk_trade else us_trading
print(trading)
print(hk_trading)
hk_trading = 0
is_hk_trade = 0
us_trading = 0
trading = hk_trading if is_hk_trade else us_trading
print(trading)
print(hk_trading)
hk_trading = 0
is_hk_trade = 1
us_trading = 1
trading = hk_trading if is_hk_trade else us_trading
print(trading)
print(hk_trading)
hk_trading = 0
is_hk_trade = 0
us_trading = 1
trading = hk_trading if is_hk_trade else us_trading
print(trading)
print(hk_trading)
hk_trading = 0
is_hk_trade = 1
us_trading = 0
trading = hk_trading if is_hk_trade else us_trading
print(trading)
print(hk_trading)
hk_trading = 1
is_hk_trade = 0
us_trading = 1
trading = hk_trading if is_hk_trade else us_trading
print(trading)
print(hk_trading)
def get_adder(summand1):
"""Returns a function that adds numbers to a given number."""
def adder(summand2):
return summand1 + summand2
return adder
sum = get_adder(1)(2)
print(sum)
i = 4
def foo(x):
def bar():
print i
# ...
# A bunch of code here
# ...
for i in x: # Ah, i *is* local to Foo, so this is what Bar sees
print i
bar()
return i
print( foo([1,2,3]) )
| 16.11828
| 67
| 0.721147
| 269
| 1,499
| 3.743494
| 0.167286
| 0.214499
| 0.142999
| 0.142999
| 0.762661
| 0.762661
| 0.762661
| 0.762661
| 0.762661
| 0.752731
| 0
| 0.028381
| 0.200801
| 1,499
| 93
| 68
| 16.11828
| 0.812187
| 0.052035
| 0
| 0.793651
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.31746
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
50981f545f36d31d30d83fafc02796279a8dfaf9
| 4,327
|
py
|
Python
|
test/pyaz/keyvault/storage/sas_definition/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
test/pyaz/keyvault/storage/sas_definition/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | 9
|
2021-09-24T16:37:24.000Z
|
2021-12-24T00:39:19.000Z
|
test/pyaz/keyvault/storage/sas_definition/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
import json, subprocess
from .... pyaz_utils import get_cli_name, get_params
def create(disabled=None, vault_name, account_name, name, template_uri, sas_type, validity_period, __SAS_DEFINITION_ATTRIBUTES=None, tags=None):
params = get_params(locals())
command = "az keyvault storage sas-definition create " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list(vault_name, account_name, maxresults=None):
params = get_params(locals())
command = "az keyvault storage sas-definition list " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def show(id=None, vault_name=None, account_name=None, name=None):
params = get_params(locals())
command = "az keyvault storage sas-definition show " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def update(disabled=None, id=None, vault_name=None, account_name=None, name=None, template_uri=None, sas_type=None, validity_period=None, __SAS_DEFINITION_ATTRIBUTES=None, tags=None):
params = get_params(locals())
command = "az keyvault storage sas-definition update " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def delete(id=None, vault_name=None, account_name=None, name=None):
params = get_params(locals())
command = "az keyvault storage sas-definition delete " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list_deleted(vault_name, account_name, maxresults=None):
params = get_params(locals())
command = "az keyvault storage sas-definition list-deleted " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def show_deleted(vault_name, account_name, name):
params = get_params(locals())
command = "az keyvault storage sas-definition show-deleted " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def recover(vault_name, account_name, name):
params = get_params(locals())
command = "az keyvault storage sas-definition recover " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
| 37.301724
| 183
| 0.676681
| 536
| 4,327
| 5.380597
| 0.102612
| 0.07767
| 0.055479
| 0.058252
| 0.926838
| 0.913662
| 0.913662
| 0.913662
| 0.913662
| 0.913662
| 0
| 0.004676
| 0.209152
| 4,327
| 115
| 184
| 37.626087
| 0.838106
| 0
| 0
| 0.830189
| 0
| 0
| 0.09822
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.018868
| null | null | 0.226415
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
50a5c4f656459f8146fc8237fc681a772ae68c63
| 26,628
|
py
|
Python
|
influxdb_client/service/invocable_scripts_service.py
|
wasted925/influxdb-client-python
|
afee531fd1dc244b3d9d270e262b0a1865a7c89d
|
[
"MIT"
] | 380
|
2019-09-19T20:20:10.000Z
|
2022-03-31T12:59:33.000Z
|
influxdb_client/service/invocable_scripts_service.py
|
wasted925/influxdb-client-python
|
afee531fd1dc244b3d9d270e262b0a1865a7c89d
|
[
"MIT"
] | 362
|
2019-09-16T11:53:29.000Z
|
2022-03-29T03:11:59.000Z
|
influxdb_client/service/invocable_scripts_service.py
|
wasted925/influxdb-client-python
|
afee531fd1dc244b3d9d270e262b0a1865a7c89d
|
[
"MIT"
] | 130
|
2019-09-20T08:02:35.000Z
|
2022-03-30T16:44:45.000Z
|
# coding: utf-8
"""
Influx OSS API Service.
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: 2.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
class InvocableScriptsService(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None): # noqa: E501,D401,D403
"""InvocableScriptsService - a operation defined in OpenAPI."""
if api_client is None:
raise ValueError("Invalid value for `api_client`, must be defined.")
self.api_client = api_client
def delete_scripts_id(self, script_id, **kwargs): # noqa: E501,D401,D403
"""Delete a script.
Deletes a script and all associated records.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_scripts_id(script_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str script_id: The ID of the script to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_scripts_id_with_http_info(script_id, **kwargs) # noqa: E501
else:
(data) = self.delete_scripts_id_with_http_info(script_id, **kwargs) # noqa: E501
return data
def delete_scripts_id_with_http_info(self, script_id, **kwargs): # noqa: E501,D401,D403
"""Delete a script.
Deletes a script and all associated records.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_scripts_id_with_http_info(script_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str script_id: The ID of the script to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params = locals()
all_params = ['script_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
all_params.append('urlopen_kw')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_scripts_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'script_id' is set
if ('script_id' not in local_var_params or
local_var_params['script_id'] is None):
raise ValueError("Missing the required parameter `script_id` when calling `delete_scripts_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'script_id' in local_var_params:
path_params['scriptID'] = local_var_params['script_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
# urlopen optional setting
urlopen_kw = None
if 'urlopen_kw' in kwargs:
urlopen_kw = kwargs['urlopen_kw']
return self.api_client.call_api(
'/api/v2/scripts/{scriptID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
urlopen_kw=urlopen_kw)
def get_scripts(self, **kwargs): # noqa: E501,D401,D403
"""List scripts.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_scripts(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int limit: The number of scripts to return.
:param int offset: The offset for pagination.
:return: Scripts
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_scripts_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_scripts_with_http_info(**kwargs) # noqa: E501
return data
def get_scripts_with_http_info(self, **kwargs): # noqa: E501,D401,D403
"""List scripts.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_scripts_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int limit: The number of scripts to return.
:param int offset: The offset for pagination.
:return: Scripts
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params = locals()
all_params = ['limit', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
all_params.append('urlopen_kw')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_scripts" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
# urlopen optional setting
urlopen_kw = None
if 'urlopen_kw' in kwargs:
urlopen_kw = kwargs['urlopen_kw']
return self.api_client.call_api(
'/api/v2/scripts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Scripts', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
urlopen_kw=urlopen_kw)
def get_scripts_id(self, script_id, **kwargs): # noqa: E501,D401,D403
"""Retrieve a script.
Uses script ID to retrieve details of an invocable script.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_scripts_id(script_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str script_id: The script ID. (required)
:return: Script
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_scripts_id_with_http_info(script_id, **kwargs) # noqa: E501
else:
(data) = self.get_scripts_id_with_http_info(script_id, **kwargs) # noqa: E501
return data
def get_scripts_id_with_http_info(self, script_id, **kwargs): # noqa: E501,D401,D403
"""Retrieve a script.
Uses script ID to retrieve details of an invocable script.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_scripts_id_with_http_info(script_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str script_id: The script ID. (required)
:return: Script
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params = locals()
all_params = ['script_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
all_params.append('urlopen_kw')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_scripts_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'script_id' is set
if ('script_id' not in local_var_params or
local_var_params['script_id'] is None):
raise ValueError("Missing the required parameter `script_id` when calling `get_scripts_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'script_id' in local_var_params:
path_params['scriptID'] = local_var_params['script_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
# urlopen optional setting
urlopen_kw = None
if 'urlopen_kw' in kwargs:
urlopen_kw = kwargs['urlopen_kw']
return self.api_client.call_api(
'/api/v2/scripts/{scriptID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Script', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
urlopen_kw=urlopen_kw)
def patch_scripts_id(self, script_id, script_update_request, **kwargs): # noqa: E501,D401,D403
"""Update a script.
Updates properties (`name`, `description`, and `script`) of an invocable script.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_scripts_id(script_id, script_update_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str script_id: The script ID. (required)
:param ScriptUpdateRequest script_update_request: Script update to apply (required)
:return: Script
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_scripts_id_with_http_info(script_id, script_update_request, **kwargs) # noqa: E501
else:
(data) = self.patch_scripts_id_with_http_info(script_id, script_update_request, **kwargs) # noqa: E501
return data
def patch_scripts_id_with_http_info(self, script_id, script_update_request, **kwargs): # noqa: E501,D401,D403
"""Update a script.
Updates properties (`name`, `description`, and `script`) of an invocable script.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_scripts_id_with_http_info(script_id, script_update_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str script_id: The script ID. (required)
:param ScriptUpdateRequest script_update_request: Script update to apply (required)
:return: Script
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params = locals()
all_params = ['script_id', 'script_update_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
all_params.append('urlopen_kw')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_scripts_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'script_id' is set
if ('script_id' not in local_var_params or
local_var_params['script_id'] is None):
raise ValueError("Missing the required parameter `script_id` when calling `patch_scripts_id`") # noqa: E501
# verify the required parameter 'script_update_request' is set
if ('script_update_request' not in local_var_params or
local_var_params['script_update_request'] is None):
raise ValueError("Missing the required parameter `script_update_request` when calling `patch_scripts_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'script_id' in local_var_params:
path_params['scriptID'] = local_var_params['script_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'script_update_request' in local_var_params:
body_params = local_var_params['script_update_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
# urlopen optional setting
urlopen_kw = None
if 'urlopen_kw' in kwargs:
urlopen_kw = kwargs['urlopen_kw']
return self.api_client.call_api(
'/api/v2/scripts/{scriptID}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Script', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
urlopen_kw=urlopen_kw)
def post_scripts(self, script_create_request, **kwargs): # noqa: E501,D401,D403
"""Create a script.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_scripts(script_create_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ScriptCreateRequest script_create_request: The script to create. (required)
:return: Script
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_scripts_with_http_info(script_create_request, **kwargs) # noqa: E501
else:
(data) = self.post_scripts_with_http_info(script_create_request, **kwargs) # noqa: E501
return data
def post_scripts_with_http_info(self, script_create_request, **kwargs): # noqa: E501,D401,D403
"""Create a script.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_scripts_with_http_info(script_create_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ScriptCreateRequest script_create_request: The script to create. (required)
:return: Script
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params = locals()
all_params = ['script_create_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
all_params.append('urlopen_kw')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_scripts" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'script_create_request' is set
if ('script_create_request' not in local_var_params or
local_var_params['script_create_request'] is None):
raise ValueError("Missing the required parameter `script_create_request` when calling `post_scripts`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'script_create_request' in local_var_params:
body_params = local_var_params['script_create_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
# urlopen optional setting
urlopen_kw = None
if 'urlopen_kw' in kwargs:
urlopen_kw = kwargs['urlopen_kw']
return self.api_client.call_api(
'/api/v2/scripts', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Script', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
urlopen_kw=urlopen_kw)
def post_scripts_id_invoke(self, script_id, **kwargs): # noqa: E501,D401,D403
"""Invoke a script.
Invokes a script and substitutes `params` keys referenced in the script with `params` key-values sent in the request body.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_scripts_id_invoke(script_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str script_id: (required)
:param ScriptInvocationParams script_invocation_params:
:return: str
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_scripts_id_invoke_with_http_info(script_id, **kwargs) # noqa: E501
else:
(data) = self.post_scripts_id_invoke_with_http_info(script_id, **kwargs) # noqa: E501
return data
def post_scripts_id_invoke_with_http_info(self, script_id, **kwargs): # noqa: E501,D401,D403
"""Invoke a script.
Invokes a script and substitutes `params` keys referenced in the script with `params` key-values sent in the request body.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_scripts_id_invoke_with_http_info(script_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str script_id: (required)
:param ScriptInvocationParams script_invocation_params:
:return: str
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params = locals()
all_params = ['script_id', 'script_invocation_params'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
all_params.append('urlopen_kw')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_scripts_id_invoke" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'script_id' is set
if ('script_id' not in local_var_params or
local_var_params['script_id'] is None):
raise ValueError("Missing the required parameter `script_id` when calling `post_scripts_id_invoke`") # noqa: E501
collection_formats = {}
path_params = {}
if 'script_id' in local_var_params:
path_params['scriptID'] = local_var_params['script_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'script_invocation_params' in local_var_params:
body_params = local_var_params['script_invocation_params']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
# urlopen optional setting
urlopen_kw = None
if 'urlopen_kw' in kwargs:
urlopen_kw = kwargs['urlopen_kw']
return self.api_client.call_api(
'/api/v2/scripts/{scriptID}/invoke', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
urlopen_kw=urlopen_kw)
| 39.981982
| 132
| 0.62438
| 3,173
| 26,628
| 4.935708
| 0.05925
| 0.045974
| 0.069727
| 0.027584
| 0.944256
| 0.938446
| 0.926569
| 0.918077
| 0.911053
| 0.906839
| 0
| 0.018528
| 0.288568
| 26,628
| 665
| 133
| 40.042105
| 0.808171
| 0.313204
| 0
| 0.787293
| 1
| 0
| 0.179481
| 0.051827
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035912
| false
| 0
| 0.008287
| 0
| 0.096685
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
50e05bdf7e63b090fd2258b624bea8c108fd4a66
| 123,504
|
py
|
Python
|
embyapi/api/library_service_api.py
|
stanionascu/python-embyapi
|
a3f7aa49aea4052277cc43605c0d89bc6ff21913
|
[
"BSD-3-Clause"
] | null | null | null |
embyapi/api/library_service_api.py
|
stanionascu/python-embyapi
|
a3f7aa49aea4052277cc43605c0d89bc6ff21913
|
[
"BSD-3-Clause"
] | null | null | null |
embyapi/api/library_service_api.py
|
stanionascu/python-embyapi
|
a3f7aa49aea4052277cc43605c0d89bc6ff21913
|
[
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
"""
Emby Server API
Explore the Emby Server API # noqa: E501
OpenAPI spec version: 4.1.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from embyapi.api_client import ApiClient
class LibraryServiceApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_items(self, ids, **kwargs): # noqa: E501
"""Deletes an item from the library and file system # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_items(ids, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ids: Ids (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_items_with_http_info(ids, **kwargs) # noqa: E501
else:
(data) = self.delete_items_with_http_info(ids, **kwargs) # noqa: E501
return data
def delete_items_with_http_info(self, ids, **kwargs): # noqa: E501
"""Deletes an item from the library and file system # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_items_with_http_info(ids, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ids: Ids (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ids'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_items" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ids' is set
if ('ids' not in params or
params['ids'] is None):
raise ValueError("Missing the required parameter `ids` when calling `delete_items`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'ids' in params:
query_params.append(('Ids', params['ids'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_items_by_id(self, id, **kwargs): # noqa: E501
"""Deletes an item from the library and file system # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_items_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_items_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_items_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_items_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Deletes an item from the library and file system # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_items_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_items_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_items_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_albums_by_id_similar(self, id, **kwargs): # noqa: E501
"""Finds albums similar to a given album. # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_albums_by_id_similar(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str include_item_types: Optional. If specified, results will be filtered based on item type. This allows multiple, comma delimeted.
:param bool enable_images: Optional, include image information in output
:param bool enable_user_data: Optional, include user data
:param int image_type_limit: Optional, the max number of images to return, per image type
:param str enable_image_types: Optional. The image types to include in the output.
:param str user_id: Optional. Filter by user id, and attach user data
:param int limit: Optional. The maximum number of records to return
:param str fields: Optional. Specify additional fields of information to return in the output. This allows multiple, comma delimeted. Options: Budget, Chapters, DateCreated, Genres, HomePageUrl, IndexOptions, MediaStreams, Overview, ParentId, Path, People, ProviderIds, PrimaryImageAspectRatio, Revenue, SortName, Studios, Taglines, TrailerUrls
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_albums_by_id_similar_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_albums_by_id_similar_with_http_info(id, **kwargs) # noqa: E501
return data
def get_albums_by_id_similar_with_http_info(self, id, **kwargs): # noqa: E501
"""Finds albums similar to a given album. # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_albums_by_id_similar_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str include_item_types: Optional. If specified, results will be filtered based on item type. This allows multiple, comma delimeted.
:param bool enable_images: Optional, include image information in output
:param bool enable_user_data: Optional, include user data
:param int image_type_limit: Optional, the max number of images to return, per image type
:param str enable_image_types: Optional. The image types to include in the output.
:param str user_id: Optional. Filter by user id, and attach user data
:param int limit: Optional. The maximum number of records to return
:param str fields: Optional. Specify additional fields of information to return in the output. This allows multiple, comma delimeted. Options: Budget, Chapters, DateCreated, Genres, HomePageUrl, IndexOptions, MediaStreams, Overview, ParentId, Path, People, ProviderIds, PrimaryImageAspectRatio, Revenue, SortName, Studios, Taglines, TrailerUrls
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'include_item_types', 'enable_images', 'enable_user_data', 'image_type_limit', 'enable_image_types', 'user_id', 'limit', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_albums_by_id_similar" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_albums_by_id_similar`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
if 'include_item_types' in params:
query_params.append(('IncludeItemTypes', params['include_item_types'])) # noqa: E501
if 'enable_images' in params:
query_params.append(('EnableImages', params['enable_images'])) # noqa: E501
if 'enable_user_data' in params:
query_params.append(('EnableUserData', params['enable_user_data'])) # noqa: E501
if 'image_type_limit' in params:
query_params.append(('ImageTypeLimit', params['image_type_limit'])) # noqa: E501
if 'enable_image_types' in params:
query_params.append(('EnableImageTypes', params['enable_image_types'])) # noqa: E501
if 'user_id' in params:
query_params.append(('UserId', params['user_id'])) # noqa: E501
if 'limit' in params:
query_params.append(('Limit', params['limit'])) # noqa: E501
if 'fields' in params:
query_params.append(('Fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Albums/{Id}/Similar', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='QueryResultBaseItemDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_artists_by_id_similar(self, id, **kwargs): # noqa: E501
"""Finds albums similar to a given album. # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_artists_by_id_similar(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str include_item_types: Optional. If specified, results will be filtered based on item type. This allows multiple, comma delimeted.
:param bool enable_images: Optional, include image information in output
:param bool enable_user_data: Optional, include user data
:param int image_type_limit: Optional, the max number of images to return, per image type
:param str enable_image_types: Optional. The image types to include in the output.
:param str user_id: Optional. Filter by user id, and attach user data
:param int limit: Optional. The maximum number of records to return
:param str fields: Optional. Specify additional fields of information to return in the output. This allows multiple, comma delimeted. Options: Budget, Chapters, DateCreated, Genres, HomePageUrl, IndexOptions, MediaStreams, Overview, ParentId, Path, People, ProviderIds, PrimaryImageAspectRatio, Revenue, SortName, Studios, Taglines, TrailerUrls
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_artists_by_id_similar_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_artists_by_id_similar_with_http_info(id, **kwargs) # noqa: E501
return data
def get_artists_by_id_similar_with_http_info(self, id, **kwargs): # noqa: E501
"""Finds albums similar to a given album. # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_artists_by_id_similar_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str include_item_types: Optional. If specified, results will be filtered based on item type. This allows multiple, comma delimeted.
:param bool enable_images: Optional, include image information in output
:param bool enable_user_data: Optional, include user data
:param int image_type_limit: Optional, the max number of images to return, per image type
:param str enable_image_types: Optional. The image types to include in the output.
:param str user_id: Optional. Filter by user id, and attach user data
:param int limit: Optional. The maximum number of records to return
:param str fields: Optional. Specify additional fields of information to return in the output. This allows multiple, comma delimeted. Options: Budget, Chapters, DateCreated, Genres, HomePageUrl, IndexOptions, MediaStreams, Overview, ParentId, Path, People, ProviderIds, PrimaryImageAspectRatio, Revenue, SortName, Studios, Taglines, TrailerUrls
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'include_item_types', 'enable_images', 'enable_user_data', 'image_type_limit', 'enable_image_types', 'user_id', 'limit', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_artists_by_id_similar" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_artists_by_id_similar`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
if 'include_item_types' in params:
query_params.append(('IncludeItemTypes', params['include_item_types'])) # noqa: E501
if 'enable_images' in params:
query_params.append(('EnableImages', params['enable_images'])) # noqa: E501
if 'enable_user_data' in params:
query_params.append(('EnableUserData', params['enable_user_data'])) # noqa: E501
if 'image_type_limit' in params:
query_params.append(('ImageTypeLimit', params['image_type_limit'])) # noqa: E501
if 'enable_image_types' in params:
query_params.append(('EnableImageTypes', params['enable_image_types'])) # noqa: E501
if 'user_id' in params:
query_params.append(('UserId', params['user_id'])) # noqa: E501
if 'limit' in params:
query_params.append(('Limit', params['limit'])) # noqa: E501
if 'fields' in params:
query_params.append(('Fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Artists/{Id}/Similar', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='QueryResultBaseItemDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_games_by_id_similar(self, id, **kwargs): # noqa: E501
"""Finds games similar to a given game. # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_games_by_id_similar(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str include_item_types: Optional. If specified, results will be filtered based on item type. This allows multiple, comma delimeted.
:param bool enable_images: Optional, include image information in output
:param bool enable_user_data: Optional, include user data
:param int image_type_limit: Optional, the max number of images to return, per image type
:param str enable_image_types: Optional. The image types to include in the output.
:param str user_id: Optional. Filter by user id, and attach user data
:param int limit: Optional. The maximum number of records to return
:param str fields: Optional. Specify additional fields of information to return in the output. This allows multiple, comma delimeted. Options: Budget, Chapters, DateCreated, Genres, HomePageUrl, IndexOptions, MediaStreams, Overview, ParentId, Path, People, ProviderIds, PrimaryImageAspectRatio, Revenue, SortName, Studios, Taglines, TrailerUrls
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_games_by_id_similar_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_games_by_id_similar_with_http_info(id, **kwargs) # noqa: E501
return data
def get_games_by_id_similar_with_http_info(self, id, **kwargs): # noqa: E501
"""Finds games similar to a given game. # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_games_by_id_similar_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str include_item_types: Optional. If specified, results will be filtered based on item type. This allows multiple, comma delimeted.
:param bool enable_images: Optional, include image information in output
:param bool enable_user_data: Optional, include user data
:param int image_type_limit: Optional, the max number of images to return, per image type
:param str enable_image_types: Optional. The image types to include in the output.
:param str user_id: Optional. Filter by user id, and attach user data
:param int limit: Optional. The maximum number of records to return
:param str fields: Optional. Specify additional fields of information to return in the output. This allows multiple, comma delimeted. Options: Budget, Chapters, DateCreated, Genres, HomePageUrl, IndexOptions, MediaStreams, Overview, ParentId, Path, People, ProviderIds, PrimaryImageAspectRatio, Revenue, SortName, Studios, Taglines, TrailerUrls
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'include_item_types', 'enable_images', 'enable_user_data', 'image_type_limit', 'enable_image_types', 'user_id', 'limit', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_games_by_id_similar" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_games_by_id_similar`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
if 'include_item_types' in params:
query_params.append(('IncludeItemTypes', params['include_item_types'])) # noqa: E501
if 'enable_images' in params:
query_params.append(('EnableImages', params['enable_images'])) # noqa: E501
if 'enable_user_data' in params:
query_params.append(('EnableUserData', params['enable_user_data'])) # noqa: E501
if 'image_type_limit' in params:
query_params.append(('ImageTypeLimit', params['image_type_limit'])) # noqa: E501
if 'enable_image_types' in params:
query_params.append(('EnableImageTypes', params['enable_image_types'])) # noqa: E501
if 'user_id' in params:
query_params.append(('UserId', params['user_id'])) # noqa: E501
if 'limit' in params:
query_params.append(('Limit', params['limit'])) # noqa: E501
if 'fields' in params:
query_params.append(('Fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Games/{Id}/Similar', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='QueryResultBaseItemDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_items_by_id_ancestors(self, id, **kwargs): # noqa: E501
"""Gets all parents of an item # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_ancestors(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str user_id: Optional. Filter by user id, and attach user data
:return: list[BaseItemDto]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_items_by_id_ancestors_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_items_by_id_ancestors_with_http_info(id, **kwargs) # noqa: E501
return data
def get_items_by_id_ancestors_with_http_info(self, id, **kwargs): # noqa: E501
"""Gets all parents of an item # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_ancestors_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str user_id: Optional. Filter by user id, and attach user data
:return: list[BaseItemDto]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'user_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_items_by_id_ancestors" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_items_by_id_ancestors`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
if 'user_id' in params:
query_params.append(('UserId', params['user_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/Ancestors', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[BaseItemDto]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_items_by_id_criticreviews(self, id, **kwargs): # noqa: E501
"""Gets critic reviews for an item # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_criticreviews(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param int start_index: Optional. The record index to start at. All items with a lower index will be dropped from the results.
:param int limit: Optional. The maximum number of records to return
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_items_by_id_criticreviews_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_items_by_id_criticreviews_with_http_info(id, **kwargs) # noqa: E501
return data
def get_items_by_id_criticreviews_with_http_info(self, id, **kwargs): # noqa: E501
"""Gets critic reviews for an item # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_criticreviews_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param int start_index: Optional. The record index to start at. All items with a lower index will be dropped from the results.
:param int limit: Optional. The maximum number of records to return
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'start_index', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_items_by_id_criticreviews" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_items_by_id_criticreviews`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
if 'start_index' in params:
query_params.append(('StartIndex', params['start_index'])) # noqa: E501
if 'limit' in params:
query_params.append(('Limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/CriticReviews', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='QueryResultBaseItemDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_items_by_id_deleteinfo(self, id, **kwargs): # noqa: E501
"""Gets delete info for an item # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_deleteinfo(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:return: LibraryDeleteInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_items_by_id_deleteinfo_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_items_by_id_deleteinfo_with_http_info(id, **kwargs) # noqa: E501
return data
def get_items_by_id_deleteinfo_with_http_info(self, id, **kwargs): # noqa: E501
"""Gets delete info for an item # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_deleteinfo_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:return: LibraryDeleteInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_items_by_id_deleteinfo" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_items_by_id_deleteinfo`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/DeleteInfo', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LibraryDeleteInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_items_by_id_download(self, id, **kwargs): # noqa: E501
"""Downloads item media # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_download(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_items_by_id_download_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_items_by_id_download_with_http_info(id, **kwargs) # noqa: E501
return data
def get_items_by_id_download_with_http_info(self, id, **kwargs): # noqa: E501
"""Downloads item media # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_download_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_items_by_id_download" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_items_by_id_download`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/Download', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_items_by_id_file(self, id, **kwargs): # noqa: E501
"""Gets the original file of an item # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_file(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_items_by_id_file_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_items_by_id_file_with_http_info(id, **kwargs) # noqa: E501
return data
def get_items_by_id_file_with_http_info(self, id, **kwargs): # noqa: E501
"""Gets the original file of an item # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_file_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_items_by_id_file" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_items_by_id_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/File', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_items_by_id_similar(self, id, **kwargs): # noqa: E501
"""Gets similar items # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_similar(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str include_item_types: Optional. If specified, results will be filtered based on item type. This allows multiple, comma delimeted.
:param bool enable_images: Optional, include image information in output
:param bool enable_user_data: Optional, include user data
:param int image_type_limit: Optional, the max number of images to return, per image type
:param str enable_image_types: Optional. The image types to include in the output.
:param str user_id: Optional. Filter by user id, and attach user data
:param int limit: Optional. The maximum number of records to return
:param str fields: Optional. Specify additional fields of information to return in the output. This allows multiple, comma delimeted. Options: Budget, Chapters, DateCreated, Genres, HomePageUrl, IndexOptions, MediaStreams, Overview, ParentId, Path, People, ProviderIds, PrimaryImageAspectRatio, Revenue, SortName, Studios, Taglines, TrailerUrls
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_items_by_id_similar_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_items_by_id_similar_with_http_info(id, **kwargs) # noqa: E501
return data
def get_items_by_id_similar_with_http_info(self, id, **kwargs): # noqa: E501
"""Gets similar items # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_similar_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str include_item_types: Optional. If specified, results will be filtered based on item type. This allows multiple, comma delimeted.
:param bool enable_images: Optional, include image information in output
:param bool enable_user_data: Optional, include user data
:param int image_type_limit: Optional, the max number of images to return, per image type
:param str enable_image_types: Optional. The image types to include in the output.
:param str user_id: Optional. Filter by user id, and attach user data
:param int limit: Optional. The maximum number of records to return
:param str fields: Optional. Specify additional fields of information to return in the output. This allows multiple, comma delimeted. Options: Budget, Chapters, DateCreated, Genres, HomePageUrl, IndexOptions, MediaStreams, Overview, ParentId, Path, People, ProviderIds, PrimaryImageAspectRatio, Revenue, SortName, Studios, Taglines, TrailerUrls
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'include_item_types', 'enable_images', 'enable_user_data', 'image_type_limit', 'enable_image_types', 'user_id', 'limit', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_items_by_id_similar" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_items_by_id_similar`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
if 'include_item_types' in params:
query_params.append(('IncludeItemTypes', params['include_item_types'])) # noqa: E501
if 'enable_images' in params:
query_params.append(('EnableImages', params['enable_images'])) # noqa: E501
if 'enable_user_data' in params:
query_params.append(('EnableUserData', params['enable_user_data'])) # noqa: E501
if 'image_type_limit' in params:
query_params.append(('ImageTypeLimit', params['image_type_limit'])) # noqa: E501
if 'enable_image_types' in params:
query_params.append(('EnableImageTypes', params['enable_image_types'])) # noqa: E501
if 'user_id' in params:
query_params.append(('UserId', params['user_id'])) # noqa: E501
if 'limit' in params:
query_params.append(('Limit', params['limit'])) # noqa: E501
if 'fields' in params:
query_params.append(('Fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/Similar', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='QueryResultBaseItemDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_items_by_id_thememedia(self, id, **kwargs): # noqa: E501
"""Gets theme videos and songs for an item # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_thememedia(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str user_id: Optional. Filter by user id, and attach user data
:param bool inherit_from_parent: Determines whether or not parent items should be searched for theme media.
:return: AllThemeMediaResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_items_by_id_thememedia_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_items_by_id_thememedia_with_http_info(id, **kwargs) # noqa: E501
return data
def get_items_by_id_thememedia_with_http_info(self, id, **kwargs): # noqa: E501
"""Gets theme videos and songs for an item # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_thememedia_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str user_id: Optional. Filter by user id, and attach user data
:param bool inherit_from_parent: Determines whether or not parent items should be searched for theme media.
:return: AllThemeMediaResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'user_id', 'inherit_from_parent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_items_by_id_thememedia" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_items_by_id_thememedia`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
if 'user_id' in params:
query_params.append(('UserId', params['user_id'])) # noqa: E501
if 'inherit_from_parent' in params:
query_params.append(('InheritFromParent', params['inherit_from_parent'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/ThemeMedia', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AllThemeMediaResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_items_by_id_themesongs(self, id, **kwargs): # noqa: E501
"""Gets theme songs for an item # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_themesongs(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str user_id: Optional. Filter by user id, and attach user data
:param bool inherit_from_parent: Determines whether or not parent items should be searched for theme media.
:return: ThemeMediaResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_items_by_id_themesongs_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_items_by_id_themesongs_with_http_info(id, **kwargs) # noqa: E501
return data
def get_items_by_id_themesongs_with_http_info(self, id, **kwargs): # noqa: E501
"""Gets theme songs for an item # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_themesongs_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str user_id: Optional. Filter by user id, and attach user data
:param bool inherit_from_parent: Determines whether or not parent items should be searched for theme media.
:return: ThemeMediaResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'user_id', 'inherit_from_parent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_items_by_id_themesongs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_items_by_id_themesongs`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
if 'user_id' in params:
query_params.append(('UserId', params['user_id'])) # noqa: E501
if 'inherit_from_parent' in params:
query_params.append(('InheritFromParent', params['inherit_from_parent'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/ThemeSongs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ThemeMediaResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_items_by_id_themevideos(self, id, **kwargs): # noqa: E501
"""Gets theme videos for an item # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_themevideos(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str user_id: Optional. Filter by user id, and attach user data
:param bool inherit_from_parent: Determines whether or not parent items should be searched for theme media.
:return: ThemeMediaResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_items_by_id_themevideos_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_items_by_id_themevideos_with_http_info(id, **kwargs) # noqa: E501
return data
def get_items_by_id_themevideos_with_http_info(self, id, **kwargs): # noqa: E501
"""Gets theme videos for an item # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_themevideos_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str user_id: Optional. Filter by user id, and attach user data
:param bool inherit_from_parent: Determines whether or not parent items should be searched for theme media.
:return: ThemeMediaResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'user_id', 'inherit_from_parent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_items_by_id_themevideos" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_items_by_id_themevideos`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
if 'user_id' in params:
query_params.append(('UserId', params['user_id'])) # noqa: E501
if 'inherit_from_parent' in params:
query_params.append(('InheritFromParent', params['inherit_from_parent'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/ThemeVideos', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ThemeMediaResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_items_counts(self, **kwargs): # noqa: E501
"""get_items_counts # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_counts(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: Optional. Get counts from a specific user's library.
:param bool is_favorite: Optional. Get counts of favorite items
:return: ItemCounts
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_items_counts_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_items_counts_with_http_info(**kwargs) # noqa: E501
return data
def get_items_counts_with_http_info(self, **kwargs): # noqa: E501
"""get_items_counts # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_counts_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: Optional. Get counts from a specific user's library.
:param bool is_favorite: Optional. Get counts of favorite items
:return: ItemCounts
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'is_favorite'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_items_counts" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'user_id' in params:
query_params.append(('UserId', params['user_id'])) # noqa: E501
if 'is_favorite' in params:
query_params.append(('IsFavorite', params['is_favorite'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items/Counts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ItemCounts', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_libraries_availableoptions(self, **kwargs): # noqa: E501
"""get_libraries_availableoptions # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_libraries_availableoptions(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: LibraryLibraryOptionsResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_libraries_availableoptions_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_libraries_availableoptions_with_http_info(**kwargs) # noqa: E501
return data
def get_libraries_availableoptions_with_http_info(self, **kwargs): # noqa: E501
"""get_libraries_availableoptions # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_libraries_availableoptions_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: LibraryLibraryOptionsResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_libraries_availableoptions" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Libraries/AvailableOptions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LibraryLibraryOptionsResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_library_mediafolders(self, **kwargs): # noqa: E501
"""Gets all user media folders. # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_library_mediafolders(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool is_hidden: Optional. Filter by folders that are marked hidden, or not.
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_library_mediafolders_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_library_mediafolders_with_http_info(**kwargs) # noqa: E501
return data
def get_library_mediafolders_with_http_info(self, **kwargs): # noqa: E501
"""Gets all user media folders. # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_library_mediafolders_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool is_hidden: Optional. Filter by folders that are marked hidden, or not.
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['is_hidden'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_library_mediafolders" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'is_hidden' in params:
query_params.append(('IsHidden', params['is_hidden'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Library/MediaFolders', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='QueryResultBaseItemDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_library_physicalpaths(self, **kwargs): # noqa: E501
"""Gets a list of physical paths from virtual folders # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_library_physicalpaths(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_library_physicalpaths_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_library_physicalpaths_with_http_info(**kwargs) # noqa: E501
return data
def get_library_physicalpaths_with_http_info(self, **kwargs): # noqa: E501
"""Gets a list of physical paths from virtual folders # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_library_physicalpaths_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_library_physicalpaths" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Library/PhysicalPaths', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[str]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_library_selectablemediafolders(self, **kwargs): # noqa: E501
"""Gets all user media folders. # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_library_selectablemediafolders(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[LibraryMediaFolder]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_library_selectablemediafolders_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_library_selectablemediafolders_with_http_info(**kwargs) # noqa: E501
return data
def get_library_selectablemediafolders_with_http_info(self, **kwargs): # noqa: E501
"""Gets all user media folders. # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_library_selectablemediafolders_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[LibraryMediaFolder]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_library_selectablemediafolders" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Library/SelectableMediaFolders', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[LibraryMediaFolder]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_movies_by_id_similar(self, id, **kwargs): # noqa: E501
"""Finds movies and trailers similar to a given movie. # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_movies_by_id_similar(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str include_item_types: Optional. If specified, results will be filtered based on item type. This allows multiple, comma delimeted.
:param bool enable_images: Optional, include image information in output
:param bool enable_user_data: Optional, include user data
:param int image_type_limit: Optional, the max number of images to return, per image type
:param str enable_image_types: Optional. The image types to include in the output.
:param str user_id: Optional. Filter by user id, and attach user data
:param int limit: Optional. The maximum number of records to return
:param str fields: Optional. Specify additional fields of information to return in the output. This allows multiple, comma delimeted. Options: Budget, Chapters, DateCreated, Genres, HomePageUrl, IndexOptions, MediaStreams, Overview, ParentId, Path, People, ProviderIds, PrimaryImageAspectRatio, Revenue, SortName, Studios, Taglines, TrailerUrls
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_movies_by_id_similar_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_movies_by_id_similar_with_http_info(id, **kwargs) # noqa: E501
return data
def get_movies_by_id_similar_with_http_info(self, id, **kwargs): # noqa: E501
"""Finds movies and trailers similar to a given movie. # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_movies_by_id_similar_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str include_item_types: Optional. If specified, results will be filtered based on item type. This allows multiple, comma delimeted.
:param bool enable_images: Optional, include image information in output
:param bool enable_user_data: Optional, include user data
:param int image_type_limit: Optional, the max number of images to return, per image type
:param str enable_image_types: Optional. The image types to include in the output.
:param str user_id: Optional. Filter by user id, and attach user data
:param int limit: Optional. The maximum number of records to return
:param str fields: Optional. Specify additional fields of information to return in the output. This allows multiple, comma delimeted. Options: Budget, Chapters, DateCreated, Genres, HomePageUrl, IndexOptions, MediaStreams, Overview, ParentId, Path, People, ProviderIds, PrimaryImageAspectRatio, Revenue, SortName, Studios, Taglines, TrailerUrls
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'include_item_types', 'enable_images', 'enable_user_data', 'image_type_limit', 'enable_image_types', 'user_id', 'limit', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_movies_by_id_similar" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_movies_by_id_similar`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
if 'include_item_types' in params:
query_params.append(('IncludeItemTypes', params['include_item_types'])) # noqa: E501
if 'enable_images' in params:
query_params.append(('EnableImages', params['enable_images'])) # noqa: E501
if 'enable_user_data' in params:
query_params.append(('EnableUserData', params['enable_user_data'])) # noqa: E501
if 'image_type_limit' in params:
query_params.append(('ImageTypeLimit', params['image_type_limit'])) # noqa: E501
if 'enable_image_types' in params:
query_params.append(('EnableImageTypes', params['enable_image_types'])) # noqa: E501
if 'user_id' in params:
query_params.append(('UserId', params['user_id'])) # noqa: E501
if 'limit' in params:
query_params.append(('Limit', params['limit'])) # noqa: E501
if 'fields' in params:
query_params.append(('Fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Movies/{Id}/Similar', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='QueryResultBaseItemDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_shows_by_id_similar(self, id, **kwargs): # noqa: E501
"""Finds tv shows similar to a given one. # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_shows_by_id_similar(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str include_item_types: Optional. If specified, results will be filtered based on item type. This allows multiple, comma delimeted.
:param bool enable_images: Optional, include image information in output
:param bool enable_user_data: Optional, include user data
:param int image_type_limit: Optional, the max number of images to return, per image type
:param str enable_image_types: Optional. The image types to include in the output.
:param str user_id: Optional. Filter by user id, and attach user data
:param int limit: Optional. The maximum number of records to return
:param str fields: Optional. Specify additional fields of information to return in the output. This allows multiple, comma delimeted. Options: Budget, Chapters, DateCreated, Genres, HomePageUrl, IndexOptions, MediaStreams, Overview, ParentId, Path, People, ProviderIds, PrimaryImageAspectRatio, Revenue, SortName, Studios, Taglines, TrailerUrls
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_shows_by_id_similar_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_shows_by_id_similar_with_http_info(id, **kwargs) # noqa: E501
return data
def get_shows_by_id_similar_with_http_info(self, id, **kwargs): # noqa: E501
"""Finds tv shows similar to a given one. # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_shows_by_id_similar_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str include_item_types: Optional. If specified, results will be filtered based on item type. This allows multiple, comma delimeted.
:param bool enable_images: Optional, include image information in output
:param bool enable_user_data: Optional, include user data
:param int image_type_limit: Optional, the max number of images to return, per image type
:param str enable_image_types: Optional. The image types to include in the output.
:param str user_id: Optional. Filter by user id, and attach user data
:param int limit: Optional. The maximum number of records to return
:param str fields: Optional. Specify additional fields of information to return in the output. This allows multiple, comma delimeted. Options: Budget, Chapters, DateCreated, Genres, HomePageUrl, IndexOptions, MediaStreams, Overview, ParentId, Path, People, ProviderIds, PrimaryImageAspectRatio, Revenue, SortName, Studios, Taglines, TrailerUrls
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'include_item_types', 'enable_images', 'enable_user_data', 'image_type_limit', 'enable_image_types', 'user_id', 'limit', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_shows_by_id_similar" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_shows_by_id_similar`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
if 'include_item_types' in params:
query_params.append(('IncludeItemTypes', params['include_item_types'])) # noqa: E501
if 'enable_images' in params:
query_params.append(('EnableImages', params['enable_images'])) # noqa: E501
if 'enable_user_data' in params:
query_params.append(('EnableUserData', params['enable_user_data'])) # noqa: E501
if 'image_type_limit' in params:
query_params.append(('ImageTypeLimit', params['image_type_limit'])) # noqa: E501
if 'enable_image_types' in params:
query_params.append(('EnableImageTypes', params['enable_image_types'])) # noqa: E501
if 'user_id' in params:
query_params.append(('UserId', params['user_id'])) # noqa: E501
if 'limit' in params:
query_params.append(('Limit', params['limit'])) # noqa: E501
if 'fields' in params:
query_params.append(('Fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Shows/{Id}/Similar', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='QueryResultBaseItemDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_trailers_by_id_similar(self, id, **kwargs): # noqa: E501
"""Finds movies and trailers similar to a given trailer. # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_trailers_by_id_similar(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str include_item_types: Optional. If specified, results will be filtered based on item type. This allows multiple, comma delimeted.
:param bool enable_images: Optional, include image information in output
:param bool enable_user_data: Optional, include user data
:param int image_type_limit: Optional, the max number of images to return, per image type
:param str enable_image_types: Optional. The image types to include in the output.
:param str user_id: Optional. Filter by user id, and attach user data
:param int limit: Optional. The maximum number of records to return
:param str fields: Optional. Specify additional fields of information to return in the output. This allows multiple, comma delimeted. Options: Budget, Chapters, DateCreated, Genres, HomePageUrl, IndexOptions, MediaStreams, Overview, ParentId, Path, People, ProviderIds, PrimaryImageAspectRatio, Revenue, SortName, Studios, Taglines, TrailerUrls
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_trailers_by_id_similar_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_trailers_by_id_similar_with_http_info(id, **kwargs) # noqa: E501
return data
def get_trailers_by_id_similar_with_http_info(self, id, **kwargs): # noqa: E501
"""Finds movies and trailers similar to a given trailer. # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_trailers_by_id_similar_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str include_item_types: Optional. If specified, results will be filtered based on item type. This allows multiple, comma delimeted.
:param bool enable_images: Optional, include image information in output
:param bool enable_user_data: Optional, include user data
:param int image_type_limit: Optional, the max number of images to return, per image type
:param str enable_image_types: Optional. The image types to include in the output.
:param str user_id: Optional. Filter by user id, and attach user data
:param int limit: Optional. The maximum number of records to return
:param str fields: Optional. Specify additional fields of information to return in the output. This allows multiple, comma delimeted. Options: Budget, Chapters, DateCreated, Genres, HomePageUrl, IndexOptions, MediaStreams, Overview, ParentId, Path, People, ProviderIds, PrimaryImageAspectRatio, Revenue, SortName, Studios, Taglines, TrailerUrls
:return: QueryResultBaseItemDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'include_item_types', 'enable_images', 'enable_user_data', 'image_type_limit', 'enable_image_types', 'user_id', 'limit', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_trailers_by_id_similar" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_trailers_by_id_similar`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
if 'include_item_types' in params:
query_params.append(('IncludeItemTypes', params['include_item_types'])) # noqa: E501
if 'enable_images' in params:
query_params.append(('EnableImages', params['enable_images'])) # noqa: E501
if 'enable_user_data' in params:
query_params.append(('EnableUserData', params['enable_user_data'])) # noqa: E501
if 'image_type_limit' in params:
query_params.append(('ImageTypeLimit', params['image_type_limit'])) # noqa: E501
if 'enable_image_types' in params:
query_params.append(('EnableImageTypes', params['enable_image_types'])) # noqa: E501
if 'user_id' in params:
query_params.append(('UserId', params['user_id'])) # noqa: E501
if 'limit' in params:
query_params.append(('Limit', params['limit'])) # noqa: E501
if 'fields' in params:
query_params.append(('Fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Trailers/{Id}/Similar', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='QueryResultBaseItemDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_library_media_updated(self, body, **kwargs): # noqa: E501
"""Reports that new movies have been added by an external source # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_library_media_updated(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param LibraryPostUpdatedMedia body: PostUpdatedMedia (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_library_media_updated_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.post_library_media_updated_with_http_info(body, **kwargs) # noqa: E501
return data
def post_library_media_updated_with_http_info(self, body, **kwargs): # noqa: E501
"""Reports that new movies have been added by an external source # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_library_media_updated_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param LibraryPostUpdatedMedia body: PostUpdatedMedia (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_library_media_updated" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_library_media_updated`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Library/Media/Updated', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_library_movies_added(self, **kwargs): # noqa: E501
"""Deprecated. Use /Library/Media/Updated # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_library_movies_added(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_library_movies_added_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.post_library_movies_added_with_http_info(**kwargs) # noqa: E501
return data
def post_library_movies_added_with_http_info(self, **kwargs): # noqa: E501
"""Deprecated. Use /Library/Media/Updated # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_library_movies_added_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_library_movies_added" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Library/Movies/Added', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_library_movies_updated(self, **kwargs): # noqa: E501
"""Deprecated. Use /Library/Media/Updated # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_library_movies_updated(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_library_movies_updated_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.post_library_movies_updated_with_http_info(**kwargs) # noqa: E501
return data
def post_library_movies_updated_with_http_info(self, **kwargs): # noqa: E501
"""Deprecated. Use /Library/Media/Updated # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_library_movies_updated_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_library_movies_updated" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Library/Movies/Updated', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_library_refresh(self, **kwargs): # noqa: E501
"""Starts a library scan # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_library_refresh(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_library_refresh_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.post_library_refresh_with_http_info(**kwargs) # noqa: E501
return data
def post_library_refresh_with_http_info(self, **kwargs): # noqa: E501
"""Starts a library scan # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_library_refresh_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_library_refresh" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Library/Refresh', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_library_series_added(self, **kwargs): # noqa: E501
"""Deprecated. Use /Library/Media/Updated # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_library_series_added(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_library_series_added_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.post_library_series_added_with_http_info(**kwargs) # noqa: E501
return data
def post_library_series_added_with_http_info(self, **kwargs): # noqa: E501
"""Deprecated. Use /Library/Media/Updated # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_library_series_added_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_library_series_added" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Library/Series/Added', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_library_series_updated(self, **kwargs): # noqa: E501
"""Deprecated. Use /Library/Media/Updated # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_library_series_updated(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_library_series_updated_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.post_library_series_updated_with_http_info(**kwargs) # noqa: E501
return data
def post_library_series_updated_with_http_info(self, **kwargs): # noqa: E501
"""Deprecated. Use /Library/Media/Updated # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_library_series_updated_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_library_series_updated" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Library/Series/Updated', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 43.334737
| 352
| 0.625202
| 14,545
| 123,504
| 5.064627
| 0.020832
| 0.046915
| 0.021286
| 0.027367
| 0.983086
| 0.976746
| 0.973909
| 0.972307
| 0.966497
| 0.965547
| 0
| 0.01483
| 0.286946
| 123,504
| 2,849
| 353
| 43.349947
| 0.821654
| 0.380886
| 0
| 0.836269
| 1
| 0
| 0.197884
| 0.050244
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037182
| false
| 0
| 0.002609
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
50f1a935fc342c6cc7f4dadc08f4fbeaca65dfe0
| 98
|
py
|
Python
|
geometric_vector_perceptron/__init__.py
|
AspirinCode/geometric-vector-perceptron
|
0bb57b6f4bf181c00b3855b81bff8cdd0fdd901d
|
[
"MIT"
] | null | null | null |
geometric_vector_perceptron/__init__.py
|
AspirinCode/geometric-vector-perceptron
|
0bb57b6f4bf181c00b3855b81bff8cdd0fdd901d
|
[
"MIT"
] | null | null | null |
geometric_vector_perceptron/__init__.py
|
AspirinCode/geometric-vector-perceptron
|
0bb57b6f4bf181c00b3855b81bff8cdd0fdd901d
|
[
"MIT"
] | 1
|
2021-11-28T17:35:44.000Z
|
2021-11-28T17:35:44.000Z
|
from geometric_vector_perceptron.geometric_vector_perceptron import GVP, GVPDropout, GVPLayerNorm
| 49
| 97
| 0.908163
| 11
| 98
| 7.727273
| 0.727273
| 0.352941
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061224
| 98
| 1
| 98
| 98
| 0.923913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e8440169ab6b1304cb155867cb0c8952ae9a4c3f
| 160
|
py
|
Python
|
ai_awesome/model.py
|
wiktorlazarski/ai-awesome-project-template
|
f4c93306e6ca7dc7f4fb1638e160eaa879a542b5
|
[
"Apache-2.0"
] | 15
|
2022-01-22T13:58:10.000Z
|
2022-03-29T18:57:18.000Z
|
ai_awesome/model.py
|
wiktorlazarski/ai-awesome-project-template
|
f4c93306e6ca7dc7f4fb1638e160eaa879a542b5
|
[
"Apache-2.0"
] | null | null | null |
ai_awesome/model.py
|
wiktorlazarski/ai-awesome-project-template
|
f4c93306e6ca7dc7f4fb1638e160eaa879a542b5
|
[
"Apache-2.0"
] | null | null | null |
import torch
class NeuralNet(torch.nn.Module):
def __init__(self):
pass
def forward(self, X: torch.Tensor) -> torch.Tensor:
return X
| 16
| 55
| 0.63125
| 21
| 160
| 4.619048
| 0.666667
| 0.226804
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2625
| 160
| 9
| 56
| 17.777778
| 0.822034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.166667
| 0.166667
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 7
|
2cf3d14001280f6546128060e4f38c1ea967b05e
| 1,449
|
py
|
Python
|
library/test/test_measures.py
|
pupuis/drep
|
f5f8d8029321a2f2160f443dbfe1387edb651568
|
[
"MIT"
] | null | null | null |
library/test/test_measures.py
|
pupuis/drep
|
f5f8d8029321a2f2160f443dbfe1387edb651568
|
[
"MIT"
] | null | null | null |
library/test/test_measures.py
|
pupuis/drep
|
f5f8d8029321a2f2160f443dbfe1387edb651568
|
[
"MIT"
] | null | null | null |
import unittest
import library.measures as measures
import numpy as np
class DummyEstimator:
def __init__(self, y):
self.y = y
def predict(self, x, collapse = False):
return self.y
class MeasuresTest(unittest.TestCase):
def test_diversity_on_completely_agreeing_estimators(self):
estimators = [DummyEstimator(np.ones(3)), DummyEstimator(np.ones(3))]
dataset = "dataset_text"
self.assertEqual(measures.diversity(estimators, dataset), 0)
def test_agreement_on_completely_agreeing_estimators(self):
estimators = [DummyEstimator(np.ones(3)), DummyEstimator(np.ones(3))]
dataset = "dataset_text"
self.assertEqual(measures.agreement(estimators, dataset), 2)
def test_diversity_on_completely_disagreeing_estimators(self):
estimators = [DummyEstimator(np.ones(3)), DummyEstimator(-np.ones(3))]
dataset = "dataset_text"
self.assertEqual(measures.diversity(estimators, dataset), 2)
def test_agreement_on_completely_disagreeing_estimators(self):
estimators = [DummyEstimator(np.ones(3)), DummyEstimator(-np.ones(3))]
dataset = "dataset_text"
self.assertEqual(measures.agreement(estimators, dataset), -2)
def test_diversity_with_only_one_estimator(self):
estimators = [DummyEstimator(np.ones(3))]
dataset = "dataset_text"
self.assertEqual(measures.diversity(estimators, dataset), 2)
| 34.5
| 78
| 0.709455
| 164
| 1,449
| 6.060976
| 0.237805
| 0.144869
| 0.181087
| 0.190141
| 0.808853
| 0.760563
| 0.746479
| 0.746479
| 0.746479
| 0.746479
| 0
| 0.011824
| 0.182885
| 1,449
| 41
| 79
| 35.341463
| 0.827703
| 0
| 0
| 0.37931
| 0
| 0
| 0.041408
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 1
| 0.241379
| false
| 0
| 0.103448
| 0.034483
| 0.448276
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
2cfd67e6830225786889170f21bc719c86e4b8fd
| 130
|
py
|
Python
|
app/main/__init__.py
|
8by8-org/usvotes
|
e2af8b2d8b986bf36804bae1c784bc78b54dc412
|
[
"MIT"
] | null | null | null |
app/main/__init__.py
|
8by8-org/usvotes
|
e2af8b2d8b986bf36804bae1c784bc78b54dc412
|
[
"MIT"
] | null | null | null |
app/main/__init__.py
|
8by8-org/usvotes
|
e2af8b2d8b986bf36804bae1c784bc78b54dc412
|
[
"MIT"
] | 1
|
2021-12-18T21:57:40.000Z
|
2021-12-18T21:57:40.000Z
|
from flask import Blueprint
main = Blueprint('main', __name__)
from app.main import starter_views
from app.main import error_pages
| 32.5
| 34
| 0.823077
| 20
| 130
| 5.05
| 0.55
| 0.257426
| 0.217822
| 0.336634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 130
| 4
| 35
| 32.5
| 0.878261
| 0
| 0
| 0
| 0
| 0
| 0.030534
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
fa32e749dd81bc046ff3f6ec56f42d7afed2980b
| 258
|
py
|
Python
|
pynumdiff/augmented_data/__init__.py
|
fossabot/PyNumDiff
|
dccad2ad7a875f2ecccb0db2bb6e2afa392916d1
|
[
"MIT"
] | null | null | null |
pynumdiff/augmented_data/__init__.py
|
fossabot/PyNumDiff
|
dccad2ad7a875f2ecccb0db2bb6e2afa392916d1
|
[
"MIT"
] | null | null | null |
pynumdiff/augmented_data/__init__.py
|
fossabot/PyNumDiff
|
dccad2ad7a875f2ecccb0db2bb6e2afa392916d1
|
[
"MIT"
] | null | null | null |
from pynumdiff.augmented_data.__augmented_data__ import linearmodel as linearmodel
from pynumdiff.augmented_data.__augmented_data__ import nonlinearmodel as nonlinearmodel
from pynumdiff.augmented_data.__augmented_data__ import __integrate__ as __integrate__
| 86
| 88
| 0.910853
| 30
| 258
| 6.966667
| 0.3
| 0.373206
| 0.315789
| 0.373206
| 0.645933
| 0.645933
| 0.645933
| 0
| 0
| 0
| 0
| 0
| 0.065891
| 258
| 3
| 89
| 86
| 0.86722
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fa4154e77a344515245e030a0334d98d42085174
| 15,536
|
py
|
Python
|
controlimcap/decoders/memory.py
|
SikandarBakht/asg2cap
|
d8a6360eaccdb8c3add5f9c4f6fd72764e47e762
|
[
"MIT"
] | 169
|
2020-03-15T08:41:39.000Z
|
2022-03-30T09:36:17.000Z
|
controlimcap/decoders/memory.py
|
wtr850/asg2cap
|
97a1d866d4a2b86c1f474bb168518f97eb2f8b96
|
[
"MIT"
] | 25
|
2020-05-23T15:14:00.000Z
|
2022-03-10T06:20:31.000Z
|
controlimcap/decoders/memory.py
|
wtr850/asg2cap
|
97a1d866d4a2b86c1f474bb168518f97eb2f8b96
|
[
"MIT"
] | 25
|
2020-04-02T10:08:01.000Z
|
2021-12-09T12:10:10.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
import caption.decoders.attention
from framework.modules.embeddings import Embedding
from framework.modules.global_attention import GlobalAttention
from controlimcap.decoders.cfattention import ContentFlowAttentionDecoder
class MemoryDecoder(caption.decoders.attention.BUTDAttnDecoder):
def __init__(self, config):
super().__init__(config)
memory_size = self.config.attn_size if self.config.memory_same_key_value else self.config.attn_input_size
self.memory_update_layer = nn.Sequential(
nn.Linear(self.config.hidden_size + memory_size, memory_size),
nn.ReLU(),
nn.Linear(memory_size, memory_size * 2))
self.sentinal_layer = nn.Sequential(
nn.Linear(self.config.hidden_size, self.config.hidden_size),
nn.ReLU(),
nn.Linear(self.config.hidden_size, 1))
def forward(self, inputs, enc_globals, enc_memories, enc_masks, return_attn=False):
'''
Args:
inputs: (batch, dec_seq_len)
enc_globals: (batch, hidden_size)
enc_memories: (batch, enc_seq_len, attn_input_size)
enc_masks: (batch, enc_seq_len)
Returns:
logits: (batch*seq_len, num_words)
'''
enc_seq_len = enc_memories.size(1)
memory_keys, memory_values = self.gen_memory_key_value(enc_memories)
states = self.init_dec_state(inputs.size(0)) # zero init state
step_outs, step_attns = [], []
for t in range(inputs.size(1)):
wordids = inputs[:, t]
if t > 0 and self.config.schedule_sampling:
sample_rate = torch.rand(wordids.size(0)).to(wordids.device)
sample_mask = sample_rate < self.config.ss_rate
prob = self.softmax(step_outs[-1]).detach() # detach grad
sampled_wordids = torch.multinomial(prob, 1).view(-1)
wordids.masked_scatter_(sample_mask, sampled_wordids)
embed = self.embedding(wordids)
h_attn_lstm, c_attn_lstm = self.attn_lstm(
torch.cat([states[0][1], enc_globals, embed], dim=1),
(states[0][0], states[1][0]))
# attn_score: (batch, max_attn_len)
attn_score, attn_memory = self.attn(h_attn_lstm,
memory_keys, memory_values, enc_masks)
step_attns.append(attn_score)
h_lang_lstm, c_lang_lstm = self.lang_lstm(
torch.cat([h_attn_lstm, attn_memory], dim=1),
(states[0][1], states[1][1]))
# write: update memory keys and values
# (batch, enc_seq_len, hidden_size + attn_input_size)
individual_vectors = torch.cat(
[h_lang_lstm.unsqueeze(1).expand(-1, enc_seq_len, -1), enc_memories], 2)
update_vectors = self.memory_update_layer(individual_vectors)
memory_size = update_vectors.size(-1) // 2
erase_gates = torch.sigmoid(update_vectors[:, :, :memory_size])
add_vectors = update_vectors[:, :, memory_size:]
# some words do not need to attend on visual nodes
sentinal_gates = torch.sigmoid(self.sentinal_layer(h_lang_lstm))
memory_attn_score = attn_score * sentinal_gates
enc_memories = enc_memories * (1 - memory_attn_score.unsqueeze(2) * erase_gates) \
+ memory_attn_score.unsqueeze(2) * add_vectors
memory_keys, memory_values = self.gen_memory_key_value(enc_memories)
outs = h_lang_lstm
logit = self.calc_logits_with_rnn_outs(outs)
step_outs.append(logit)
states = (torch.stack([h_attn_lstm, h_lang_lstm], dim=0),
torch.stack([c_attn_lstm, c_lang_lstm], dim=0))
logits = torch.stack(step_outs, 1)
logits = logits.view(-1, self.config.num_words)
if return_attn:
return logits, step_attns
return logits
def step_fn(self, words, step, **kwargs):
states = kwargs['states']
enc_globals = kwargs['enc_globals']
enc_memories = kwargs['enc_memories']
memory_masks = kwargs['memory_masks']
enc_seq_len = enc_memories.size(1)
embed = self.embedding(words.squeeze(1))
h_attn_lstm, c_attn_lstm = self.attn_lstm(
torch.cat([states[0][1], enc_globals, embed], dim=1),
(states[0][0], states[1][0]))
memory_keys, memory_values = self.gen_memory_key_value(enc_memories)
attn_score, attn_memory = self.attn(h_attn_lstm,
memory_keys, memory_values, memory_masks)
h_lang_lstm, c_lang_lstm = self.lang_lstm(
torch.cat([h_attn_lstm, attn_memory], dim=1),
(states[0][1], states[1][1]))
logits = self.calc_logits_with_rnn_outs(h_lang_lstm)
logprobs = self.log_softmax(logits)
states = (torch.stack([h_attn_lstm, h_lang_lstm], dim=0),
torch.stack([c_attn_lstm, c_lang_lstm], dim=0))
# write: update memory keys and values
individual_vectors = torch.cat([h_lang_lstm.unsqueeze(1).expand(-1, enc_seq_len, -1), enc_memories], 2)
update_vectors = self.memory_update_layer(individual_vectors)
memory_size = update_vectors.size(-1) // 2
erase_gates = torch.sigmoid(update_vectors[:, :, :memory_size])
add_vectors = update_vectors[:, :, memory_size:]
sentinal_gates = torch.sigmoid(self.sentinal_layer(h_lang_lstm))
memory_attn_score = attn_score * sentinal_gates
enc_memories = enc_memories * (1 - memory_attn_score.unsqueeze(2) * erase_gates) \
+ memory_attn_score.unsqueeze(2) * add_vectors
kwargs['enc_memories'] = enc_memories
kwargs['states'] = states
return logprobs, kwargs
def sample_decode(self, words, enc_globals, enc_memories, enc_masks, greedy=True, early_stop=True):
'''
Args
words: (batch, )
enc_states: (batch, hidden_size)
enc_memories: (batch, enc_seq_len, attn_input_size)
enc_masks: (batch, enc_seq_len)
'''
states = self.init_dec_state(words.size(0))
seq_words, seq_word_logprobs = caption.utils.inference.sample_decode(
words, self.step_fn, self.config.max_words_in_sent,
greedy=greedy, early_stop=early_stop, states=states,
enc_globals=enc_globals, enc_memories=enc_memories, memory_masks=enc_masks)
return seq_words, seq_word_logprobs
def beam_search_decode(self, words, enc_globals, enc_memories, enc_masks):
states = self.init_dec_state(words.size(0))
sent_pool = caption.utils.inference.beam_search_decode(words, self.step_fn,
self.config.max_words_in_sent, beam_width=self.config.beam_width,
sent_pool_size=self.config.sent_pool_size,
expand_fn=self.expand_fn, select_fn=self.select_fn,
states=states, enc_globals=enc_globals,
enc_memories=enc_memories, memory_masks=enc_masks)
return sent_pool
class MemoryFlowDecoder(ContentFlowAttentionDecoder):
def __init__(self, config):
super().__init__(config)
memory_size = self.config.attn_size if self.config.memory_same_key_value else self.config.attn_input_size
self.memory_update_layer = nn.Sequential(
nn.Linear(self.config.hidden_size + memory_size, memory_size),
nn.ReLU(),
nn.Linear(memory_size, memory_size * 2))
self.sentinal_layer = nn.Sequential(
nn.Linear(self.config.hidden_size, self.config.hidden_size),
nn.ReLU(),
nn.Linear(self.config.hidden_size, 1))
def forward(self, inputs, enc_globals, enc_memories, enc_masks, flow_edges, return_attn=False):
'''
Args:
inputs: (batch, dec_seq_len)
enc_globals: (batch, hidden_size)
enc_memories: (batch, enc_seq_len, attn_input_size)
enc_masks: (batch, enc_seq_len)
flow_edges: sparse matrix (num_nodes, num_nodes), num_nodes=batch*enc_seq_len
Returns:
logits: (batch*seq_len, num_words)
'''
batch_size, max_attn_len = enc_masks.size()
device = inputs.device
# initialize states
states = self.init_dec_state(batch_size) # zero init state
# location attention: (batch, max_attn_len)
prev_attn_score = torch.zeros((batch_size, max_attn_len)).to(device)
prev_attn_score[:, 0] = 1
step_outs, step_attns = [], []
for t in range(inputs.size(1)):
wordids = inputs[:, t]
if t > 0 and self.config.schedule_sampling:
sample_rate = torch.rand(wordids.size(0)).to(wordids.device)
sample_mask = sample_rate < self.config.ss_rate
prob = self.softmax(step_outs[-1]).detach() # detach grad
sampled_wordids = torch.multinomial(prob, 1).view(-1)
wordids.masked_scatter_(sample_mask, sampled_wordids)
embed = self.embedding(wordids)
h_attn_lstm, c_attn_lstm = self.attn_lstm(
torch.cat([states[0][1], enc_globals, embed], dim=1),
(states[0][0], states[1][0]))
memory_keys, memory_values = self.gen_memory_key_value(enc_memories)
prev_attn_memory = torch.sum(prev_attn_score.unsqueeze(2) * memory_values, 1)
address_params = self.address_layer(torch.cat([h_attn_lstm, prev_attn_memory], 1))
interpolate_gate = torch.sigmoid(address_params[:, :1])
flow_gate = torch.softmax(address_params[:, 1:], dim=1)
# content_attn_score: (batch, max_attn_len)
content_attn_score, content_attn_memory = self.attn(h_attn_lstm,
memory_keys, memory_values, enc_masks)
# location attention flow: (batch, max_attn_len)
flow_attn_score_1 = torch.einsum('bts,bs->bt', flow_edges, prev_attn_score)
flow_attn_score_2 = torch.einsum('bts,bs->bt', flow_edges, flow_attn_score_1)
# (batch, max_attn_len, 3)
flow_attn_score = torch.stack([x.view(batch_size, max_attn_len) \
for x in [prev_attn_score, flow_attn_score_1, flow_attn_score_2]], 2)
flow_attn_score = torch.sum(flow_gate.unsqueeze(1) * flow_attn_score, 2)
# content + location interpolation
attn_score = interpolate_gate * content_attn_score + (1 - interpolate_gate) * flow_attn_score
# final attention
step_attns.append(attn_score)
prev_attn_score = attn_score
attn_memory = torch.sum(attn_score.unsqueeze(2) * memory_values, 1)
# next layer with attended context
h_lang_lstm, c_lang_lstm = self.lang_lstm(
torch.cat([h_attn_lstm, attn_memory], dim=1),
(states[0][1], states[1][1]))
# write: update memory keys and values
individual_vectors = torch.cat([h_lang_lstm.unsqueeze(1).expand(-1, max_attn_len, -1), enc_memories], 2)
update_vectors = self.memory_update_layer(individual_vectors)
memory_size = update_vectors.size(-1) // 2
erase_gates = torch.sigmoid(update_vectors[:, :, :memory_size])
add_vectors = update_vectors[:, :, memory_size:]
# some words do not need to attend on visual nodes
sentinal_gates = torch.sigmoid(self.sentinal_layer(h_lang_lstm))
memory_attn_score = attn_score * sentinal_gates
enc_memories = enc_memories * (1 - memory_attn_score.unsqueeze(2) * erase_gates) \
+ memory_attn_score.unsqueeze(2) * add_vectors
outs = h_lang_lstm
logit = self.calc_logits_with_rnn_outs(outs)
step_outs.append(logit)
states = (torch.stack([h_attn_lstm, h_lang_lstm], dim=0),
torch.stack([c_attn_lstm, c_lang_lstm], dim=0))
logits = torch.stack(step_outs, 1)
logits = logits.view(-1, self.config.num_words)
if return_attn:
return logits, step_attns
return logits
def step_fn(self, words, step, **kwargs):
states = kwargs['states']
enc_globals = kwargs['enc_globals']
enc_memories = kwargs['enc_memories']
memory_masks = kwargs['memory_masks']
prev_attn_score = kwargs['prev_attn_score']
flow_edges = kwargs['flow_edges']
batch_size, max_attn_len = memory_masks.size()
memory_keys, memory_values = self.gen_memory_key_value(enc_memories)
embed = self.embedding(words.squeeze(1))
h_attn_lstm, c_attn_lstm = self.attn_lstm(
torch.cat([states[0][1], enc_globals, embed], dim=1),
(states[0][0], states[1][0]))
prev_attn_memory = torch.sum(prev_attn_score.unsqueeze(2) * memory_values, 1)
address_params = self.address_layer(torch.cat([h_attn_lstm, prev_attn_memory], 1))
interpolate_gate = torch.sigmoid(address_params[:, :1])
flow_gate = torch.softmax(address_params[:, 1:], dim=1)
# content_attn_score: (batch, max_attn_len)
content_attn_score, content_attn_memory = self.attn(h_attn_lstm,
memory_keys, memory_values, memory_masks)
# location attention flow: (batch, max_attn_len)
flow_attn_score_1 = torch.einsum('bts,bs->bt', flow_edges, prev_attn_score)
flow_attn_score_2 = torch.einsum('bts,bs->bt', flow_edges, flow_attn_score_1)
flow_attn_score = torch.stack([x.view(batch_size, max_attn_len) \
for x in [prev_attn_score, flow_attn_score_1, flow_attn_score_2]], 2)
flow_attn_score = torch.sum(flow_gate.unsqueeze(1) * flow_attn_score, 2)
# content + location interpolation
attn_score = interpolate_gate * content_attn_score + (1 - interpolate_gate) * flow_attn_score
# final attention
attn_memory = torch.sum(attn_score.unsqueeze(2) * memory_values, 1)
h_lang_lstm, c_lang_lstm = self.lang_lstm(
torch.cat([h_attn_lstm, attn_memory], dim=1),
(states[0][1], states[1][1]))
logits = self.calc_logits_with_rnn_outs(h_lang_lstm)
logprobs = self.log_softmax(logits)
states = (torch.stack([h_attn_lstm, h_lang_lstm], dim=0),
torch.stack([c_attn_lstm, c_lang_lstm], dim=0))
# write: update memory keys and values
individual_vectors = torch.cat([h_lang_lstm.unsqueeze(1).expand(-1, max_attn_len, -1), enc_memories], 2)
update_vectors = self.memory_update_layer(individual_vectors)
memory_size = update_vectors.size(-1) // 2
erase_gates = torch.sigmoid(update_vectors[:, :, :memory_size])
add_vectors = update_vectors[:, :, memory_size:]
sentinal_gates = torch.sigmoid(self.sentinal_layer(h_lang_lstm))
memory_attn_score = attn_score * sentinal_gates
enc_memories = enc_memories * (1 - memory_attn_score.unsqueeze(2) * erase_gates) \
+ memory_attn_score.unsqueeze(2) * add_vectors
kwargs['states'] = states
kwargs['enc_memories'] = enc_memories
kwargs['prev_attn_score'] = attn_score
return logprobs, kwargs
def sample_decode(self, words, enc_globals, enc_memories, enc_masks, flow_edges, greedy=True):
batch_size, max_attn_len = enc_masks.size()
device = enc_masks.device
states = self.init_dec_state(batch_size)
prev_attn_score = torch.zeros((batch_size, max_attn_len)).to(device)
prev_attn_score[:, 0] = 1
seq_words, seq_word_logprobs = caption.utils.inference.sample_decode(
words, self.step_fn, self.config.max_words_in_sent,
greedy=greedy, states=states, enc_globals=enc_globals,
enc_memories=enc_memories, memory_masks=enc_masks,
prev_attn_score=prev_attn_score, flow_edges=flow_edges)
return seq_words, seq_word_logprobs
def beam_search_decode(self, words, enc_globals, enc_memories, enc_masks, flow_edges):
batch_size, max_attn_len = enc_masks.size()
device = enc_masks.device
states = self.init_dec_state(batch_size)
prev_attn_score = torch.zeros((batch_size, max_attn_len)).to(device)
prev_attn_score[:, 0] = 1
sent_pool = caption.utils.inference.beam_search_decode(words, self.step_fn,
self.config.max_words_in_sent, beam_width=self.config.beam_width,
sent_pool_size=self.config.sent_pool_size,
expand_fn=self.expand_fn, select_fn=self.select_fn,
enc_memories=enc_memories, memory_masks=enc_masks,
states=states, enc_globals=enc_globals,
prev_attn_score=prev_attn_score, flow_edges=flow_edges)
return sent_pool
| 41.989189
| 110
| 0.706488
| 2,247
| 15,536
| 4.52826
| 0.076992
| 0.062801
| 0.01769
| 0.022408
| 0.927666
| 0.9143
| 0.902703
| 0.89199
| 0.880786
| 0.87715
| 0
| 0.012582
| 0.18145
| 15,536
| 369
| 111
| 42.102981
| 0.787529
| 0.091594
| 0
| 0.884921
| 0
| 0
| 0.014167
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039683
| false
| 0
| 0.027778
| 0
| 0.115079
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d7286ce807fa966a7dd7ef3ae79c567719b439d5
| 108
|
py
|
Python
|
models/storage/__init__.py
|
smbdsbrain/sick_bastard
|
e99b7a5eca1d8e5f52676aabbabefd9aca5cd446
|
[
"WTFPL"
] | null | null | null |
models/storage/__init__.py
|
smbdsbrain/sick_bastard
|
e99b7a5eca1d8e5f52676aabbabefd9aca5cd446
|
[
"WTFPL"
] | null | null | null |
models/storage/__init__.py
|
smbdsbrain/sick_bastard
|
e99b7a5eca1d8e5f52676aabbabefd9aca5cd446
|
[
"WTFPL"
] | null | null | null |
from models.storage.mongo import MongoStorage # noqa
from models.storage.mongo import MongoStorage # noqa
| 36
| 53
| 0.814815
| 14
| 108
| 6.285714
| 0.5
| 0.227273
| 0.386364
| 0.5
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0.12963
| 108
| 2
| 54
| 54
| 0.93617
| 0.083333
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 12
|
ad38508dfcafee00022ef62d4fc31b2630397290
| 356
|
py
|
Python
|
mod1-previous-concepts/logs.py
|
uoc-santiloopz/graphs-complexity
|
2bd75155a10a4d6caa09db4cdc3ac37fb18e850a
|
[
"Apache-2.0"
] | null | null | null |
mod1-previous-concepts/logs.py
|
uoc-santiloopz/graphs-complexity
|
2bd75155a10a4d6caa09db4cdc3ac37fb18e850a
|
[
"Apache-2.0"
] | null | null | null |
mod1-previous-concepts/logs.py
|
uoc-santiloopz/graphs-complexity
|
2bd75155a10a4d6caa09db4cdc3ac37fb18e850a
|
[
"Apache-2.0"
] | null | null | null |
def letUsCalculate():
print("Where n is the number of ELEMENTS in the SET, and R is the samples taken in the function\n")
print("///////////////////////////////\n")
print("///////////////////////////////\n")
print("///////////////////////////////\n")
print("///////////////////////////////\n")
print("NOW.... LET US CALCULATE!!!\n")
| 50.857143
| 103
| 0.390449
| 37
| 356
| 3.756757
| 0.567568
| 0.215827
| 0.201439
| 0.345324
| 0.215827
| 0.215827
| 0.215827
| 0.215827
| 0
| 0
| 0
| 0
| 0.146067
| 356
| 7
| 104
| 50.857143
| 0.457237
| 0
| 0
| 0.571429
| 0
| 0.142857
| 0.703081
| 0.369748
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| true
| 0
| 0
| 0
| 0.142857
| 0.857143
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
ad4e9582098f8dbd96fd4f1e4574056d47540639
| 44,585
|
py
|
Python
|
venv/Lib/site-packages/pyo/lib/randoms.py
|
mintzer/pupillometry-rf-back
|
cfa86fa984a49dce0123798f8de5b838c02e10d5
|
[
"CC-BY-4.0"
] | null | null | null |
venv/Lib/site-packages/pyo/lib/randoms.py
|
mintzer/pupillometry-rf-back
|
cfa86fa984a49dce0123798f8de5b838c02e10d5
|
[
"CC-BY-4.0"
] | null | null | null |
venv/Lib/site-packages/pyo/lib/randoms.py
|
mintzer/pupillometry-rf-back
|
cfa86fa984a49dce0123798f8de5b838c02e10d5
|
[
"CC-BY-4.0"
] | null | null | null |
"""
Set of objects that implement different kinds of random noise generators.
"""
from __future__ import absolute_import
"""
Copyright 2009-2015 Olivier Belanger
This file is part of pyo, a python module to help digital signal
processing script creation.
pyo is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
pyo is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with pyo. If not, see <http://www.gnu.org/licenses/>.
"""
from ._core import *
from ._maps import *
class Randi(PyoObject):
"""
Periodic pseudo-random generator with interpolation.
Randi generates a pseudo-random number between `min` and `max`
values at a frequency specified by `freq` parameter. Randi will
produce straight-line interpolation between current number and the next.
:Parent: :py:class:`PyoObject`
:Args:
min: float or PyoObject, optional
Minimum value for the random generation. Defaults to 0.
max: float or PyoObject, optional
Maximum value for the random generation. Defaults to 1.
freq: float or PyoObject, optional
Polling frequency. Defaults to 1.
>>> s = Server().boot()
>>> s.start()
>>> freq = Randi(500, 3000, 4)
>>> noze = Noise().mix(2)
>>> a = Biquad(noze, freq=freq, q=5, type=2, mul=.5).out()
"""
def __init__(self, min=0.0, max=1.0, freq=1.0, mul=1, add=0):
pyoArgsAssert(self, "OOOOO", min, max, freq, mul, add)
PyoObject.__init__(self, mul, add)
self._min = min
self._max = max
self._freq = freq
min, max, freq, mul, add, lmax = convertArgsToLists(min, max, freq, mul, add)
self._base_objs = [
Randi_base(wrap(min, i), wrap(max, i), wrap(freq, i), wrap(mul, i), wrap(add, i)) for i in range(lmax)
]
self._init_play()
def setMin(self, x):
"""
Replace the `min` attribute.
:Args:
x: float or PyoObject
new `min` attribute.
"""
pyoArgsAssert(self, "O", x)
self._min = x
x, lmax = convertArgsToLists(x)
[obj.setMin(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setMax(self, x):
"""
Replace the `max` attribute.
:Args:
x: float or PyoObject
new `max` attribute.
"""
pyoArgsAssert(self, "O", x)
self._max = x
x, lmax = convertArgsToLists(x)
[obj.setMax(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setFreq(self, x):
"""
Replace the `freq` attribute.
:Args:
x: float or PyoObject
new `freq` attribute.
"""
pyoArgsAssert(self, "O", x)
self._freq = x
x, lmax = convertArgsToLists(x)
[obj.setFreq(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def ctrl(self, map_list=None, title=None, wxnoserver=False):
self._map_list = [
SLMap(0.0, 1.0, "lin", "min", self._min),
SLMap(1.0, 2.0, "lin", "max", self._max),
SLMap(0.1, 20.0, "lin", "freq", self._freq),
SLMapMul(self._mul),
]
PyoObject.ctrl(self, map_list, title, wxnoserver)
@property
def min(self):
"""float or PyoObject. Minimum value."""
return self._min
@min.setter
def min(self, x):
self.setMin(x)
@property
def max(self):
"""float or PyoObject. Maximum value."""
return self._max
@max.setter
def max(self, x):
self.setMax(x)
@property
def freq(self):
"""float or PyoObject. Polling frequency."""
return self._freq
@freq.setter
def freq(self, x):
self.setFreq(x)
class Randh(PyoObject):
"""
Periodic pseudo-random generator.
Randh generates a pseudo-random number between `min` and `max`
values at a frequency specified by `freq` parameter. Randh will
hold generated value until next generation.
:Parent: :py:class:`PyoObject`
:Args:
min: float or PyoObject, optional
Minimum value for the random generation. Defaults to 0.
max: float or PyoObject, optional
Maximum value for the random generation. Defaults to 1.
freq: float or PyoObject, optional
Polling frequency. Defaults to 1.
>>> s = Server().boot()
>>> s.start()
>>> freq = Randh(500, 3000, 4)
>>> noze = Noise().mix(2)
>>> a = Biquad(noze, freq=freq, q=5, type=2, mul=.5).out()
"""
def __init__(self, min=0.0, max=1.0, freq=1.0, mul=1, add=0):
pyoArgsAssert(self, "OOOOO", min, max, freq, mul, add)
PyoObject.__init__(self, mul, add)
self._min = min
self._max = max
self._freq = freq
min, max, freq, mul, add, lmax = convertArgsToLists(min, max, freq, mul, add)
self._base_objs = [
Randh_base(wrap(min, i), wrap(max, i), wrap(freq, i), wrap(mul, i), wrap(add, i)) for i in range(lmax)
]
self._init_play()
def setMin(self, x):
"""
Replace the `min` attribute.
:Args:
x: float or PyoObject
new `min` attribute.
"""
pyoArgsAssert(self, "O", x)
self._min = x
x, lmax = convertArgsToLists(x)
[obj.setMin(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setMax(self, x):
"""
Replace the `max` attribute.
:Args:
x: float or PyoObject
new `max` attribute.
"""
pyoArgsAssert(self, "O", x)
self._max = x
x, lmax = convertArgsToLists(x)
[obj.setMax(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setFreq(self, x):
"""
Replace the `freq` attribute.
:Args:
x: float or PyoObject
new `freq` attribute.
"""
pyoArgsAssert(self, "O", x)
self._freq = x
x, lmax = convertArgsToLists(x)
[obj.setFreq(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def ctrl(self, map_list=None, title=None, wxnoserver=False):
self._map_list = [
SLMap(0.0, 1.0, "lin", "min", self._min),
SLMap(1.0, 2.0, "lin", "max", self._max),
SLMap(0.1, 20.0, "lin", "freq", self._freq),
SLMapMul(self._mul),
]
PyoObject.ctrl(self, map_list, title, wxnoserver)
@property
def min(self):
"""float or PyoObject. Minimum value."""
return self._min
@min.setter
def min(self, x):
self.setMin(x)
@property
def max(self):
"""float or PyoObject. Maximum value."""
return self._max
@max.setter
def max(self, x):
self.setMax(x)
@property
def freq(self):
"""float or PyoObject. Polling frequency."""
return self._freq
@freq.setter
def freq(self, x):
self.setFreq(x)
class Choice(PyoObject):
"""
Periodically choose a new value from a user list.
Choice chooses a new value from a predefined list of floats `choice`
at a frequency specified by `freq` parameter. Choice will
hold choosen value until next generation.
:Parent: :py:class:`PyoObject`
:Args:
choice: list of floats or list of lists of floats
Possible values for the random generation.
freq: float or PyoObject, optional
Polling frequency. Defaults to 1.
>>> s = Server().boot()
>>> s.start()
>>> freqs = midiToHz([60,62,64,65,67,69,71,72])
>>> rnd = Choice(choice=freqs, freq=[3,4])
>>> a = SineLoop(rnd, feedback=0.05, mul=.2).out()
"""
def __init__(self, choice, freq=1.0, mul=1, add=0):
pyoArgsAssert(self, "lOOO", choice, freq, mul, add)
PyoObject.__init__(self, mul, add)
self._choice = choice
self._freq = freq
freq, mul, add, lmax = convertArgsToLists(freq, mul, add)
if type(choice[0]) != list:
self._base_objs = [Choice_base(choice, wrap(freq, i), wrap(mul, i), wrap(add, i)) for i in range(lmax)]
else:
choicelen = len(choice)
lmax = max(choicelen, lmax)
self._base_objs = [
Choice_base(wrap(choice, i), wrap(freq, i), wrap(mul, i), wrap(add, i)) for i in range(lmax)
]
self._init_play()
def setChoice(self, x):
"""
Replace the `choice` attribute.
:Args:
x: list of floats or list of lists of floats
new `choice` attribute.
"""
pyoArgsAssert(self, "l", x)
self._choice = x
if type(x[0]) != list:
[obj.setChoice(self._choice) for i, obj in enumerate(self._base_objs)]
else:
[obj.setChoice(wrap(self._choice, i)) for i, obj in enumerate(self._base_objs)]
def setFreq(self, x):
"""
Replace the `freq` attribute.
:Args:
x: float or PyoObject
new `freq` attribute.
"""
pyoArgsAssert(self, "O", x)
self._freq = x
x, lmax = convertArgsToLists(x)
[obj.setFreq(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def ctrl(self, map_list=None, title=None, wxnoserver=False):
self._map_list = [SLMap(0.1, 20.0, "lin", "freq", self._freq), SLMapMul(self._mul)]
PyoObject.ctrl(self, map_list, title, wxnoserver)
@property
def choice(self):
"""list of floats or list of lists of floats. Possible choices."""
return self._choice
@choice.setter
def choice(self, x):
self.setChoice(x)
@property
def freq(self):
"""float or PyoObject. Polling frequency."""
return self._freq
@freq.setter
def freq(self, x):
self.setFreq(x)
class RandInt(PyoObject):
"""
Periodic pseudo-random integer generator.
RandInt generates a pseudo-random integer number between 0 and `max`
values at a frequency specified by `freq` parameter. RandInt will
hold generated value until the next generation.
:Parent: :py:class:`PyoObject`
:Args:
max: float or PyoObject, optional
Maximum value for the random generation. Defaults to 100.
freq: float or PyoObject, optional
Polling frequency. Defaults to 1.
>>> s = Server().boot()
>>> s.start()
>>> freq = RandInt(max=10, freq=5, mul=100, add=500)
>>> jit = Randi(min=0.99, max=1.01, freq=[2.33,3.41])
>>> a = SineLoop(freq*jit, feedback=0.03, mul=.2).out()
"""
def __init__(self, max=100, freq=1.0, mul=1, add=0):
pyoArgsAssert(self, "OOOO", max, freq, mul, add)
PyoObject.__init__(self, mul, add)
self._max = max
self._freq = freq
max, freq, mul, add, lmax = convertArgsToLists(max, freq, mul, add)
self._base_objs = [RandInt_base(wrap(max, i), wrap(freq, i), wrap(mul, i), wrap(add, i)) for i in range(lmax)]
self._init_play()
def setMax(self, x):
"""
Replace the `max` attribute.
:Args:
x: float or PyoObject
new `max` attribute.
"""
pyoArgsAssert(self, "O", x)
self._max = x
x, lmax = convertArgsToLists(x)
[obj.setMax(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setFreq(self, x):
"""
Replace the `freq` attribute.
:Args:
x: float or PyoObject
new `freq` attribute.
"""
pyoArgsAssert(self, "O", x)
self._freq = x
x, lmax = convertArgsToLists(x)
[obj.setFreq(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def ctrl(self, map_list=None, title=None, wxnoserver=False):
self._map_list = [
SLMap(1.0, 2.0, "lin", "max", self._max),
SLMap(0.1, 20.0, "lin", "freq", self._freq),
SLMapMul(self._mul),
]
PyoObject.ctrl(self, map_list, title, wxnoserver)
@property
def max(self):
"""float or PyoObject. Maximum value."""
return self._max
@max.setter
def max(self, x):
self.setMax(x)
@property
def freq(self):
"""float or PyoObject. Polling frequency."""
return self._freq
@freq.setter
def freq(self, x):
self.setFreq(x)
class RandDur(PyoObject):
"""
Recursive time varying pseudo-random generator.
RandDur generates a pseudo-random number between `min` and `max`
arguments and uses that number to set the delay time before the next
generation. RandDur will hold the generated value until next generation.
:Parent: :py:class:`PyoObject`
:Args:
min: float or PyoObject, optional
Minimum value for the random generation. Defaults to 0.
max: float or PyoObject, optional
Maximum value for the random generation. Defaults to 1.
>>> s = Server().boot()
>>> s.start()
>>> dur = RandDur(min=[.05,0.1], max=[.4,.5])
>>> trig = Change(dur)
>>> amp = TrigEnv(trig, table=HannTable(), dur=dur, mul=.2)
>>> freqs = midiToHz([60,63,67,70,72])
>>> freq = TrigChoice(trig, choice=freqs)
>>> a = LFO(freq=freq, type=2, mul=amp).out()
"""
def __init__(self, min=0.0, max=1.0, mul=1, add=0):
pyoArgsAssert(self, "OOOO", min, max, mul, add)
PyoObject.__init__(self, mul, add)
self._min = min
self._max = max
min, max, mul, add, lmax = convertArgsToLists(min, max, mul, add)
self._base_objs = [RandDur_base(wrap(min, i), wrap(max, i), wrap(mul, i), wrap(add, i)) for i in range(lmax)]
self._init_play()
def setMin(self, x):
"""
Replace the `min` attribute.
:Args:
x: float or PyoObject
new `min` attribute.
"""
pyoArgsAssert(self, "O", x)
self._min = x
x, lmax = convertArgsToLists(x)
[obj.setMin(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setMax(self, x):
"""
Replace the `max` attribute.
:Args:
x: float or PyoObject
new `max` attribute.
"""
pyoArgsAssert(self, "O", x)
self._max = x
x, lmax = convertArgsToLists(x)
[obj.setMax(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def ctrl(self, map_list=None, title=None, wxnoserver=False):
self._map_list = [
SLMap(0.0, 1.0, "lin", "min", self._min),
SLMap(1.0, 2.0, "lin", "max", self._max),
SLMapMul(self._mul),
]
PyoObject.ctrl(self, map_list, title, wxnoserver)
@property
def min(self):
"""float or PyoObject. Minimum value."""
return self._min
@min.setter
def min(self, x):
self.setMin(x)
@property
def max(self):
"""float or PyoObject. Maximum value."""
return self._max
@max.setter
def max(self, x):
self.setMax(x)
class Xnoise(PyoObject):
"""
X-class pseudo-random generator.
Xnoise implements a few of the most common noise distributions.
Each distribution generates values in the range 0 and 1.
:Parent: :py:class:`PyoObject`
:Args:
dist: string or int, optional
Distribution type. Defaults to 0.
freq: float or PyoObject, optional
Polling frequency. Defaults to 1.
x1: float or PyoObject, optional
First parameter. Defaults to 0.5.
x2: float or PyoObject, optional
Second parameter. Defaults to 0.5.
.. note::
Available distributions are:
0. uniform
1. linear minimum
2. linear maximum
3. triangular
4. exponential minimum
5. exponential maximum
6. double (bi)exponential
7. cauchy
8. weibull
9. gaussian
10. poisson
11. walker (drunk)
12. loopseg (drunk with looped segments)
Depending on the distribution, `x1` and `x2` parameters are applied
as follow (names as string, or associated number can be used as `dist`
parameter):
0. uniform
- x1: not used
- x2: not used
1. linear_min
- x1: not used
- x2: not used
2. linear_max
- x1: not used
- x2: not used
3. triangle
- x1: not used
- x2: not used
4. expon_min
- x1: slope {0 = no slope -> 10 = sharp slope}
- x2: not used
5. expon_max
- x1: slope {0 = no slope -> 10 = sharp slope}
- x2: not used
6. biexpon
- x1: bandwidth {0 = huge bandwidth -> 10 = narrow bandwidth}
- x2: not used
7. cauchy
- x1: bandwidth {0 = narrow bandwidth -> 10 = huge bandwidth}
- x2: not used
8. weibull
- x1: mean location {0 -> 1}
- x2: shape {0.5 = linear min, 1.5 = expon min, 3.5 = gaussian}
9. gaussian
- x1: mean location {0 -> 1}
- x2: bandwidth {0 = narrow bandwidth -> 10 = huge bandwidth}
10. poisson
- x1: gravity center {0 = low values -> 10 = high values}
- x2: compress/expand range {0.1 = full compress -> 4 full expand}
11. walker
- x1: maximum value {0.1 -> 1}
- x2: maximum step {0.1 -> 1}
12. loopseg
- x1: maximum value {0.1 -> 1}
- x2: maximum step {0.1 -> 1}
>>> s = Server().boot()
>>> s.start()
>>> lfo = Phasor(.1, 0, .5, .15)
>>> freq = Xnoise(dist=12, freq=8, x1=1, x2=lfo, mul=1000, add=500)
>>> jit = Randi(min=0.99, max=1.01, freq=[2.33,3.41])
>>> a = SineLoop(freq*jit, feedback=0.03, mul=.2).out()
"""
def __init__(self, dist=0, freq=1.0, x1=0.5, x2=0.5, mul=1, add=0):
pyoArgsAssert(self, "OOOOO", freq, x1, x2, mul, add)
PyoObject.__init__(self, mul, add)
self._dist = dist
self._freq = freq
self._x1 = x1
self._x2 = x2
dist, freq, x1, x2, mul, add, lmax = convertArgsToLists(dist, freq, x1, x2, mul, add)
for i, t in enumerate(dist):
if type(t) in [bytes_t, unicode_t]:
dist[i] = XNOISE_DICT.get(t, 0)
self._base_objs = [
Xnoise_base(wrap(dist, i), wrap(freq, i), wrap(x1, i), wrap(x2, i), wrap(mul, i), wrap(add, i))
for i in range(lmax)
]
self._init_play()
def setDist(self, x):
"""
Replace the `dist` attribute.
:Args:
x: string or int
new `dist` attribute.
"""
self._dist = x
x, lmax = convertArgsToLists(x)
for i, t in enumerate(x):
if type(t) in [bytes_t, unicode_t]:
x[i] = XNOISE_DICT.get(t, 0)
[obj.setType(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setX1(self, x):
"""
Replace the `x1` attribute.
:Args:
x: float or PyoObject
new `x1` attribute.
"""
pyoArgsAssert(self, "O", x)
self._x1 = x
x, lmax = convertArgsToLists(x)
[obj.setX1(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setX2(self, x):
"""
Replace the `x2` attribute.
:Args:
x: float or PyoObject
new `x2` attribute.
"""
pyoArgsAssert(self, "O", x)
self._x2 = x
x, lmax = convertArgsToLists(x)
[obj.setX2(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setFreq(self, x):
"""
Replace the `freq` attribute.
:Args:
x: float or PyoObject
new `freq` attribute.
"""
pyoArgsAssert(self, "O", x)
self._freq = x
x, lmax = convertArgsToLists(x)
[obj.setFreq(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def ctrl(self, map_list=None, title=None, wxnoserver=False):
self._map_list = [
SLMap(0, 12, "lin", "dist", self._dist, res="int", dataOnly=True),
SLMap(0.001, 200.0, "log", "freq", self._freq),
SLMap(0, 1, "lin", "x1", self._x1),
SLMap(0, 1, "lin", "x2", self._x2),
SLMap(0, 2500, "lin", "mul", self._mul),
SLMap(0, 2500, "lin", "add", self._add),
]
PyoObject.ctrl(self, map_list, title, wxnoserver)
@property
def dist(self):
"""string or int. Distribution type."""
return self._dist
@dist.setter
def dist(self, x):
self.setDist(x)
@property
def freq(self):
"""float or PyoObject. Polling frequency."""
return self._freq
@freq.setter
def freq(self, x):
self.setFreq(x)
@property
def x1(self):
"""float or PyoObject. First parameter."""
return self._x1
@x1.setter
def x1(self, x):
self.setX1(x)
@property
def x2(self):
"""float or PyoObject. Second parameter."""
return self._x2
@x2.setter
def x2(self, x):
self.setX2(x)
class XnoiseMidi(PyoObject):
"""
X-class midi notes pseudo-random generator.
XnoiseMidi implements a few of the most common noise distributions.
Each distribution generates integer values in the range defined with
`mrange` parameter and output can be scaled on midi notes, hertz or
transposition factor.
:Parent: :py:class:`PyoObject`
:Args:
dist: string or int, optional
Distribution type. Defaults to 0.
freq: float or PyoObject, optional
Polling frequency. Defaults to 1.
x1: float or PyoObject, optional
First parameter. Defaults to 0.5.
x2: float or PyoObject, optional
Second parameter. Defaults to 0.5.
scale: int {0, 1, 2}, optional
Output format. 0 = Midi, 1 = Hertz, 2 = transposition factor.
In the transposition mode, the central key (the key where there
is no transposition) is (`minrange` + `maxrange`) / 2. Defaults
to 0.
mrange: tuple of int, optional
Minimum and maximum possible values, in Midi notes. Available
only at initialization time. Defaults to (0, 127).
.. note::
Available distributions are:
0. uniform
1. linear minimum
2. linear maximum
3. triangular
4. exponential minimum
5. exponential maximum
6. double (bi)exponential
7. cauchy
8. weibull
9. gaussian
10. poisson
11. walker (drunk)
12. loopseg (drunk with looped segments)
Depending on the distribution, `x1` and `x2` parameters are applied
as follow (names as string, or associated number can be used as `dist`
parameter):
0. uniform
- x1: not used
- x2: not used
1. linear_min
- x1: not used
- x2: not used
2. linear_max
- x1: not used
- x2: not used
3. triangle
- x1: not used
- x2: not used
4. expon_min
- x1: slope {0 = no slope -> 10 = sharp slope}
- x2: not used
5. expon_max
- x1: slope {0 = no slope -> 10 = sharp slope}
- x2: not used
6. biexpon
- x1: bandwidth {0 = huge bandwidth -> 10 = narrow bandwidth}
- x2: not used
7. cauchy
- x1: bandwidth {0 = narrow bandwidth -> 10 = huge bandwidth}
- x2: not used
8. weibull
- x1: mean location {0 -> 1}
- x2: shape {0.5 = linear min, 1.5 = expon min, 3.5 = gaussian}
9. gaussian
- x1: mean location {0 -> 1}
- x2: bandwidth {0 = narrow bandwidth -> 10 = huge bandwidth}
10. poisson
- x1: gravity center {0 = low values -> 10 = high values}
- x2: compress/expand range {0.1 = full compress -> 4 full expand}
11. walker
- x1: maximum value {0.1 -> 1}
- x2: maximum step {0.1 -> 1}
12. loopseg
- x1: maximum value {0.1 -> 1}
- x2: maximum step {0.1 -> 1}
>>> s = Server().boot()
>>> s.start()
>>> l = Phasor(.4)
>>> rnd = XnoiseMidi('loopseg', freq=8, x1=1, x2=l, scale=0, mrange=(60,96))
>>> freq = Snap(rnd, choice=[0, 2, 3, 5, 7, 8, 11], scale=1)
>>> jit = Randi(min=0.99, max=1.01, freq=[2.33,3.41])
>>> a = SineLoop(freq*jit, feedback=0.03, mul=.2).out()
"""
def __init__(self, dist=0, freq=1.0, x1=0.5, x2=0.5, scale=0, mrange=(0, 127), mul=1, add=0):
pyoArgsAssert(self, "OOOixOO", freq, x1, x2, scale, mrange, mul, add)
PyoObject.__init__(self, mul, add)
self._dist = dist
self._freq = freq
self._x1 = x1
self._x2 = x2
self._scale = scale
self._mrange = mrange
dist, freq, x1, x2, scale, mrange, mul, add, lmax = convertArgsToLists(
dist, freq, x1, x2, scale, mrange, mul, add
)
for i, t in enumerate(dist):
if type(t) in [bytes_t, unicode_t]:
dist[i] = XNOISE_DICT.get(t, 0)
self._base_objs = [
XnoiseMidi_base(
wrap(dist, i),
wrap(freq, i),
wrap(x1, i),
wrap(x2, i),
wrap(scale, i),
wrap(mrange, i),
wrap(mul, i),
wrap(add, i),
)
for i in range(lmax)
]
self._init_play()
def setDist(self, x):
"""
Replace the `dist` attribute.
:Args:
x: string or int
new `dist` attribute.
"""
self._dist = x
x, lmax = convertArgsToLists(x)
for i, t in enumerate(x):
if type(t) in [bytes_t, unicode_t]:
x[i] = XNOISE_DICT.get(t, 0)
[obj.setType(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setScale(self, x):
"""
Replace the `scale` attribute.
Possible values are:
0. Midi notes
1. Hertz
2. transposition factor (centralkey is (`minrange` + `maxrange`) / 2
:Args:
x: int {0, 1, 2}
new `scale` attribute.
"""
pyoArgsAssert(self, "i", x)
self._scale = x
x, lmax = convertArgsToLists(x)
[obj.setScale(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setRange(self, mini, maxi):
"""
Replace the `mrange` attribute.
:Args:
mini: int
minimum output midi range.
maxi: int
maximum output midi range.
"""
pyoArgsAssert(self, "ii", mini, maxi)
self._mrange = (mini, maxi)
mini, maxi, lmax = convertArgsToLists(mini, maxi)
[obj.setRange(wrap(mini, i), wrap(maxi, i)) for i, obj in enumerate(self._base_objs)]
def setX1(self, x):
"""
Replace the `x1` attribute.
:Args:
x: float or PyoObject
new `x1` attribute.
"""
pyoArgsAssert(self, "O", x)
self._x1 = x
x, lmax = convertArgsToLists(x)
[obj.setX1(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setX2(self, x):
"""
Replace the `x2` attribute.
:Args:
x: float or PyoObject
new `x2` attribute.
"""
pyoArgsAssert(self, "O", x)
self._x2 = x
x, lmax = convertArgsToLists(x)
[obj.setX2(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setFreq(self, x):
"""
Replace the `freq` attribute.
:Args:
x: float or PyoObject
new `freq` attribute.
"""
pyoArgsAssert(self, "O", x)
self._freq = x
x, lmax = convertArgsToLists(x)
[obj.setFreq(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def ctrl(self, map_list=None, title=None, wxnoserver=False):
self._map_list = [
SLMap(0, 12, "lin", "dist", self._dist, res="int", dataOnly=True),
SLMap(0.001, 200.0, "log", "freq", self._freq),
SLMap(0, 1, "lin", "x1", self._x1),
SLMap(0, 1, "lin", "x2", self._x2),
SLMap(0, 2, "lin", "scale", self._scale, res="int", dataOnly=True),
]
PyoObject.ctrl(self, map_list, title, wxnoserver)
@property
def dist(self):
"""string or int. Distribution type."""
return self._dist
@dist.setter
def dist(self, x):
self.setDist(x)
@property
def freq(self):
"""float or PyoObject. Polling frequency."""
return self._freq
@freq.setter
def freq(self, x):
self.setFreq(x)
@property
def x1(self):
"""float or PyoObject. First parameter."""
return self._x1
@x1.setter
def x1(self, x):
self.setX1(x)
@property
def x2(self):
"""float or PyoObject. Second parameter."""
return self._x2
@x2.setter
def x2(self, x):
self.setX2(x)
@property
def scale(self):
"""int. Output format."""
return self._scale
@scale.setter
def scale(self, x):
self.setScale(x)
class XnoiseDur(PyoObject):
"""
Recursive time varying X-class pseudo-random generator.
Xnoise implements a few of the most common noise distributions.
Each distribution generates values in the range 0 to 1, which are
then rescaled between `min` and `max` arguments. The object uses
the generated value to set the delay time before the next generation.
XnoiseDur will hold the value until next generation.
:Parent: :py:class:`PyoObject`
:Args:
dist: string or int, optional
Distribution type. Can be the name of the distribution as a string
or its associated number. Defaults to 0.
min: float or PyoObject, optional
Minimum value for the random generation. Defaults to 0.
max: float or PyoObject, optional
Maximum value for the random generation. Defaults to 1.
x1: float or PyoObject, optional
First parameter. Defaults to 0.5.
x2: float or PyoObject, optional
Second parameter. Defaults to 0.5.
.. note::
Available distributions are:
0. uniform
1. linear minimum
2. linear maximum
3. triangular
4. exponential minimum
5. exponential maximum
6. double (bi)exponential
7. cauchy
8. weibull
9. gaussian
10. poisson
11. walker (drunk)
12. loopseg (drunk with looped segments)
Depending on the distribution, `x1` and `x2` parameters are applied
as follow (names as string, or associated number can be used as `dist`
parameter):
0. uniform
- x1: not used
- x2: not used
1. linear_min
- x1: not used
- x2: not used
2. linear_max
- x1: not used
- x2: not used
3. triangle
- x1: not used
- x2: not used
4. expon_min
- x1: slope {0 = no slope -> 10 = sharp slope}
- x2: not used
5. expon_max
- x1: slope {0 = no slope -> 10 = sharp slope}
- x2: not used
6. biexpon
- x1: bandwidth {0 = huge bandwidth -> 10 = narrow bandwidth}
- x2: not used
7. cauchy
- x1: bandwidth {0 = narrow bandwidth -> 10 = huge bandwidth}
- x2: not used
8. weibull
- x1: mean location {0 -> 1}
- x2: shape {0.5 = linear min, 1.5 = expon min, 3.5 = gaussian}
9. gaussian
- x1: mean location {0 -> 1}
- x2: bandwidth {0 = narrow bandwidth -> 10 = huge bandwidth}
10. poisson
- x1: gravity center {0 = low values -> 10 = high values}
- x2: compress/expand range {0.1 = full compress -> 4 full expand}
11. walker
- x1: maximum value {0.1 -> 1}
- x2: maximum step {0.1 -> 1}
12. loopseg
- x1: maximum value {0.1 -> 1}
- x2: maximum step {0.1 -> 1}
>>> s = Server().boot()
>>> s.start()
>>> dur = XnoiseDur(dist="expon_min", min=[.05,0.1], max=[.4,.5], x1=3)
>>> trig = Change(dur)
>>> amp = TrigEnv(trig, table=HannTable(), dur=dur, mul=.2)
>>> freqs = midiToHz([60,63,67,70,72])
>>> freq = TrigChoice(trig, choice=freqs)
>>> a = LFO(freq=freq, type=2, mul=amp).out()
"""
def __init__(self, dist=0, min=0.0, max=1.0, x1=0.5, x2=0.5, mul=1, add=0):
pyoArgsAssert(self, "OOOOOO", min, max, x1, x2, mul, add)
PyoObject.__init__(self, mul, add)
self._dist = dist
self._min = min
self._max = max
self._x1 = x1
self._x2 = x2
dist, min, max, x1, x2, mul, add, lmax = convertArgsToLists(dist, min, max, x1, x2, mul, add)
for i, t in enumerate(dist):
if type(t) in [bytes_t, unicode_t]:
dist[i] = XNOISE_DICT.get(t, 0)
self._base_objs = [
XnoiseDur_base(
wrap(dist, i), wrap(min, i), wrap(max, i), wrap(x1, i), wrap(x2, i), wrap(mul, i), wrap(add, i)
)
for i in range(lmax)
]
self._init_play()
def setDist(self, x):
"""
Replace the `dist` attribute.
:Args:
x: string or int
new `dist` attribute.
"""
self._dist = x
x, lmax = convertArgsToLists(x)
for i, t in enumerate(x):
if type(t) in [bytes_t, unicode_t]:
x[i] = XNOISE_DICT.get(t, 0)
[obj.setType(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setMin(self, x):
"""
Replace the `min` attribute.
:Args:
x: float or PyoObject
new `min` attribute.
"""
pyoArgsAssert(self, "O", x)
self._min = x
x, lmax = convertArgsToLists(x)
[obj.setMin(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setMax(self, x):
"""
Replace the `max` attribute.
:Args:
x: float or PyoObject
new `max` attribute.
"""
pyoArgsAssert(self, "O", x)
self._max = x
x, lmax = convertArgsToLists(x)
[obj.setMax(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setX1(self, x):
"""
Replace the `x1` attribute.
:Args:
x: float or PyoObject
new `x1` attribute.
"""
pyoArgsAssert(self, "O", x)
self._x1 = x
x, lmax = convertArgsToLists(x)
[obj.setX1(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setX2(self, x):
"""
Replace the `x2` attribute.
:Args:
x: float or PyoObject
new `x2` attribute.
"""
pyoArgsAssert(self, "O", x)
self._x2 = x
x, lmax = convertArgsToLists(x)
[obj.setX2(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def ctrl(self, map_list=None, title=None, wxnoserver=False):
self._map_list = [
SLMap(0, 12, "lin", "dist", self._dist, res="int", dataOnly=True),
SLMap(0, 20, "lin", "min", self._min),
SLMap(0, 20, "lin", "max", self._max),
SLMap(0, 1, "lin", "x1", self._x1),
SLMap(0, 1, "lin", "x2", self._x2),
]
PyoObject.ctrl(self, map_list, title, wxnoserver)
@property
def dist(self):
"""string or int. Distribution type."""
return self._dist
@dist.setter
def dist(self, x):
self.setDist(x)
@property
def min(self):
"""float or PyoObject. Minimum value."""
return self._min
@min.setter
def min(self, x):
self.setMin(x)
@property
def max(self):
"""float or PyoObject. Maximum value."""
return self._max
@max.setter
def max(self, x):
self.setMax(x)
@property
def x1(self):
"""float or PyoObject. First parameter."""
return self._x1
@x1.setter
def x1(self, x):
self.setX1(x)
@property
def x2(self):
"""float or PyoObject. Second parameter."""
return self._x2
@x2.setter
def x2(self, x):
self.setX2(x)
class Urn(PyoObject):
"""
Periodic pseudo-random integer generator without duplicates.
Urn generates a pseudo-random integer number between 0 and `max`
values at a frequency specified by `freq` parameter. Urn will
hold generated value until the next generation. Urn works like RandInt,
except that it keeps track of each number which has been generated. After
all numbers have been outputed, the pool is reseted and the object send
a trigger signal.
:Parent: :py:class:`PyoObject`
:Args:
max: int, optional
Maximum value for the random generation. Defaults to 100.
freq: float or PyoObject, optional
Polling frequency. Defaults to 1.
.. note::
Urn will send a trigger signal when the pool is empty.
User can retreive the trigger streams by calling obj['trig'].
Useful to synchronize other processes.
>>> s = Server().boot()
>>> s.start()
>>> mid = Urn(max=12, freq=10, add=60)
>>> fr = MToF(mid)
>>> sigL = SineLoop(freq=fr, feedback=.08, mul=0.3).out()
>>> amp = TrigExpseg(mid["trig"], [(0,0),(.01,.25),(1,0)])
>>> sigR = SineLoop(midiToHz(84), feedback=0.05, mul=amp).out(1)
"""
def __init__(self, max=100, freq=1.0, mul=1, add=0):
pyoArgsAssert(self, "iOOO", max, freq, mul, add)
PyoObject.__init__(self, mul, add)
self._max = max
self._freq = freq
max, freq, mul, add, lmax = convertArgsToLists(max, freq, mul, add)
self._base_objs = [Urn_base(wrap(max, i), wrap(freq, i), wrap(mul, i), wrap(add, i)) for i in range(lmax)]
self._trig_objs = Dummy([TriggerDummy_base(obj) for obj in self._base_objs])
self._init_play()
def out(self, chnl=0, inc=1, dur=0, delay=0):
return self
def setMax(self, x):
"""
Replace the `max` attribute.
:Args:
x: int
new `max` attribute.
"""
pyoArgsAssert(self, "i", x)
self._max = x
x, lmax = convertArgsToLists(x)
[obj.setMax(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setFreq(self, x):
"""
Replace the `freq` attribute.
:Args:
x: float or PyoObject
new `freq` attribute.
"""
pyoArgsAssert(self, "O", x)
self._freq = x
x, lmax = convertArgsToLists(x)
[obj.setFreq(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def ctrl(self, map_list=None, title=None, wxnoserver=False):
self._map_list = [
SLMap(1, 1000, "lin", "max", self._max, res="int", dataOnly=True),
SLMap(0.1, 20.0, "lin", "freq", self._freq),
SLMapMul(self._mul),
]
PyoObject.ctrl(self, map_list, title, wxnoserver)
@property
def max(self):
"""int. Maximum value."""
return self._max
@max.setter
def max(self, x):
self.setMax(x)
@property
def freq(self):
"""float or PyoObject. Polling frequency."""
return self._freq
@freq.setter
def freq(self, x):
self.setFreq(x)
class LogiMap(PyoObject):
"""
Random generator based on the logistic map.
The logistic equation (sometimes called the Verhulst model or logistic
growth curve) is a model of population growth first published by Pierre
Verhulst (1845, 1847). The logistic map is a discrete quadratic recurrence
equation derived from the logistic equation that can be effectively used
as a number generator that exhibit chaotic behavior. This object uses the
following equation:
x[n] = (r + 3) * x[n-1] * (1.0 - x[n-1])
where 'r' is the randomization factor between 0 and 1.
:Parent: :py:class:`PyoObject`
:Args:
chaos: float or PyoObject, optional
Randomization factor, 0.0 < chaos < 1.0. Defaults to 0.6.
freq: float or PyoObject, optional
Polling frequency. Defaults to 1.
init: float, optional
Initial value, 0.0 < init < 1.0. Defaults to 0.5.
.. note::
The method play() resets the internal state to the initial value.
>>> s = Server().boot()
>>> s.start()
>>> val = LogiMap([0.6,0.65], [4,8])
>>> mid = Round(Scale(val, 0, 1, [36,48], [72,84]))
>>> hz = Snap(mid, [0,2,4,5,7,9,11], 1)
>>> env = CosTable([(0,0), (32,1), (4064,1), (4096,0), (8192,0)])
>>> amp = TrigEnv(Change(val), table=env, dur=[.25,.125], mul=0.3)
>>> osc = RCOsc(hz, mul=amp).out()
"""
def __init__(self, chaos=0.6, freq=1.0, init=0.5, mul=1, add=0):
pyoArgsAssert(self, "OOnOO", chaos, freq, init, mul, add)
PyoObject.__init__(self, mul, add)
self._chaos = chaos
self._freq = freq
chaos, freq, init, mul, add, lmax = convertArgsToLists(chaos, freq, init, mul, add)
self._base_objs = [
LogiMap_base(wrap(chaos, i), wrap(freq, i), wrap(init, i), wrap(mul, i), wrap(add, i)) for i in range(lmax)
]
self._init_play()
def out(self, chnl=0, inc=1, dur=0, delay=0):
return self
def setChaos(self, x):
"""
Replace the `chaos` attribute.
:Args:
x: float or PyoObject
new `chaos` attribute.
"""
pyoArgsAssert(self, "O", x)
self._chaos = x
x, lmax = convertArgsToLists(x)
[obj.setChaos(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def setFreq(self, x):
"""
Replace the `freq` attribute.
:Args:
x: float or PyoObject
new `freq` attribute.
"""
pyoArgsAssert(self, "O", x)
self._freq = x
x, lmax = convertArgsToLists(x)
[obj.setFreq(wrap(x, i)) for i, obj in enumerate(self._base_objs)]
def ctrl(self, map_list=None, title=None, wxnoserver=False):
self._map_list = [
SLMap(0.001, 0.999, "lin", "chaos", self._chaos),
SLMap(0.1, 20.0, "lin", "freq", self._freq),
SLMapMul(self._mul),
]
PyoObject.ctrl(self, map_list, title, wxnoserver)
@property
def chaos(self):
"""float or PyoObject. Randomization factor."""
return self._chaos
@chaos.setter
def chaos(self, x):
self.setChaos(x)
@property
def freq(self):
"""float or PyoObject. Polling frequency."""
return self._freq
@freq.setter
def freq(self, x):
self.setFreq(x)
| 29.274458
| 119
| 0.537064
| 5,689
| 44,585
| 4.136579
| 0.081561
| 0.021417
| 0.048953
| 0.012238
| 0.815068
| 0.794034
| 0.775507
| 0.761186
| 0.745931
| 0.731823
| 0
| 0.034167
| 0.339599
| 44,585
| 1,522
| 120
| 29.293693
| 0.765079
| 0.434137
| 0
| 0.788091
| 0
| 0
| 0.013618
| 0
| 0
| 0
| 0
| 0
| 0.06655
| 1
| 0.197898
| false
| 0
| 0.005254
| 0.003503
| 0.276708
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a8dc309af88502161d08131386290afd3f714a3d
| 34,449
|
py
|
Python
|
sdk/python/pulumi_gcp/compute/managed_ssl_certificate.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/compute/managed_ssl_certificate.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/compute/managed_ssl_certificate.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ManagedSslCertificateArgs', 'ManagedSslCertificate']
@pulumi.input_type
class ManagedSslCertificateArgs:
def __init__(__self__, *,
certificate_id: Optional[pulumi.Input[int]] = None,
description: Optional[pulumi.Input[str]] = None,
managed: Optional[pulumi.Input['ManagedSslCertificateManagedArgs']] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ManagedSslCertificate resource.
:param pulumi.Input[int] certificate_id: The unique identifier for the resource.
:param pulumi.Input[str] description: An optional description of this resource.
:param pulumi.Input['ManagedSslCertificateManagedArgs'] managed: Properties relevant to a managed certificate. These will be used if the
certificate is managed (as indicated by a value of `MANAGED` in `type`).
Structure is documented below.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] type: Enum field whose value is always `MANAGED` - used to signal to the API
which type this is.
Default value is `MANAGED`.
Possible values are `MANAGED`.
"""
if certificate_id is not None:
pulumi.set(__self__, "certificate_id", certificate_id)
if description is not None:
pulumi.set(__self__, "description", description)
if managed is not None:
pulumi.set(__self__, "managed", managed)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="certificateId")
def certificate_id(self) -> Optional[pulumi.Input[int]]:
"""
The unique identifier for the resource.
"""
return pulumi.get(self, "certificate_id")
@certificate_id.setter
def certificate_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "certificate_id", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional description of this resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def managed(self) -> Optional[pulumi.Input['ManagedSslCertificateManagedArgs']]:
"""
Properties relevant to a managed certificate. These will be used if the
certificate is managed (as indicated by a value of `MANAGED` in `type`).
Structure is documented below.
"""
return pulumi.get(self, "managed")
@managed.setter
def managed(self, value: Optional[pulumi.Input['ManagedSslCertificateManagedArgs']]):
pulumi.set(self, "managed", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
Enum field whose value is always `MANAGED` - used to signal to the API
which type this is.
Default value is `MANAGED`.
Possible values are `MANAGED`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class _ManagedSslCertificateState:
def __init__(__self__, *,
certificate_id: Optional[pulumi.Input[int]] = None,
creation_timestamp: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
expire_time: Optional[pulumi.Input[str]] = None,
managed: Optional[pulumi.Input['ManagedSslCertificateManagedArgs']] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None,
subject_alternative_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering ManagedSslCertificate resources.
:param pulumi.Input[int] certificate_id: The unique identifier for the resource.
:param pulumi.Input[str] creation_timestamp: Creation timestamp in RFC3339 text format.
:param pulumi.Input[str] description: An optional description of this resource.
:param pulumi.Input[str] expire_time: Expire time of the certificate.
:param pulumi.Input['ManagedSslCertificateManagedArgs'] managed: Properties relevant to a managed certificate. These will be used if the
certificate is managed (as indicated by a value of `MANAGED` in `type`).
Structure is documented below.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] self_link: The URI of the created resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] subject_alternative_names: Domains associated with the certificate via Subject Alternative Name.
:param pulumi.Input[str] type: Enum field whose value is always `MANAGED` - used to signal to the API
which type this is.
Default value is `MANAGED`.
Possible values are `MANAGED`.
"""
if certificate_id is not None:
pulumi.set(__self__, "certificate_id", certificate_id)
if creation_timestamp is not None:
pulumi.set(__self__, "creation_timestamp", creation_timestamp)
if description is not None:
pulumi.set(__self__, "description", description)
if expire_time is not None:
pulumi.set(__self__, "expire_time", expire_time)
if managed is not None:
pulumi.set(__self__, "managed", managed)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if self_link is not None:
pulumi.set(__self__, "self_link", self_link)
if subject_alternative_names is not None:
pulumi.set(__self__, "subject_alternative_names", subject_alternative_names)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="certificateId")
def certificate_id(self) -> Optional[pulumi.Input[int]]:
"""
The unique identifier for the resource.
"""
return pulumi.get(self, "certificate_id")
@certificate_id.setter
def certificate_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "certificate_id", value)
@property
@pulumi.getter(name="creationTimestamp")
def creation_timestamp(self) -> Optional[pulumi.Input[str]]:
"""
Creation timestamp in RFC3339 text format.
"""
return pulumi.get(self, "creation_timestamp")
@creation_timestamp.setter
def creation_timestamp(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "creation_timestamp", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional description of this resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="expireTime")
def expire_time(self) -> Optional[pulumi.Input[str]]:
"""
Expire time of the certificate.
"""
return pulumi.get(self, "expire_time")
@expire_time.setter
def expire_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expire_time", value)
@property
@pulumi.getter
def managed(self) -> Optional[pulumi.Input['ManagedSslCertificateManagedArgs']]:
"""
Properties relevant to a managed certificate. These will be used if the
certificate is managed (as indicated by a value of `MANAGED` in `type`).
Structure is documented below.
"""
return pulumi.get(self, "managed")
@managed.setter
def managed(self, value: Optional[pulumi.Input['ManagedSslCertificateManagedArgs']]):
pulumi.set(self, "managed", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> Optional[pulumi.Input[str]]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
@self_link.setter
def self_link(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "self_link", value)
@property
@pulumi.getter(name="subjectAlternativeNames")
def subject_alternative_names(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Domains associated with the certificate via Subject Alternative Name.
"""
return pulumi.get(self, "subject_alternative_names")
@subject_alternative_names.setter
def subject_alternative_names(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "subject_alternative_names", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
Enum field whose value is always `MANAGED` - used to signal to the API
which type this is.
Default value is `MANAGED`.
Possible values are `MANAGED`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
class ManagedSslCertificate(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
certificate_id: Optional[pulumi.Input[int]] = None,
description: Optional[pulumi.Input[str]] = None,
managed: Optional[pulumi.Input[pulumi.InputType['ManagedSslCertificateManagedArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
An SslCertificate resource, used for HTTPS load balancing. This resource
represents a certificate for which the certificate secrets are created and
managed by Google.
For a resource where you provide the key, see the
SSL Certificate resource.
To get more information about ManagedSslCertificate, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/rest/v1/sslCertificates)
* How-to Guides
* [Official Documentation](https://cloud.google.com/load-balancing/docs/ssl-certificates)
> **Warning:** This resource should be used with extreme caution! Provisioning an SSL
certificate is complex. Ensure that you understand the lifecycle of a
certificate before attempting complex tasks like cert rotation automatically.
This resource will "return" as soon as the certificate object is created,
but post-creation the certificate object will go through a "provisioning"
process. The provisioning process can complete only when the domain name
for which the certificate is created points to a target pool which, itself,
points at the certificate. Depending on your DNS provider, this may take
some time, and migrating from self-managed certificates to Google-managed
certificates may entail some downtime while the certificate provisions.
In conclusion: Be extremely cautious.
## Example Usage
### Managed Ssl Certificate Basic
```python
import pulumi
import pulumi_gcp as gcp
default_managed_ssl_certificate = gcp.compute.ManagedSslCertificate("defaultManagedSslCertificate", managed=gcp.compute.ManagedSslCertificateManagedArgs(
domains=["sslcert.tf-test.club."],
))
default_http_health_check = gcp.compute.HttpHealthCheck("defaultHttpHealthCheck",
request_path="/",
check_interval_sec=1,
timeout_sec=1)
default_backend_service = gcp.compute.BackendService("defaultBackendService",
port_name="http",
protocol="HTTP",
timeout_sec=10,
health_checks=[default_http_health_check.id])
default_url_map = gcp.compute.URLMap("defaultURLMap",
description="a description",
default_service=default_backend_service.id,
host_rules=[gcp.compute.URLMapHostRuleArgs(
hosts=["sslcert.tf-test.club"],
path_matcher="allpaths",
)],
path_matchers=[gcp.compute.URLMapPathMatcherArgs(
name="allpaths",
default_service=default_backend_service.id,
path_rules=[gcp.compute.URLMapPathMatcherPathRuleArgs(
paths=["/*"],
service=default_backend_service.id,
)],
)])
default_target_https_proxy = gcp.compute.TargetHttpsProxy("defaultTargetHttpsProxy",
url_map=default_url_map.id,
ssl_certificates=[default_managed_ssl_certificate.id])
zone = gcp.dns.ManagedZone("zone", dns_name="sslcert.tf-test.club.")
default_global_forwarding_rule = gcp.compute.GlobalForwardingRule("defaultGlobalForwardingRule",
target=default_target_https_proxy.id,
port_range="443")
set = gcp.dns.RecordSet("set",
name="sslcert.tf-test.club.",
type="A",
ttl=3600,
managed_zone=zone.name,
rrdatas=[default_global_forwarding_rule.ip_address])
```
## Import
ManagedSslCertificate can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/managedSslCertificate:ManagedSslCertificate default projects/{{project}}/global/sslCertificates/{{name}}
```
```sh
$ pulumi import gcp:compute/managedSslCertificate:ManagedSslCertificate default {{project}}/{{name}}
```
```sh
$ pulumi import gcp:compute/managedSslCertificate:ManagedSslCertificate default {{name}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] certificate_id: The unique identifier for the resource.
:param pulumi.Input[str] description: An optional description of this resource.
:param pulumi.Input[pulumi.InputType['ManagedSslCertificateManagedArgs']] managed: Properties relevant to a managed certificate. These will be used if the
certificate is managed (as indicated by a value of `MANAGED` in `type`).
Structure is documented below.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] type: Enum field whose value is always `MANAGED` - used to signal to the API
which type this is.
Default value is `MANAGED`.
Possible values are `MANAGED`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[ManagedSslCertificateArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
An SslCertificate resource, used for HTTPS load balancing. This resource
represents a certificate for which the certificate secrets are created and
managed by Google.
For a resource where you provide the key, see the
SSL Certificate resource.
To get more information about ManagedSslCertificate, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/rest/v1/sslCertificates)
* How-to Guides
* [Official Documentation](https://cloud.google.com/load-balancing/docs/ssl-certificates)
> **Warning:** This resource should be used with extreme caution! Provisioning an SSL
certificate is complex. Ensure that you understand the lifecycle of a
certificate before attempting complex tasks like cert rotation automatically.
This resource will "return" as soon as the certificate object is created,
but post-creation the certificate object will go through a "provisioning"
process. The provisioning process can complete only when the domain name
for which the certificate is created points to a target pool which, itself,
points at the certificate. Depending on your DNS provider, this may take
some time, and migrating from self-managed certificates to Google-managed
certificates may entail some downtime while the certificate provisions.
In conclusion: Be extremely cautious.
## Example Usage
### Managed Ssl Certificate Basic
```python
import pulumi
import pulumi_gcp as gcp
default_managed_ssl_certificate = gcp.compute.ManagedSslCertificate("defaultManagedSslCertificate", managed=gcp.compute.ManagedSslCertificateManagedArgs(
domains=["sslcert.tf-test.club."],
))
default_http_health_check = gcp.compute.HttpHealthCheck("defaultHttpHealthCheck",
request_path="/",
check_interval_sec=1,
timeout_sec=1)
default_backend_service = gcp.compute.BackendService("defaultBackendService",
port_name="http",
protocol="HTTP",
timeout_sec=10,
health_checks=[default_http_health_check.id])
default_url_map = gcp.compute.URLMap("defaultURLMap",
description="a description",
default_service=default_backend_service.id,
host_rules=[gcp.compute.URLMapHostRuleArgs(
hosts=["sslcert.tf-test.club"],
path_matcher="allpaths",
)],
path_matchers=[gcp.compute.URLMapPathMatcherArgs(
name="allpaths",
default_service=default_backend_service.id,
path_rules=[gcp.compute.URLMapPathMatcherPathRuleArgs(
paths=["/*"],
service=default_backend_service.id,
)],
)])
default_target_https_proxy = gcp.compute.TargetHttpsProxy("defaultTargetHttpsProxy",
url_map=default_url_map.id,
ssl_certificates=[default_managed_ssl_certificate.id])
zone = gcp.dns.ManagedZone("zone", dns_name="sslcert.tf-test.club.")
default_global_forwarding_rule = gcp.compute.GlobalForwardingRule("defaultGlobalForwardingRule",
target=default_target_https_proxy.id,
port_range="443")
set = gcp.dns.RecordSet("set",
name="sslcert.tf-test.club.",
type="A",
ttl=3600,
managed_zone=zone.name,
rrdatas=[default_global_forwarding_rule.ip_address])
```
## Import
ManagedSslCertificate can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/managedSslCertificate:ManagedSslCertificate default projects/{{project}}/global/sslCertificates/{{name}}
```
```sh
$ pulumi import gcp:compute/managedSslCertificate:ManagedSslCertificate default {{project}}/{{name}}
```
```sh
$ pulumi import gcp:compute/managedSslCertificate:ManagedSslCertificate default {{name}}
```
:param str resource_name: The name of the resource.
:param ManagedSslCertificateArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ManagedSslCertificateArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
certificate_id: Optional[pulumi.Input[int]] = None,
description: Optional[pulumi.Input[str]] = None,
managed: Optional[pulumi.Input[pulumi.InputType['ManagedSslCertificateManagedArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ManagedSslCertificateArgs.__new__(ManagedSslCertificateArgs)
__props__.__dict__["certificate_id"] = certificate_id
__props__.__dict__["description"] = description
__props__.__dict__["managed"] = managed
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["type"] = type
__props__.__dict__["creation_timestamp"] = None
__props__.__dict__["expire_time"] = None
__props__.__dict__["self_link"] = None
__props__.__dict__["subject_alternative_names"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="gcp:compute/mangedSslCertificate:MangedSslCertificate")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(ManagedSslCertificate, __self__).__init__(
'gcp:compute/managedSslCertificate:ManagedSslCertificate',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
certificate_id: Optional[pulumi.Input[int]] = None,
creation_timestamp: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
expire_time: Optional[pulumi.Input[str]] = None,
managed: Optional[pulumi.Input[pulumi.InputType['ManagedSslCertificateManagedArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None,
subject_alternative_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
type: Optional[pulumi.Input[str]] = None) -> 'ManagedSslCertificate':
"""
Get an existing ManagedSslCertificate resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] certificate_id: The unique identifier for the resource.
:param pulumi.Input[str] creation_timestamp: Creation timestamp in RFC3339 text format.
:param pulumi.Input[str] description: An optional description of this resource.
:param pulumi.Input[str] expire_time: Expire time of the certificate.
:param pulumi.Input[pulumi.InputType['ManagedSslCertificateManagedArgs']] managed: Properties relevant to a managed certificate. These will be used if the
certificate is managed (as indicated by a value of `MANAGED` in `type`).
Structure is documented below.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] self_link: The URI of the created resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] subject_alternative_names: Domains associated with the certificate via Subject Alternative Name.
:param pulumi.Input[str] type: Enum field whose value is always `MANAGED` - used to signal to the API
which type this is.
Default value is `MANAGED`.
Possible values are `MANAGED`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ManagedSslCertificateState.__new__(_ManagedSslCertificateState)
__props__.__dict__["certificate_id"] = certificate_id
__props__.__dict__["creation_timestamp"] = creation_timestamp
__props__.__dict__["description"] = description
__props__.__dict__["expire_time"] = expire_time
__props__.__dict__["managed"] = managed
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["self_link"] = self_link
__props__.__dict__["subject_alternative_names"] = subject_alternative_names
__props__.__dict__["type"] = type
return ManagedSslCertificate(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="certificateId")
def certificate_id(self) -> pulumi.Output[int]:
"""
The unique identifier for the resource.
"""
return pulumi.get(self, "certificate_id")
@property
@pulumi.getter(name="creationTimestamp")
def creation_timestamp(self) -> pulumi.Output[str]:
"""
Creation timestamp in RFC3339 text format.
"""
return pulumi.get(self, "creation_timestamp")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
An optional description of this resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="expireTime")
def expire_time(self) -> pulumi.Output[str]:
"""
Expire time of the certificate.
"""
return pulumi.get(self, "expire_time")
@property
@pulumi.getter
def managed(self) -> pulumi.Output[Optional['outputs.ManagedSslCertificateManaged']]:
"""
Properties relevant to a managed certificate. These will be used if the
certificate is managed (as indicated by a value of `MANAGED` in `type`).
Structure is documented below.
"""
return pulumi.get(self, "managed")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression `a-z?` which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> pulumi.Output[str]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter(name="subjectAlternativeNames")
def subject_alternative_names(self) -> pulumi.Output[Sequence[str]]:
"""
Domains associated with the certificate via Subject Alternative Name.
"""
return pulumi.get(self, "subject_alternative_names")
@property
@pulumi.getter
def type(self) -> pulumi.Output[Optional[str]]:
"""
Enum field whose value is always `MANAGED` - used to signal to the API
which type this is.
Default value is `MANAGED`.
Possible values are `MANAGED`.
"""
return pulumi.get(self, "type")
| 45.268068
| 163
| 0.647305
| 3,895
| 34,449
| 5.570732
| 0.087291
| 0.056779
| 0.050327
| 0.048668
| 0.894737
| 0.870311
| 0.851184
| 0.840538
| 0.834132
| 0.813163
| 0
| 0.004372
| 0.263056
| 34,449
| 760
| 164
| 45.327632
| 0.850317
| 0.49386
| 0
| 0.716172
| 1
| 0
| 0.111149
| 0.047921
| 0
| 0
| 0
| 0
| 0
| 1
| 0.161716
| false
| 0.0033
| 0.023102
| 0
| 0.283828
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a8e26eb6189ec035dc6752547efc06b547b37256
| 10,749
|
py
|
Python
|
bayesian_quadrature/tests/test_gauss_c.py
|
jhamrick/bayesian-quadrature
|
b7245cb6ccb96606da7caf1c516631c410f5dc1b
|
[
"MIT"
] | 13
|
2015-02-04T15:21:32.000Z
|
2020-11-20T23:12:12.000Z
|
bayesian_quadrature/tests/test_gauss_c.py
|
jhamrick/bayesian-quadrature
|
b7245cb6ccb96606da7caf1c516631c410f5dc1b
|
[
"MIT"
] | 1
|
2018-07-20T20:02:55.000Z
|
2018-07-20T20:02:55.000Z
|
bayesian_quadrature/tests/test_gauss_c.py
|
jhamrick/bayesian-quadrature
|
b7245cb6ccb96606da7caf1c516631c410f5dc1b
|
[
"MIT"
] | 3
|
2017-04-24T06:48:36.000Z
|
2021-12-12T19:05:10.000Z
|
import numpy as np
import scipy.stats
import pytest
from .. import gauss_c
from .. import linalg_c as la
from . import util
import logging
logger = logging.getLogger("bayesian_quadrature")
logger.setLevel("DEBUG")
DTYPE = util.DTYPE
options = util.options
def test_mvn_logpdf():
util.npseed()
x_mean = options['x_mean']
x_var = options['x_var']
mu = np.array([x_mean], order='F')
cov = np.array([[x_var]], order='F')
la.cho_factor(cov, cov)
logdet = la.logdet(cov)
n = 20
x = np.array(np.random.uniform(-10, 10, n)[None], order='F')
y = np.log(np.array(
scipy.stats.norm.pdf(x, x_mean, np.sqrt(x_var)), order='F'))
pdf = np.empty(n, order='F')
for i in xrange(n):
pdf[i] = gauss_c.mvn_logpdf(x[:, i], mu, cov, logdet)
assert np.allclose(y, pdf)
def test_mvn_logpdf_same():
util.npseed()
mu = np.array([options['x_mean']], order='F')
cov = np.array([[options['x_var']]], order='F')
la.cho_factor(cov, cov)
logdet = la.logdet(cov)
n = 20
m = 20
x = np.array(np.random.uniform(-10, 10, n)[None], order='F')
pdf = np.empty((m, n), order='F')
for i in xrange(m):
for j in xrange(n):
pdf[i, j] = gauss_c.mvn_logpdf(x[:, j], mu, cov, logdet)
assert (pdf[0] == pdf).all()
def test_int_exp_norm():
def approx_int_exp_norm(xo, c, m, S):
e = np.exp(xo * c)
p = scipy.stats.norm.pdf(xo, m, np.sqrt(S))
return np.trapz(e * p, xo)
xo = np.linspace(-20, 20, 1000)
approx = approx_int_exp_norm(xo, 2, 0, 1)
calc = gauss_c.int_exp_norm(2, 0, 1)
assert np.allclose(approx, calc)
approx = approx_int_exp_norm(xo, 1, 0, 1)
calc = gauss_c.int_exp_norm(1, 0, 1)
assert np.allclose(approx, calc)
approx = approx_int_exp_norm(xo, 2, 1, 1)
calc = gauss_c.int_exp_norm(2, 1, 1)
assert np.allclose(approx, calc)
approx = approx_int_exp_norm(xo, 2, 1, 2)
calc = gauss_c.int_exp_norm(2, 1, 2)
assert np.allclose(approx, calc)
def test_int_K():
util.npseed()
bq = util.make_bq()
xo = util.make_xo()
x_mean = bq.options['x_mean']
x_cov = bq.options['x_cov']
Kxxo = np.array(bq.gp_l.Kxxo(xo), order='F')
approx_int = np.empty(bq.gp_l.x.shape[0], order='F')
gauss_c.approx_int_K(
approx_int, np.array(xo[None], order='F'),
Kxxo, x_mean, x_cov)
calc_int = np.empty(bq.gp_l.x.shape[0], order='F')
gauss_c.int_K(
calc_int, np.array(bq.gp_l.x[None], order='F'),
bq.gp_l.K.h, np.array([bq.gp_l.K.w]),
x_mean, x_cov)
assert np.allclose(calc_int, approx_int, atol=1e-5)
def test_int_K_same():
util.npseed()
bq = util.make_bq()
xo = util.make_xo()
x_mean = bq.options['x_mean']
x_cov = bq.options['x_cov']
vals = np.empty((bq.gp_l.x.shape[0], 20), order='F')
for i in xrange(20):
gauss_c.int_K(
vals[:, i], np.array(bq.gp_l.x[None], order='F'),
bq.gp_l.K.h, np.array([bq.gp_l.K.w]),
x_mean, x_cov)
assert (vals[:, [0]] == vals).all()
def test_approx_int_K_same():
util.npseed()
bq = util.make_bq()
xo = util.make_xo()
x_mean = bq.options['x_mean']
x_cov = bq.options['x_cov']
Kxxo = np.array(bq.gp_l.Kxxo(xo), order='F')
vals = np.empty((bq.gp_l.x.shape[0], 20), order='F')
xo = np.array(xo[None], order='F')
for i in xrange(20):
gauss_c.approx_int_K(
vals[:, i], xo,
np.array(Kxxo, order='F'),
x_mean, x_cov)
assert (vals[:, [0]] == vals).all()
def test_int_K1_K2():
util.npseed()
bq = util.make_bq()
xo = util.make_xo()
x_mean = bq.options['x_mean']
x_cov = bq.options['x_cov']
K1xxo = np.array(bq.gp_l.Kxxo(xo), order='F')
K2xxo = np.array(bq.gp_log_l.Kxxo(xo), order='F')
approx_int = np.empty((bq.gp_l.x.shape[0], bq.gp_log_l.x.shape[0]), order='F')
gauss_c.approx_int_K1_K2(
approx_int, np.array(xo[None], order='F'),
K1xxo, K2xxo, x_mean, x_cov)
calc_int = np.empty((bq.gp_l.x.shape[0], bq.gp_log_l.x.shape[0]), order='F')
gauss_c.int_K1_K2(
calc_int,
np.array(bq.gp_l.x[None], order='F'),
np.array(bq.gp_log_l.x[None], order='F'),
bq.gp_l.K.h, np.array([bq.gp_l.K.w], order='F'),
bq.gp_log_l.K.h, np.array([bq.gp_log_l.K.w], order='F'),
x_mean, x_cov)
assert np.allclose(calc_int, approx_int, atol=1e-3)
def test_int_K1_K2_same():
util.npseed()
bq = util.make_bq()
x_mean = bq.options['x_mean']
x_cov = bq.options['x_cov']
vals = np.empty((bq.gp_l.x.shape[0], bq.gp_log_l.x.shape[0], 20), order='F')
for i in xrange(vals.shape[-1]):
gauss_c.int_K1_K2(
vals[:, :, i],
np.array(bq.gp_l.x[None], order='F'),
np.array(bq.gp_log_l.x[None], order='F'),
bq.gp_l.K.h, np.array([bq.gp_l.K.w], order='F'),
bq.gp_log_l.K.h, np.array([bq.gp_log_l.K.w], order='F'),
x_mean, x_cov)
assert (vals[:, :, [0]] == vals).all()
def test_approx_int_K1_K2_same():
util.npseed()
bq = util.make_bq()
xo = util.make_xo()
x_mean = bq.options['x_mean']
x_cov = bq.options['x_cov']
K1xxo = np.array(bq.gp_l.Kxxo(xo), order='F')
K2xxo = np.array(bq.gp_log_l.Kxxo(xo), order='F')
vals = np.empty((bq.gp_l.x.shape[0], bq.gp_log_l.x.shape[0], 20), order='F')
for i in xrange(vals.shape[-1]):
gauss_c.approx_int_K1_K2(
vals[:, :, i], np.array(xo[None], order='F'),
K1xxo, K2xxo, x_mean, x_cov)
assert (vals[:, :, [0]] == vals).all()
def test_int_int_K1_K2_K1():
util.npseed()
bq = util.make_bq()
xo = util.make_xo()
x_mean = bq.options['x_mean']
x_cov = bq.options['x_cov']
K1xxo = np.array(bq.gp_l.Kxxo(xo), order='F')
K2xoxo = np.array(bq.gp_log_l.Kxoxo(xo), order='F')
approx_int = np.empty((bq.gp_l.x.shape[0], bq.gp_l.x.shape[0]), order='F')
gauss_c.approx_int_int_K1_K2_K1(
approx_int, np.array(xo[None], order='F'),
K1xxo, K2xoxo, x_mean, x_cov)
calc_int = np.empty((bq.gp_l.x.shape[0], bq.gp_l.x.shape[0]), order='F')
gauss_c.int_int_K1_K2_K1(
calc_int, np.array(bq.gp_l.x[None], order='F'),
bq.gp_l.K.h, np.array([bq.gp_l.K.w]),
bq.gp_log_l.K.h, np.array([bq.gp_log_l.K.w]),
x_mean, x_cov)
assert np.allclose(calc_int, approx_int, atol=1e-5)
def test_int_int_K1_K2_K1_same():
util.npseed()
bq = util.make_bq()
x_mean = bq.options['x_mean']
x_cov = bq.options['x_cov']
vals = np.empty((bq.gp_l.x.shape[0], bq.gp_l.x.shape[0], 20), order='F')
for i in xrange(vals.shape[-1]):
gauss_c.int_int_K1_K2_K1(
vals[:, :, i], np.array(bq.gp_l.x[None], order='F'),
bq.gp_l.K.h, np.array([bq.gp_l.K.w]),
bq.gp_log_l.K.h, np.array([bq.gp_log_l.K.w]),
x_mean, x_cov)
assert (vals[:, :, [0]] == vals).all()
def test_approx_int_int_K1_K2_K1_same():
util.npseed()
bq = util.make_bq()
xo = util.make_xo()
x_mean = bq.options['x_mean']
x_cov = bq.options['x_cov']
K1xxo = np.array(bq.gp_l.Kxxo(xo), order='F')
K2xoxo = np.array(bq.gp_log_l.Kxoxo(xo), order='F')
vals = np.empty((bq.gp_l.x.shape[0], bq.gp_l.x.shape[0], 20), order='F')
for i in xrange(vals.shape[-1]):
gauss_c.approx_int_int_K1_K2_K1(
vals[:, :, i], np.array(xo[None], order='F'),
K1xxo, K2xoxo, x_mean, x_cov)
assert (vals[:, :, [0]] == vals).all()
def test_int_int_K1_K2():
util.npseed()
bq = util.make_bq()
xo = util.make_xo()
x_mean = bq.options['x_mean']
x_cov = bq.options['x_cov']
K1xoxo = np.array(bq.gp_l.Kxoxo(xo), order='F')
K2xxo = np.array(bq.gp_log_l.Kxxo(xo), order='F')
approx_int = np.empty(bq.gp_log_l.x.shape[0], order='F')
gauss_c.approx_int_int_K1_K2(
approx_int, np.array(xo[None], order='F'),
K1xoxo, K2xxo, x_mean, x_cov)
calc_int = np.empty(bq.gp_log_l.x.shape[0], order='F')
gauss_c.int_int_K1_K2(
calc_int, np.array(bq.gp_log_l.x[None], order='F'),
bq.gp_l.K.h, np.array([bq.gp_l.K.w]),
bq.gp_log_l.K.h, np.array([bq.gp_log_l.K.w]),
x_mean, x_cov)
assert np.allclose(calc_int, approx_int, atol=1e-5)
def test_int_int_K1_K2_same():
util.npseed()
bq = util.make_bq()
x_mean = bq.options['x_mean']
x_cov = bq.options['x_cov']
vals = np.empty((bq.gp_log_l.x.shape[0], 20), order='F')
for i in xrange(vals.shape[-1]):
gauss_c.int_int_K1_K2(
vals[:, i], np.array(bq.gp_log_l.x[None], order='F'),
bq.gp_l.K.h, np.array([bq.gp_l.K.w]),
bq.gp_log_l.K.h, np.array([bq.gp_log_l.K.w]),
x_mean, x_cov)
assert (vals[:, [0]] == vals).all()
def test_approx_int_int_K1_K2_same():
util.npseed()
bq = util.make_bq()
xo = util.make_xo()
x_mean = bq.options['x_mean']
x_cov = bq.options['x_cov']
K1xoxo = np.array(bq.gp_l.Kxoxo(xo), order='F')
K2xxo = np.array(bq.gp_log_l.Kxxo(xo), order='F')
vals = np.empty((bq.gp_log_l.x.shape[0], 20), order='F')
for i in xrange(vals.shape[-1]):
gauss_c.approx_int_int_K1_K2(
vals[:, i], np.array(xo[None], order='F'),
K1xoxo, K2xxo, x_mean, x_cov)
assert (vals[:, [0]] == vals).all()
def test_int_int_K():
util.npseed()
bq = util.make_bq()
xo = util.make_xo()
x_mean = bq.options['x_mean']
x_cov = bq.options['x_cov']
Kxoxo = np.array(bq.gp_l.Kxoxo(xo), order='F')
approx_int = gauss_c.approx_int_int_K(
np.array(xo[None], order='F'),
Kxoxo, x_mean, x_cov)
calc_int = gauss_c.int_int_K(
1, bq.gp_l.K.h, np.array([bq.gp_l.K.w]),
x_mean, x_cov)
assert np.allclose(calc_int, approx_int, atol=1e-6)
def test_int_int_K_same():
util.npseed()
bq = util.make_bq()
x_mean = bq.options['x_mean']
x_cov = bq.options['x_cov']
vals = np.empty(20)
for i in xrange(vals.shape[-1]):
vals[i] = gauss_c.int_int_K(
1, bq.gp_l.K.h, np.array([bq.gp_l.K.w]),
x_mean, x_cov)
assert (vals[0] == vals).all()
def test_approx_int_int_K_same():
util.npseed()
bq = util.make_bq()
xo = util.make_xo()
x_mean = bq.options['x_mean']
x_cov = bq.options['x_cov']
Kxoxo = np.array(bq.gp_l.Kxoxo(xo), order='F')
vals = np.empty(20)
for i in xrange(vals.shape[-1]):
vals[i] = gauss_c.approx_int_int_K(
np.array(xo[None], order='F'),
Kxoxo, x_mean, x_cov)
assert (vals[0] == vals).all()
| 27.491049
| 82
| 0.581077
| 1,977
| 10,749
| 2.924633
| 0.049064
| 0.056728
| 0.044967
| 0.079903
| 0.901245
| 0.876167
| 0.872708
| 0.8523
| 0.831546
| 0.815981
| 0
| 0.023893
| 0.229045
| 10,749
| 390
| 83
| 27.561538
| 0.673826
| 0
| 0
| 0.700704
| 0
| 0
| 0.02577
| 0
| 0
| 0
| 0
| 0
| 0.073944
| 1
| 0.066901
| false
| 0
| 0.024648
| 0
| 0.09507
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0f2d17d68964a29ad200326d5b751864001a4e22
| 10,944
|
py
|
Python
|
test/test_attestation.py
|
verifiably/checker
|
7fafed98b61c99196a21924082c6f47f530186db
|
[
"Apache-2.0"
] | null | null | null |
test/test_attestation.py
|
verifiably/checker
|
7fafed98b61c99196a21924082c6f47f530186db
|
[
"Apache-2.0"
] | null | null | null |
test/test_attestation.py
|
verifiably/checker
|
7fafed98b61c99196a21924082c6f47f530186db
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
import sys
import os
import base64
import cbor2
import json
from OpenSSL import crypto
import pytest
from verifiably_checker import attestation
EXAMPLE_ATT_DOC = "hEShATgioFkQ36lpbW9kdWxlX2lkeCdpLTBkZGIwM2Y2ZjFmMTVjOGFmLWVuYzAxN2ZiOGIyOTc0MDc2ZTlmZGlnZXN0ZlNIQTM4NGl0aW1lc3RhbXAbAAABf7izKm5kcGNyc7AAWDBFWkIIdBlHeZ3Sik7ThjLPMQOcu6nsRfL2WKGnW9xSRk0i8iT10GC1xJdXQsTzdyMBWDC83wX+/Mqo5VvyyNbe6eebv/MeNL8oqZqhnmspw37oCyFKQUt2ByNu3yb8t4ZU5j8CWDD4b48YTg7EkF9W6uRMwog5HbVv1aNZnaXMQP+Jy/Uqd2gJBHEboeJCWkvj0ImWjg0DWDAhe44hkudiea3SeEKfbUrZOBZ9Yebf+uOf+3by5KKKlucDmwjXLs7A75UNZdEofF8EWDC3EpkZyNxuEEgQ+QGAE2iU7+rYfeWtZ31H+9SOhI4bdXGVd4wm/I1aQuhC9TVgnMIFWDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGWDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHWDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIWDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJWDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKWDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALWDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMWDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANWDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOWDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPWDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABrY2VydGlmaWNhdGVZAoAwggJ8MIICAaADAgECAhABf7iyl0B26QAAAABiO5I3MAoGCCqGSM49BAMDMIGOMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3RvbjEQMA4GA1UEBwwHU2VhdHRsZTEPMA0GA1UECgwGQW1hem9uMQwwCgYDVQQLDANBV1MxOTA3BgNVBAMMMGktMGRkYjAzZjZmMWYxNWM4YWYudXMtZWFzdC0yLmF3cy5uaXRyby1lbmNsYXZlczAeFw0yMjAzMjMyMTMzNDBaFw0yMjAzMjQwMDMzNDNaMIGTMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3RvbjEQMA4GA1UEBwwHU2VhdHRsZTEPMA0GA1UECgwGQW1hem9uMQwwCgYDVQQLDANBV1MxPjA8BgNVBAMMNWktMGRkYjAzZjZmMWYxNWM4YWYtZW5jMDE3ZmI4YjI5NzQwNzZlOS51cy1lYXN0LTIuYXdzMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEYVAdHBvcYG74rkX9Np+SEypVdd2cH5P+GRP4Mq2TJKSuHoPWEQRbhNN7eQeZAj1oAamHAiq0afoUJDS/+fAk0zoiorQSSA0GxK2OIW4p8llMMImXk/8IGtK5DXMDfnaCox0wGzAMBgNVHRMBAf8EAjAAMAsGA1UdDwQEAwIGwDAKBggqhkjOPQQDAwNpADBmAjEAwEHSHiRNnWtpTG3tuXtU+T0I0eyipNsR5V1nkafxoAP/rSCq/Vo/kKqjLzkv2FawAjEAxKXISuHizzs92jC25UgF12ulZekUrXXa7ht6a7B+NzkSwj2fc3XSGYyijIiBiyNlaGNhYnVuZGxlhFkCFTCCAhEwggGWoAMCAQICEQD5MXVoG5Cv4R1GzLTk5/hWMAoGCCqGSM49BAMDMEkxCzAJBgNVBAYTAlVTMQ8wDQYDVQQKDAZBbWF6b24xDDAKBgNVBAsMA0FXUzEbMBkGA1UEAwwSYXdzLm5pdHJvLWVuY2xhdmVzMB4XDTE5MTAyODEzMjgwNVoXDTQ5MTAyODE0MjgwNVowSTELMAkGA1UEBhMCVVMxDzANBgNVBAoMBkFtYXpvbjEMMAoGA1UECwwDQVdTMRswGQYDVQQDDBJhd3Mubml0cm8tZW5jbGF2ZXMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT8AlTrpgjB82hw4prakL5GODKSc26JS//2ctmJREtQUeU0pLH22+PAvFgaMrexdgcO3hLWmj/qIRtm51LPfdHdCV9vE3D0FwhD2dwQASHkz2MBKAlmRIfJeWKEME3FP/SjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFJAltQ3ZBUfnlsOW+nKdz5mp30uWMA4GA1UdDwEB/wQEAwIBhjAKBggqhkjOPQQDAwNpADBmAjEAo38vkaHJvV7nuGJ8FpjSVQOOHwND+VtjqWKMPTmAlUWhHry/LjtV2K7ucbTD1q3zAjEAovObFgWycCil3UugabUBbmW0+96P4AYdalMZf5za9dlDvGH8K+sDy2/ujSMC89/2WQLCMIICvjCCAkSgAwIBAgIQQve3yZ2T44rLpUYkxjMl9TAKBggqhkjOPQQDAzBJMQswCQYDVQQGEwJVUzEPMA0GA1UECgwGQW1hem9uMQwwCgYDVQQLDANBV1MxGzAZBgNVBAMMEmF3cy5uaXRyby1lbmNsYXZlczAeFw0yMjAzMjIxOTIxMDBaFw0yMjA0MTEyMDIxMDBaMGQxCzAJBgNVBAYTAlVTMQ8wDQYDVQQKDAZBbWF6b24xDDAKBgNVBAsMA0FXUzE2MDQGA1UEAwwtZWYwN2Q2NmIxYTFhOGVhNC51cy1lYXN0LTIuYXdzLm5pdHJvLWVuY2xhdmVzMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE+n10EzfrTy9GPFkeyyEdg2tdHb1O5zF9Wxcpm+b0lrpJJfrfXfOa260ffyiaTj5oL9Huvm/tAztWEl3XfH0uCVkUHHVKh15r5wLABh3U9B/RDjCEPaaevaTuMzDH00p8o4HVMIHSMBIGA1UdEwEB/wQIMAYBAf8CAQIwHwYDVR0jBBgwFoAUkCW1DdkFR+eWw5b6cp3PmanfS5YwHQYDVR0OBBYEFCANb7uC3SNJHQSr4qht/aG6TVVLMA4GA1UdDwEB/wQEAwIBhjBsBgNVHR8EZTBjMGGgX6BdhltodHRwOi8vYXdzLW5pdHJvLWVuY2xhdmVzLWNybC5zMy5hbWF6b25hd3MuY29tL2NybC9hYjQ5NjBjYy03ZDYzLTQyYmQtOWU5Zi01OTMzOGNiNjdmODQuY3JsMAoGCCqGSM49BAMDA2gAMGUCMQDjFW4xmDnpdxAcsENgL7iaKoo0wXno0y6vTZMcO15Co2oJ/K5ofFviOrW/lHoZklQCMAZQWEFRjHI6V3pCP1vIo+2aeOy7Hxq+SO8H9gIyYVF/LJGz0SjBvjSPNau+4NhX01kDFzCCAxMwggKaoAMCAQICEEIDVYGE9Du1Rg/6g2FrJrgwCgYIKoZIzj0EAwMwZDELMAkGA1UEBhMCVVMxDzANBgNVBAoMBkFtYXpvbjEMMAoGA1UECwwDQVdTMTYwNAYDVQQDDC1lZjA3ZDY2YjFhMWE4ZWE0LnVzLWVhc3QtMi5hd3Mubml0cm8tZW5jbGF2ZXMwHhcNMjIwMzIzMDI0MTQ2WhcNMjIwMzI4MjM0MTQ2WjCBiTE8MDoGA1UEAwwzM2UzNzRiMTc3ODcyZjdkZS56b25hbC51cy1lYXN0LTIuYXdzLm5pdHJvLWVuY2xhdmVzMQwwCgYDVQQLDANBV1MxDzANBgNVBAoMBkFtYXpvbjELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAldBMRAwDgYDVQQHDAdTZWF0dGxlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEZNXcZp5fnJCyAnZaBRF9BwizwA298vNGPSepHz1uR2z3OdkWfwEliAIYO7mddPRslaxwGSpoip4p8l3oCBUM4MfIWlVfIQJHwXSL+lZ0UcAey+BwmTPTaVmFbSGj4fbUo4HqMIHnMBIGA1UdEwEB/wQIMAYBAf8CAQEwHwYDVR0jBBgwFoAUIA1vu4LdI0kdBKviqG39obpNVUswHQYDVR0OBBYEFOgZbOhqmAjQQ/1mJkxtmZaQhW8XMA4GA1UdDwEB/wQEAwIBhjCBgAYDVR0fBHkwdzB1oHOgcYZvaHR0cDovL2NybC11cy1lYXN0LTItYXdzLW5pdHJvLWVuY2xhdmVzLnMzLnVzLWVhc3QtMi5hbWF6b25hd3MuY29tL2NybC81ZTcyMWUyYi1kOGJlLTRkYTItYTc4Zi0yMzg4NDhlYTI4NmEuY3JsMAoGCCqGSM49BAMDA2cAMGQCMCAjnlyIcyP6/tu3e/kCx9UVBUlqlPXq9VwobxunHrJNNgOWwV0+PWLacpEwdipx/QIwC6/OZnlgBYpPUHPdaEc/XPOnS/Yme2M1xNOgWWarKESOIBxcmE1/iNsjNeRcVgUIWQKCMIICfjCCAgWgAwIBAgIVAMofM8igEeqo6uODaku9GkTQia5aMAoGCCqGSM49BAMDMIGJMTwwOgYDVQQDDDMzZTM3NGIxNzc4NzJmN2RlLnpvbmFsLnVzLWVhc3QtMi5hd3Mubml0cm8tZW5jbGF2ZXMxDDAKBgNVBAsMA0FXUzEPMA0GA1UECgwGQW1hem9uMQswCQYDVQQGEwJVUzELMAkGA1UECAwCV0ExEDAOBgNVBAcMB1NlYXR0bGUwHhcNMjIwMzIzMDk1NzQxWhcNMjIwMzI0MDk1NzQxWjCBjjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxDzANBgNVBAoMBkFtYXpvbjEMMAoGA1UECwwDQVdTMTkwNwYDVQQDDDBpLTBkZGIwM2Y2ZjFmMTVjOGFmLnVzLWVhc3QtMi5hd3Mubml0cm8tZW5jbGF2ZXMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATD7s57eHTBp5Da5qbzsEKKB3tlH3HxYMNPjymWWgc1vcLbmCYcfbULft4P6yCdRmCHqSKZaIufc42nlR0oYL8K2MLMtD2imjJYHw/Qmg/ga2385njRKIkZ/adojlI2+2yjJjAkMBIGA1UdEwEB/wQIMAYBAf8CAQAwDgYDVR0PAQH/BAQDAgIEMAoGCCqGSM49BAMDA2cAMGQCMF39MTCngcnUGhAf7xehXgoHkTLkcYjinbCM2zbfvysUx/EER0hUdfHHz5jYDJrBmQIwFhVYbeF59zIRh2zqb7u8m8A9IXLvShmUKdWFuPaH0OlQKBDhnyrtsRfiyVAKO+kranB1YmxpY19rZXn2aXVzZXJfZGF0YVggeyJyZXN1bHQiOiBmYWxzZSwgImJhbGFuY2UiOiAxNX1lbm9uY2X2WGBJXNbQEOCpXf5bFTUbdTpG9Ay61YHfqUj0CLu/SJCMuXitkDYDVPIP/ucmlYCYS5GvtLfNK/vlsP4vpdyKyL2NWUCdZ6w2r2rXNGBPROO9ONh6jY4TBV20cpowKHYwdAo="
def test_check_correct_pcr():
doc_obj = {'pcrs':{
0: bytes.fromhex('a123'),
1: bytes.fromhex('b456')
}
}
expected_pcrs = {'0':'a123', '1':'b456'}
assert attestation.verify_pcrs(doc_obj, expected_pcrs) == True
def test_check_wrong_pcr():
doc_obj = {'pcrs':{
0: bytes.fromhex('a123'),
1: bytes.fromhex('b456')
}
}
expected_pcrs = {'0':'a12', '1':'b456'}
assert attestation.verify_pcrs(doc_obj, expected_pcrs) == False
def test_validate_signature():
attestation_doc = base64.b64decode(EXAMPLE_ATT_DOC)
data = cbor2.loads(attestation_doc)
doc = data[2]
doc_obj = cbor2.loads(doc)
assert attestation.validate_signature(data, doc, doc_obj) == True
def test_validate_signature_modify_user_data():
attestation_doc = base64.b64decode(EXAMPLE_ATT_DOC)
data = cbor2.loads(attestation_doc)
doc = data[2]
doc_obj = cbor2.loads(doc)
# Modify user data
user_data = json.loads(doc_obj['user_data'])
user_data['result'] = True
doc_obj['user_data'] = json.dumps(user_data)
doc = cbor2.dumps(doc_obj)
assert attestation.validate_signature(data, doc, doc_obj) == False
def test_validate_signature_modify_signature():
attestation_doc = base64.b64decode(EXAMPLE_ATT_DOC)
data = cbor2.loads(attestation_doc)
doc = data[2]
doc_obj = cbor2.loads(doc)
# Modify signature
signature_value = data[3].hex()
signature_value = '0' + signature_value[1:]
data[3] = bytes.fromhex(signature_value)
assert attestation.validate_signature(data, doc, doc_obj) == False
def test_validate_certificate():
certificate = '3082027c30820201a0030201020210017fb8b2974076e900000000623b9237300a06082a8648ce3d04030330818e310b30090603550406130255533113301106035504080c0a57617368696e67746f6e3110300e06035504070c0753656174746c65310f300d060355040a0c06416d617a6f6e310c300a060355040b0c034157533139303706035504030c30692d30646462303366366631663135633861662e75732d656173742d322e6177732e6e6974726f2d656e636c61766573301e170d3232303332333231333334305a170d3232303332343030333334335a308193310b30090603550406130255533113301106035504080c0a57617368696e67746f6e3110300e06035504070c0753656174746c65310f300d060355040a0c06416d617a6f6e310c300a060355040b0c03415753313e303c06035504030c35692d30646462303366366631663135633861662d656e63303137666238623239373430373665392e75732d656173742d322e6177733076301006072a8648ce3d020106052b810400220362000461501d1c1bdc606ef8ae45fd369f92132a5575dd9c1f93fe1913f832ad9324a4ae1e83d611045b84d37b790799023d6801a987022ab469fa142434bff9f024d33a22a2b412480d06c4ad8e216e29f2594c30899793ff081ad2b90d73037e7682a31d301b300c0603551d130101ff04023000300b0603551d0f0404030206c0300a06082a8648ce3d0403030369003066023100c041d21e244d9d6b694c6dedb97b54f93d08d1eca2a4db11e55d6791a7f1a003ffad20aafd5a3f90aaa32f392fd856b0023100c4a5c84ae1e2cf3b3dda30b6e54805d76ba565e914ad75daee1b7a6bb07e373912c23d9f7375d2198ca28c88818b2365'
attestation_doc = base64.b64decode(EXAMPLE_ATT_DOC)
data = cbor2.loads(attestation_doc)
doc = data[2]
doc_obj = cbor2.loads(doc)
doc_obj['certificate'] = bytes.fromhex(certificate)
assert attestation.validate_pki(doc_obj) == True
def test_validate_wrong_certificate():
certificate = '1082027c30820201a0030201020210017fb8b2974076e900000000623b9237300a06082a8648ce3d04030330818e310b30090603550406130255533113301106035504080c0a57617368696e67746f6e3110300e06035504070c0753656174746c65310f300d060355040a0c06416d617a6f6e310c300a060355040b0c034157533139303706035504030c30692d30646462303366366631663135633861662e75732d656173742d322e6177732e6e6974726f2d656e636c61766573301e170d3232303332333231333334305a170d3232303332343030333334335a308193310b30090603550406130255533113301106035504080c0a57617368696e67746f6e3110300e06035504070c0753656174746c65310f300d060355040a0c06416d617a6f6e310c300a060355040b0c03415753313e303c06035504030c35692d30646462303366366631663135633861662d656e63303137666238623239373430373665392e75732d656173742d322e6177733076301006072a8648ce3d020106052b810400220362000461501d1c1bdc606ef8ae45fd369f92132a5575dd9c1f93fe1913f832ad9324a4ae1e83d611045b84d37b790799023d6801a987022ab469fa142434bff9f024d33a22a2b412480d06c4ad8e216e29f2594c30899793ff081ad2b90d73037e7682a31d301b300c0603551d130101ff04023000300b0603551d0f0404030206c0300a06082a8648ce3d0403030369003066023100c041d21e244d9d6b694c6dedb97b54f93d08d1eca2a4db11e55d6791a7f1a003ffad20aafd5a3f90aaa32f392fd856b0023100c4a5c84ae1e2cf3b3dda30b6e54805d76ba565e914ad75daee1b7a6bb07e373912c23d9f7375d2198ca28c88818b2365'
attestation_doc = base64.b64decode(EXAMPLE_ATT_DOC)
data = cbor2.loads(attestation_doc)
doc = data[2]
doc_obj = cbor2.loads(doc)
doc_obj['certificate'] = bytes.fromhex(certificate)
with pytest.raises(crypto.Error):
attestation.validate_pki(doc_obj)
| 100.40367
| 5,924
| 0.92288
| 396
| 10,944
| 25.292929
| 0.335859
| 0.011382
| 0.007788
| 0.014477
| 0.113818
| 0.103834
| 0.099441
| 0.099441
| 0.094748
| 0.094748
| 0
| 0.260799
| 0.04386
| 10,944
| 108
| 5,925
| 101.333333
| 0.696388
| 0.005026
| 0
| 0.447761
| 0
| 0.014925
| 0.78578
| 0.777512
| 0
| 1
| 0
| 0
| 0.089552
| 1
| 0.104478
| false
| 0
| 0.119403
| 0
| 0.223881
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0f6d45df57dca5b17eed065e874e0d6a7e4c827c
| 127
|
py
|
Python
|
CA117/Lab_8/swapletters_51.py
|
PRITI1999/OneLineWonders
|
91a7368e0796e5a3b5839c9165f9fbe5460879f5
|
[
"MIT"
] | 6
|
2016-02-04T00:15:20.000Z
|
2019-10-13T13:53:16.000Z
|
CA117/Lab_8/swapletters_51.py
|
PRITI1999/OneLineWonders
|
91a7368e0796e5a3b5839c9165f9fbe5460879f5
|
[
"MIT"
] | 2
|
2016-03-14T04:01:36.000Z
|
2019-10-16T12:45:34.000Z
|
CA117/Lab_8/swapletters_51.py
|
PRITI1999/OneLineWonders
|
91a7368e0796e5a3b5839c9165f9fbe5460879f5
|
[
"MIT"
] | 10
|
2016-02-09T14:38:32.000Z
|
2021-05-25T08:16:26.000Z
|
(lambda l:print(''.join([l[i+1]+l[i]for i in range(0,len(l)-1,2)])+(l[-1]if len(l)%2else'')))(list(__import__("sys").argv[1]))
| 63.5
| 126
| 0.582677
| 29
| 127
| 2.413793
| 0.62069
| 0.057143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057851
| 0.047244
| 127
| 1
| 127
| 127
| 0.520661
| 0
| 0
| 0
| 0
| 0
| 0.023622
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
7e859b407f053ddd022ae4460df909ffd88041ed
| 59,341
|
py
|
Python
|
luma/oled/const.py
|
morpheus200/luma.oled
|
74492ef4ea3374c02d139731855fff7a43c79f7a
|
[
"MIT"
] | 582
|
2017-01-17T00:01:33.000Z
|
2022-03-30T17:46:42.000Z
|
luma/oled/const.py
|
morpheus200/luma.oled
|
74492ef4ea3374c02d139731855fff7a43c79f7a
|
[
"MIT"
] | 182
|
2017-01-12T15:01:19.000Z
|
2022-03-24T21:06:10.000Z
|
luma/oled/const.py
|
morpheus200/luma.oled
|
74492ef4ea3374c02d139731855fff7a43c79f7a
|
[
"MIT"
] | 112
|
2017-01-15T02:29:26.000Z
|
2022-03-30T17:36:56.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2014-18 Richard Hull and contributors
# See LICENSE.rst for details.
from luma.core.const import common
class ssd1306(common):
CHARGEPUMP = 0x8D
COLUMNADDR = 0x21
COMSCANDEC = 0xC8
COMSCANINC = 0xC0
EXTERNALVCC = 0x1
MEMORYMODE = 0x20
PAGEADDR = 0x22
SETCOMPINS = 0xDA
SETDISPLAYCLOCKDIV = 0xD5
SETDISPLAYOFFSET = 0xD3
SETHIGHCOLUMN = 0x10
SETLOWCOLUMN = 0x00
SETPRECHARGE = 0xD9
SETSEGMENTREMAP = 0xA1
SETSTARTLINE = 0x40
SETVCOMDETECT = 0xDB
SWITCHCAPVCC = 0x2
sh1106 = ssd1306
class ssd1322(common):
DISPLAYON = 0xAF
DISPLAYOFF = 0xAE
SETCONTRAST = 0xC1
class ssd1362(common):
DISPLAYON = 0xAF
DISPLAYOFF = 0xAE
SETCONTRAST = 0x81
class ws0010(object):
"""
Values to be used by the ws0010 class during initialization of the display.
Contains FONTDATA to enable the ws0010 class to embed the same fonts that
are contained within any display that uses the ws0010 controller
.. versionadded:: 3.6.0
"""
CLEAR = 0x01
HOME = 0x02
ENTRY = 0x06
DISPLAYOFF = 0x08
DISPLAYON = 0x0C
POWEROFF = 0x13
POWERON = 0x17
GRAPHIC = 0x08
CHAR = 0x00
FUNCTIONSET = 0x29
DL8 = 0x10
DL4 = 0x00
DDRAMADDR = 0x80
CGRAMADDR = 0x40
FONTDATA = {
'metrics': [
# FT00 ENGLISH_JAPANESE 5x8 METRICS
{
'name': 'FT00',
'index': range(16, 256),
'xwidth': 6,
'cell_size': (5, 10),
'glyph_size': (5, 8),
'table_size': (800, 20)
},
# FT01 WESTERN EUROPEAN 5x8 METRICS
{
'name': 'FT01',
'index': range(16, 256),
'xwidth': 6,
'cell_size': (5, 10),
'glyph_size': (5, 8),
'table_size': (800, 20)
},
# FT10 ENGLISH_RUSSIAN 5x8 METRICS
{
'name': 'FT10',
'index': range(16, 256),
'xwidth': 6,
'cell_size': (5, 10),
'glyph_size': (5, 8),
'table_size': (800, 20)
},
# FT11 WESTERN_EUROPEAN_II 5x8 METRICS
{
'name': 'FT11',
'index': range(16, 256),
'xwidth': 6,
'cell_size': (5, 10),
'glyph_size': (5, 8),
'table_size': (800, 20)
},
],
# TODO: Complete (and verify) FONT Mappings (Issue #296)
'mappings': [
{ # FT00 ENGLISH_JAPANESE CHARACTER FONT
# Missing maps for
# 10-1F, 80-9f, a0, a1, e7, e9, ea, f8, f9, fa, fc, fe
0x0410: 0x41, # А CYRILLIC CAPITAL LETTER A
0x0412: 0x42, # В CYRILLIC CAPITAL LETTER VE
0x0421: 0x43, # С CYRILLIC CAPITAL LETTER ES
0x0415: 0x45, # Е CYRILLIC CAPITAL LETTER IE
0x041d: 0x48, # Н CYRILLIC CAPITAL LETTER EN
0x041a: 0x4b, # К CYRILLIC CAPITAL LETTER KA
0x041c: 0x4d, # М CYRILLIC CAPITAL LETTER EM
0x041e: 0x4f, # О CYRILLIC CAPITAL LETTER O
0x0420: 0x50, # Р CYRILLIC CAPITAL LETTER ER
0x0422: 0x54, # Т CYRILLIC CAPITAL LETTER TE
0x0425: 0x58, # Х CYRILLIC CAPITAL LETTER HA
0x042c: 0x62, # Ь CYRILLIC CAPITAL LETTER SOFT SIGN
0x00a5: 0x5c, # ¥ YEN SIGN
0x2192: 0x7e, # → RIGHTWARDS ARROW
0x2190: 0x7f, # ← LEFTWARDS ARROW
0x300c: 0xa2, # 「 LEFT CORNER BRACKET
0x300d: 0xa3, # 」 RIGHT CORNER BRACKET
0x30fd: 0xa4, # ヽ KATAKANA ITERATION MARK
0x30f8: 0xa5, # ヸ KATAKANA LETTER VI
0x30f2: 0xa6, # ヲ KATAKANA LETTER WO
0x30a1: 0xa7, # ァ KATAKANA LETTER SMALL A
0x30a3: 0xa8, # ィ KATAKANA LETTER SMALL I
0x30a5: 0xa9, # ゥ KATAKANA LETTER SMALL U
0x30a7: 0xaa, # ェ KATAKANA LETTER SMALL E
0x30a9: 0xab, # ォ KATAKANA LETTER SMALL O
0x30e3: 0xac, # ャ KATAKANA LETTER SMALL YA
0x30e5: 0xad, # ュ KATAKANA LETTER SMALL YU
0x30e7: 0xae, # ョ KATAKANA LETTER SMALL YO
0x30c3: 0xaf, # ッ KATAKANA LETTER SMALL TU
0x30fc: 0xb0, # ー KATAKANA-HIRAGANA PROLONGED SOUND MARK
0x30a2: 0xb1, # ア KATAKANA LETTER A
0x30a4: 0xb2, # イ KATAKANA LETTER I
0x30a6: 0xb3, # ウ KATAKANA LETTER U
0x30a8: 0xb4, # エ KATAKANA LETTER E
0x30aa: 0xb5, # オ KATAKANA LETTER O
0x30ab: 0xb6, # カ KATAKANA LETTER KA
0x30ad: 0xb7, # キ KATAKANA LETTER KI
0x30af: 0xb8, # ク KATAKANA LETTER KU
0x30b1: 0xb9, # ケ KATAKANA LETTER KE
0x30b3: 0xba, # コ KATAKANA LETTER KO
0x30b5: 0xbb, # サ KATAKANA LETTER SA
0x30b7: 0xbc, # シ KATAKANA LETTER SI
0x30b9: 0xbd, # ス KATAKANA LETTER SU
0x30bb: 0xbe, # セ KATAKANA LETTER SE
0x30bd: 0xbf, # ソ KATAKANA LETTER SO
0x30bf: 0xc0, # タ KATAKANA LETTER TA
0x30c1: 0xc1, # チ KATAKANA LETTER TI
0x30c4: 0xc2, # ツ KATAKANA LETTER TU
0x30c6: 0xc3, # テ KATAKANA LETTER TE
0x30c8: 0xc4, # ト KATAKANA LETTER TO
0x30ca: 0xc5, # ナ KATAKANA LETTER NA
0x30cb: 0xc6, # ニ KATAKANA LETTER NI
0x30cc: 0xc7, # ヌ KATAKANA LETTER NU
0x30cd: 0xc8, # ネ KATAKANA LETTER NE
0x30ce: 0xc9, # ノ KATAKANA LETTER NO
0x30cf: 0xca, # ハ KATAKANA LETTER HA
0x30d2: 0xcb, # ヒ KATAKANA LETTER HI
0x30d5: 0xcc, # フ KATAKANA LETTER HU
0x30d8: 0xcd, # ヘ KATAKANA LETTER HE
0x30db: 0xce, # ホ KATAKANA LETTER HO
0x30de: 0xcf, # マ KATAKANA LETTER MA
0x30df: 0xd0, # ミ KATAKANA LETTER MI
0x30e0: 0xd1, # ム KATAKANA LETTER MU
0x30e1: 0xd2, # メ KATAKANA LETTER ME
0x30e2: 0xd3, # モ KATAKANA LETTER MO
0x30e4: 0xd4, # ヤ KATAKANA LETTER YA
0x30e6: 0xd5, # ユ KATAKANA LETTER YU
0x30e8: 0xd6, # ヨ KATAKANA LETTER YO
0x30e9: 0xd7, # ラ KATAKANA LETTER RA
0x30ea: 0xd8, # リ KATAKANA LETTER RI
0x30eb: 0xd9, # ル KATAKANA LETTER RU
0x30ec: 0xda, # レ KATAKANA LETTER RE
0x30ed: 0xdb, # ロ KATAKANA LETTER RO
0x30ef: 0xdc, # ワ KATAKANA LETTER WA
0x30f3: 0xdd, # ン KATAKANA LETTER N
0x309b: 0xde, # ゛ KATAKANA-HIRAGANA VOICED SOUND MARK
0x309c: 0xdf, # ゜ KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK
0x03b1: 0xe0, # α GREEK SMALL LETTER ALPHA
0x00e4: 0xe1, # ä LATIN SMALL LETTER A WITH DIAERESIS
0x03b2: 0xe2, # β GREEK SMALL LETTER BETA (5x10)
0x0385: 0xe3, # ΅ GREEK DIALYTIKA TONOS
0x00b5: 0xe4, # µ MICRO SIGN (5x10)
0x03bc: 0xe4, # μ GREEK SMALL LETTER MU (5x10)
0x03c3: 0xe5, # σ GREEK SMALL LETTER SIGMA
0x03c1: 0xe6, # ρ GREEK SMALL LETTER RHO (5x10)
0x221a: 0xe8, # √ SQUARE ROOT
0x02e3: 0xeb, # ˣ MODIFIER LETTER SMALL X
0x00a2: 0xec, # ¢ CENT SIGN
0x2c60: 0xed, # Ⱡ LATIN CAPITAL LETTER L WITH DOUBLE BAR
0x00f1: 0xee, # ñ LATIN SMALL LETTER N WITH TILDE
0x00f6: 0xef, # ö LATIN SMALL LETTER O WITH DIAERESIS
0x0398: 0xf2, # Θ GREEK CAPITAL LETTER THETA
0x03f4: 0xf2, # ϴ GREEK CAPITAL THETA SYMBOL
0x221e: 0xf3, # ∞ INFINITY
0x03a9: 0xf4, # Ω GREEK CAPITAL LETTER OMEGA
0x00fc: 0xf5, # ü LATIN SMALL LETTER U WITH DIAERESIS
0x03a3: 0xf6, # Σ GREEK CAPITAL LETTER SIGMA
0x03c0: 0xf7, # π GREEK SMALL LETTER PI
0xa68b: 0xfb, # ꚋ CYRILLIC SMALL LETTER TE WITH MIDDLE HOOK
0x00f7: 0xfd, # ÷ DIVISION SIGN
0x25ae: 0xff, # ▮ BLACK VERTICAL RECTANGLE
},
{ # FT01 WESTERN EUROPEAN CHARACTER FONT
# Missing mappings 9f, a0, aa, ab, ac, ad, ae, c5, c8
# Dropping e1, e2, e3 conflict with 5x8 versions at be, bd, bf
0x0410: 0x41, # А CYRILLIC CAPITAL LETTER A
0x0412: 0x42, # В CYRILLIC CAPITAL LETTER VE
0x0421: 0x43, # С CYRILLIC CAPITAL LETTER ES
0x0415: 0x45, # Е CYRILLIC CAPITAL LETTER IE
0x041d: 0x48, # Н CYRILLIC CAPITAL LETTER EN
0x041a: 0x4b, # К CYRILLIC CAPITAL LETTER KA
0x041c: 0x4d, # М CYRILLIC CAPITAL LETTER EM
0x041e: 0x4f, # О CYRILLIC CAPITAL LETTER O
0x0420: 0x50, # Р CYRILLIC CAPITAL LETTER ER
0x0422: 0x54, # Т CYRILLIC CAPITAL LETTER TE
0x0425: 0x58, # Х CYRILLIC CAPITAL LETTER HA
0x00a5: 0x5c, # ¥ YEN SIGN
0x042c: 0x62, # Ь CYRILLIC CAPITAL LETTER SOFT SIGN
0x2192: 0x7e, # → RIGHTWARDS ARROW
0x2190: 0x7f, # ← LEFTWARDS ARROW
0x00db: 0x80, # Û LATIN CAPITAL LETTER U WITH CIRCUMFLEX
0x00d9: 0x81, # Ù LATIN CAPITAL LETTER U WITH GRAVE
0x00da: 0x82, # Ú LATIN CAPITAL LETTER U WITH ACUTE
0x00dc: 0x83, # Ü LATIN CAPITAL LETTER U WITH DIAERESIS
0x00fb: 0x84, # û LATIN SMALL LETTER U WITH CIRCUMFLEX
0x00f9: 0x85, # ù LATIN SMALL LETTER U WITH GRAVE
0x00fa: 0x86, # ú LATIN SMALL LETTER U WITH ACUTE
0x00d4: 0x87, # Ô LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00d2: 0x88, # Ò LATIN CAPITAL LETTER O WITH GRAVE
0x00d3: 0x89, # Ó LATIN CAPITAL LETTER O WITH ACUTE
0x00f4: 0x8b, # ô LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f2: 0x8c, # ò LATIN SMALL LETTER O WITH GRAVE
0x00f3: 0x8d, # ó LATIN SMALL LETTER O WITH ACUTE
0x00f6: 0x8e, # ö LATIN SMALL LETTER O WITH DIAERESIS
0x00bf: 0x8f, # ¿ INVERTED QUESTION MARK
0x00ca: 0x90, # Ê LATIN CAPITAL LETTER E WITH CIRCUMFLEX
0x00c8: 0x91, # È LATIN CAPITAL LETTER E WITH GRAVE
0x00c9: 0x92, # É LATIN CAPITAL LETTER E WITH ACUTE
0x00cb: 0x93, # Ë LATIN CAPITAL LETTER E WITH DIAERESIS
0x00ea: 0x94, # ê LATIN SMALL LETTER E WITH CIRCUMFLEX
0x00e8: 0x95, # è LATIN SMALL LETTER E WITH GRAVE
0x00e9: 0x96, # é LATIN SMALL LETTER E WITH ACUTE
0x00eb: 0x97, # ë LATIN SMALL LETTER E WITH DIAERESIS
0x00c1: 0x98, # Á LATIN CAPITAL LETTER A WITH ACUTE
0x00c4: 0x99, # Ä LATIN CAPITAL LETTER A WITH DIAERESIS
0x0227: 0x9a, # ȧ LATIN SMALL LETTER A WITH DOT ABOVE
0x00e2: 0x9b, # â LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e0: 0x9c, # à LATIN SMALL LETTER A WITH GRAVE
0x00e1: 0x9d, # á LATIN SMALL LETTER A WITH ACUTE
0x00e4: 0x9e, # ä LATIN SMALL LETTER A WITH DIAERESIS
0x00ee: 0xa1, # î LATIN SMALL LETTER I WITH CIRCUMFLEX
0x00ec: 0xa2, # ì LATIN SMALL LETTER I WITH GRAVE
0x00ed: 0xa3, # í LATIN SMALL LETTER I WITH ACUTE
0x00ef: 0xa4, # ï LATIN SMALL LETTER I WITH DIAERESIS
0x00a1: 0xa5, # ¡ INVERTED EXCLAMATION MARK
0x00d1: 0xa6, # Ñ LATIN CAPITAL LETTER N WITH TILDE
0x00f1: 0xa7, # ñ LATIN SMALL LETTER N WITH TILDE
0x00c6: 0xaf, # Æ LATIN CAPITAL LETTER AE
0x00a7: 0xb0, # § SECTION SIGN
0x00b1: 0xb1, # ± PLUS-MINUS SIGN
0x0133: 0xb2, # ij LATIN SMALL LIGATURE IJ
0x2191: 0xb3, # ↑ UPWARDS ARROW
0x2193: 0xb4, # ↓ DOWNWARDS ARROW
0x21b2: 0xb5, # ↲ DOWNWARDS ARROW WITH TIP LEFTWARDS
0x2a0d: 0xb6, # ⨍ FINITE PART INTEGRAL
0x00a3: 0xb7, # £ POUND SIGN
0x21e5: 0xb8, # ⇥ RIGHTWARDS ARROW TO BAR
0x2908: 0xb9, # ⤈ DOWNWARDS ARROW WITH HORIZONTAL STROKE
0x2909: 0xba, # ⤉ UPWARDS ARROW WITH HORIZONTAL STROKE
0x2913: 0xbb, # ⤓ DOWNWARDS ARROW TO BAR
0x00b6: 0xbc, # ¶ PILCROW SIGN
0x00bd: 0xbd, # ½ VULGAR FRACTION ONE HALF
0x2153: 0xbe, # ⅓ VULGAR FRACTION ONE THIRD
0x00bc: 0xbf, # ¼ VULGAR FRACTION ONE QUARTER
0x00f8: 0xc0, # ø LATIN SMALL LETTER O WITH STROKE
0x013f: 0xc1, # Ŀ LATIN CAPITAL LETTER L WITH MIDDLE DOT
0x00d0: 0xc2, # Ð LATIN CAPITAL LETTER ETH
0x00df: 0xc3, # ß LATIN SMALL LETTER SHARP S
0x00e7: 0xc4, # ç LATIN SMALL LETTER C WITH CEDILLA
0x00a4: 0xc6, # ¤ CURRENCY SIGN
0x26f6: 0xc7, # ⛶ SQUARE FOUR CORNERS
0x00d8: 0xc9, # Ø LATIN CAPITAL LETTER O WITH STROKE
0x00ff: 0xca, # ÿ LATIN SMALL LETTER Y WITH DIAERESIS
0x00c3: 0xcb, # Ã LATIN CAPITAL LETTER A WITH TILDE
0x00a2: 0xcc, # ¢ CENT SIGN
0x00e3: 0xcd, # ã LATIN SMALL LETTER A WITH TILDE
0x00d5: 0xce, # Õ LATIN CAPITAL LETTER O WITH TILDE
0x00f5: 0xcf, # õ LATIN SMALL LETTER O WITH TILDE
0x0307: 0xd0, # ̇ COMBINING DOT ABOVE
0x0308: 0xd1, # ̈ COMBINING DIAERESIS
0x030a: 0xd2, # ̊ COMBINING RING ABOVE
0x0300: 0xd3, # ̀ COMBINING GRAVE ACCENT
0x0301: 0xd4, # ́ COMBINING ACUTE ACCENT
0x0303: 0xd5, # ̃ COMBINING TILDE
0x00f7: 0xd6, # ÷ DIVISION SIGN
0x2aa1: 0xd7, # ⪡ DOUBLE NESTED LESS-THAN
0x2aa2: 0xd8, # ⪢ DOUBLE NESTED GREATER-THAN
0x0140: 0xd9, # ŀ LATIN SMALL LETTER L WITH MIDDLE DOT
0x005c: 0xda, # ⧵ REVERSE SOLIDUS
0x29f5: 0xda, # ⧵ REVERSE SOLIDUS OPERATOR
0x00d7: 0xdb, # × MULTIPLICATION SIGN
0x00ae: 0xdc, # ® REGISTERED SIGN
0x00a9: 0xdd, # © COPYRIGHT SIGN
0x2122: 0xde, # ™ TRADE MARK SIGN
0x039e: 0xdf, # Ξ GREEK CAPITAL LETTER XI
0x03b1: 0xe0, # α GREEK SMALL LETTER ALPHA
0x2154: 0xe4, # ⅔ VULGAR FRACTION TWO THIRDS
0x00be: 0xe5, # ¾ VULGAR FRACTION THREE QUARTERS
0x2195: 0xe8, # ↕ UP DOWN ARROW
0x2938: 0xe9, # ⤸ RIGHT-SIDE ARC CLOCKWISE ARROW
0x25af: 0xeb, # ▯ WHITE VERTICAL RECTANGLE
0x1d19: 0xed, # ᴙ LATIN LETTER SMALL CAPITAL REVERSED R
0x042e: 0xee, # Ю CYRILLIC CAPITAL LETTER YU
0x03f4: 0xf2, # ϴ GREEK CAPITAL THETA SYMBOL
0x221e: 0xf3, # ∞ INFINITY
0x03a9: 0xf4, # Ω GREEK CAPITAL LETTER OMEGA
0x03b2: 0xf5, # β GREEK SMALL LETTER BETA
0x03a3: 0xf6, # Σ GREEK CAPITAL LETTER SIGMA
0x1d540: 0xfb, # 𝕀 MATHEMATICAL DOUBLE-STRUCK CAPITAL I
0x0439: 0xfc, # й CYRILLIC SMALL LETTER SHORT I
0x25ae: 0xff, # ▮ BLACK VERTICAL RECTANGLE
# CONFLICT 0x2153: 0xe1, # ⅓ VULGAR FRACTION ONE THIRD (5x10)
# CONFLICT 0x00bd: 0xbd, # ½ VULGAR FRACTION ONE HALF (5x10)
# CONFLICT 0x00bc: 0xe3, # ¼ VULGAR FRACTION ONE QUARTER (5x10)
# CONFLICT 0x00f8: 0xf8, # ø LATIN SMALL LETTER O WITH STROKE (5x10)
},
{ # FT10 ENGLISH_RUSSIAN CHARACTER FONT
0x00c0: 0x10, # À LATIN CAPITAL LETTER A WITH GRAVE
0x00c1: 0x11, # Á LATIN CAPITAL LETTER A WITH ACUTE
0x00c2: 0x12, # Â LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00c3: 0x13, # Ã LATIN CAPITAL LETTER A WITH TILDE
0x00c4: 0x14, # Ä LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c5: 0x15, # Å LATIN CAPITAL LETTER A WITH RING ABOVE
0x00c6: 0x16, # Æ LATIN CAPITAL LETTER AE
0x00c7: 0x17, # Ç LATIN CAPITAL LETTER C WITH CEDILLA
0x00c8: 0x18, # È LATIN CAPITAL LETTER E WITH GRAVE
0x00c9: 0x19, # É LATIN CAPITAL LETTER E WITH ACUTE
0x00ca: 0x1a, # Ê LATIN CAPITAL LETTER E WITH CIRCUMFLEX
0x00cb: 0x1b, # Ë LATIN CAPITAL LETTER E WITH DIAERESIS
0x00cc: 0x1c, # Ì LATIN CAPITAL LETTER I WITH GRAVE
0x00cd: 0x1d, # Í LATIN CAPITAL LETTER I WITH ACUTE
0x00ce: 0x1e, # Î LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00cf: 0x1f, # Ï LATIN CAPITAL LETTER I WITH DIAERESIS
0x0410: 0x41, # А CYRILLIC CAPITAL LETTER A
0x0412: 0x42, # В CYRILLIC CAPITAL LETTER VE
0x0421: 0x43, # С CYRILLIC CAPITAL LETTER ES
0x0415: 0x45, # Е CYRILLIC CAPITAL LETTER IE
0x041d: 0x48, # Н CYRILLIC CAPITAL LETTER EN
0x041a: 0x4b, # К CYRILLIC CAPITAL LETTER KA
0x041c: 0x4d, # М CYRILLIC CAPITAL LETTER EM
0x041e: 0x4f, # О CYRILLIC CAPITAL LETTER O
0x0420: 0x50, # Р CYRILLIC CAPITAL LETTER ER
0x0422: 0x54, # Т CYRILLIC CAPITAL LETTER TE
0x0425: 0x58, # Х CYRILLIC CAPITAL LETTER HA
0x042c: 0x62, # Ь CYRILLIC CAPITAL LETTER SOFT SIGN
0x00a2: 0x5c, # ¢ CENT SIGN
0x21b2: 0x7e, # ↲ DOWNWARDS ARROW WITH TIP LEFTWARDS
0x00d0: 0x80, # Ð LATIN CAPITAL LETTER ETH
0x00d1: 0x81, # Ñ LATIN CAPITAL LETTER N WITH TILDE
0x00d2: 0x82, # Ò LATIN CAPITAL LETTER O WITH GRAVE
0x00d3: 0x83, # Ó LATIN CAPITAL LETTER O WITH ACUTE
0x00f4: 0x84, # ô LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f5: 0x85, # õ LATIN SMALL LETTER O WITH TILDE
0x00f6: 0x86, # ö LATIN SMALL LETTER O WITH DIAERESIS
0x00d7: 0x87, # × MULTIPLICATION SIGN
0x00d8: 0x88, # Ø LATIN CAPITAL LETTER O WITH STROKE
0x00d9: 0x89, # Ù LATIN CAPITAL LETTER U WITH GRAVE
0x00da: 0x8a, # Ú LATIN CAPITAL LETTER U WITH ACUTE
0x00db: 0x8b, # Û LATIN CAPITAL LETTER U WITH CIRCUMFLEX
0x00dc: 0x8c, # Ü LATIN CAPITAL LETTER U WITH DIAERESIS
0x00dd: 0x8d, # Ý LATIN CAPITAL LETTER Y WITH ACUTE
0x00de: 0x8e, # Þ LATIN CAPITAL LETTER THORN
0x00df: 0x8f, # ß LATIN SMALL LETTER SHARP S
0x00e0: 0x90, # à LATIN SMALL LETTER A WITH GRAVE
0x00e1: 0x91, # á LATIN SMALL LETTER A WITH ACUTE
0x00e2: 0x92, # â LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e3: 0x93, # ã LATIN SMALL LETTER A WITH TILDE
0x00e4: 0x94, # ä LATIN SMALL LETTER A WITH DIAERESIS
0x00e5: 0x95, # å LATIN SMALL LETTER A WITH RING ABOVE
0x00e6: 0x96, # æ LATIN SMALL LETTER AE
0x00e7: 0x97, # ç LATIN SMALL LETTER C WITH CEDILLA
0x00e8: 0x98, # è LATIN SMALL LETTER E WITH GRAVE
0x00e9: 0x99, # é LATIN SMALL LETTER E WITH ACUTE
0x00ea: 0x9a, # ê LATIN SMALL LETTER E WITH CIRCUMFLEX
0x00eb: 0x9b, # ë LATIN SMALL LETTER E WITH DIAERESIS
0x00ec: 0x9c, # ì LATIN SMALL LETTER I WITH GRAVE
0x00ed: 0x9d, # í LATIN SMALL LETTER I WITH ACUTE
0x00ee: 0x9e, # î LATIN SMALL LETTER I WITH CIRCUMFLEX
0x00ef: 0x9f, # ï LATIN SMALL LETTER I WITH DIAERESIS
0x042e: 0xb0, # Ю CYRILLIC CAPITAL LETTER YU
0x2aa1: 0xc8, # ⪡ DOUBLE NESTED LESS-THAN
0x2aa2: 0xc9, # ⪢ DOUBLE NESTED GREATER-THAN
0x2193: 0xda, # ↓ DOWNWARDS ARROW
0x0301: 0xe7, # ́ COMBINING ACUTE ACCENT
0x0308: 0xe8, # ̈ COMBINING DIAERESIS
0x0303: 0xe9, # ̃ COMBINING TILDE
0x00bc: 0xf0, # ¼ VULGAR FRACTION ONE QUARTER
0x2153: 0xf1, # ⅓ VULGAR FRACTION ONE THIRD
0x00bd: 0xf2, # ½ VULGAR FRACTION ONE HALF
0x00be: 0xf3, # ¾ VULGAR FRACTION THREE QUARTERS
0x00a7: 0xfd, # § SECTION SIGN
0x00b6: 0xfe, # ¶ PILCROW SIGN
0x25ae: 0xff, # ▮ BLACK VERTICAL RECTANGLE
# conflict 0x00e9: 0xea, # 'é' LATIN SMALL LETTER E WITH ACUTE
# duplicate 0x00e7: 0xeb, # 'ç' LATIN SMALL LETTER C WITH CEDILLA
},
{ # FT11 WESTERN_EUROPEAN_II CHARACTER FONT
0x00b1: 0x10, # ± PLUS-MINUS SIGN
0x039e: 0x11, # Ξ GREEK CAPITAL LETTER XI
0x2248: 0x1a, # ≈ ALMOST EQUAL TO
0x222b: 0x1b, # ∫ INTEGRAL
0x2017: 0x1c, # ‗ DOUBLE LOW LINE
0x0303: 0x1d, # ̃ COMBINING TILDE
0x00b2: 0x1e, # ² SUPERSCRIPT TWO
0x00b3: 0x1f, # ³ SUPERSCRIPT THREE
0x00c7: 0x80, # Ç LATIN CAPITAL LETTER C WITH CEDILLA
0x00fc: 0x81, # ü LATIN SMALL LETTER U WITH DIAERESIS
0x00e9: 0x82, # é LATIN SMALL LETTER E WITH ACUTE
0x00e2: 0x83, # â LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e4: 0x84, # ä LATIN SMALL LETTER A WITH DIAERESIS
0x00e0: 0x85, # à LATIN SMALL LETTER A WITH GRAVE
0x00e7: 0x87, # ç LATIN SMALL LETTER C WITH CEDILLA
0x00ea: 0x88, # ê LATIN SMALL LETTER E WITH CIRCUMFLEX
0x00eb: 0x89, # ë LATIN SMALL LETTER E WITH DIAERESIS
0x00e8: 0x8a, # è LATIN SMALL LETTER E WITH GRAVE
0x00ef: 0x8b, # ï LATIN SMALL LETTER I WITH DIAERESIS
0x00ee: 0x8c, # î LATIN SMALL LETTER I WITH CIRCUMFLEX
0x00ec: 0x8d, # ì LATIN SMALL LETTER I WITH GRAVE
0x00c4: 0x8e, # Ä LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c5: 0x8f, # Å LATIN CAPITAL LETTER A WITH RING ABOVE
0x00c9: 0x90, # É LATIN CAPITAL LETTER E WITH ACUTE
0x00e6: 0x91, # æ LATIN SMALL LETTER AE
0x00c6: 0x92, # Æ LATIN CAPITAL LETTER AE
0x00f4: 0x93, # ô LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f6: 0x94, # ö LATIN SMALL LETTER O WITH DIAERESIS
0x00f2: 0x95, # ò LATIN SMALL LETTER O WITH GRAVE
0x00fb: 0x96, # û LATIN SMALL LETTER U WITH CIRCUMFLEX
0x00f9: 0x97, # ù LATIN SMALL LETTER U WITH GRAVE
0x00ff: 0x98, # ÿ LATIN SMALL LETTER Y WITH DIAERESIS
0x00dc: 0x9a, # Ü LATIN CAPITAL LETTER U WITH DIAERESIS
0x00f1: 0x9b, # ñ LATIN SMALL LETTER N WITH TILDE
0x00d1: 0x9c, # Ñ LATIN CAPITAL LETTER N WITH TILDE
0x00bf: 0x9f, # ¿ INVERTED QUESTION MARK
0x00e1: 0xa0, # á LATIN SMALL LETTER A WITH ACUTE
0x00ed: 0xa1, # í LATIN SMALL LETTER I WITH ACUTE
0x00f3: 0xa2, # ó LATIN SMALL LETTER O WITH ACUTE
0x00fa: 0xa3, # ú LATIN SMALL LETTER U WITH ACUTE
0x00a2: 0xa4, # ¢ CENT SIGN
0x00a3: 0xa5, # £ POUND SIGN
0x00a5: 0xa6, # ¥ YEN SIGN
0x00b4: 0xa7, # ´ ACUTE ACCENT
0x2a0d: 0xa8, # ⨍ FINITE PART INTEGRAL
0x00a1: 0xa9, # ¡ INVERTED EXCLAMATION MARK
0x00c3: 0xaa, # Ã LATIN CAPITAL LETTER A WITH TILDE
0x00e3: 0xab, # ã LATIN SMALL LETTER A WITH TILDE
0x00d5: 0xac, # Õ LATIN CAPITAL LETTER O WITH TILDE
0x00f5: 0xad, # õ LATIN SMALL LETTER O WITH TILDE
0x00f8: 0xaf, # ø LATIN SMALL LETTER O WITH STROKE
0x0307: 0xb0, # ̇ COMBINING DOT ABOVE
0x0308: 0xb1, # ̈ COMBINING DIAERESIS
0x030a: 0xb2, # ̊ COMBINING RING ABOVE
0x0300: 0xb3, # ̀ COMBINING GRAVE ACCENT
0x0301: 0xb4, # ́ COMBINING ACUTE ACCENT
0x00bd: 0xb5, # ½ VULGAR FRACTION ONE HALF
0x00bc: 0xb6, # ¼ VULGAR FRACTION ONE QUARTER
0x00d7: 0xb7, # × MULTIPLICATION SIGN
0x00f7: 0xb8, # ÷ DIVISION SIGN
0x2264: 0xb9, # ≤ LESS-THAN OR EQUAL TO
0x2265: 0xba, # ≥ GREATER-THAN OR EQUAL TO
0x2aa1: 0xbb, # ⪡ DOUBLE NESTED LESS-THAN
0x2aa2: 0xbc, # ⪢ DOUBLE NESTED GREATER-THAN
0x2260: 0xbd, # ≠ NOT EQUAL TO
0x221a: 0xbe, # √ SQUARE ROOT
0x203e: 0xbf, # ‾ OVERLINE
0x221e: 0xc2, # ∞ INFINITY
0x25f8: 0xc3, # ◸ UPPER LEFT TRIANGLE
0x21b2: 0xc4, # ↲ DOWNWARDS ARROW WITH TIP LEFTWARDS
0x2191: 0xc5, # ↑ UPWARDS ARROW
0x2193: 0xc6, # ↓ DOWNWARDS ARROW
0x2192: 0xc7, # → RIGHTWARDS ARROW
0x2190: 0xc8, # ← LEFTWARDS ARROW
0x250c: 0xc9, # ┌ BOX DRAWINGS LIGHT DOWN AND RIGHT
0x231c: 0xc9, # ⌜ TOP LEFT CORNER
0x2510: 0xca, # ┐ BOX DRAWINGS LIGHT DOWN AND LEFT
0x231d: 0xca, # ⌝ TOP RIGHT CORNER
0x2514: 0xcb, # └ BOX DRAWINGS LIGHT UP AND RIGHT
0x231e: 0xcb, # ⌞ BOTTOM LEFT CORNER
0x2518: 0xcc, # ┘ BOX DRAWINGS LIGHT UP AND LEFT
0x231f: 0xcc, # ⌟ BOTTOM RIGHT CORNER
0x00b7: 0xcd, # · MIDDLE DOT
0x00ae: 0xce, # ® REGISTERED SIGN
0x00a9: 0xcf, # © COPYRIGHT SIGN
0x2122: 0xd0, # ™ TRADE MARK SIGN
0x2020: 0xd1, # † DAGGER
0x00a7: 0xd2, # § SECTION SIGN
0x00b6: 0xd3, # ¶ PILCROW SIGN
0x25ff: 0xd5, # ◿ LOWER RIGHT TRIANGLE
0x0398: 0xd6, # Θ GREEK CAPITAL LETTER THETA
0x03f4: 0xd6, # ϴ GREEK CAPITAL THETA SYMBOL
0x1d6b5: 0xd8, # 𝚵 MATHEMATICAL BOLD CAPITAL XI
0x2aea: 0xd9, # ⫪ DOUBLE DOWN TACK
0x03a3: 0xda, # Σ GREEK CAPITAL LETTER SIGMA
0x297e: 0xdb, # ⥾ UP FISH TAIL
0x00d8: 0xdc, # Ø LATIN CAPITAL LETTER O WITH STROKE
0x03a8: 0xdd, # Ψ GREEK CAPITAL LETTER PSI
0x03a9: 0xde, # Ω GREEK CAPITAL LETTER OMEGA
0x03b1: 0xdf, # α GREEK SMALL LETTER ALPHA
0x03b2: 0xe0, # β GREEK SMALL LETTER BETA
0x03b4: 0xe2, # δ GREEK SMALL LETTER DELTA
0x03b6: 0xe4, # ζ GREEK SMALL LETTER ZETA
0x03b7: 0xe5, # η GREEK SMALL LETTER ETA
0x00b5: 0xea, # µ MICRO SIGN
0x03b3: 0xec, # γ GREEK SMALL LETTER GAMMA
0x03c0: 0xed, # π GREEK SMALL LETTER PI
0x03c3: 0xef, # σ GREEK SMALL LETTER SIGMA
0x25bc: 0xf5, # ▼ BLACK DOWN-POINTING TRIANGLE
0x25b6: 0xf6, # ▶ BLACK RIGHT-POINTING TRIANGLE
0x25c0: 0xf7, # ◀ BLACK LEFT-POINTING TRIANGLE
0x21a4: 0xf9, # ↤ LEFTWARDS ARROW FROM BAR
0x21e5: 0xfb, # ⇥ RIGHTWARDS ARROW TO BAR
0x25af: 0xfc, # ▯ WHITE VERTICAL RECTANGLE
}
],
'fonts': [
# FT00 ENGLISH_JAPANESE CHARACTER FONT
b'\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x01\x14\xa2a\x8c\x12\x00\x00\x00\x00q\x1d\xf1|\xdfs\x80\x01\x01\x0es\xbc\xee\x7f\xee\x8b\x8f\x18F.\xf3\xbc\xff\xc61\x8c~\xe8\xb8\x80@ \x00\x80\xc0\x81\x05\x06\x00\x00\x00\x00\x04\x00\x00\x00\x00" \x00\x00\x00\x00\x00\x00\x00\x00@\x00@ \x00\x00\x10\x80\x00\x00@\x00\x06\x00\x0e\x00\x00\x00\x00\x00\x00\x00\x00\x8f\x8d\xff\x99\xdf\xb9\x9f\xef\xfe\xc6\x01\x14\xa7\xe6D!\x08@\x00\x01\x8b"#A\x01\x8cX\xc2\x00\x91\x8cc\x19B\x11\x89\x05(n1\x8cc\x02F1\x8cB\x85\t@ \x00\x81/\x80\x01\x02\x00\x00\x00\x00\x04\x00\x00\x00\x00B\x10\x847\xe2\xff\xa6?\xfd^\x91\x00\x84(\x04\x06\x01%\x02\x80\x02\x84\x1d\x00\x08\x00\x03\xe0\x00\x00\x00\x00\x00H\x84 \x88\x91\xa8\x82\x10\xc5"\x01\x15\xfa\n\x88@\xaa@\x00\x02\x99\x02Ez\x02\x8cX\xc4|A\x0cc\x08\xc2\x10\x89\x05HW1\x8cc\x02F1TD\x8f\x8a \x13\xac\xe6\xb9\x11\xb3\r"j\xce\xf3l\xeeF1\x8c~B\x10H\x14b\x94\x99A\rS\xd0\x01\xc0\xe1\x00\x01?\xa0\x00\x1e@\x04\x01\x00\x08\x00\x00?\x11\x00$\x03\xc0p\x8c(\x89\x11\xa8\x02\x10\xc6"\x01\x00\xa7\x11\x00@\x9d\xf0|\x04\xa9\x04)\x07\xc4s\xc0\x08\x00"l}\x08\xfb\xd7\xf9\x05\x88V\xb1\xf4|\xe2F5"\x88\x82\x08\x00\x00s\t\xc7\x91\xc9\x05BW1\x8c\xf3\x04F1TD\x82\x0b\xff\x14R\xd6\x88\x91\rR\x11\x08\x06\x02\xa6t\xa4"c\x12\xa9\x84\x81\x00\x08@3\xe1\'\xff\xff\xb8U\x90\x94(\x88\x91\x88\x02\x10\xc6"\x00\x01\xf2\xa2\xa0@\xaaF\x00\x08\xc9\x08\x1f\x86(\x88X\xc4|D\xaf\xe3\x08\xc2\x11\x89\x05HFq\x85h\x12F5Q\x10\x8f\x88\x00\x03\xe3\x08\xfd\x0f\x89\x05\x82V1\xf3\xe0\xe4F5#\xc8B\x10H\x14J\x10\x84V\rS\x12\x94H fZ\x98%R\x8c\x19\x85A\x07\x00H0&dHd\x8b\xd5\x8f\xd4(\x88Qx\x03\xe3~\xe2\x00\x00\xafN@!\x08B\x01\x90\x89\x11\x11F(\x88\x98B\x00\x80\xacc\x19B\x11\x89%(F1\x84\xa4\x12EU\x89 \x82\x08\x00\x04c\x18\xc1\x01\x89%BF1\x80`\x14\xcdUPPB\x10\x84s\xbc\x1f\x87\xf0\x0b\xb3\xefy\x9e\xff\xff\xbf\xe4>ks\xff\xf9\xc1\x05\x00D\x00D\xa0H\xa5\x08A\x00\x00\x00\x00\x00\x00\x02\x00\x00\x02\x01\x00\xa2\r\xa0\x12\x00\x04\x01\x80s\xbe\xe19\xc8s\x00\x81\x01\x04t|\xee~\x0f\x8b\x99\x1f\xc6.\x83c\xe28\x8a\x89>\xe28\x1f\x03\xfc\xe7\xb9\x0e\x8b\x99\'F.\x80a\xe34\x8a\x8b\xbe" \x00\x00\x00\x10\x04\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x01\xc2\x00\x88!\xbf$\x7f\xc6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xc2@\t\x04\x02\x00\xa0\x01\x00\x00\x80\xe4\x10\x00 \x81\x00\x00\x80\x00\x00\x04\x00\x0e\x90\x00\x00\x00\x9c\x02\x80\x00\x00\x00\x00\x04\x00!\xca\x00\x00\x00+\xe0\xf8\x00\x00\x00\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00E\xff\xff\xff{\xff\xfc\x7f\xf1\x7f*\x04\x11\xdf\xf8\x89\x0f\xa3\xffq\x03\xf4;\xe0\x91!\xff\xe2T\x00\x00\x00\x00\x00\x00\x81B \x00\x00\x1c\x00\x02\x00\x00\x02\x00\x00\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01I\x12\t$L\x82\xa0\x851I+\xf4|\x01\x10\x85\xf0\xd0\x81\x02\x02\x8f\x88?\x95!\x18\x81\x1cK\x9c\xe8\xbc\xcf>\x8c\x87r\xce\xb3b\x07E\x1f\x8c}\xff\x90\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf9\x99\x12\x19?\x88\x82\xac\x89I\xaf\xeaF\x10\n \xa3\x00\x88\x81t\x15\xf4\x8b\xe1\x95#\x18\x84\x00\xa8c\x08\xd11 \x85J#1\xcc\xfe\xb8\xc4\x8aTH\x8a\x80\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01(\x12)$\x08\x82 \x91\x01\x19\x02E\x10\x04p\xa3\x00\x86\xaa\x04H\x85\x08!\x15e\x10\x84\x00\x93\xfc\xc8\xca1 \x04\n\xf21\x8ccX\xc5\n$~\xff\xfc\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x08"I$\x10\x82A)\x02\x11\x04D \n\xa9#\x01\x06\xa4w\xd4\x84\x08"%i\x11\x08\x00\x94c\x19\xc61\xa0\x04\x07"1\x8cc\xa5N\nTH\x98\x80\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x08O\x8aDa>\x8eD\xecb\x08\x84C\xf0""\xf6\x00\x82\x08`t\x7f\xe4E\xb1\xf2p\x00k\xfc\xee\xbb\xcf@\x04\x02>.\xf3\xdc\r\xb7\xf3\x8b\xc9\x18\x90\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 \x08\x02\x01\x00\x04\x00\x00\x00\x80@\x00\x00\x00\x00@\x00\x00\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 \x08\x02\x01\x00$\x00\x00\x00\x80@\x00\x00\x00\x00@\x00\x00\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 \x08\x02\x0e\x00\x18\x00\x00\x00\x80@\x00\x00\x00\x03\x80\x00\x00\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
# FT01 WESTERN EUROPEAN CHARACTER FONT
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x14\xa2a\x8c\x12\x00\x00\x00\x00q\x1d\xf1|\xdfs\x80\x01\x01\x0es\xbc\xee\x7f\xee\x8b\x8f\x18F.\xf3\xbc\xff\xc61\x8c~\xe8\xb8\x80@ \x00\x80\xc0\x81\x05\x06\x00\x00\x00\x00\x04\x00\x00\x00\x00" \x00"\x04\xa2 D@\x9cD\tD"\x04\xa2 J"\x88D\tNq\x10%\x11)\x1b\x9d\x18\xc5O\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x14\xa7\xe6D!\x08@\x00\x01\x8b"#A\x01\x8cX\xc2\x00\x91\x8cc\x19B\x11\x89\x05(n1\x8cc\x02F1\x8cB\x85\t@ \x00\x81/\x80\x01\x02\x00\x00\x00\x00\x04\x00\x00\x00\x00B\x10\x84Q\x08\x05\x10\x8a!"\xa2\x10\x00Q\x08\x05\x10\x80\x00\x00\xa2\x10\x11\n\x88@\x02\xd6\x04\xa5\x18\xd6\xb4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x15\xfa\n\x88@\xaa@\x00\x02\x99\x02Ez\x02\x8cX\xc4|A\x0cc\x08\xc2\x10\x89\x05HW1\x8cc\x02F1TD\x8f\x8a \x13\xac\xe6\xb9\x11\xb3\r"j\xce\xf3l\xeeF1\x8c~B\x10H\x8cc\x10\x00\x0es\xa2\x00\x00\x04\xff\xff\xf79\xces\x9c\xe79\xdf{\x18\xc6\x10\x00\x9b\x9c\xa5V\xb4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xa7\x11\x00@\x9d\xf0|\x04\xa9\x04)\x07\xc4s\xc0\x08\x00"l}\x08\xfb\xd7\xf9\x05\x88V\xb1\xf4|\xe2F5"\x88\x82\x08\x00\x00s\t\xc7\x91\xc9\x05BW1\x8c\xf3\x04F1TD\x82\x0b\xff\x8cc\x18\xc61\x8cb\xe79\xc8\x84!\x08\xc61\x8cB\x10\x841\x89\x08B\x136\xd0\x84B*>\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xf2\xa2\xa0@\xaaF\x00\x08\xc9\x08\x1f\x86(\x88X\xc4|D\xaf\xe3\x08\xc2\x11\x89\x05HFq\x85h\x12F5Q\x10\x8f\x88\x00\x03\xe3\x08\xfd\x0f\x89\x05\x82V1\xf3\xe0\xe4F5#\xc8B\x10H\x8cc\x18\xc61\x8c]\x18\xc60\xf7\xbd\xef\xff\xff\xff\xde\xf7\xbd\xe0y\x08B\x12\xb9\xb3\x18\x00\x00\x14\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xafN@!\x08B\x01\x90\x89\x11\x11F(\x88\x98B\x00\x80\xacc\x19B\x11\x89%(F1\x84\xa4\x12EU\x89 \x82\x08\x00\x04c\x18\xc1\x01\x89%BF1\x80`\x14\xcdUPPB\x10\x84\x8cc\x19\xceq\x8cA\x18\xc61\x84!\x08B\x10\x8cc\x18\xc6?\x01\x08B\x12q\x97\xc0\x0f\xff\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xa2\r\xa0\x12\x00\x04\x01\x80s\xbe\xe19\xc8s\x00\x81\x01\x04t|\xee~\x0f\x8b\x99\x1f\xc6.\x83c\xe28\x8a\x89>\xe28\x1f\x03\xfc\xe7\xb9\x0e\x8b\x99\'F.\x80a\xe34\x8a\x8b\xbe" \x00s\x9c\xe6\xb5\xaes\xbe\xe79\xce\xff\xff\xf79\xce\x8c^\xf7\xbd\xe0\xfb\x9c\xe7\x121\x90\x00\x00\x00\x17\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00q\x14B\x04fI\x08G\xc61\x04\x18\xc78\x1b\x00T\x92%)"\x88\x81$\x00\x06\x00\x07;\xff\x04c\x1c`\x00 \x05\xf8\x00\x00\x00\x00\x00\x03\xe0p=\xf5H\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x81\x00\xe2\x14\x89)\x1cN\xcaR\x0c\x15(v1K\x81g\xda\xd6\x00\x14BX\x85\xa2!\x19\xc6 \x04\xa5$ \x00s\x81\x1a\x1eU\x00\x1c\x00:\x00 \x12\xa2I\x9f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00w\xd5R$\x88\x1f\xeaN\xde\xf5t\x13(m\xc0L\xe2\xea9\xc0\x00\x08\x00\x00\nR\x10\xaa\xd7`M)H\xe4\xcf\xa8M\x1c&\xb5\xb3b\x07E\x0etP\xa8\xca\x9f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x89\x14B}\xdc\xf9\tV\xa5+\xac\xbbHv Mc\x1a\x06.\x00\x00\x00\x03\xf4*\x88I\xdf\x7f\xaa\x10\x9d)1 E\x1a&\xb5\xcc\xfe\xb8\xf8\x90\xacP\xa9\xc8\x9f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00q\x14J\xa0\x88\x1d~\xe2\xcew\xac\x13\'n v_\xfa>1\x00\x00\x00\x00\nR\x04\xaa\xd7`\x95\xef2v1 E\x17\'\xb5\x8ccX\xc5\x00\xac\\\xaa\xc8\x9f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x04G\x10\x88+\x88B\x88!t\x15"E\xd1C\x83\x17\xc61\x00\x00\x00\x00\x85\xa2\x03\x1a\xc7`\x90BU\xae1!E\x12\x1e\xb5\x8cc\xa5z\x00\xacR\xac\xc8\xdf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00w\xd8B\x03\x1fI\t\xf2\x8ca\x87\xd9F:;\x84\x1d\x12]\xce\x00\x00\x00\x00\x00\x07\x00\x07;\xffi\xce\xf8\xd7\xcf"E\x13\x96\xb5\xf3\xdc\r\xc3\xe0\xab\xd2\xa8\xc8\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00H\x11\xbe\x01\xaf\xc5\x11&_\x80@\x00@\x00pR\xa0H\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xce\x00\x86\x01r%\x10\x80\x00\x80@\x00@\x00 S\xf0x\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x82\x0e!\x19\xf1\x80\x00\x80@\x00@\x00s\xbc\x00\x04\xdf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
# FT10 ENGLISH_RUSSIAN CHARACTER FONT
b'@\x88\xd5\x10\x0e@\x88\x04\x08\x80\x01\x14\xa2a\x8c\x12\x00\x00\x00\x00q\x1d\xf1|\xdfs\x80\x01\x01\x0es\xbc\xee\x7f\xee\x8b\x8f\x18F.\xf3\xbc\xff\xc61\x8c~\xe08\x80@ \x00\x80\xc0\x81\x05\x06\x00\x00\x00\x00\x04\x00\x00\x00\x01{\xdc \x03P"5@\x02\x04E\x0b\x00@\x88\xd0\x10\x00@\x88\x04\x08\x80\xff\xd5_EO\xfcI\x1a\xe2.!\x15 (\xf1!\x14\xa2\x11J\x01\x14\xa7\xe6D!\x08@\x00\x01\x8b"#A\x01\x8cX\xc2\x00\x91\x8cc\x19B\x11\x89\x05(n1\x8cc\x02F1\x8cB\x82\t@ \x00\x81 \x80\x01\x02\x00\x00\x00\x00\x04\x00\x00\x00\x01X\xd0\xa7t\x88EH\x00q\x08\xa0\x11\x06!\x15%(\x00!\x14\xa2\x11J\x8cAP\xc4\x85\x8c]\x1a\xa21!\x00\x02\x11\x90\x00\x00\x00\x00\x00\x01\x15\xfa\n\x88@\xaa@\x00\x02\x99\x02Ez\x02\x8cX\xc4|A\x0cc\x08\xc2\x10\x89\x05HW1\x8cc\x02F1TD\x87\n \x03\xac\xe6\xb9\x0f\xb3\r"j\xce\xf3\xec\xeeF1\x8c\x7f[\xdd.H\x1c\xe0\x01\xd1$b\x08\xc5\xc9\x00\x00\x00\x13N\x00\x00\x00\x00\x00\x84=P\xce%\x8ck\x1a\xa2!R\x9c\xe5:\x90\xff\xff\xf79\xce\x01\x00\xa7\x11\x00@\x9d\xf0|\x04\xa9\x04)\x07\xc4s\xc0\x08\x00"l}\x08\xfb\xd7\xf9\x05\x88V\xb1\xf4|\xe2F5"\x88\x8a\x08\x00\x00s\t\xc7\x91\xc9\x05BW1\x8cs\x04F1TEZG\xfcLc\x17:*tc\x18\xa9)s\x9c\xe78\xb0s\x9c\xe2\x10\x84\xf4 \xe7Ve\x8a\xaa\xfa\xbb\'\x8cc\x18\xc6\xf1\x84!\x02\x10\x84\x00\x01\xf2\xa2\xa0@\xaaF\x00\x08\xc9\x08\x1f\x86(\x88X\xc4|D\xaf\xe3\x08\xc2\x11\x89\x05HFq\x85h\x12F5Q\x10\x8a\x88\x00\x03\xe3\x08\xfd\x0f\x89\x05\x82V1\xf3\xe0\xe4F5#\xc9{\xdd\x11\xeec\x18\xc6$\xacc\x18\x91.\x08B\x10\x85\xf1\x8cc\x161\x8c\x8c9P\xe6\xa5\x89*\x1a\xa6\xa1\xff\xff\xff\xff\x8e\xf7\xbd\xe2\x10\x84\x00\x00\xafN@!\x08B\x01\x90\x89\x11\x11F(\x88\x98B\x00\x80\xacc\x19B\x11\x89%(F1\x84\xa4\x12EU\x89 \x87\x08\x00\x04c\x18\xc1\x01\x89%BF1\x80`\x14\xcdUPP\x00\x00\x9eMc\x18\xc6*tc\x18\x91\xc9{\xde\xf7\xbe\x8e\xff\xff\xf2\x10\x84\x8c!P\xc75\x8a\x1c\x1a\xa6\xb1\x8cc\x18\xc6\x82\x84!\x02\x10\x84\x01\x00\xa2\r\xa0\x12\x00\x04\x01\x80s\xbe\xe19\xc8s\x00\x81\x01\x04t|\xee~\x0f\x8b\x99\x1f\xc6.\x83c\xe28\x8a\x89>\xe28\x1f\x03\xfc\xe7\xb9\x0e\x8b\x99\'F.\x80a\xe34\x8a\x8b\xbe\x00\x00\x00L\xe3\x18\xc61$c\x18\x91\t\x8cc\x18\xc6\xa4\x84!\x02\x10\x84\xf4=_F)\x8c\x08\x1f\xbb.\x8cc\x18\xc6\xe6\xff\xff\xf79\xce\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00t\\\xe79\xc0s\x9c\xe7\x13\x96{\xde\xf7\xbdLs\x9c\xe79\xce\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x93\xc6\x00(\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xb9\x10d\x00\x00\n\x02\x8e\xf9\t\x00\xc2\x00|j\x00\x00\x02RD\x05\x10\x00\x8cc\x88\xe1\x08 \x04A\x19\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xacX\x00\x00\x00\x02\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x9d\x00\x8a\x00\x00\x00m\x04S\x89$\xc7\x80,j\x02\x00\x04\x05\x88\x009N\x94\xa4\x8f\x82\x8es\\\xe9&\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xaca\xcf\xba\xbe\x89$\xf8\xc7\xff\x8dq\x18:O!\x01+\x10\x88\x00\x14\xae\xba\xa4UIf\xd6&,j\xf2F\xa0\x00\x1c\xe58\x11\xad\xef\x88\x8f\x88 \x04E\x12\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xeb\xfd(\xc6\xa1\x9ch]\xc6$\x8dQ\x18F\xb1L\x80\t!\xde\x01\x14\xa1\x10DQ\t\xff\xfcF,jWF\xa0\x00#\x05:1ZR\x9f\x82\xaf\xff}\xf3)\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa9c\xc8}\xc6\xac\xf0Z\xfe$}]\x9e\x1f\xaf\x92R\x03\xc0\x88\x01\x14\xa28\x84Q+f\xd4\x80Lj\x9a\xc6\xa0\x00?\x15|\x11\xbc\xe7\xb8\xb0( \x04E(\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xaac(B\xa1\xcdiX\xc6$\rSYF\xa5L\xa4\x02\xc4\x88!\x00\x04m\x0e\xf9\x1d$\xc5\x00\x8ck\x1a\xc6\xa0\x00 \xe1\x11N\x08H\x7f\x80\xaes\\\xe9\x10\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x94]\xc8:\xbe\x8ed\x98\xc6$\x0f\xdd\x9e:I!6\x03\xbb\x0f!\x1c\xa8\x02\x00\x01\t\x00\xc2\x00\xff\xff\xf7\x7f\xe0\x00\x1cF\x00\x00\t\xce\x18\x81\xe8 \x84AH\xbf\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x88C\x12\x04 \x00\x00\xc0\x00\x00\x00\x00\x10\x00\x9cq\xce\x03\xb0\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
# FT11 WESTERN_EUROPEAN_II CHARACTER FONT
b' > \x92\x04\x0c\x000\x03\x9c\x01\x14\xa2a\x0c\x12\x08\x00\x00\x00q\x1d\xf1|\xdfs\x80\x01\x01\x0eq<\xee\x7f\xee\x8b\x9f\x18F.\xf3\xbc\xef\xc61\x8c~\xe08\x800 \x00\x80`\x81\x04\x86\x00\x00\x00\x00\x02\x00\x00\x00\x002a\x00x\x04E \x80"\x90\xa2!D\x10\x1e@ \x88TT\xd6\xb9\xc4\x10\x84"\x1a<\x11\x1a\xd6\xb4 \'\xe2A\x11\x04\x12\x14P\x00\x84\x01\x14\xa7\xe6\x84!*@\x00\x01\x8b"#A\x01\x8cX\xc2\x00\x91\x8a\xa3\x19B\x11\x89\x05(n1\x8cc\x12F1\x8cB\x88\t@ \x00\x80\x80\x80\x00\x82\x00\x00\x00\x00\x02\x00\x00\x00\x00B\x12\xa4\x82\x88\xa0\x10\x0fP\x08\x05\x10\n (\xa5\x11D\x03\x81)\x06 !\x08G\xa5R(%)I\xc2\xf8"A\x11\x04!(@#\x9c\x01\x15\xfa\n\x88@\x9c@\x00\x02\x99\x02Ez\x02\x8cX\xc4|A\xbcc\x08\xc2\x10\x89\x05HW1\x8cc\x02F1R\x84\x84\n \x13\xbc\xf7\xbb\xef\xb3\x0c\x92j\xce\xf3\xd6\xef\xc61\x8c~B\x10D\x80\x1c\xe79\xd0s\x9c\xc0\x00\x84\xfe\xa8\x00\x00\x00\x8cb\x00>$p\x00\n#\xfc \x00g\x02n\'\xd2B\x10\x84!\x00@V\x04\x01\x00\xa7\x11 @\x89\xf0|\x04\xa9\x04)\x07\xc2s\xc0\x08\x00"\xac}\x08\xfb\xd7\xf9\x05\x88V\xb1\xf4|\xe2F5!\x08\x82\x08\x00\x00c\x08\xc4\x91\xc9\x04\xa2W1\x8cY\x02F1TD\x82\x08\n\x84b\x10\x840\x8cbF1N\x81~\xe7:1\x8ccl\xc6(\x0b\x1d\x1ax\x92q\x1c\x18\xba\xb5 \x10\x82\x10\x84!\x14O\xd7\x9c\x00\x01\xf2\xa2\xa0@\x9cF\x00\x08\xc9\x08\x1f\x86$\x88X\xc4|D\xbf\xe3\x08\xc2\x11\x89\x05HFq\x85h\x12F5Q\x10\x81\x08\x00\x03\xe3\x08\xfc\x8f\x89\x04\xc2V1\xf3\xd0\xe2F5#\xc8B\x10\n|~\xf7\xbd\xef\xff\xfeB\x121\xf3\xe9\x18\xc61|c\x9a\xbd\xd0y#\x1a#\xf7!"\xf8\xc75\x07\xd0\x82\x10\x84!(@\x08\x00\x00\x00\xafN@!*B\x01\x90\x89\x11\x11F$\x88\x98B\x00\x80\x84c\x19B\x11\x89%(F1\x84\xa5\x12EU\x89 \x80\x88\x00\x04c\x08\xc0\x81\x89$\xa2V1\x80P\x12MUPPB\x10\x11\x14\xe1\x18\xc6"\x84 B\x13\xff\x85)\x18\xc6s\x0cc\x19\x80\x11\x89#7\xa0\x92\xa1?\x18\xc5\xce\xf8\x08\x92\x10\x84!\x00O\x80\x00\x01\x00\xa2\r\xa0\x12\x08\x04\x01\x80s\xbe\xe19\xc4s\x00\x81\x01\x04t|\xee~\x0f\x8b\x99\x1f\xc6.\x83b\xe28\x8a\x89>\xe08\x1f\x03\xfc\xf7\xb8\x8e\x8b\x98\x97V.\x80Q\xe1\xb4\x8a\x8b\xbe2`\x1fs\\\xf7\xbd\xees\x9c\xe7:1\xfb\xee\xe79\xads\x9d\x18\xff\xee{\x9c\xd2|\x93A"\xf7:\x08\x00\t\x12\x08\x88!\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\t\x12\x08\x88@\x81@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\xf2\x04\x90\x80A\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"\x88\x81B\x00\x00\x90\x00\x08\xff\x11\x01\xf0\x90\x80\x07\xff\x00\x81\xce\xf9\x1c\xff\x81\xc4\xff\xfe\xa7U\xc0`\x0c\x0f\x81\x80\x03\x00\x0f\x80\x00\x00\x00\x00\x00\x00\xf0~\x1f\x83\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x14BB\x11!\x08Z\x08\x80)\x01\x10\xb8\x84$\x03\x00\x82{!!\xd8\x86$\x8a\x91RV \x90\x12\x01\x02@\x01\x00\x02\x00\x00\x00\x00\x00~\x01\xf9~X\x831\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00J\x8a\x02\x04\xa5|\x80!\x15"\xd4\x82D\x03\x00\x82\xb5\'\xdd\xd8\x8e*\x02\x89WV-\x94H\xf2ZDI%4}\xcf\xfc\xb2E\x7f\x87\xdap8\x82\xf5\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00V\xa4\xf9\tB\x90\x80!+D\x90\x9f\xfc\x03\x00\xb2w\x89"\xd8\x17\xear\x84J\xd62\xb2\x9d\x04\'\xc4Q%\x13\xaa2$KX\xbb\xff\xff\xfd\xf8\xfe1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\xaa\x00\x90\xa5~\x80!+\x8f\x92\xa2D\x03\x00\xb2\xb5\xd9\x1cX&1\x02\x88J\xb9R\x8a\xa3\xe8&Db\xa5$*2$MZ\xbb\x87\xf2|8\xff\xb7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\xf1 \x00Z!\x80%\x15\x04\x11\xc4$\x03\x00\x82\xbb\xa9\x02XF1\x8a\x90G\x11R\x89#\x08&DR\xa5H*2,\x8dZ\x92\x01\xd9pX\x82w\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1c \x07\xfe\x00 \x80"\x00\x02\x10\x80\x04\x03\xff\x81\xce\x89\x1cX}\xd1\xfa\xbeB\x13m\xb1\x1c\xf7%\x83Lz\x88O\xcc\x13\x18\xe5\x10\x00\xd8p\x18\x83\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x82\x00\x00\x84\x00\x00 \x07\x02\x00\x00\x18@\x00\x00\x00\x00\x0f\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x82\x00\x00\x84\x00\x00 \x00\x82\x00\x00(@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x01\x00\x00\x00&\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
]
}
# Replicate FT00 data to support making 5x10 version
FONTDATA['mappings'].append(FONTDATA['mappings'][0])
FONTDATA['fonts'].append(FONTDATA['fonts'][0])
FONTDATA['metrics'].append(dict(FONTDATA['metrics'][0]))
FONTDATA['metrics'][4]['name'] = 'FT00_5x10'
FONTDATA['metrics'][4]['glyph_size'] = (5, 10)
# Replicate FT01 data to support making 5x10 version
FONTDATA['mappings'].append(FONTDATA['mappings'][1])
FONTDATA['fonts'].append(FONTDATA['fonts'][1])
FONTDATA['metrics'].append(dict(FONTDATA['metrics'][1]))
FONTDATA['metrics'][5]['name'] = 'FT01_5x10'
FONTDATA['metrics'][5]['glyph_size'] = (5, 10)
# Replicate FT10 data to support making 5x10 version
FONTDATA['mappings'].append(FONTDATA['mappings'][2])
FONTDATA['fonts'].append(FONTDATA['fonts'][2])
FONTDATA['metrics'].append(dict(FONTDATA['metrics'][2]))
FONTDATA['metrics'][6]['name'] = 'FT10_5x10'
FONTDATA['metrics'][6]['glyph_size'] = (5, 10)
# Replicate FT11 data to support making 5x10 version
FONTDATA['mappings'].append(FONTDATA['mappings'][3])
FONTDATA['fonts'].append(FONTDATA['fonts'][3])
FONTDATA['metrics'].append(dict(FONTDATA['metrics'][3]))
FONTDATA['metrics'][7]['name'] = 'FT11_5x10'
FONTDATA['metrics'][7]['glyph_size'] = (5, 10)
class winstar_weh(ws0010):
"""
Values to be used by the winstar_weh class during initialization of the display.
Contains FONTDATA to enable the winstar_weh class to embed the same fonts that
are contained within any character-based display that uses the ws0010 controller.
.. versionadded:: 3.6.0
"""
FONTDATA = {
'mappings': ws0010.FONTDATA['mappings'],
'fonts': ws0010.FONTDATA['fonts'],
'metrics': []
}
for i, m in enumerate(ws0010.FONTDATA['metrics']):
FONTDATA['metrics'].append(dict(m))
FONTDATA['metrics'][i]['xwidth'] = 5
| 99.565436
| 7,053
| 0.624829
| 10,320
| 59,341
| 3.603973
| 0.141279
| 0.619794
| 0.895572
| 1.158928
| 0.635523
| 0.589385
| 0.568171
| 0.480252
| 0.448256
| 0.440863
| 0
| 0.314314
| 0.220286
| 59,341
| 595
| 7,054
| 99.732773
| 0.485898
| 0.224432
| 0
| 0.156309
| 0
| 0.030132
| 0.385371
| 0.370038
| 0
| 1
| 0.092652
| 0.001681
| 0
| 1
| 0
| false
| 0
| 0.001883
| 0
| 0.084746
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0e52800152dc4132c09824b1c29076f289c161e3
| 6,796
|
py
|
Python
|
Server-Test-PDP-Exd/2.Ballot/run.py
|
Parwatsingh/OptSmart
|
0564abdd04e7bc37a3586982a1d7ca5a97be88d5
|
[
"Apache-2.0"
] | 1
|
2021-03-13T08:55:13.000Z
|
2021-03-13T08:55:13.000Z
|
Server-Test-PDP-Exd/2.Ballot/run.py
|
Parwatsingh/OptSmart
|
0564abdd04e7bc37a3586982a1d7ca5a97be88d5
|
[
"Apache-2.0"
] | null | null | null |
Server-Test-PDP-Exd/2.Ballot/run.py
|
Parwatsingh/OptSmart
|
0564abdd04e7bc37a3586982a1d7ca5a97be88d5
|
[
"Apache-2.0"
] | null | null | null |
import subprocess
#subprocess.call("make")
GenAUs = ["g++", "-std=c++17", "GenAUs.cpp", "-o", "genAUs", "-O3", "-g"]
subprocess.call(GenAUs)
print("\n\n\n---------------- Workload 1 ----------------\n")
cmd = ["./genAUs", "100", "900", "50", "1000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
#subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n-------------------------------------------------\n")
cmd = ["./genAUs", "100", "900", "50", "2000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
#subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n-------------------------------------------------\n")
cmd = ["./genAUs", "100", "900", "50", "3000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
#subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n-------------------------------------------------\n")
cmd = ["./genAUs", "100", "900", "50", "4000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
#subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n-------------------------------------------------\n")
cmd = ["./genAUs", "100", "900", "50", "5000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
#subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n-------------------------------------------------\n")
cmd = ["./genAUs", "100", "900", "50", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
#subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n-------------------------------------------------\n")
print("\n\n\n---------------- Workload 2 ----------------\n")
cmd = ["./genAUs", "100", "900", "10", "3000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
#subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n-------------------------------------------------\n")
cmd = ["./genAUs", "100", "900", "20", "2000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
#subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n-------------------------------------------------\n")
cmd = ["./genAUs", "100", "900", "30", "3000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
#subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n-------------------------------------------------\n")
cmd = ["./genAUs", "100", "900", "40", "4000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
#subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n-------------------------------------------------\n")
cmd = ["./genAUs", "100", "900", "60", "5000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
#subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n-------------------------------------------------\n")
print("\n\n\n---------------- Workload 3 ----------------\n")
cmd = ["./genAUs", "200", "1800", "50", "2000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
#subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n-------------------------------------------------\n")
cmd = ["./genAUs", "300", "2700", "50", "3000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
#subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n-------------------------------------------------\n")
cmd = ["./genAUs", "400", "3600", "50", "4000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
#subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n-------------------------------------------------\n")
cmd = ["./genAUs", "500", "4500", "50", "5000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
#subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n-------------------------------------------------\n")
cmd = ["./genAUs", "600", "5400", "50", "6000", "0"]
subprocess.call(cmd)
subprocess.call(["python", "run.py"], cwd="Serial")
subprocess.call(["python", "run.py"], cwd="BTO-STM")
subprocess.call(["python", "run.py"], cwd="MVTO")
subprocess.call(["python", "run.py"], cwd="Spec-Bin")
#subprocess.call(["python", "run.py"], cwd="Static-Bin")
print("\n-------------------------------------------------\n")
| 40.452381
| 73
| 0.534579
| 856
| 6,796
| 4.244159
| 0.064252
| 0.377649
| 0.440407
| 0.506468
| 0.959262
| 0.950454
| 0.9455
| 0.9455
| 0.9455
| 0.9455
| 0
| 0.034548
| 0.067245
| 6,796
| 167
| 74
| 40.694611
| 0.538571
| 0.132872
| 0
| 0.813559
| 0
| 0
| 0.433799
| 0.155548
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.008475
| 0
| 0.008475
| 0.161017
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0e53eefb00a151b2108cafb122e3c212edc3e7f0
| 4,257
|
py
|
Python
|
code/sentiment.py
|
JonaBenja/lad-assignment1
|
d4eeafda1032dd8dbe2b049b3008f3b4ad8c6c74
|
[
"Apache-2.0"
] | null | null | null |
code/sentiment.py
|
JonaBenja/lad-assignment1
|
d4eeafda1032dd8dbe2b049b3008f3b4ad8c6c74
|
[
"Apache-2.0"
] | null | null | null |
code/sentiment.py
|
JonaBenja/lad-assignment1
|
d4eeafda1032dd8dbe2b049b3008f3b4ad8c6c74
|
[
"Apache-2.0"
] | null | null | null |
from polyglot.text import Text
from statistics import mean
import pandas as pd
from collections import defaultdict, Counter
import matplotlib.pyplot as plt
import numpy as np
"""
DUTCH
"""
# Prepare dictionaries
sents_sentiment = defaultdict(list)
# Read in data
tsv_file = "../data/nl/decoded_nl_greta_overview.tsv"
content = pd.read_csv(tsv_file, sep="\t", keep_default_na=False, header=0, encoding = 'utf-8')
articles = content['Text']
publishers = content['Publisher']
# Save mean sentiment of sentences per article
for text, publisher in zip(articles, publishers):
text = ''.join(x for x in text if x.isprintable())
sentences = Text(text, hint_language_code = 'nl').sentences
sent_senti = float(mean([sent.polarity for sent in sentences]))
sents_sentiment[publisher].append(sent_senti)
art_pub_sent = defaultdict(dict)
for publisher in sents_sentiment:
art_pub_sent[publisher] = mean(sents_sentiment[publisher])
d = sents_sentiment
top10_publishers = sorted(d, key=lambda k: len(d[k]), reverse=True)[:10]
publishers = top10_publishers
sentiment = [art_pub_sent[publisher] for publisher in top10_publishers]
x = np.arange(len(publishers)) # the label locations
width = 0.50 # the width of the bars
fig, ax = plt.subplots(1, 1, figsize = (16, 6))
rects1 = ax.bar(x - width/2, sentiment, width, label='Men')
# Add some text for labels, title and custom x-axis tick labels, etc.
ax.set_ylabel('SENTIMENT')
ax.set_xlabel('PUBLISHER')
ax.set_title('MEAN ARTICLE SENTIMENT OF DUTCH PUBLISHERS')
ax.set_xticks(x)
ax.set_xticklabels(publishers)
def autolabel(rects):
"""Attach a text label above each bar in *rects*, displaying its height."""
for rect, publisher in zip(rects, top10_publishers):
label = len(sents_sentiment[publisher])
height = rect.get_height()
ax.annotate('{}'.format(label),
xy=(rect.get_x() + rect.get_width() / 2, height),
xytext=(0, 3), # 3 points vertical offset
textcoords="offset points",
ha='center', va='bottom')
autolabel(rects1)
fig.tight_layout()
plt.show()
fig.savefig("../data/plots/nl_publisher_sentiment.png")
"""
ITALIAN
"""
sents_sentiment = defaultdict(list)
tsv_file = "../data/it/it_greta_overview.tsv"
content = pd.read_csv(tsv_file, sep="\t", keep_default_na=False, header=0, encoding = 'utf-8')
articles = content['Text']
publishers = content['Publisher']
# Save mean sentiment of sentences per article
for text, publisher in zip(articles, publishers):
if publisher == 'la Repubblica':
publisher = 'La Repubblica'
text = ''.join(x for x in text if x.isprintable())
sentences = Text(text, hint_language_code = 'it').sentences
sent_senti = float(mean([sent.polarity for sent in sentences]))
sents_sentiment[publisher].append(sent_senti)
art_pub_sent = defaultdict(dict)
for publisher in sents_sentiment:
art_pub_sent[publisher] = mean(sents_sentiment[publisher])
d = sents_sentiment
top10_publishers = sorted(d, key=lambda k: len(d[k]), reverse=True)[:10]
"""
SENTIMENT PLOT
"""
publishers = top10_publishers
sentiment = [art_pub_sent[publisher] for publisher in top10_publishers]
x = np.arange(len(publishers)) # the label locations
width = 0.50 # the width of the bars
fig, ax = plt.subplots(1, 1, figsize = (16, 6))
rects1 = ax.bar(x - width/2, sentiment, width, label='Men')
# Add some text for labels, title and custom x-axis tick labels, etc.
ax.set_ylabel('SENTIMENT')
ax.set_xlabel('PUBLISHER')
ax.set_title('MEAN ARTICLE SENTIMENT OF ITALIAN PUBLISHERS')
ax.set_xticks(x)
ax.set_xticklabels(publishers)
def autolabel(rects):
"""Attach a text label above each bar in *rects*, displaying its height."""
for rect, publisher in zip(rects, top10_publishers):
label = len(sents_sentiment[publisher])
height = rect.get_height()
ax.annotate('{}'.format(label),
xy=(rect.get_x() + rect.get_width() / 2, height),
xytext=(0, 3), # 3 points vertical offset
textcoords="offset points",
ha='center', va='bottom')
autolabel(rects1)
fig.tight_layout()
plt.show()
fig.savefig("../data/plots/it_publisher_sentiment.png")
| 32.746154
| 94
| 0.698379
| 598
| 4,257
| 4.842809
| 0.247492
| 0.058011
| 0.047652
| 0.026243
| 0.861188
| 0.861188
| 0.861188
| 0.861188
| 0.861188
| 0.861188
| 0
| 0.015367
| 0.174536
| 4,257
| 130
| 95
| 32.746154
| 0.808765
| 0.12544
| 0
| 0.813953
| 0
| 0
| 0.110624
| 0.041621
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023256
| false
| 0
| 0.069767
| 0
| 0.093023
| 0.023256
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0e7a792774694d631aa13835df29f4d31c3043a8
| 72
|
py
|
Python
|
Python 3/juego_aun_sin_nombre/nombredejuego.py
|
DarkShadow4/python
|
4cd94e0cf53ee06c9c31e9272572ca9656697c30
|
[
"MIT"
] | null | null | null |
Python 3/juego_aun_sin_nombre/nombredejuego.py
|
DarkShadow4/python
|
4cd94e0cf53ee06c9c31e9272572ca9656697c30
|
[
"MIT"
] | null | null | null |
Python 3/juego_aun_sin_nombre/nombredejuego.py
|
DarkShadow4/python
|
4cd94e0cf53ee06c9c31e9272572ca9656697c30
|
[
"MIT"
] | 1
|
2020-08-19T17:25:22.000Z
|
2020-08-19T17:25:22.000Z
|
import sys, pygame
# Main Game
def GetMovement():
""" """
pass
| 10.285714
| 18
| 0.569444
| 8
| 72
| 5.125
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.277778
| 72
| 6
| 19
| 12
| 0.788462
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
0e899df236b62cac8c3e686ca972c178ab27afa8
| 198
|
py
|
Python
|
temboo/core/Library/Stripe/Account/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/Stripe/Account/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/Stripe/Account/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.Stripe.Account.RetrieveAccountDetails import RetrieveAccountDetails, RetrieveAccountDetailsInputSet, RetrieveAccountDetailsResultSet, RetrieveAccountDetailsChoreographyExecution
| 99
| 197
| 0.929293
| 11
| 198
| 16.727273
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035354
| 198
| 1
| 198
| 198
| 0.963351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7ebdaee9ca66e441cbb82f950e139148327a97d8
| 14,766
|
py
|
Python
|
test/python/pulse/test_utils.py
|
Sahar2/qiskit-terra
|
19fbaeb68f2b279c9748384e919e1d1b006860f2
|
[
"Apache-2.0"
] | null | null | null |
test/python/pulse/test_utils.py
|
Sahar2/qiskit-terra
|
19fbaeb68f2b279c9748384e919e1d1b006860f2
|
[
"Apache-2.0"
] | null | null | null |
test/python/pulse/test_utils.py
|
Sahar2/qiskit-terra
|
19fbaeb68f2b279c9748384e919e1d1b006860f2
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2019.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Test cases for the pulse utilities."""
import unittest
import numpy as np
from qiskit import pulse
from qiskit.pulse.cmd_def import CmdDef
from qiskit.pulse.commands import AcquireInstruction
from qiskit.pulse.exceptions import PulseError
from qiskit.test import QiskitTestCase
from qiskit.test.mock import FakeOpenPulse2Q
from qiskit.pulse.utils import add_implicit_acquires, align_measures
class TestAutoMerge(QiskitTestCase):
"""Test the helper function which aligns acquires."""
def setUp(self):
self.backend = FakeOpenPulse2Q()
self.device = pulse.PulseChannelSpec.from_backend(self.backend)
self.config = self.backend.configuration()
self.defaults = self.backend.defaults()
self.cmd_def = CmdDef.from_defaults(self.defaults.cmd_def,
self.defaults.pulse_library)
self.short_pulse = pulse.SamplePulse(samples=np.array([0.02739068], dtype=np.complex128),
name='p0')
def test_align_measures(self):
"""Test that one acquire is delayed to match the time of the later acquire."""
acquire = pulse.Acquire(5)
sched = pulse.Schedule(name='fake_experiment')
sched = sched.insert(0, self.short_pulse(self.device.drives[0]))
sched = sched.insert(1, acquire(self.device.acquires[0], self.device.memoryslots[0]))
sched = sched.insert(10, acquire(self.device.acquires[1], self.device.memoryslots[1]))
sched = align_measures([sched], self.cmd_def)[0]
for time, inst in sched.instructions:
if isinstance(inst, AcquireInstruction):
self.assertEqual(time, 10)
sched = align_measures([sched], self.cmd_def, align_time=20)[0]
for time, inst in sched.instructions:
if isinstance(inst, AcquireInstruction):
self.assertEqual(time, 20)
def test_align_post_u3(self):
"""Test that acquires are scheduled no sooner than the duration of the longest X gate.
"""
acquire = pulse.Acquire(5)
sched = pulse.Schedule(name='fake_experiment')
sched = sched.insert(0, self.short_pulse(self.device.drives[0]))
sched = sched.insert(1, acquire(self.device.acquires[0], self.device.memoryslots[0]))
sched = align_measures([sched], self.cmd_def)[0]
for time, inst in sched.instructions:
if isinstance(inst, AcquireInstruction):
self.assertEqual(time, 4)
sched = align_measures([sched], self.cmd_def, max_calibration_duration=10)[0]
for time, inst in sched.instructions:
if isinstance(inst, AcquireInstruction):
self.assertEqual(time, 10)
def test_error_multi_acquire(self):
"""Test that an error is raised if multiple acquires occur on the same channel."""
acquire = pulse.Acquire(5)
sched = pulse.Schedule(name='fake_experiment')
sched = sched.insert(0, self.short_pulse(self.device.drives[0]))
sched = sched.insert(4, acquire(self.device.acquires[0], self.device.memoryslots[0]))
sched = sched.insert(10, acquire(self.device.acquires[0], self.device.memoryslots[0]))
with self.assertRaises(PulseError):
align_measures([sched], self.cmd_def)
def test_error_post_acquire_pulse(self):
"""Test that an error is raised if a pulse occurs on a channel after an acquire."""
acquire = pulse.Acquire(5)
sched = pulse.Schedule(name='fake_experiment')
sched = sched.insert(0, self.short_pulse(self.device.drives[0]))
sched = sched.insert(4, acquire(self.device.acquires[0], self.device.memoryslots[0]))
# No error with separate channel
sched = sched.insert(10, self.short_pulse(self.device.drives[1]))
align_measures([sched], self.cmd_def)
sched = sched.insert(10, self.short_pulse(self.device.drives[0]))
with self.assertRaises(PulseError):
align_measures([sched], self.cmd_def)
def test_align_across_schedules(self):
"""Test that acquires are aligned together across multiple schedules."""
acquire = pulse.Acquire(5)
sched1 = pulse.Schedule(name='fake_experiment')
sched1 = sched1.insert(0, self.short_pulse(self.device.drives[0]))
sched1 = sched1.insert(10, acquire(self.device.acquires[0], self.device.memoryslots[0]))
sched2 = pulse.Schedule(name='fake_experiment')
sched2 = sched2.insert(3, self.short_pulse(self.device.drives[0]))
sched2 = sched2.insert(25, acquire(self.device.acquires[0], self.device.memoryslots[0]))
schedules = align_measures([sched1, sched2], self.cmd_def)
for time, inst in schedules[0].instructions:
if isinstance(inst, AcquireInstruction):
self.assertEqual(time, 25)
for time, inst in schedules[0].instructions:
if isinstance(inst, AcquireInstruction):
self.assertEqual(time, 25)
class TestAddImplicitAcquires(QiskitTestCase):
"""Test the helper function which makes implicit acquires explicit."""
def setUp(self):
self.backend = FakeOpenPulse2Q()
self.device = pulse.PulseChannelSpec.from_backend(self.backend)
self.config = self.backend.configuration()
self.defaults = self.backend.defaults()
self.cmd_def = CmdDef.from_defaults(self.defaults.cmd_def,
self.defaults.pulse_library)
self.short_pulse = pulse.SamplePulse(samples=np.array([0.02739068], dtype=np.complex128),
name='p0')
acquire = pulse.Acquire(5)
sched = pulse.Schedule(name='fake_experiment')
sched = sched.insert(0, self.short_pulse(self.device.drives[0]))
self.sched = sched.insert(5, acquire(self.device.acquires, self.device.memoryslots))
def test_add_implicit(self):
"""Test that implicit acquires are made explicit according to the meas map."""
sched = add_implicit_acquires(self.sched, [[0, 1]])
acquired_qubits = set()
for _, inst in sched.instructions:
if isinstance(inst, AcquireInstruction):
acquired_qubits.update({a.index for a in inst.acquires})
self.assertEqual(acquired_qubits, {0, 1})
def test_add_across_meas_map_sublists(self):
"""Test that implicit acquires in separate meas map sublists are all added."""
sched = add_implicit_acquires(self.sched, [[0, 2], [1, 3]])
acquired_qubits = set()
for _, inst in sched.instructions:
if isinstance(inst, AcquireInstruction):
acquired_qubits.update({a.index for a in inst.acquires})
self.assertEqual(acquired_qubits, {0, 1, 2, 3})
def test_dont_add_all(self):
"""Test that acquires aren't added if no qubits in the sublist aren't being acquired."""
sched = add_implicit_acquires(self.sched, [[4, 5], [0, 2], [1, 3]])
acquired_qubits = set()
for _, inst in sched.instructions:
if isinstance(inst, AcquireInstruction):
acquired_qubits.update({a.index for a in inst.acquires})
self.assertEqual(acquired_qubits, {0, 1, 2, 3})
class TestAutoMergeWithDeviceSpecification(QiskitTestCase):
"""Test the helper function which aligns acquires."""
# TODO: This test will be deprecated in future update.
def setUp(self):
self.backend = FakeOpenPulse2Q()
self.device = pulse.DeviceSpecification.create_from(self.backend)
self.config = self.backend.configuration()
self.defaults = self.backend.defaults()
self.cmd_def = CmdDef.from_defaults(self.defaults.cmd_def,
self.defaults.pulse_library)
self.short_pulse = pulse.SamplePulse(samples=np.array([0.02739068], dtype=np.complex128),
name='p0')
def test_align_measures(self):
"""Test that one acquire is delayed to match the time of the later acquire."""
acquire = pulse.Acquire(5)
sched = pulse.Schedule(name='fake_experiment')
sched = sched.insert(0, self.short_pulse(self.device.q[0].drive))
sched = sched.insert(1, acquire(self.device.q[0], self.device.mem[0]))
sched = sched.insert(10, acquire(self.device.q[1], self.device.mem[1]))
sched = align_measures([sched], self.cmd_def)[0]
for time, inst in sched.instructions:
if isinstance(inst, AcquireInstruction):
self.assertEqual(time, 10)
sched = align_measures([sched], self.cmd_def, align_time=20)[0]
for time, inst in sched.instructions:
if isinstance(inst, AcquireInstruction):
self.assertEqual(time, 20)
def test_align_post_u3(self):
"""Test that acquires are scheduled no sooner than the duration of the longest X gate.
"""
acquire = pulse.Acquire(5)
sched = pulse.Schedule(name='fake_experiment')
sched = sched.insert(0, self.short_pulse(self.device.q[0].drive))
sched = sched.insert(1, acquire(self.device.q[0], self.device.mem[0]))
sched = align_measures([sched], self.cmd_def)[0]
for time, inst in sched.instructions:
if isinstance(inst, AcquireInstruction):
self.assertEqual(time, 4)
sched = align_measures([sched], self.cmd_def, max_calibration_duration=10)[0]
for time, inst in sched.instructions:
if isinstance(inst, AcquireInstruction):
self.assertEqual(time, 10)
def test_error_multi_acquire(self):
"""Test that an error is raised if multiple acquires occur on the same channel."""
acquire = pulse.Acquire(5)
sched = pulse.Schedule(name='fake_experiment')
sched = sched.insert(0, self.short_pulse(self.device.q[0].drive))
sched = sched.insert(4, acquire(self.device.q[0], self.device.mem[0]))
sched = sched.insert(10, acquire(self.device.q[0], self.device.mem[0]))
with self.assertRaises(PulseError):
align_measures([sched], self.cmd_def)
def test_error_post_acquire_pulse(self):
"""Test that an error is raised if a pulse occurs on a channel after an acquire."""
acquire = pulse.Acquire(5)
sched = pulse.Schedule(name='fake_experiment')
sched = sched.insert(0, self.short_pulse(self.device.q[0].drive))
sched = sched.insert(4, acquire(self.device.q[0], self.device.mem[0]))
# No error with separate channel
sched = sched.insert(10, self.short_pulse(self.device.q[1].drive))
align_measures([sched], self.cmd_def)
sched = sched.insert(10, self.short_pulse(self.device.q[0].drive))
with self.assertRaises(PulseError):
align_measures([sched], self.cmd_def)
def test_align_across_schedules(self):
"""Test that acquires are aligned together across multiple schedules."""
acquire = pulse.Acquire(5)
sched1 = pulse.Schedule(name='fake_experiment')
sched1 = sched1.insert(0, self.short_pulse(self.device.q[0].drive))
sched1 = sched1.insert(10, acquire(self.device.q[0], self.device.mem[0]))
sched2 = pulse.Schedule(name='fake_experiment')
sched2 = sched2.insert(3, self.short_pulse(self.device.q[0].drive))
sched2 = sched2.insert(25, acquire(self.device.q[0], self.device.mem[0]))
schedules = align_measures([sched1, sched2], self.cmd_def)
for time, inst in schedules[0].instructions:
if isinstance(inst, AcquireInstruction):
self.assertEqual(time, 25)
for time, inst in schedules[0].instructions:
if isinstance(inst, AcquireInstruction):
self.assertEqual(time, 25)
class TestAddImplicitAcquiresWithDeviceSpecification(QiskitTestCase):
"""Test the helper function which makes implicit acquires explicit."""
# TODO: This test will be deprecated in future update.
def setUp(self):
self.backend = FakeOpenPulse2Q()
self.device = pulse.DeviceSpecification.create_from(self.backend)
self.config = self.backend.configuration()
self.defaults = self.backend.defaults()
self.cmd_def = CmdDef.from_defaults(self.defaults.cmd_def,
self.defaults.pulse_library)
self.short_pulse = pulse.SamplePulse(samples=np.array([0.02739068], dtype=np.complex128),
name='p0')
acquire = pulse.Acquire(5)
sched = pulse.Schedule(name='fake_experiment')
sched = sched.insert(0, self.short_pulse(self.device.q[0].drive))
self.sched = sched.insert(5, acquire(self.device.q, self.device.mem))
def test_add_implicit(self):
"""Test that implicit acquires are made explicit according to the meas map."""
sched = add_implicit_acquires(self.sched, [[0, 1]])
acquired_qubits = set()
for _, inst in sched.instructions:
if isinstance(inst, AcquireInstruction):
acquired_qubits.update({a.index for a in inst.acquires})
self.assertEqual(acquired_qubits, {0, 1})
def test_add_across_meas_map_sublists(self):
"""Test that implicit acquires in separate meas map sublists are all added."""
sched = add_implicit_acquires(self.sched, [[0, 2], [1, 3]])
acquired_qubits = set()
for _, inst in sched.instructions:
if isinstance(inst, AcquireInstruction):
acquired_qubits.update({a.index for a in inst.acquires})
self.assertEqual(acquired_qubits, {0, 1, 2, 3})
def test_dont_add_all(self):
"""Test that acquires aren't added if no qubits in the sublist aren't being acquired."""
sched = add_implicit_acquires(self.sched, [[4, 5], [0, 2], [1, 3]])
acquired_qubits = set()
for _, inst in sched.instructions:
if isinstance(inst, AcquireInstruction):
acquired_qubits.update({a.index for a in inst.acquires})
self.assertEqual(acquired_qubits, {0, 1, 2, 3})
if __name__ == '__main__':
unittest.main()
| 49.885135
| 97
| 0.652851
| 1,869
| 14,766
| 5.05511
| 0.107544
| 0.061389
| 0.047417
| 0.034293
| 0.900614
| 0.900614
| 0.900614
| 0.893099
| 0.872248
| 0.863146
| 0
| 0.025451
| 0.233645
| 14,766
| 295
| 98
| 50.054237
| 0.809473
| 0.144995
| 0
| 0.850679
| 0
| 0
| 0.018074
| 0
| 0
| 0
| 0
| 0.00678
| 0.099548
| 1
| 0.090498
| false
| 0
| 0.040724
| 0
| 0.149321
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ec2ef9277ea8e85846f07500d7f39944caaee4b
| 462
|
py
|
Python
|
src/cronos/constants.py
|
fentas/staketaxcsv
|
ad37a32d8864111dbf88e926b80eb4ccacb921c6
|
[
"MIT"
] | null | null | null |
src/cronos/constants.py
|
fentas/staketaxcsv
|
ad37a32d8864111dbf88e926b80eb4ccacb921c6
|
[
"MIT"
] | null | null | null |
src/cronos/constants.py
|
fentas/staketaxcsv
|
ad37a32d8864111dbf88e926b80eb4ccacb921c6
|
[
"MIT"
] | null | null | null |
# todo: make this configurable
EXCHANGE_CRONOS_BLOCKCHAIN = "Cronos"
CUR_CRONOS = "CRO"
MILLION = 100000000.0
CURRENCIES = {
"ibc/14F9BC3E44B8A9C1BE1FB08980FAB87034C9905EF17CF2F5008FC085218811CC": "OSMO",
"ibc/EB2CED20AB0466F18BE49285E56B31306D4C60438A022EA995BA65D5E3CF7E09": "SCRT",
"ibc/FA0006F056DB6719B8C16C551FC392B62F5729978FC0B125AC9A432DBB2AA1A5": "ATOM",
"ibc/E7D5E9D0E9BF8B7354929A817DD28D4D017E745F638954764AA88522A7A409EC": "BTSG",
}
| 46.2
| 83
| 0.822511
| 27
| 462
| 13.962963
| 0.814815
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.410926
| 0.088745
| 462
| 10
| 84
| 46.2
| 0.484561
| 0.060606
| 0
| 0
| 0
| 0
| 0.685912
| 0.628176
| 0
| 1
| 0
| 0.1
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7ed92b31c5181b2aa0b59a07c0666dce2d82d744
| 94,655
|
py
|
Python
|
figureFunctions.py
|
conbraun/hodgkinHuxleyModel_Research
|
336ca3276b681ee685fac70bd087f90a8c687b23
|
[
"MIT"
] | null | null | null |
figureFunctions.py
|
conbraun/hodgkinHuxleyModel_Research
|
336ca3276b681ee685fac70bd087f90a8c687b23
|
[
"MIT"
] | null | null | null |
figureFunctions.py
|
conbraun/hodgkinHuxleyModel_Research
|
336ca3276b681ee685fac70bd087f90a8c687b23
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib as mpl
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import math
def recallQuestion1Data():
data_1 = np.array([-124.07267743966287, -96.50906793839974, -83.69271938498048, -75.25080640630922, -68.94545843715301, -63.910735318666404, -59.719435437194456, -56.12910988368529, -52.988831887629374, -50.198238696227584, -47.68719690502624, -45.40476242292407, -43.3127613301936, -41.38184893596217, -39.58898078868387, -37.91572897566998, -36.34712581743123, -34.87084835153414, -33.476629760456895, -32.15582593597418, -30.90109055942565, -29.706127643742555, -28.56550038245245, -27.47448159722701, -26.428935372861428, -25.425222386373253, -24.46012345689816, -23.530777263921518, -22.634629195016956, -21.769389018638716, -20.93299561587702, -20.12358740379266, -19.339477382484127, -18.579131965000617, -17.841152921710375, -17.124261905376745, -16.427287126310112, -15.749151828975075, -15.088864285261225, -14.445509071244953, -13.818239434701226, -13.206270593630295, -12.608873832889657, -12.025371287424246, -11.455131318499712, -10.897564403808156, -10.352119474437547, -9.818280641520703, -9.295564264117756, -8.783516316173266, -8.281710016941584, -7.7897436937496565, -7.307238850303566, -6.8338384172210995, -6.369205164653484, -5.913020259202737, -5.4649819496737955, -5.024804368219209, -4.5922164347294965, -4.166960854214552, -3.748793197636934, -3.3374810581736525, -2.9328032754857407, -2.534549221592515, -2.142518142514774, -1.7565185506000627, -1.3763676628516237, -1.0018908812173712, -0.6329213110621855, -0.2692993145610021, 0.08912790401522501, 0.4425066838006714, 0.7909772660655643, 1.1346741283715684, 1.4737262961500273, 1.808257633502012, 2.138387114866274, 2.464229079050824, 2.7858934669716087, 3.103486044342034, 3.4171086104560837, 3.726859194062398, 4.032832237313563, 4.335118768624652, 4.633806565238834, 4.928980306228366, 5.220721716614433, 5.50910970318433, 5.794220482609578, 6.07612770233808, 6.354902554807213, 6.630613885356588, 6.903328294284393, 7.173110233397077, 7.440022097455778, 7.704124310780899, 7.965475409369538, 8.224132118753605, 8.480149427923747, 8.733580659465817])
data_1 = np.reshape(data_1, (1, data_1.shape[0]))
data_2 = np.array([-118.47412208518406, -94.50083813752971, -82.46859960536291, -74.37038348895803, -68.25803246075066, -63.346903082754366, -59.24152615719647, -55.71439831618992, -52.6225556220398, -49.8702669855909, -47.39027653321572, -45.133522174766064, -43.063112717246696, -41.150607864956356, -39.37361913503261, -37.71420597578691, -36.157770363155066, -34.692274796309036, -33.307676404160425, -31.99550923333333, -30.748570463006413, -29.56068098370402, -28.42650014550311, -27.341380602869016, -26.301253272382763, -25.302535205019723, -24.342055104440274, -23.416992585474922, -22.52482823816969, -21.66330227033244, -20.830380018452153, -20.024223002252814, -19.243164486487824, -18.48568873371017, -17.750413298029095, -17.0360738405902, -16.34151104756275, -15.665659310664672, -15.007536892832611, -14.36623735097204, -13.740922028081036, -13.130813458275345, -12.535189554973037, -11.953378473040049, -11.3847540532906, -10.82873177190609, -10.284765129033977, -9.752342420661215, -9.230983845977008, -8.72023890912491, -8.21968408028814, -7.72892068555418, -7.24757299912546, -6.77528651527263, -6.311726379846237, -5.85657596408734, -5.409535565573106, -4.970321222819858, -4.538663631913077, -4.114307154731066, -3.6970089097106467, -3.2865379369235415, -2.8826744304471257, -2.4852090314602053, -2.0939421765282313, -1.7086834958894437, -1.3292512572769999, -0.9554718511489688, -0.5871793137152751, -0.22421488446800392, 0.13357340473564433, 0.48633111170405763, 0.8341977394507037, 1.177307067424456, 1.515787460394455, 1.8497621567729383, 2.1793495379986823, 2.5046633804614125, 2.82581309129636, 3.1429039292703846, 3.456037211896857, 3.765310509781094, 4.070817829123122, 4.372649783249865, 4.670893753951736, 4.965634043346663, 5.256952016909033, 5.544926238314951, 5.829632596595089, 6.111144426223088, 6.389532620476846, 6.664865738593644, 6.9372101071266385, 7.206629915863896, 7.473187308621153, 7.73694246928, 7.997953703343554, 8.256277515327586, 8.511968682150146, 8.765080322860738])
data_2 = np.reshape(data_2, (1, data_2.shape[0]))
data_3 = np.array([-113.89976928852847, -92.64150775643049, -81.30143666877629, -73.51981123390492, -67.58894004643618, -62.79546377312594, -58.77255028609312, -55.306430325842285, -52.26154970168583, -49.546527286135536, -47.096828993896274, -44.86518294737262, -42.81592373654297, -40.92147861409316, -39.160090368799736, -37.51428875396184, -35.96983331514433, -34.51496325552323, -33.13985317911433, -31.836210421402942, -30.59697194377966, -29.41607263757432, -28.28826575397067, -27.208981991673422, -26.174217666336844, -25.180445043912826, -24.224539767369446, -23.30372160935736, -22.415505718377048, -21.55766220437363, -20.728182408322056, -19.925250571745746, -19.14721990075246, -18.392592231381087, -17.660000664710132, -16.94819466635816, -16.256027221792305, -15.58244371664937, -14.926472271098968, -14.287215305820496, -13.663842156099985, -13.05558258153167, -12.461721043922308, -11.881591646969804, -11.314573647825226, -10.760087464796625, -10.217591116757923, -9.686577039384673, -9.166569231399862, -8.657120690687657, -8.157811105423715, -7.6682447706192445, -7.188048704071618, -6.716870939185014, -6.254378975226421, -5.800258367835567, -5.354211444820795, -4.9159561341688915, -4.485224892658339, -4.061763724855285, -3.6453312836225757, -3.2356980439555367, -2.8326455432915743, -2.435965681805107, -2.045460077198977, -1.6609394689299681, -1.2822231674146611, -0.9091385441754863, -0.541520559356804, -0.17921132335150877, 0.177940310365061, 0.5300791238264208, 0.8773438869634704, 1.2198676859343964, 1.557778229351966, 1.8911981341442035, 2.2202451926859714, 2.545032622633115, 2.8656693007813385, 3.1822599821947146, 3.49490550564412, 3.8037029864313823, 4.108745997484354, 4.410124739573787, 4.707926201405127, 5.00223431037435, 5.293130074510425, 5.580691716353152, 5.864994799197597, 6.146112346304187, 6.424114953444573, 6.699070895350488, 6.971046226340857, 7.240104875595417, 7.506308737324954, 7.769717756230097, 8.030390008565366, 8.288381778953365, 8.543747633319477, 8.796540488228667])
data_3 = np.reshape(data_3, (1, data_3.shape[0]))
data_4 = np.array([-110.0322091065856, -90.91051258400996, -80.18616512324756, -72.69713167596393, -66.9372286362957, -62.25588429757933, -58.31217996426342, -54.90499010409803, -51.90566460569442, -49.226911771572176, -46.80677398773253, -44.59968334182487, -42.571146394040085, -40.694422959494965, -38.94836355454164, -37.31595192189148, -35.78329358154169, -34.33889601650266, -32.97314506679706, -31.677916655984244, -30.44628393169545, -29.27229299711588, -28.150788814958986, -27.077278389550365, -26.04782204085272, -25.058946120784785, -24.10757228934766, -23.190959718001604, -22.30665748432924, -21.452465075012103, -20.626399394121275, -19.826667031968803, -19.0516408190757, -18.299839894341574, -17.569912673502124, -16.860622226323265, -16.170833664229793, -15.499503216008943, -14.845668727530763, -14.208441367958994, -13.586998363724325, -12.980576610582407, -12.388467039790742, -11.81000963380661, -11.244589003851045, -10.691630454813943, -10.150596474559634, -9.620983593923064, -9.102319571211877, -8.594160861924848, -8.096090339737621, -7.607715239223983, -7.128665295059333, -6.658591055613759, -6.197162351582722, -5.744066902927773, -5.299009049401345, -4.8617085917503, -4.43189973210564, -4.009330103694082, -3.5937598808867137, -3.184960961759366, -2.7827162161767367, -2.3868187932296907, -1.9970714824593134, -1.6132861239386007, -1.2352830628049976, -0.8628906442684673, -0.4959447455609305, -0.13428834161774914, 0.2222288983799772, 0.5737509861938779, 0.9204159637995737, 1.2623562288501582, 1.5996988382612498, 1.9325657916536574, 2.261074296236772, 2.585337014585185, 2.9054622965870713, 3.221554396799089, 3.5337136782617784, 3.8420368038106987, 4.146616915764681, 4.4475438048145515, 4.744904068964963, 5.038781263079005, 5.329256039851981, 5.616406282659175, 5.90030723092305, 6.1810315984191035, 6.458649685111632, 6.73322948278144, 7.004836775013249, 7.273535231768653, 7.53938649899603, 7.802450283508166, 8.06278443345986, 8.320445014745927, 8.57548638340308, 8.82796125451396])
data_4 = np.reshape(data_4, (1, data_4.shape[0]))
data_5 = np.array([-106.68197608953929, -89.2912747393856, -79.11836658831197, -71.90057338915594, -66.30201803475484, -61.727665238087354, -57.860105056088095, -54.50987203906052, -51.55475708702588, -48.91131668465519, -46.52003396848468, -44.33696388794061, -42.32873408705916, -40.4694037059372, -38.73840853353113, -37.119170688899395, -35.59813053743837, -34.164055736859, -32.807537346110266, -31.520615334475163, -30.29649555486, -29.12933261833011, -28.01406107279822, -26.946262537805865, -25.922059980059935, -24.938032736888374, -23.991147585792863, -23.07870235579858, -22.19827943849652, -21.347707183389407, -20.52502762550952, -19.728469338729894, -18.956424467262334, -18.207429187309934, -17.480147001388417, -16.773354386718903, -16.085928410287888, -15.416835995930949, -14.765124585823596, -14.129913984311921, -13.510389209259037, -12.905794204714073, -12.315426293591814, -11.738631268180844, -11.174799032302447, -10.623359722632163, -10.083780247071754, -9.555561187641517, -9.038234022708414, -8.531358629899984, -8.034521036219461, -7.547331386740544, -7.06942210674306, -6.600446235630054, -6.140075913766622, -5.688001005635787, -5.24392784485531, -4.807578088348108, -4.378687668475528, -3.957005833227426, -3.5422942657222247, -3.134326275360371, -2.732886053628665, -2.337767988561728, -1.9487760323294838, -1.565723117096637, -1.1884306148383803, -0.8167278371416486, -0.4504515715454046, -0.08944565122552109, 0.2664394447894944, 0.6173469634454004, 0.96341422384532, 1.3047729398751795, 1.6415495211846416, 1.9738653542225042, 2.301837064902656, 2.625576764343779, 2.945192278914242, 3.260787365847507, 3.5724619154538897, 3.880312140900118, 4.184430756523138, 4.48490714544788, 4.7818275172720135, 5.075275056558951, 5.3653300627350635, 5.652070081978352, 5.935570031663256, 6.215902317856794, 6.493136946344602, 6.7673416275328675, 7.038581875724758, 7.306921103093621, 7.572420708631871, 7.835140162523647, 8.095137086035164, 8.352467327444213, 8.607185033987372, 8.859342720264069])
data_5 = np.reshape(data_5, (1, data_5.shape[0]))
data_6 = np.array([-103.72686113746713, -87.77023458789827, -78.09416403056701, -71.12852851768808, -65.68249325972127, -61.21033807316015, -57.416031895353434, -54.120880082799886, -51.20868982663006, -48.59964213499398, -46.23653401879284, -44.07696696442767, -42.08864155062336, -40.24638465000139, -38.53019589794045, -36.923920842820884, -35.41432401139285, -33.99042543423185, -32.64301558546023, -31.36429408985034, -30.147596135069406, -28.987182217724428, -27.878074406030624, -26.815927290942035, -25.79692516406306, -24.817699275559267, -23.87526064260291, -22.96694502804889, -22.09036753624861, -21.243384876754977, -20.424063792699105, -19.63065448345461, -18.86156810249345, -18.115357602888302, -17.390701350160693, -16.68638903588076, -16.00130951525258, -15.334440261473814, -14.684838185790458, -14.051631616294502, -13.434013264138454, -12.831234035140652, -12.242597567290543, -11.667455394637525, -11.105202653153862, -10.555274257254656, -10.017141486519627, -9.490308930837463, -8.974311749676891, -8.468713207604681, -7.973102453288945, -7.487092513639189, -7.010318478479877, -6.542435854653029, -6.0831190707009375, -5.632060116027622, -5.188967300214292, -4.753564120022945, -4.3255882230585065, -3.9047904582997237, -3.4909340050123343, -3.083793572282679, -2.6831546625271794, -2.288812892845667, -1.900573368904116, -1.5182501065361869, -1.1416654967399449, -0.770649810241299, -0.40504073816135466, -0.04468296567575255, 0.3105722241469251, 0.6608673188424039, 1.006338919680034, 1.3471180614804232, 1.6833305110167494, 2.0150970456621433, 2.342533713883719, 2.665752078924568, 2.9848594470096064, 3.2999590812149484, 3.6111504020660843, 3.9185291758522314, 4.22218769155762, 4.522214927185723, 4.818696706264547, 5.111715845239804, 5.401352292320732, 5.687683258425855, 5.9707833407324955, 6.250724639333823, 6.527576867461938, 6.801407455700211, 7.072281650559251, 7.340262607788385, 7.605411480770529, 7.867787504275347, 8.127448073881258, 8.384448821385906, 8.63884368625992, 8.890684983665595])
data_6 = np.reshape(data_6, (1, data_6.shape[0]))
data_7 = np.array([-101.08342073512574, -86.33615978735642, -77.11013678739499, -70.37953323777744, -65.07789825515522, -60.703462680351485, -56.97968213882281, -53.73782716759499, -50.86733111062007, -48.29179190958785, -45.95620173262307, -43.81963672302664, -41.85082480706658, -40.02533054515407, -38.32369696604857, -36.73017873265576, -35.231854271849095, -33.817988476481005, -32.47956563543057, -31.208940784875278, -29.999575183036146, -28.845832668668677, -27.742820824603967, -26.686265614275907, -25.672411367100572, -24.69794020045315, -23.759906514819146, -22.855683299829916, -21.982917784890674, -21.13949454779567, -20.32350462564499, -19.53321949266058, -18.767069013017338, -18.0236226612302, -17.301573446137937, -16.59972408408499, -15.91697505401298, -15.252314235297522, -14.604807883190347, -13.973592739706149, -13.35786911284564, -12.756894784920192, -12.1699796335365, -11.596480867554462, -11.03579879534804, -10.487373055866925, -9.950679252708067, -9.425225940794506, -8.910551922229255, -8.40622381391532, -7.911833854848678, -7.426997925395408, -6.95135375428648, -6.484559292436984, -6.026291235320773, -5.576243677888709, -5.13412688800598, -4.699666186091338, -4.27260092016126, -3.8526835265997055, -3.439678668300962, -3.033362442549777, -2.6335216520887994, -2.2399531333015266, -1.8524631363377397, -1.4708667523298864, -1.094987383563489, -0.7246562527309871, -0.3597119478811818, -8e-323, 0.3546275095585562, 0.7043123142792782, 1.0491903025904856, 1.3893918349123076, 1.7250420394849975, 2.0562610886868424, 2.38316445733086, 2.7058631643594713, 3.024463999188179, 3.3390697338662076, 3.649779322081853, 3.956688086017451, 4.259887891883186, 4.559467315017958, 4.855511795179433, 5.1481037828642116, 5.437322877114627, 5.7232459555121835, 6.005947296867183, 6.285498697007687, 6.561969578256842, 6.835427092911477, 7.105936221129542, 7.373559863634523, 7.638358929487921, 7.900392419311649, 8.159717504176724, 8.416389600488062, 8.670462441039813, 8.921988142560174])
data_7 = np.reshape(data_7, (1, data_7.shape[0]))
data_8 = np.array([-98.69213801900612, -84.97964139655502, -76.16325163630101, -69.65225105190164, -64.48753034409458, -60.206625086705806, -56.55079171897674, -53.360534665320735, -50.53055452928068, -47.98767329394963, -45.67896710414232, -43.56491901647111, -41.615241117541146, -39.80620706852079, -38.118883758466026, -36.53792125091425, -35.05070201485628, -33.64672857196108, -32.31717362126786, -31.054543506245167, -29.852422394090663, -28.705274997978044, -27.608292466919394, -26.55727058156438, -25.548512455578223, -24.578750054234952, -23.645080325385234, -22.74491279497351, -21.87592624280514, -21.03603263378598, -20.223346893443736, -19.436161427351976, -18.672924517580338, -17.93222190951847, -17.21276103978848, -16.51335746319109, -15.832923120814288, -15.17045615747332, -14.525032049377408, -13.895795844512882, -13.281955352674677, -12.682775148750075, -12.09757127565642, -11.525706550954677, -10.966586396711923, -10.419655123813195, -9.884392612840326, -9.360311341588506, -8.846953716829612, -8.343889673533692, -7.8507145101340585, -7.367046932485959, -6.892527282793711, -6.426815933006803, -5.969591824531372, -5.520551138716669, -5.079406084205464, -4.645883789086332, -4.219725287112171, -3.8006845886216087, -3.388527827759056, -2.9830324786402085, -2.5839866338352397, -2.191188339340408, -1.8044449808287717, -1.4235727164677978, -1.0483959521698702, -0.6787468554830687, -0.3144649047878734, 0.04460352925047536, 0.3986055726947619, 0.7476822102908283, 1.091968622579683, 1.4315945001951036, 1.7666843371699925, 2.0973577049132075, 2.4237295083589827, 2.745910225688791, 3.06400613282054, 3.3781195138826963, 3.6883488586474007, 3.9947890479236894, 4.297531527744459, 4.596664473195312, 4.892272942554765, 5.184439022516893, 5.473241964992087, 5.758758316146603, 6.04106203821465, 6.320224624478839, 6.596315207979029, 6.869400664241167, 7.139545708535306, 7.406812987911201, 7.671263168429734, 7.932955017779145, 8.191945483682684, 8.448289768282775, 8.702041398755965, 8.953252294389838])
data_8 = np.reshape(data_8, (1, data_8.shape[0]))
data_9 = np.array([-96.50906793839974, -83.69271938498048, -75.25080640630922, -68.94545843715301, -63.910735318666404, -59.719435437194456, -56.12910988368529, -52.988831887629374, -50.198238696227584, -47.68719690502624, -45.40476242292407, -43.3127613301936, -41.38184893596217, -39.58898078868387, -37.91572897566998, -36.34712581743123, -34.87084835153414, -33.476629760456895, -32.15582593597418, -30.90109055942565, -29.706127643742555, -28.56550038245245, -27.47448159722701, -26.428935372861428, -25.425222386373253, -24.46012345689816, -23.530777263921518, -22.634629195016956, -21.769389018638716, -20.93299561587702, -20.12358740379266, -19.339477382484127, -18.579131965000617, -17.841152921710375, -17.124261905376745, -16.427287126310112, -15.749151828975075, -15.088864285261225, -14.445509071244953, -13.818239434701226, -13.206270593630295, -12.608873832889657, -12.025371287424246, -11.455131318499712, -10.897564403808156, -10.352119474437547, -9.818280641520703, -9.295564264117756, -8.783516316173266, -8.281710016941584, -7.7897436937496565, -7.307238850303566, -6.8338384172210995, -6.369205164653484, -5.913020259202737, -5.4649819496737955, -5.024804368219209, -4.5922164347294965, -4.166960854214552, -3.748793197636934, -3.3374810581736525, -2.9328032754857407, -2.534549221592515, -2.142518142514774, -1.7565185506000627, -1.3763676628516237, -1.0018908812173712, -0.6329213110621855, -0.2692993145610021, 0.08912790401522501, 0.4425066838006714, 0.7909772660655643, 1.1346741283715684, 1.4737262961500273, 1.808257633502012, 2.138387114866274, 2.464229079050824, 2.7858934669716087, 3.103486044342034, 3.4171086104560837, 3.726859194062398, 4.032832237313563, 4.335118768624652, 4.633806565238834, 4.928980306228366, 5.220721716614433, 5.50910970318433, 5.794220482609578, 6.07612770233808, 6.354902554807213, 6.630613885356588, 6.903328294284393, 7.173110233397077, 7.440022097455778, 7.704124310780899, 7.965475409369538, 8.224132118753605, 8.480149427923747, 8.733580659465817, 8.984477536249694])
data_9 = np.reshape(data_9, (1, data_9.shape[0]))
data_10 = np.array([-94.50083813752971, -82.46859960536291, -74.37038348895803, -68.25803246075066, -63.346903082754366, -59.24152615719647, -55.71439831618992, -52.6225556220398, -49.8702669855909, -47.39027653321572, -45.133522174766064, -43.063112717246696, -41.150607864956356, -39.37361913503261, -37.71420597578691, -36.157770363155066, -34.692274796309036, -33.307676404160425, -31.99550923333333, -30.748570463006413, -29.56068098370402, -28.42650014550311, -27.341380602869016, -26.301253272382763, -25.302535205019723, -24.342055104440274, -23.416992585474922, -22.52482823816969, -21.66330227033244, -20.830380018452153, -20.024223002252814, -19.243164486487824, -18.48568873371017, -17.750413298029095, -17.0360738405902, -16.34151104756275, -15.665659310664672, -15.007536892832611, -14.36623735097204, -13.740922028081036, -13.130813458275345, -12.535189554973037, -11.953378473040049, -11.3847540532906, -10.82873177190609, -10.284765129033977, -9.752342420661215, -9.230983845977008, -8.72023890912491, -8.21968408028814, -7.72892068555418, -7.24757299912546, -6.77528651527263, -6.311726379846237, -5.85657596408734, -5.409535565573106, -4.970321222819858, -4.538663631913077, -4.114307154731066, -3.6970089097106467, -3.2865379369235415, -2.8826744304471257, -2.4852090314602053, -2.0939421765282313, -1.7086834958894437, -1.3292512572769999, -0.9554718511489688, -0.5871793137152751, -0.22421488446800392, 0.13357340473564433, 0.48633111170405763, 0.8341977394507037, 1.177307067424456, 1.515787460394455, 1.8497621567729383, 2.1793495379986823, 2.5046633804614125, 2.82581309129636, 3.1429039292703846, 3.456037211896857, 3.765310509781094, 4.070817829123122, 4.372649783249865, 4.670893753951736, 4.965634043346663, 5.256952016909033, 5.544926238314951, 5.829632596595089, 6.111144426223088, 6.389532620476846, 6.664865738593644, 6.9372101071266385, 7.206629915863896, 7.473187308621153, 7.73694246928, 7.997953703343554, 8.256277515327586, 8.511968682150146, 8.765080322860738, 9.015663964869667])
data_10 = np.reshape(data_10, (1, data_10.shape[0]))
data_11 = np.array([-92.64150775643049, -81.30143666877629, -73.51981123390492, -67.58894004643618, -62.79546377312594, -58.77255028609312, -55.306430325842285, -52.26154970168583, -49.546527286135536, -47.096828993896274, -44.86518294737262, -42.81592373654297, -40.92147861409316, -39.160090368799736, -37.51428875396184, -35.96983331514433, -34.51496325552323, -33.13985317911433, -31.836210421402942, -30.59697194377966, -29.41607263757432, -28.28826575397067, -27.208981991673422, -26.174217666336844, -25.180445043912826, -24.224539767369446, -23.30372160935736, -22.415505718377048, -21.55766220437363, -20.728182408322056, -19.925250571745746, -19.14721990075246, -18.392592231381087, -17.660000664710132, -16.94819466635816, -16.256027221792305, -15.58244371664937, -14.926472271098968, -14.287215305820496, -13.663842156099985, -13.05558258153167, -12.461721043922308, -11.881591646969804, -11.314573647825226, -10.760087464796625, -10.217591116757923, -9.686577039384673, -9.166569231399862, -8.657120690687657, -8.157811105423715, -7.6682447706192445, -7.188048704071618, -6.716870939185014, -6.254378975226421, -5.800258367835567, -5.354211444820795, -4.9159561341688915, -4.485224892658339, -4.061763724855285, -3.6453312836225757, -3.2356980439555367, -2.8326455432915743, -2.435965681805107, -2.045460077198977, -1.6609394689299681, -1.2822231674146611, -0.9091385441754863, -0.541520559356804, -0.17921132335150877, 0.177940310365061, 0.5300791238264208, 0.8773438869634704, 1.2198676859343964, 1.557778229351966, 1.8911981341442035, 2.2202451926859714, 2.545032622633115, 2.8656693007813385, 3.1822599821947146, 3.49490550564412, 3.8037029864313823, 4.108745997484354, 4.410124739573787, 4.707926201405127, 5.00223431037435, 5.293130074510425, 5.580691716353152, 5.864994799197597, 6.146112346304187, 6.424114953444573, 6.699070895350488, 6.971046226340857, 7.240104875595417, 7.506308737324954, 7.769717756230097, 8.030390008565366, 8.288381778953365, 8.543747633319477, 8.796540488228667, 9.046811676615397])
data_11 = np.reshape(data_11, (1, data_11.shape[0]))
data_12 = np.array([-90.91051258400996, -80.18616512324756, -72.69713167596393, -66.9372286362957, -62.25588429757933, -58.31217996426342, -54.90499010409803, -51.90566460569442, -49.226911771572176, -46.80677398773253, -44.59968334182487, -42.571146394040085, -40.694422959494965, -38.94836355454164, -37.31595192189148, -35.78329358154169, -34.33889601650266, -32.97314506679706, -31.677916655984244, -30.44628393169545, -29.27229299711588, -28.150788814958986, -27.077278389550365, -26.04782204085272, -25.058946120784785, -24.10757228934766, -23.190959718001604, -22.30665748432924, -21.452465075012103, -20.626399394121275, -19.826667031968803, -19.0516408190757, -18.299839894341574, -17.569912673502124, -16.860622226323265, -16.170833664229793, -15.499503216008943, -14.845668727530763, -14.208441367958994, -13.586998363724325, -12.980576610582407, -12.388467039790742, -11.81000963380661, -11.244589003851045, -10.691630454813943, -10.150596474559634, -9.620983593923064, -9.102319571211877, -8.594160861924848, -8.096090339737621, -7.607715239223983, -7.128665295059333, -6.658591055613759, -6.197162351582722, -5.744066902927773, -5.299009049401345, -4.8617085917503, -4.43189973210564, -4.009330103694082, -3.5937598808867137, -3.184960961759366, -2.7827162161767367, -2.3868187932296907, -1.9970714824593134, -1.6132861239386007, -1.2352830628049976, -0.8628906442684673, -0.4959447455609305, -0.13428834161774914, 0.2222288983799772, 0.5737509861938779, 0.9204159637995737, 1.2623562288501582, 1.5996988382612498, 1.9325657916536574, 2.261074296236772, 2.585337014585185, 2.9054622965870713, 3.221554396799089, 3.5337136782617784, 3.8420368038106987, 4.146616915764681, 4.4475438048145515, 4.744904068964963, 5.038781263079005, 5.329256039851981, 5.616406282659175, 5.90030723092305, 6.1810315984191035, 6.458649685111632, 6.73322948278144, 7.004836775013249, 7.273535231768653, 7.53938649899603, 7.802450283508166, 8.06278443345986, 8.320445014745927, 8.57548638340308, 8.82796125451396, 9.077920767515655])
data_12 = np.reshape(data_12, (1, data_12.shape[0]))
data_13 = np.array([-89.2912747393856, -79.11836658831197, -71.90057338915594, -66.30201803475484, -61.727665238087354, -57.860105056088095, -54.50987203906052, -51.55475708702588, -48.91131668465519, -46.52003396848468, -44.33696388794061, -42.32873408705916, -40.4694037059372, -38.73840853353113, -37.119170688899395, -35.59813053743837, -34.164055736859, -32.807537346110266, -31.520615334475163, -30.29649555486, -29.12933261833011, -28.01406107279822, -26.946262537805865, -25.922059980059935, -24.938032736888374, -23.991147585792863, -23.07870235579858, -22.19827943849652, -21.347707183389407, -20.52502762550952, -19.728469338729894, -18.956424467262334, -18.207429187309934, -17.480147001388417, -16.773354386718903, -16.085928410287888, -15.416835995930949, -14.765124585823596, -14.129913984311921, -13.510389209259037, -12.905794204714073, -12.315426293591814, -11.738631268180844, -11.174799032302447, -10.623359722632163, -10.083780247071754, -9.555561187641517, -9.038234022708414, -8.531358629899984, -8.034521036219461, -7.547331386740544, -7.06942210674306, -6.600446235630054, -6.140075913766622, -5.688001005635787, -5.24392784485531, -4.807578088348108, -4.378687668475528, -3.957005833227426, -3.5422942657222247, -3.134326275360371, -2.732886053628665, -2.337767988561728, -1.9487760323294838, -1.565723117096637, -1.1884306148383803, -0.8167278371416486, -0.4504515715454046, -0.08944565122552109, 0.2664394447894944, 0.6173469634454004, 0.96341422384532, 1.3047729398751795, 1.6415495211846416, 1.9738653542225042, 2.301837064902656, 2.625576764343779, 2.945192278914242, 3.260787365847507, 3.5724619154538897, 3.880312140900118, 4.184430756523138, 4.48490714544788, 4.7818275172720135, 5.075275056558951, 5.3653300627350635, 5.652070081978352, 5.935570031663256, 6.215902317856794, 6.493136946344602, 6.7673416275328675, 7.038581875724758, 7.306921103093621, 7.572420708631871, 7.835140162523647, 8.095137086035164, 8.352467327444213, 8.607185033987372, 8.859342720264069, 9.108991333223189])
data_13 = np.reshape(data_13, (1, data_13.shape[0]))
data_14 = np.array([-87.77023458789827, -78.09416403056701, -71.12852851768808, -65.68249325972127, -61.21033807316015, -57.416031895353434, -54.120880082799886, -51.20868982663006, -48.59964213499398, -46.23653401879284, -44.07696696442767, -42.08864155062336, -40.24638465000139, -38.53019589794045, -36.923920842820884, -35.41432401139285, -33.99042543423185, -32.64301558546023, -31.36429408985034, -30.147596135069406, -28.987182217724428, -27.878074406030624, -26.815927290942035, -25.79692516406306, -24.817699275559267, -23.87526064260291, -22.96694502804889, -22.09036753624861, -21.243384876754977, -20.424063792699105, -19.63065448345461, -18.86156810249345, -18.115357602888302, -17.390701350160693, -16.68638903588076, -16.00130951525258, -15.334440261473814, -14.684838185790458, -14.051631616294502, -13.434013264138454, -12.831234035140652, -12.242597567290543, -11.667455394637525, -11.105202653153862, -10.555274257254656, -10.017141486519627, -9.490308930837463, -8.974311749676891, -8.468713207604681, -7.973102453288945, -7.487092513639189, -7.010318478479877, -6.542435854653029, -6.0831190707009375, -5.632060116027622, -5.188967300214292, -4.753564120022945, -4.3255882230585065, -3.9047904582997237, -3.4909340050123343, -3.083793572282679, -2.6831546625271794, -2.288812892845667, -1.900573368904116, -1.5182501065361869, -1.1416654967399449, -0.770649810241299, -0.40504073816135466, -0.04468296567575255, 0.3105722241469251, 0.6608673188424039, 1.006338919680034, 1.3471180614804232, 1.6833305110167494, 2.0150970456621433, 2.342533713883719, 2.665752078924568, 2.9848594470096064, 3.2999590812149484, 3.6111504020660843, 3.9185291758522314, 4.22218769155762, 4.522214927185723, 4.818696706264547, 5.111715845239804, 5.401352292320732, 5.687683258425855, 5.9707833407324955, 6.250724639333823, 6.527576867461938, 6.801407455700211, 7.072281650559251, 7.340262607788385, 7.605411480770529, 7.867787504275347, 8.127448073881258, 8.384448821385906, 8.63884368625992, 8.890684983665595, 9.140023469021038])
data_14 = np.reshape(data_14, (1, data_14.shape[0]))
data_15 = np.array([-86.33615978735642, -77.11013678739499, -70.37953323777744, -65.07789825515522, -60.703462680351485, -56.97968213882281, -53.73782716759499, -50.86733111062007, -48.29179190958785, -45.95620173262307, -43.81963672302664, -41.85082480706658, -40.02533054515407, -38.32369696604857, -36.73017873265576, -35.231854271849095, -33.817988476481005, -32.47956563543057, -31.208940784875278, -29.999575183036146, -28.845832668668677, -27.742820824603967, -26.686265614275907, -25.672411367100572, -24.69794020045315, -23.759906514819146, -22.855683299829916, -21.982917784890674, -21.13949454779567, -20.32350462564499, -19.53321949266058, -18.767069013017338, -18.0236226612302, -17.301573446137937, -16.59972408408499, -15.91697505401298, -15.252314235297522, -14.604807883190347, -13.973592739706149, -13.35786911284564, -12.756894784920192, -12.1699796335365, -11.596480867554462, -11.03579879534804, -10.487373055866925, -9.950679252708067, -9.425225940794506, -8.910551922229255, -8.40622381391532, -7.911833854848678, -7.426997925395408, -6.95135375428648, -6.484559292436984, -6.026291235320773, -5.576243677888709, -5.13412688800598, -4.699666186091338, -4.27260092016126, -3.8526835265997055, -3.439678668300962, -3.033362442549777, -2.6335216520887994, -2.2399531333015266, -1.8524631363377397, -1.4708667523298864, -1.094987383563489, -0.7246562527309871, -0.3597119478811818, -8e-323, 0.3546275095585562, 0.7043123142792782, 1.0491903025904856, 1.3893918349123076, 1.7250420394849975, 2.0562610886868424, 2.38316445733086, 2.7058631643594713, 3.024463999188179, 3.3390697338662076, 3.649779322081853, 3.956688086017451, 4.259887891883186, 4.559467315017958, 4.855511795179433, 5.1481037828642116, 5.437322877114627, 5.7232459555121835, 6.005947296867183, 6.285498697007687, 6.561969578256842, 6.835427092911477, 7.105936221129542, 7.373559863634523, 7.638358929487921, 7.900392419311649, 8.159717504176724, 8.416389600488062, 8.670462441039813, 8.921988142560174, 9.171017269865834])
data_15 = np.reshape(data_15, (1, data_15.shape[0]))
data_16 = np.array([-84.97964139655502, -76.16325163630101, -69.65225105190164, -64.48753034409458, -60.206625086705806, -56.55079171897674, -53.360534665320735, -50.53055452928068, -47.98767329394963, -45.67896710414232, -43.56491901647111, -41.615241117541146, -39.80620706852079, -38.118883758466026, -36.53792125091425, -35.05070201485628, -33.64672857196108, -32.31717362126786, -31.054543506245167, -29.852422394090663, -28.705274997978044, -27.608292466919394, -26.55727058156438, -25.548512455578223, -24.578750054234952, -23.645080325385234, -22.74491279497351, -21.87592624280514, -21.03603263378598, -20.223346893443736, -19.436161427351976, -18.672924517580338, -17.93222190951847, -17.21276103978848, -16.51335746319109, -15.832923120814288, -15.17045615747332, -14.525032049377408, -13.895795844512882, -13.281955352674677, -12.682775148750075, -12.09757127565642, -11.525706550954677, -10.966586396711923, -10.419655123813195, -9.884392612840326, -9.360311341588506, -8.846953716829612, -8.343889673533692, -7.8507145101340585, -7.367046932485959, -6.892527282793711, -6.426815933006803, -5.969591824531372, -5.520551138716669, -5.079406084205464, -4.645883789086332, -4.219725287112171, -3.8006845886216087, -3.388527827759056, -2.9830324786402085, -2.5839866338352397, -2.191188339340408, -1.8044449808287717, -1.4235727164677978, -1.0483959521698702, -0.6787468554830687, -0.3144649047878734, 0.04460352925047536, 0.3986055726947619, 0.7476822102908283, 1.091968622579683, 1.4315945001951036, 1.7666843371699925, 2.0973577049132075, 2.4237295083589827, 2.745910225688791, 3.06400613282054, 3.3781195138826963, 3.6883488586474007, 3.9947890479236894, 4.297531527744459, 4.596664473195312, 4.892272942554765, 5.184439022516893, 5.473241964992087, 5.758758316146603, 6.04106203821465, 6.320224624478839, 6.596315207979029, 6.869400664241167, 7.139545708535306, 7.406812987911201, 7.671263168429734, 7.932955017779145, 8.191945483682684, 8.448289768282775, 8.702041398755965, 8.953252294389838, 9.201972830362942])
data_16 = np.reshape(data_16, (1, data_16.shape[0]))
data_17 = np.array([-83.69271938498048, -75.25080640630922, -68.94545843715301, -63.910735318666404, -59.719435437194456, -56.12910988368529, -52.988831887629374, -50.198238696227584, -47.68719690502624, -45.40476242292407, -43.3127613301936, -41.38184893596217, -39.58898078868387, -37.91572897566998, -36.34712581743123, -34.87084835153414, -33.476629760456895, -32.15582593597418, -30.90109055942565, -29.706127643742555, -28.56550038245245, -27.47448159722701, -26.428935372861428, -25.425222386373253, -24.46012345689816, -23.530777263921518, -22.634629195016956, -21.769389018638716, -20.93299561587702, -20.12358740379266, -19.339477382484127, -18.579131965000617, -17.841152921710375, -17.124261905376745, -16.427287126310112, -15.749151828975075, -15.088864285261225, -14.445509071244953, -13.818239434701226, -13.206270593630295, -12.608873832889657, -12.025371287424246, -11.455131318499712, -10.897564403808156, -10.352119474437547, -9.818280641520703, -9.295564264117756, -8.783516316173266, -8.281710016941584, -7.7897436937496565, -7.307238850303566, -6.8338384172210995, -6.369205164653484, -5.913020259202737, -5.4649819496737955, -5.024804368219209, -4.5922164347294965, -4.166960854214552, -3.748793197636934, -3.3374810581736525, -2.9328032754857407, -2.534549221592515, -2.142518142514774, -1.7565185506000627, -1.3763676628516237, -1.0018908812173712, -0.6329213110621855, -0.2692993145610021, 0.08912790401522501, 0.4425066838006714, 0.7909772660655643, 1.1346741283715684, 1.4737262961500273, 1.808257633502012, 2.138387114866274, 2.464229079050824, 2.7858934669716087, 3.103486044342034, 3.4171086104560837, 3.726859194062398, 4.032832237313563, 4.335118768624652, 4.633806565238834, 4.928980306228366, 5.220721716614433, 5.50910970318433, 5.794220482609578, 6.07612770233808, 6.354902554807213, 6.630613885356588, 6.903328294284393, 7.173110233397077, 7.440022097455778, 7.704124310780899, 7.965475409369538, 8.224132118753605, 8.480149427923747, 8.733580659465817, 8.984477536249694, 9.23289024474136])
data_17 = np.reshape(data_17, (1, data_17.shape[0]))
data_18 = np.array([-82.46859960536291, -74.37038348895803, -68.25803246075066, -63.346903082754366, -59.24152615719647, -55.71439831618992, -52.6225556220398, -49.8702669855909, -47.39027653321572, -45.133522174766064, -43.063112717246696, -41.150607864956356, -39.37361913503261, -37.71420597578691, -36.157770363155066, -34.692274796309036, -33.307676404160425, -31.99550923333333, -30.748570463006413, -29.56068098370402, -28.42650014550311, -27.341380602869016, -26.301253272382763, -25.302535205019723, -24.342055104440274, -23.416992585474922, -22.52482823816969, -21.66330227033244, -20.830380018452153, -20.024223002252814, -19.243164486487824, -18.48568873371017, -17.750413298029095, -17.0360738405902, -16.34151104756275, -15.665659310664672, -15.007536892832611, -14.36623735097204, -13.740922028081036, -13.130813458275345, -12.535189554973037, -11.953378473040049, -11.3847540532906, -10.82873177190609, -10.284765129033977, -9.752342420661215, -9.230983845977008, -8.72023890912491, -8.21968408028814, -7.72892068555418, -7.24757299912546, -6.77528651527263, -6.311726379846237, -5.85657596408734, -5.409535565573106, -4.970321222819858, -4.538663631913077, -4.114307154731066, -3.6970089097106467, -3.2865379369235415, -2.8826744304471257, -2.4852090314602053, -2.0939421765282313, -1.7086834958894437, -1.3292512572769999, -0.9554718511489688, -0.5871793137152751, -0.22421488446800392, 0.13357340473564433, 0.48633111170405763, 0.8341977394507037, 1.177307067424456, 1.515787460394455, 1.8497621567729383, 2.1793495379986823, 2.5046633804614125, 2.82581309129636, 3.1429039292703846, 3.456037211896857, 3.765310509781094, 4.070817829123122, 4.372649783249865, 4.670893753951736, 4.965634043346663, 5.256952016909033, 5.544926238314951, 5.829632596595089, 6.111144426223088, 6.389532620476846, 6.664865738593644, 6.9372101071266385, 7.206629915863896, 7.473187308621153, 7.73694246928, 7.997953703343554, 8.256277515327586, 8.511968682150146, 8.765080322860738, 9.015663964869667, 9.263769606912087])
data_18 = np.reshape(data_18, (1, data_18.shape[0]))
data_19 = np.array([-81.30143666877629, -73.51981123390492, -67.58894004643618, -62.79546377312594, -58.77255028609312, -55.306430325842285, -52.26154970168583, -49.546527286135536, -47.096828993896274, -44.86518294737262, -42.81592373654297, -40.92147861409316, -39.160090368799736, -37.51428875396184, -35.96983331514433, -34.51496325552323, -33.13985317911433, -31.836210421402942, -30.59697194377966, -29.41607263757432, -28.28826575397067, -27.208981991673422, -26.174217666336844, -25.180445043912826, -24.224539767369446, -23.30372160935736, -22.415505718377048, -21.55766220437363, -20.728182408322056, -19.925250571745746, -19.14721990075246, -18.392592231381087, -17.660000664710132, -16.94819466635816, -16.256027221792305, -15.58244371664937, -14.926472271098968, -14.287215305820496, -13.663842156099985, -13.05558258153167, -12.461721043922308, -11.881591646969804, -11.314573647825226, -10.760087464796625, -10.217591116757923, -9.686577039384673, -9.166569231399862, -8.657120690687657, -8.157811105423715, -7.6682447706192445, -7.188048704071618, -6.716870939185014, -6.254378975226421, -5.800258367835567, -5.354211444820795, -4.9159561341688915, -4.485224892658339, -4.061763724855285, -3.6453312836225757, -3.2356980439555367, -2.8326455432915743, -2.435965681805107, -2.045460077198977, -1.6609394689299681, -1.2822231674146611, -0.9091385441754863, -0.541520559356804, -0.17921132335150877, 0.177940310365061, 0.5300791238264208, 0.8773438869634704, 1.2198676859343964, 1.557778229351966, 1.8911981341442035, 2.2202451926859714, 2.545032622633115, 2.8656693007813385, 3.1822599821947146, 3.49490550564412, 3.8037029864313823, 4.108745997484354, 4.410124739573787, 4.707926201405127, 5.00223431037435, 5.293130074510425, 5.580691716353152, 5.864994799197597, 6.146112346304187, 6.424114953444573, 6.699070895350488, 6.971046226340857, 7.240104875595417, 7.506308737324954, 7.769717756230097, 8.030390008565366, 8.288381778953365, 8.543747633319477, 8.796540488228667, 9.046811676615397, 9.294611010411476])
data_19 = np.reshape(data_19, (1, data_19.shape[0]))
data_20 = np.array([-80.18616512324756, -72.69713167596393, -66.9372286362957, -62.25588429757933, -58.31217996426342, -54.90499010409803, -51.90566460569442, -49.226911771572176, -46.80677398773253, -44.59968334182487, -42.571146394040085, -40.694422959494965, -38.94836355454164, -37.31595192189148, -35.78329358154169, -34.33889601650266, -32.97314506679706, -31.677916655984244, -30.44628393169545, -29.27229299711588, -28.150788814958986, -27.077278389550365, -26.04782204085272, -25.058946120784785, -24.10757228934766, -23.190959718001604, -22.30665748432924, -21.452465075012103, -20.626399394121275, -19.826667031968803, -19.0516408190757, -18.299839894341574, -17.569912673502124, -16.860622226323265, -16.170833664229793, -15.499503216008943, -14.845668727530763, -14.208441367958994, -13.586998363724325, -12.980576610582407, -12.388467039790742, -11.81000963380661, -11.244589003851045, -10.691630454813943, -10.150596474559634, -9.620983593923064, -9.102319571211877, -8.594160861924848, -8.096090339737621, -7.607715239223983, -7.128665295059333, -6.658591055613759, -6.197162351582722, -5.744066902927773, -5.299009049401345, -4.8617085917503, -4.43189973210564, -4.009330103694082, -3.5937598808867137, -3.184960961759366, -2.7827162161767367, -2.3868187932296907, -1.9970714824593134, -1.6132861239386007, -1.2352830628049976, -0.8628906442684673, -0.4959447455609305, -0.13428834161774914, 0.2222288983799772, 0.5737509861938779, 0.9204159637995737, 1.2623562288501582, 1.5996988382612498, 1.9325657916536574, 2.261074296236772, 2.585337014585185, 2.9054622965870713, 3.221554396799089, 3.5337136782617784, 3.8420368038106987, 4.146616915764681, 4.4475438048145515, 4.744904068964963, 5.038781263079005, 5.329256039851981, 5.616406282659175, 5.90030723092305, 6.1810315984191035, 6.458649685111632, 6.73322948278144, 7.004836775013249, 7.273535231768653, 7.53938649899603, 7.802450283508166, 8.06278443345986, 8.320445014745927, 8.57548638340308, 8.82796125451396, 9.077920767515655, 9.325414548475985])
data_20 = np.reshape(data_20, (1, data_20.shape[0]))
data_21 = np.array([-79.11836658831197, -71.90057338915594, -66.30201803475484, -61.727665238087354, -57.860105056088095, -54.50987203906052, -51.55475708702588, -48.91131668465519, -46.52003396848468, -44.33696388794061, -42.32873408705916, -40.4694037059372, -38.73840853353113, -37.119170688899395, -35.59813053743837, -34.164055736859, -32.807537346110266, -31.520615334475163, -30.29649555486, -29.12933261833011, -28.01406107279822, -26.946262537805865, -25.922059980059935, -24.938032736888374, -23.991147585792863, -23.07870235579858, -22.19827943849652, -21.347707183389407, -20.52502762550952, -19.728469338729894, -18.956424467262334, -18.207429187309934, -17.480147001388417, -16.773354386718903, -16.085928410287888, -15.416835995930949, -14.765124585823596, -14.129913984311921, -13.510389209259037, -12.905794204714073, -12.315426293591814, -11.738631268180844, -11.174799032302447, -10.623359722632163, -10.083780247071754, -9.555561187641517, -9.038234022708414, -8.531358629899984, -8.034521036219461, -7.547331386740544, -7.06942210674306, -6.600446235630054, -6.140075913766622, -5.688001005635787, -5.24392784485531, -4.807578088348108, -4.378687668475528, -3.957005833227426, -3.5422942657222247, -3.134326275360371, -2.732886053628665, -2.337767988561728, -1.9487760323294838, -1.565723117096637, -1.1884306148383803, -0.8167278371416486, -0.4504515715454046, -0.08944565122552109, 0.2664394447894944, 0.6173469634454004, 0.96341422384532, 1.3047729398751795, 1.6415495211846416, 1.9738653542225042, 2.301837064902656, 2.625576764343779, 2.945192278914242, 3.260787365847507, 3.5724619154538897, 3.880312140900118, 4.184430756523138, 4.48490714544788, 4.7818275172720135, 5.075275056558951, 5.3653300627350635, 5.652070081978352, 5.935570031663256, 6.215902317856794, 6.493136946344602, 6.7673416275328675, 7.038581875724758, 7.306921103093621, 7.572420708631871, 7.835140162523647, 8.095137086035164, 8.352467327444213, 8.607185033987372, 8.859342720264069, 9.108991333223189, 9.35618031394396])
data_21 = np.reshape(data_21, (1, data_21.shape[0]))
data_22 = np.array([-78.09416403056701, -71.12852851768808, -65.68249325972127, -61.21033807316015, -57.416031895353434, -54.120880082799886, -51.20868982663006, -48.59964213499398, -46.23653401879284, -44.07696696442767, -42.08864155062336, -40.24638465000139, -38.53019589794045, -36.923920842820884, -35.41432401139285, -33.99042543423185, -32.64301558546023, -31.36429408985034, -30.147596135069406, -28.987182217724428, -27.878074406030624, -26.815927290942035, -25.79692516406306, -24.817699275559267, -23.87526064260291, -22.96694502804889, -22.09036753624861, -21.243384876754977, -20.424063792699105, -19.63065448345461, -18.86156810249345, -18.115357602888302, -17.390701350160693, -16.68638903588076, -16.00130951525258, -15.334440261473814, -14.684838185790458, -14.051631616294502, -13.434013264138454, -12.831234035140652, -12.242597567290543, -11.667455394637525, -11.105202653153862, -10.555274257254656, -10.017141486519627, -9.490308930837463, -8.974311749676891, -8.468713207604681, -7.973102453288945, -7.487092513639189, -7.010318478479877, -6.542435854653029, -6.0831190707009375, -5.632060116027622, -5.188967300214292, -4.753564120022945, -4.3255882230585065, -3.9047904582997237, -3.4909340050123343, -3.083793572282679, -2.6831546625271794, -2.288812892845667, -1.900573368904116, -1.5182501065361869, -1.1416654967399449, -0.770649810241299, -0.40504073816135466, -0.04468296567575255, 0.3105722241469251, 0.6608673188424039, 1.006338919680034, 1.3471180614804232, 1.6833305110167494, 2.0150970456621433, 2.342533713883719, 2.665752078924568, 2.9848594470096064, 3.2999590812149484, 3.6111504020660843, 3.9185291758522314, 4.22218769155762, 4.522214927185723, 4.818696706264547, 5.111715845239804, 5.401352292320732, 5.687683258425855, 5.9707833407324955, 6.250724639333823, 6.527576867461938, 6.801407455700211, 7.072281650559251, 7.340262607788385, 7.605411480770529, 7.867787504275347, 8.127448073881258, 8.384448821385906, 8.63884368625992, 8.890684983665595, 9.140023469021038, 9.386908399347064])
data_22 = np.reshape(data_22, (1, data_22.shape[0]))
data_23 = np.array([-77.11013678739499, -70.37953323777744, -65.07789825515522, -60.703462680351485, -56.97968213882281, -53.73782716759499, -50.86733111062007, -48.29179190958785, -45.95620173262307, -43.81963672302664, -41.85082480706658, -40.02533054515407, -38.32369696604857, -36.73017873265576, -35.231854271849095, -33.817988476481005, -32.47956563543057, -31.208940784875278, -29.999575183036146, -28.845832668668677, -27.742820824603967, -26.686265614275907, -25.672411367100572, -24.69794020045315, -23.759906514819146, -22.855683299829916, -21.982917784890674, -21.13949454779567, -20.32350462564499, -19.53321949266058, -18.767069013017338, -18.0236226612302, -17.301573446137937, -16.59972408408499, -15.91697505401298, -15.252314235297522, -14.604807883190347, -13.973592739706149, -13.35786911284564, -12.756894784920192, -12.1699796335365, -11.596480867554462, -11.03579879534804, -10.487373055866925, -9.950679252708067, -9.425225940794506, -8.910551922229255, -8.40622381391532, -7.911833854848678, -7.426997925395408, -6.95135375428648, -6.484559292436984, -6.026291235320773, -5.576243677888709, -5.13412688800598, -4.699666186091338, -4.27260092016126, -3.8526835265997055, -3.439678668300962, -3.033362442549777, -2.6335216520887994, -2.2399531333015266, -1.8524631363377397, -1.4708667523298864, -1.094987383563489, -0.7246562527309871, -0.3597119478811818, -8e-323, 0.3546275095585562, 0.7043123142792782, 1.0491903025904856, 1.3893918349123076, 1.7250420394849975, 2.0562610886868424, 2.38316445733086, 2.7058631643594713, 3.024463999188179, 3.3390697338662076, 3.649779322081853, 3.956688086017451, 4.259887891883186, 4.559467315017958, 4.855511795179433, 5.1481037828642116, 5.437322877114627, 5.7232459555121835, 6.005947296867183, 6.285498697007687, 6.561969578256842, 6.835427092911477, 7.105936221129542, 7.373559863634523, 7.638358929487921, 7.900392419311649, 8.159717504176724, 8.416389600488062, 8.670462441039813, 8.921988142560174, 9.171017269865834, 9.41759889686404])
data_23 = np.reshape(data_23, (1, data_23.shape[0]))
data_24 = np.array([-76.16325163630101, -69.65225105190164, -64.48753034409458, -60.206625086705806, -56.55079171897674, -53.360534665320735, -50.53055452928068, -47.98767329394963, -45.67896710414232, -43.56491901647111, -41.615241117541146, -39.80620706852079, -38.118883758466026, -36.53792125091425, -35.05070201485628, -33.64672857196108, -32.31717362126786, -31.054543506245167, -29.852422394090663, -28.705274997978044, -27.608292466919394, -26.55727058156438, -25.548512455578223, -24.578750054234952, -23.645080325385234, -22.74491279497351, -21.87592624280514, -21.03603263378598, -20.223346893443736, -19.436161427351976, -18.672924517580338, -17.93222190951847, -17.21276103978848, -16.51335746319109, -15.832923120814288, -15.17045615747332, -14.525032049377408, -13.895795844512882, -13.281955352674677, -12.682775148750075, -12.09757127565642, -11.525706550954677, -10.966586396711923, -10.419655123813195, -9.884392612840326, -9.360311341588506, -8.846953716829612, -8.343889673533692, -7.8507145101340585, -7.367046932485959, -6.892527282793711, -6.426815933006803, -5.969591824531372, -5.520551138716669, -5.079406084205464, -4.645883789086332, -4.219725287112171, -3.8006845886216087, -3.388527827759056, -2.9830324786402085, -2.5839866338352397, -2.191188339340408, -1.8044449808287717, -1.4235727164677978, -1.0483959521698702, -0.6787468554830687, -0.3144649047878734, 0.04460352925047536, 0.3986055726947619, 0.7476822102908283, 1.091968622579683, 1.4315945001951036, 1.7666843371699925, 2.0973577049132075, 2.4237295083589827, 2.745910225688791, 3.06400613282054, 3.3781195138826963, 3.6883488586474007, 3.9947890479236894, 4.297531527744459, 4.596664473195312, 4.892272942554765, 5.184439022516893, 5.473241964992087, 5.758758316146603, 6.04106203821465, 6.320224624478839, 6.596315207979029, 6.869400664241167, 7.139545708535306, 7.406812987911201, 7.671263168429734, 7.932955017779145, 8.191945483682684, 8.448289768282775, 8.702041398755965, 8.953252294389838, 9.201972830362942, 9.448251898348738])
data_24 = np.reshape(data_24, (1, data_24.shape[0]))
data_25 = np.array([-75.25080640630922, -68.94545843715301, -63.910735318666404, -59.719435437194456, -56.12910988368529, -52.988831887629374, -50.198238696227584, -47.68719690502624, -45.40476242292407, -43.3127613301936, -41.38184893596217, -39.58898078868387, -37.91572897566998, -36.34712581743123, -34.87084835153414, -33.476629760456895, -32.15582593597418, -30.90109055942565, -29.706127643742555, -28.56550038245245, -27.47448159722701, -26.428935372861428, -25.425222386373253, -24.46012345689816, -23.530777263921518, -22.634629195016956, -21.769389018638716, -20.93299561587702, -20.12358740379266, -19.339477382484127, -18.579131965000617, -17.841152921710375, -17.124261905376745, -16.427287126310112, -15.749151828975075, -15.088864285261225, -14.445509071244953, -13.818239434701226, -13.206270593630295, -12.608873832889657, -12.025371287424246, -11.455131318499712, -10.897564403808156, -10.352119474437547, -9.818280641520703, -9.295564264117756, -8.783516316173266, -8.281710016941584, -7.7897436937496565, -7.307238850303566, -6.8338384172210995, -6.369205164653484, -5.913020259202737, -5.4649819496737955, -5.024804368219209, -4.5922164347294965, -4.166960854214552, -3.748793197636934, -3.3374810581736525, -2.9328032754857407, -2.534549221592515, -2.142518142514774, -1.7565185506000627, -1.3763676628516237, -1.0018908812173712, -0.6329213110621855, -0.2692993145610021, 0.08912790401522501, 0.4425066838006714, 0.7909772660655643, 1.1346741283715684, 1.4737262961500273, 1.808257633502012, 2.138387114866274, 2.464229079050824, 2.7858934669716087, 3.103486044342034, 3.4171086104560837, 3.726859194062398, 4.032832237313563, 4.335118768624652, 4.633806565238834, 4.928980306228366, 5.220721716614433, 5.50910970318433, 5.794220482609578, 6.07612770233808, 6.354902554807213, 6.630613885356588, 6.903328294284393, 7.173110233397077, 7.440022097455778, 7.704124310780899, 7.965475409369538, 8.224132118753605, 8.480149427923747, 8.733580659465817, 8.984477536249694, 9.23289024474136, 9.478867495314548])
data_25 = np.reshape(data_25, (1, data_25.shape[0]))
data_26 = np.array([-74.37038348895803, -68.25803246075066, -63.346903082754366, -59.24152615719647, -55.71439831618992, -52.6225556220398, -49.8702669855909, -47.39027653321572, -45.133522174766064, -43.063112717246696, -41.150607864956356, -39.37361913503261, -37.71420597578691, -36.157770363155066, -34.692274796309036, -33.307676404160425, -31.99550923333333, -30.748570463006413, -29.56068098370402, -28.42650014550311, -27.341380602869016, -26.301253272382763, -25.302535205019723, -24.342055104440274, -23.416992585474922, -22.52482823816969, -21.66330227033244, -20.830380018452153, -20.024223002252814, -19.243164486487824, -18.48568873371017, -17.750413298029095, -17.0360738405902, -16.34151104756275, -15.665659310664672, -15.007536892832611, -14.36623735097204, -13.740922028081036, -13.130813458275345, -12.535189554973037, -11.953378473040049, -11.3847540532906, -10.82873177190609, -10.284765129033977, -9.752342420661215, -9.230983845977008, -8.72023890912491, -8.21968408028814, -7.72892068555418, -7.24757299912546, -6.77528651527263, -6.311726379846237, -5.85657596408734, -5.409535565573106, -4.970321222819858, -4.538663631913077, -4.114307154731066, -3.6970089097106467, -3.2865379369235415, -2.8826744304471257, -2.4852090314602053, -2.0939421765282313, -1.7086834958894437, -1.3292512572769999, -0.9554718511489688, -0.5871793137152751, -0.22421488446800392, 0.13357340473564433, 0.48633111170405763, 0.8341977394507037, 1.177307067424456, 1.515787460394455, 1.8497621567729383, 2.1793495379986823, 2.5046633804614125, 2.82581309129636, 3.1429039292703846, 3.456037211896857, 3.765310509781094, 4.070817829123122, 4.372649783249865, 4.670893753951736, 4.965634043346663, 5.256952016909033, 5.544926238314951, 5.829632596595089, 6.111144426223088, 6.389532620476846, 6.664865738593644, 6.9372101071266385, 7.206629915863896, 7.473187308621153, 7.73694246928, 7.997953703343554, 8.256277515327586, 8.511968682150146, 8.765080322860738, 9.015663964869667, 9.263769606912087, 9.509445778916692])
data_26 = np.reshape(data_26, (1, data_26.shape[0]))
data_27 = np.array([-73.51981123390492, -67.58894004643618, -62.79546377312594, -58.77255028609312, -55.306430325842285, -52.26154970168583, -49.546527286135536, -47.096828993896274, -44.86518294737262, -42.81592373654297, -40.92147861409316, -39.160090368799736, -37.51428875396184, -35.96983331514433, -34.51496325552323, -33.13985317911433, -31.836210421402942, -30.59697194377966, -29.41607263757432, -28.28826575397067, -27.208981991673422, -26.174217666336844, -25.180445043912826, -24.224539767369446, -23.30372160935736, -22.415505718377048, -21.55766220437363, -20.728182408322056, -19.925250571745746, -19.14721990075246, -18.392592231381087, -17.660000664710132, -16.94819466635816, -16.256027221792305, -15.58244371664937, -14.926472271098968, -14.287215305820496, -13.663842156099985, -13.05558258153167, -12.461721043922308, -11.881591646969804, -11.314573647825226, -10.760087464796625, -10.217591116757923, -9.686577039384673, -9.166569231399862, -8.657120690687657, -8.157811105423715, -7.6682447706192445, -7.188048704071618, -6.716870939185014, -6.254378975226421, -5.800258367835567, -5.354211444820795, -4.9159561341688915, -4.485224892658339, -4.061763724855285, -3.6453312836225757, -3.2356980439555367, -2.8326455432915743, -2.435965681805107, -2.045460077198977, -1.6609394689299681, -1.2822231674146611, -0.9091385441754863, -0.541520559356804, -0.17921132335150877, 0.177940310365061, 0.5300791238264208, 0.8773438869634704, 1.2198676859343964, 1.557778229351966, 1.8911981341442035, 2.2202451926859714, 2.545032622633115, 2.8656693007813385, 3.1822599821947146, 3.49490550564412, 3.8037029864313823, 4.108745997484354, 4.410124739573787, 4.707926201405127, 5.00223431037435, 5.293130074510425, 5.580691716353152, 5.864994799197597, 6.146112346304187, 6.424114953444573, 6.699070895350488, 6.971046226340857, 7.240104875595417, 7.506308737324954, 7.769717756230097, 8.030390008565366, 8.288381778953365, 8.543747633319477, 8.796540488228667, 9.046811676615397, 9.294611010411476, 9.539986840018745])
data_27 = np.reshape(data_27, (1, data_27.shape[0]))
data_28 = np.array([-72.69713167596393, -66.9372286362957, -62.25588429757933, -58.31217996426342, -54.90499010409803, -51.90566460569442, -49.226911771572176, -46.80677398773253, -44.59968334182487, -42.571146394040085, -40.694422959494965, -38.94836355454164, -37.31595192189148, -35.78329358154169, -34.33889601650266, -32.97314506679706, -31.677916655984244, -30.44628393169545, -29.27229299711588, -28.150788814958986, -27.077278389550365, -26.04782204085272, -25.058946120784785, -24.10757228934766, -23.190959718001604, -22.30665748432924, -21.452465075012103, -20.626399394121275, -19.826667031968803, -19.0516408190757, -18.299839894341574, -17.569912673502124, -16.860622226323265, -16.170833664229793, -15.499503216008943, -14.845668727530763, -14.208441367958994, -13.586998363724325, -12.980576610582407, -12.388467039790742, -11.81000963380661, -11.244589003851045, -10.691630454813943, -10.150596474559634, -9.620983593923064, -9.102319571211877, -8.594160861924848, -8.096090339737621, -7.607715239223983, -7.128665295059333, -6.658591055613759, -6.197162351582722, -5.744066902927773, -5.299009049401345, -4.8617085917503, -4.43189973210564, -4.009330103694082, -3.5937598808867137, -3.184960961759366, -2.7827162161767367, -2.3868187932296907, -1.9970714824593134, -1.6132861239386007, -1.2352830628049976, -0.8628906442684673, -0.4959447455609305, -0.13428834161774914, 0.2222288983799772, 0.5737509861938779, 0.9204159637995737, 1.2623562288501582, 1.5996988382612498, 1.9325657916536574, 2.261074296236772, 2.585337014585185, 2.9054622965870713, 3.221554396799089, 3.5337136782617784, 3.8420368038106987, 4.146616915764681, 4.4475438048145515, 4.744904068964963, 5.038781263079005, 5.329256039851981, 5.616406282659175, 5.90030723092305, 6.1810315984191035, 6.458649685111632, 6.73322948278144, 7.004836775013249, 7.273535231768653, 7.53938649899603, 7.802450283508166, 8.06278443345986, 8.320445014745927, 8.57548638340308, 8.82796125451396, 9.077920767515655, 9.325414548475985, 9.570490769118123])
data_28 = np.reshape(data_28, (1, data_28.shape[0]))
data_29 = np.array([-71.90057338915594, -66.30201803475484, -61.727665238087354, -57.860105056088095, -54.50987203906052, -51.55475708702588, -48.91131668465519, -46.52003396848468, -44.33696388794061, -42.32873408705916, -40.4694037059372, -38.73840853353113, -37.119170688899395, -35.59813053743837, -34.164055736859, -32.807537346110266, -31.520615334475163, -30.29649555486, -29.12933261833011, -28.01406107279822, -26.946262537805865, -25.922059980059935, -24.938032736888374, -23.991147585792863, -23.07870235579858, -22.19827943849652, -21.347707183389407, -20.52502762550952, -19.728469338729894, -18.956424467262334, -18.207429187309934, -17.480147001388417, -16.773354386718903, -16.085928410287888, -15.416835995930949, -14.765124585823596, -14.129913984311921, -13.510389209259037, -12.905794204714073, -12.315426293591814, -11.738631268180844, -11.174799032302447, -10.623359722632163, -10.083780247071754, -9.555561187641517, -9.038234022708414, -8.531358629899984, -8.034521036219461, -7.547331386740544, -7.06942210674306, -6.600446235630054, -6.140075913766622, -5.688001005635787, -5.24392784485531, -4.807578088348108, -4.378687668475528, -3.957005833227426, -3.5422942657222247, -3.134326275360371, -2.732886053628665, -2.337767988561728, -1.9487760323294838, -1.565723117096637, -1.1884306148383803, -0.8167278371416486, -0.4504515715454046, -0.08944565122552109, 0.2664394447894944, 0.6173469634454004, 0.96341422384532, 1.3047729398751795, 1.6415495211846416, 1.9738653542225042, 2.301837064902656, 2.625576764343779, 2.945192278914242, 3.260787365847507, 3.5724619154538897, 3.880312140900118, 4.184430756523138, 4.48490714544788, 4.7818275172720135, 5.075275056558951, 5.3653300627350635, 5.652070081978352, 5.935570031663256, 6.215902317856794, 6.493136946344602, 6.7673416275328675, 7.038581875724758, 7.306921103093621, 7.572420708631871, 7.835140162523647, 8.095137086035164, 8.352467327444213, 8.607185033987372, 8.859342720264069, 9.108991333223189, 9.35618031394396, 9.600957656402079])
data_29 = np.reshape(data_29, (1, data_29.shape[0]))
data_30 = np.array([-71.12852851768808, -65.68249325972127, -61.21033807316015, -57.416031895353434, -54.120880082799886, -51.20868982663006, -48.59964213499398, -46.23653401879284, -44.07696696442767, -42.08864155062336, -40.24638465000139, -38.53019589794045, -36.923920842820884, -35.41432401139285, -33.99042543423185, -32.64301558546023, -31.36429408985034, -30.147596135069406, -28.987182217724428, -27.878074406030624, -26.815927290942035, -25.79692516406306, -24.817699275559267, -23.87526064260291, -22.96694502804889, -22.09036753624861, -21.243384876754977, -20.424063792699105, -19.63065448345461, -18.86156810249345, -18.115357602888302, -17.390701350160693, -16.68638903588076, -16.00130951525258, -15.334440261473814, -14.684838185790458, -14.051631616294502, -13.434013264138454, -12.831234035140652, -12.242597567290543, -11.667455394637525, -11.105202653153862, -10.555274257254656, -10.017141486519627, -9.490308930837463, -8.974311749676891, -8.468713207604681, -7.973102453288945, -7.487092513639189, -7.010318478479877, -6.542435854653029, -6.0831190707009375, -5.632060116027622, -5.188967300214292, -4.753564120022945, -4.3255882230585065, -3.9047904582997237, -3.4909340050123343, -3.083793572282679, -2.6831546625271794, -2.288812892845667, -1.900573368904116, -1.5182501065361869, -1.1416654967399449, -0.770649810241299, -0.40504073816135466, -0.04468296567575255, 0.3105722241469251, 0.6608673188424039, 1.006338919680034, 1.3471180614804232, 1.6833305110167494, 2.0150970456621433, 2.342533713883719, 2.665752078924568, 2.9848594470096064, 3.2999590812149484, 3.6111504020660843, 3.9185291758522314, 4.22218769155762, 4.522214927185723, 4.818696706264547, 5.111715845239804, 5.401352292320732, 5.687683258425855, 5.9707833407324955, 6.250724639333823, 6.527576867461938, 6.801407455700211, 7.072281650559251, 7.340262607788385, 7.605411480770529, 7.867787504275347, 8.127448073881258, 8.384448821385906, 8.63884368625992, 8.890684983665595, 9.140023469021038, 9.386908399347064, 9.631387591732436])
data_30 = np.reshape(data_30, (1, data_30.shape[0]))
data_31 = np.array([-71.12852851768808, -65.68249325972127, -61.21033807316015, -57.416031895353434, -54.120880082799886, -51.20868982663006, -48.59964213499398, -46.23653401879284, -44.07696696442767, -42.08864155062336, -40.24638465000139, -38.53019589794045, -36.923920842820884, -35.41432401139285, -33.99042543423185, -32.64301558546023, -31.36429408985034, -30.147596135069406, -28.987182217724428, -27.878074406030624, -26.815927290942035, -25.79692516406306, -24.817699275559267, -23.87526064260291, -22.96694502804889, -22.09036753624861, -21.243384876754977, -20.424063792699105, -19.63065448345461, -18.86156810249345, -18.115357602888302, -17.390701350160693, -16.68638903588076, -16.00130951525258, -15.334440261473814, -14.684838185790458, -14.051631616294502, -13.434013264138454, -12.831234035140652, -12.242597567290543, -11.667455394637525, -11.105202653153862, -10.555274257254656, -10.017141486519627, -9.490308930837463, -8.974311749676891, -8.468713207604681, -7.973102453288945, -7.487092513639189, -7.010318478479877, -6.542435854653029, -6.0831190707009375, -5.632060116027622, -5.188967300214292, -4.753564120022945, -4.3255882230585065, -3.9047904582997237, -3.4909340050123343, -3.083793572282679, -2.6831546625271794, -2.288812892845667, -1.900573368904116, -1.5182501065361869, -1.1416654967399449, -0.770649810241299, -0.40504073816135466, -0.04468296567575255, 0.3105722241469251, 0.6608673188424039, 1.006338919680034, 1.3471180614804232, 1.6833305110167494, 2.0150970456621433, 2.342533713883719, 2.665752078924568, 2.9848594470096064, 3.2999590812149484, 3.6111504020660843, 3.9185291758522314, 4.22218769155762, 4.522214927185723, 4.818696706264547, 5.111715845239804, 5.401352292320732, 5.687683258425855, 5.9707833407324955, 6.250724639333823, 6.527576867461938, 6.801407455700211, 7.072281650559251, 7.340262607788385, 7.605411480770529, 7.867787504275347, 8.127448073881258, 8.384448821385906, 8.63884368625992, 8.890684983665595, 9.140023469021038, 9.386908399347064, 9.631387591732436])
data_31 = np.reshape(data_31, (1, data_31.shape[0]))
data_32 = np.array([-69.65225105190164, -64.48753034409458, -60.206625086705806, -56.55079171897674, -53.360534665320735, -50.53055452928068, -47.98767329394963, -45.67896710414232, -43.56491901647111, -41.615241117541146, -39.80620706852079, -38.118883758466026, -36.53792125091425, -35.05070201485628, -33.64672857196108, -32.31717362126786, -31.054543506245167, -29.852422394090663, -28.705274997978044, -27.608292466919394, -26.55727058156438, -25.548512455578223, -24.578750054234952, -23.645080325385234, -22.74491279497351, -21.87592624280514, -21.03603263378598, -20.223346893443736, -19.436161427351976, -18.672924517580338, -17.93222190951847, -17.21276103978848, -16.51335746319109, -15.832923120814288, -15.17045615747332, -14.525032049377408, -13.895795844512882, -13.281955352674677, -12.682775148750075, -12.09757127565642, -11.525706550954677, -10.966586396711923, -10.419655123813195, -9.884392612840326, -9.360311341588506, -8.846953716829612, -8.343889673533692, -7.8507145101340585, -7.367046932485959, -6.892527282793711, -6.426815933006803, -5.969591824531372, -5.520551138716669, -5.079406084205464, -4.645883789086332, -4.219725287112171, -3.8006845886216087, -3.388527827759056, -2.9830324786402085, -2.5839866338352397, -2.191188339340408, -1.8044449808287717, -1.4235727164677978, -1.0483959521698702, -0.6787468554830687, -0.3144649047878734, 0.04460352925047536, 0.3986055726947619, 0.7476822102908283, 1.091968622579683, 1.4315945001951036, 1.7666843371699925, 2.0973577049132075, 2.4237295083589827, 2.745910225688791, 3.06400613282054, 3.3781195138826963, 3.6883488586474007, 3.9947890479236894, 4.297531527744459, 4.596664473195312, 4.892272942554765, 5.184439022516893, 5.473241964992087, 5.758758316146603, 6.04106203821465, 6.320224624478839, 6.596315207979029, 6.869400664241167, 7.139545708535306, 7.406812987911201, 7.671263168429734, 7.932955017779145, 8.191945483682684, 8.448289768282775, 8.702041398755965, 8.953252294389838, 9.201972830362942, 9.448251898348738, 9.69213696425811])
data_32 = np.reshape(data_32, (1, data_32.shape[0]))
data_33 = np.array([-68.94545843715301, -63.910735318666404, -59.719435437194456, -56.12910988368529, -52.988831887629374, -50.198238696227584, -47.68719690502624, -45.40476242292407, -43.3127613301936, -41.38184893596217, -39.58898078868387, -37.91572897566998, -36.34712581743123, -34.87084835153414, -33.476629760456895, -32.15582593597418, -30.90109055942565, -29.706127643742555, -28.56550038245245, -27.47448159722701, -26.428935372861428, -25.425222386373253, -24.46012345689816, -23.530777263921518, -22.634629195016956, -21.769389018638716, -20.93299561587702, -20.12358740379266, -19.339477382484127, -18.579131965000617, -17.841152921710375, -17.124261905376745, -16.427287126310112, -15.749151828975075, -15.088864285261225, -14.445509071244953, -13.818239434701226, -13.206270593630295, -12.608873832889657, -12.025371287424246, -11.455131318499712, -10.897564403808156, -10.352119474437547, -9.818280641520703, -9.295564264117756, -8.783516316173266, -8.281710016941584, -7.7897436937496565, -7.307238850303566, -6.8338384172210995, -6.369205164653484, -5.913020259202737, -5.4649819496737955, -5.024804368219209, -4.5922164347294965, -4.166960854214552, -3.748793197636934, -3.3374810581736525, -2.9328032754857407, -2.534549221592515, -2.142518142514774, -1.7565185506000627, -1.3763676628516237, -1.0018908812173712, -0.6329213110621855, -0.2692993145610021, 0.08912790401522501, 0.4425066838006714, 0.7909772660655643, 1.1346741283715684, 1.4737262961500273, 1.808257633502012, 2.138387114866274, 2.464229079050824, 2.7858934669716087, 3.103486044342034, 3.4171086104560837, 3.726859194062398, 4.032832237313563, 4.335118768624652, 4.633806565238834, 4.928980306228366, 5.220721716614433, 5.50910970318433, 5.794220482609578, 6.07612770233808, 6.354902554807213, 6.630613885356588, 6.903328294284393, 7.173110233397077, 7.440022097455778, 7.704124310780899, 7.965475409369538, 8.224132118753605, 8.480149427923747, 8.733580659465817, 8.984477536249694, 9.23289024474136, 9.478867495314548, 9.722456579534674])
data_33 = np.reshape(data_33, (1, data_33.shape[0]))
data_34 = np.array([-68.25803246075066, -63.346903082754366, -59.24152615719647, -55.71439831618992, -52.6225556220398, -49.8702669855909, -47.39027653321572, -45.133522174766064, -43.063112717246696, -41.150607864956356, -39.37361913503261, -37.71420597578691, -36.157770363155066, -34.692274796309036, -33.307676404160425, -31.99550923333333, -30.748570463006413, -29.56068098370402, -28.42650014550311, -27.341380602869016, -26.301253272382763, -25.302535205019723, -24.342055104440274, -23.416992585474922, -22.52482823816969, -21.66330227033244, -20.830380018452153, -20.024223002252814, -19.243164486487824, -18.48568873371017, -17.750413298029095, -17.0360738405902, -16.34151104756275, -15.665659310664672, -15.007536892832611, -14.36623735097204, -13.740922028081036, -13.130813458275345, -12.535189554973037, -11.953378473040049, -11.3847540532906, -10.82873177190609, -10.284765129033977, -9.752342420661215, -9.230983845977008, -8.72023890912491, -8.21968408028814, -7.72892068555418, -7.24757299912546, -6.77528651527263, -6.311726379846237, -5.85657596408734, -5.409535565573106, -4.970321222819858, -4.538663631913077, -4.114307154731066, -3.6970089097106467, -3.2865379369235415, -2.8826744304471257, -2.4852090314602053, -2.0939421765282313, -1.7086834958894437, -1.3292512572769999, -0.9554718511489688, -0.5871793137152751, -0.22421488446800392, 0.13357340473564433, 0.48633111170405763, 0.8341977394507037, 1.177307067424456, 1.515787460394455, 1.8497621567729383, 2.1793495379986823, 2.5046633804614125, 2.82581309129636, 3.1429039292703846, 3.456037211896857, 3.765310509781094, 4.070817829123122, 4.372649783249865, 4.670893753951736, 4.965634043346663, 5.256952016909033, 5.544926238314951, 5.829632596595089, 6.111144426223088, 6.389532620476846, 6.664865738593644, 6.9372101071266385, 7.206629915863896, 7.473187308621153, 7.73694246928, 7.997953703343554, 8.256277515327586, 8.511968682150146, 8.765080322860738, 9.015663964869667, 9.263769606912087, 9.509445778916692, 9.752739599025343])
data_34 = np.reshape(data_34, (1, data_34.shape[0]))
data_35 = np.array([-67.58894004643618, -62.79546377312594, -58.77255028609312, -55.306430325842285, -52.26154970168583, -49.546527286135536, -47.096828993896274, -44.86518294737262, -42.81592373654297, -40.92147861409316, -39.160090368799736, -37.51428875396184, -35.96983331514433, -34.51496325552323, -33.13985317911433, -31.836210421402942, -30.59697194377966, -29.41607263757432, -28.28826575397067, -27.208981991673422, -26.174217666336844, -25.180445043912826, -24.224539767369446, -23.30372160935736, -22.415505718377048, -21.55766220437363, -20.728182408322056, -19.925250571745746, -19.14721990075246, -18.392592231381087, -17.660000664710132, -16.94819466635816, -16.256027221792305, -15.58244371664937, -14.926472271098968, -14.287215305820496, -13.663842156099985, -13.05558258153167, -12.461721043922308, -11.881591646969804, -11.314573647825226, -10.760087464796625, -10.217591116757923, -9.686577039384673, -9.166569231399862, -8.657120690687657, -8.157811105423715, -7.6682447706192445, -7.188048704071618, -6.716870939185014, -6.254378975226421, -5.800258367835567, -5.354211444820795, -4.9159561341688915, -4.485224892658339, -4.061763724855285, -3.6453312836225757, -3.2356980439555367, -2.8326455432915743, -2.435965681805107, -2.045460077198977, -1.6609394689299681, -1.2822231674146611, -0.9091385441754863, -0.541520559356804, -0.17921132335150877, 0.177940310365061, 0.5300791238264208, 0.8773438869634704, 1.2198676859343964, 1.557778229351966, 1.8911981341442035, 2.2202451926859714, 2.545032622633115, 2.8656693007813385, 3.1822599821947146, 3.49490550564412, 3.8037029864313823, 4.108745997484354, 4.410124739573787, 4.707926201405127, 5.00223431037435, 5.293130074510425, 5.580691716353152, 5.864994799197597, 6.146112346304187, 6.424114953444573, 6.699070895350488, 6.971046226340857, 7.240104875595417, 7.506308737324954, 7.769717756230097, 8.030390008565366, 8.288381778953365, 8.543747633319477, 8.796540488228667, 9.046811676615397, 9.294611010411476, 9.539986840018745, 9.782986110941723])
data_35 = np.reshape(data_35, (1, data_35.shape[0]))
data_36 = np.array([-66.9372286362957, -62.25588429757933, -58.31217996426342, -54.90499010409803, -51.90566460569442, -49.226911771572176, -46.80677398773253, -44.59968334182487, -42.571146394040085, -40.694422959494965, -38.94836355454164, -37.31595192189148, -35.78329358154169, -34.33889601650266, -32.97314506679706, -31.677916655984244, -30.44628393169545, -29.27229299711588, -28.150788814958986, -27.077278389550365, -26.04782204085272, -25.058946120784785, -24.10757228934766, -23.190959718001604, -22.30665748432924, -21.452465075012103, -20.626399394121275, -19.826667031968803, -19.0516408190757, -18.299839894341574, -17.569912673502124, -16.860622226323265, -16.170833664229793, -15.499503216008943, -14.845668727530763, -14.208441367958994, -13.586998363724325, -12.980576610582407, -12.388467039790742, -11.81000963380661, -11.244589003851045, -10.691630454813943, -10.150596474559634, -9.620983593923064, -9.102319571211877, -8.594160861924848, -8.096090339737621, -7.607715239223983, -7.128665295059333, -6.658591055613759, -6.197162351582722, -5.744066902927773, -5.299009049401345, -4.8617085917503, -4.43189973210564, -4.009330103694082, -3.5937598808867137, -3.184960961759366, -2.7827162161767367, -2.3868187932296907, -1.9970714824593134, -1.6132861239386007, -1.2352830628049976, -0.8628906442684673, -0.4959447455609305, -0.13428834161774914, 0.2222288983799772, 0.5737509861938779, 0.9204159637995737, 1.2623562288501582, 1.5996988382612498, 1.9325657916536574, 2.261074296236772, 2.585337014585185, 2.9054622965870713, 3.221554396799089, 3.5337136782617784, 3.8420368038106987, 4.146616915764681, 4.4475438048145515, 4.744904068964963, 5.038781263079005, 5.329256039851981, 5.616406282659175, 5.90030723092305, 6.1810315984191035, 6.458649685111632, 6.73322948278144, 7.004836775013249, 7.273535231768653, 7.53938649899603, 7.802450283508166, 8.06278443345986, 8.320445014745927, 8.57548638340308, 8.82796125451396, 9.077920767515655, 9.325414548475985, 9.570490769118123, 9.81319620320427])
data_36 = np.reshape(data_36, (1, data_36.shape[0]))
data_37 = np.array([-66.30201803475484, -61.727665238087354, -57.860105056088095, -54.50987203906052, -51.55475708702588, -48.91131668465519, -46.52003396848468, -44.33696388794061, -42.32873408705916, -40.4694037059372, -38.73840853353113, -37.119170688899395, -35.59813053743837, -34.164055736859, -32.807537346110266, -31.520615334475163, -30.29649555486, -29.12933261833011, -28.01406107279822, -26.946262537805865, -25.922059980059935, -24.938032736888374, -23.991147585792863, -23.07870235579858, -22.19827943849652, -21.347707183389407, -20.52502762550952, -19.728469338729894, -18.956424467262334, -18.207429187309934, -17.480147001388417, -16.773354386718903, -16.085928410287888, -15.416835995930949, -14.765124585823596, -14.129913984311921, -13.510389209259037, -12.905794204714073, -12.315426293591814, -11.738631268180844, -11.174799032302447, -10.623359722632163, -10.083780247071754, -9.555561187641517, -9.038234022708414, -8.531358629899984, -8.034521036219461, -7.547331386740544, -7.06942210674306, -6.600446235630054, -6.140075913766622, -5.688001005635787, -5.24392784485531, -4.807578088348108, -4.378687668475528, -3.957005833227426, -3.5422942657222247, -3.134326275360371, -2.732886053628665, -2.337767988561728, -1.9487760323294838, -1.565723117096637, -1.1884306148383803, -0.8167278371416486, -0.4504515715454046, -0.08944565122552109, 0.2664394447894944, 0.6173469634454004, 0.96341422384532, 1.3047729398751795, 1.6415495211846416, 1.9738653542225042, 2.301837064902656, 2.625576764343779, 2.945192278914242, 3.260787365847507, 3.5724619154538897, 3.880312140900118, 4.184430756523138, 4.48490714544788, 4.7818275172720135, 5.075275056558951, 5.3653300627350635, 5.652070081978352, 5.935570031663256, 6.215902317856794, 6.493136946344602, 6.7673416275328675, 7.038581875724758, 7.306921103093621, 7.572420708631871, 7.835140162523647, 8.095137086035164, 8.352467327444213, 8.607185033987372, 8.859342720264069, 9.108991333223189, 9.35618031394396, 9.600957656402079, 9.843369963411915])
data_37 = np.reshape(data_37, (1, data_37.shape[0]))
data_38 = np.array([-65.68249325972127, -61.21033807316015, -57.416031895353434, -54.120880082799886, -51.20868982663006, -48.59964213499398, -46.23653401879284, -44.07696696442767, -42.08864155062336, -40.24638465000139, -38.53019589794045, -36.923920842820884, -35.41432401139285, -33.99042543423185, -32.64301558546023, -31.36429408985034, -30.147596135069406, -28.987182217724428, -27.878074406030624, -26.815927290942035, -25.79692516406306, -24.817699275559267, -23.87526064260291, -22.96694502804889, -22.09036753624861, -21.243384876754977, -20.424063792699105, -19.63065448345461, -18.86156810249345, -18.115357602888302, -17.390701350160693, -16.68638903588076, -16.00130951525258, -15.334440261473814, -14.684838185790458, -14.051631616294502, -13.434013264138454, -12.831234035140652, -12.242597567290543, -11.667455394637525, -11.105202653153862, -10.555274257254656, -10.017141486519627, -9.490308930837463, -8.974311749676891, -8.468713207604681, -7.973102453288945, -7.487092513639189, -7.010318478479877, -6.542435854653029, -6.0831190707009375, -5.632060116027622, -5.188967300214292, -4.753564120022945, -4.3255882230585065, -3.9047904582997237, -3.4909340050123343, -3.083793572282679, -2.6831546625271794, -2.288812892845667, -1.900573368904116, -1.5182501065361869, -1.1416654967399449, -0.770649810241299, -0.40504073816135466, -0.04468296567575255, 0.3105722241469251, 0.6608673188424039, 1.006338919680034, 1.3471180614804232, 1.6833305110167494, 2.0150970456621433, 2.342533713883719, 2.665752078924568, 2.9848594470096064, 3.2999590812149484, 3.6111504020660843, 3.9185291758522314, 4.22218769155762, 4.522214927185723, 4.818696706264547, 5.111715845239804, 5.401352292320732, 5.687683258425855, 5.9707833407324955, 6.250724639333823, 6.527576867461938, 6.801407455700211, 7.072281650559251, 7.340262607788385, 7.605411480770529, 7.867787504275347, 8.127448073881258, 8.384448821385906, 8.63884368625992, 8.890684983665595, 9.140023469021038, 9.386908399347064, 9.631387591732436, 9.873507478863733])
data_38 = np.reshape(data_38, (1, data_38.shape[0]))
data_39 = np.array([-65.07789825515522, -60.703462680351485, -56.97968213882281, -53.73782716759499, -50.86733111062007, -48.29179190958785, -45.95620173262307, -43.81963672302664, -41.85082480706658, -40.02533054515407, -38.32369696604857, -36.73017873265576, -35.231854271849095, -33.817988476481005, -32.47956563543057, -31.208940784875278, -29.999575183036146, -28.845832668668677, -27.742820824603967, -26.686265614275907, -25.672411367100572, -24.69794020045315, -23.759906514819146, -22.855683299829916, -21.982917784890674, -21.13949454779567, -20.32350462564499, -19.53321949266058, -18.767069013017338, -18.0236226612302, -17.301573446137937, -16.59972408408499, -15.91697505401298, -15.252314235297522, -14.604807883190347, -13.973592739706149, -13.35786911284564, -12.756894784920192, -12.1699796335365, -11.596480867554462, -11.03579879534804, -10.487373055866925, -9.950679252708067, -9.425225940794506, -8.910551922229255, -8.40622381391532, -7.911833854848678, -7.426997925395408, -6.95135375428648, -6.484559292436984, -6.026291235320773, -5.576243677888709, -5.13412688800598, -4.699666186091338, -4.27260092016126, -3.8526835265997055, -3.439678668300962, -3.033362442549777, -2.6335216520887994, -2.2399531333015266, -1.8524631363377397, -1.4708667523298864, -1.094987383563489, -0.7246562527309871, -0.3597119478811818, -8e-323, 0.3546275095585562, 0.7043123142792782, 1.0491903025904856, 1.3893918349123076, 1.7250420394849975, 2.0562610886868424, 2.38316445733086, 2.7058631643594713, 3.024463999188179, 3.3390697338662076, 3.649779322081853, 3.956688086017451, 4.259887891883186, 4.559467315017958, 4.855511795179433, 5.1481037828642116, 5.437322877114627, 5.7232459555121835, 6.005947296867183, 6.285498697007687, 6.561969578256842, 6.835427092911477, 7.105936221129542, 7.373559863634523, 7.638358929487921, 7.900392419311649, 8.159717504176724, 8.416389600488062, 8.670462441039813, 8.921988142560174, 9.171017269865834, 9.41759889686404, 9.661780664609582, 9.90360883652427])
data_39 = np.reshape(data_39, (1, data_39.shape[0]))
data_40 = np.array([-64.48753034409458, -60.206625086705806, -56.55079171897674, -53.360534665320735, -50.53055452928068, -47.98767329394963, -45.67896710414232, -43.56491901647111, -41.615241117541146, -39.80620706852079, -38.118883758466026, -36.53792125091425, -35.05070201485628, -33.64672857196108, -32.31717362126786, -31.054543506245167, -29.852422394090663, -28.705274997978044, -27.608292466919394, -26.55727058156438, -25.548512455578223, -24.578750054234952, -23.645080325385234, -22.74491279497351, -21.87592624280514, -21.03603263378598, -20.223346893443736, -19.436161427351976, -18.672924517580338, -17.93222190951847, -17.21276103978848, -16.51335746319109, -15.832923120814288, -15.17045615747332, -14.525032049377408, -13.895795844512882, -13.281955352674677, -12.682775148750075, -12.09757127565642, -11.525706550954677, -10.966586396711923, -10.419655123813195, -9.884392612840326, -9.360311341588506, -8.846953716829612, -8.343889673533692, -7.8507145101340585, -7.367046932485959, -6.892527282793711, -6.426815933006803, -5.969591824531372, -5.520551138716669, -5.079406084205464, -4.645883789086332, -4.219725287112171, -3.8006845886216087, -3.388527827759056, -2.9830324786402085, -2.5839866338352397, -2.191188339340408, -1.8044449808287717, -1.4235727164677978, -1.0483959521698702, -0.6787468554830687, -0.3144649047878734, 0.04460352925047536, 0.3986055726947619, 0.7476822102908283, 1.091968622579683, 1.4315945001951036, 1.7666843371699925, 2.0973577049132075, 2.4237295083589827, 2.745910225688791, 3.06400613282054, 3.3781195138826963, 3.6883488586474007, 3.9947890479236894, 4.297531527744459, 4.596664473195312, 4.892272942554765, 5.184439022516893, 5.473241964992087, 5.758758316146603, 6.04106203821465, 6.320224624478839, 6.596315207979029, 6.869400664241167, 7.139545708535306, 7.406812987911201, 7.671263168429734, 7.932955017779145, 8.191945483682684, 8.448289768282775, 8.702041398755965, 8.953252294389838, 9.201972830362942, 9.448251898348738, 9.69213696425811, 9.933674123043563])
data_40 = np.reshape(data_40, (1, data_40.shape[0]))
data_41 = np.array([-63.910735318666404, -59.719435437194456, -56.12910988368529, -52.988831887629374, -50.198238696227584, -47.68719690502624, -45.40476242292407, -43.3127613301936, -41.38184893596217, -39.58898078868387, -37.91572897566998, -36.34712581743123, -34.87084835153414, -33.476629760456895, -32.15582593597418, -30.90109055942565, -29.706127643742555, -28.56550038245245, -27.47448159722701, -26.428935372861428, -25.425222386373253, -24.46012345689816, -23.530777263921518, -22.634629195016956, -21.769389018638716, -20.93299561587702, -20.12358740379266, -19.339477382484127, -18.579131965000617, -17.841152921710375, -17.124261905376745, -16.427287126310112, -15.749151828975075, -15.088864285261225, -14.445509071244953, -13.818239434701226, -13.206270593630295, -12.608873832889657, -12.025371287424246, -11.455131318499712, -10.897564403808156, -10.352119474437547, -9.818280641520703, -9.295564264117756, -8.783516316173266, -8.281710016941584, -7.7897436937496565, -7.307238850303566, -6.8338384172210995, -6.369205164653484, -5.913020259202737, -5.4649819496737955, -5.024804368219209, -4.5922164347294965, -4.166960854214552, -3.748793197636934, -3.3374810581736525, -2.9328032754857407, -2.534549221592515, -2.142518142514774, -1.7565185506000627, -1.3763676628516237, -1.0018908812173712, -0.6329213110621855, -0.2692993145610021, 0.08912790401522501, 0.4425066838006714, 0.7909772660655643, 1.1346741283715684, 1.4737262961500273, 1.808257633502012, 2.138387114866274, 2.464229079050824, 2.7858934669716087, 3.103486044342034, 3.4171086104560837, 3.726859194062398, 4.032832237313563, 4.335118768624652, 4.633806565238834, 4.928980306228366, 5.220721716614433, 5.50910970318433, 5.794220482609578, 6.07612770233808, 6.354902554807213, 6.630613885356588, 6.903328294284393, 7.173110233397077, 7.440022097455778, 7.704124310780899, 7.965475409369538, 8.224132118753605, 8.480149427923747, 8.733580659465817, 8.984477536249694, 9.23289024474136, 9.478867495314548, 9.722456579534674, 9.96370342475891])
data_41 = np.reshape(data_41, (1, data_41.shape[0]))
fullArray = np.concatenate((data_1, data_2, data_3, data_4, data_5, data_6, data_7, data_8, data_9, data_10, data_11, data_12, data_13, data_14, data_15, data_16, data_17, data_18, data_19, data_20, data_21, data_22, data_23, data_24, data_25, data_26, data_27, data_28, data_29, data_30, data_31, data_32, data_33, data_34, data_35, data_36, data_37, data_38, data_39, data_40, data_41), axis=0)
#print(fullArray)
return fullArray
def hh_figtype_1(Vm_resting_list, corresponding_concentration_list):
# Takes a list or array of resting membrane potentials from a stable neuronal membrane, along with some ion concentration list as an independent variable and creates a
# plot with a base-ten log transformed x axis.
logTransformedConcentrations = []
for concentration in corresponding_concentration_list:
logTransformedConcentrations.append(math.log(concentration, 10))
plt.close()
fig, ax = plt.subplots()
fig.patch.set_alpha(0.0)
ax.patch.set_alpha(0.0)
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.plot(logTransformedConcentrations, Vm_resting_list, linewidth=0.8, color='deeppink')
ax.set_ylabel("Membrane potential (mV)")
ax.set_xlabel("$log_{10}([K^{+}]_{extracellular})$")
plt.show()
def hh_figtype_2(Vm_resting_list, corresponding_concentration_list, Vm_0, workingDirectory):
# Takes a list or array of resting membrane potentials from a stable neuronal membrane, along with some ion concentration list as an independent variable and creates a
# plot with a base-ten log transformed x axis.
logTransformedConcentrations = []
for concentration in corresponding_concentration_list:
logTransformedConcentrations.append(math.log(concentration, 10))
plt.close()
fig, ax = plt.subplots()
#fig.patch.set_alpha(0.0)
#ax.patch.set_alpha(0.0)
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.plot(logTransformedConcentrations, Vm_resting_list, linewidth=0.8, color='deeppink')
ax.set_ylabel("Membrane potential (mV)")
ax.set_xlabel("$log_{10}([K^{+}]_{extracellular})$")
ax.set_title("Initial membrane potential: {}".format(Vm_0))
plt.show()
plt.savefig('{}\\HodgkinHuxleyModel\\modelFigures\\Resting_concentration_initialVm{}.png'.format(workingDirectory, int(Vm_0)), bbox_inches='tight')
def hh_figtype_3():
recalledData = recallQuestion1Data()
logTransformedConcentrations = []
for concentration in list(map(lambda x: x/1.0, range(1, 200, 2))):
logTransformedConcentrations.append(math.log(concentration, 10))
plt.close()
fig, ax = plt.subplots()
fig.patch.set_alpha(0.0)
ax.patch.set_alpha(0.0)
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
iterable = range(0, recalledData.shape[0], 1)
colormap = plt.cm.summer
norm = mpl.colors.Normalize(vmin=0, vmax=200)
colors = plt.cm.summer(np.linspace(0, 0.75, len(range(0, recalledData.shape[0], 1))))
for sodium_concentration in iterable:
ax.plot(logTransformedConcentrations, recalledData[sodium_concentration, :], linewidth=0.9, color=colors[sodium_concentration])
ax.set_ylabel("Membrane potential (mV)")
ax.set_xlabel("$log_{10}([K^{+}]_{extracellular})$")
fig.colorbar(plt.cm.ScalarMappable(norm=norm, cmap=colormap), cax=None, orientation='vertical', label='$[Na^{+}]_{extracellular}$ (mM)', aspect=25)
plt.show()
def hh_figtype_4():
data = recallQuestion1Data()
logTransformed_K_Concentrations = []
for concentration in list(map(lambda x: x/1.0, range(1, 200, 2))):
logTransformed_K_Concentrations.append(math.log(concentration, 10))
plotReady_K_axis = np.reshape(np.array(logTransformed_K_Concentrations), (1, len(logTransformed_K_Concentrations)))
print(plotReady_K_axis.shape)
logTransformed_Na_Concentrations = []
Na_concentrations = list(map(lambda x: x/1.0, range(0, 205, 5)))
plotReady_Na_axis = np.reshape(np.array(Na_concentrations), (len(Na_concentrations), 1))
print(plotReady_Na_axis.shape)
plt.close()
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d') # Axes3D object
ax.set_xlabel('$log_{10}([K^{+}]_{extracellular})$')
ax.set_ylabel('$[Na^{+}]_{extracellular}$(mM)')
ax.set_zlabel('Membrane potential (mV)')
surface = ax.plot_surface(plotReady_K_axis, plotReady_Na_axis, data, cmap=plt.cm.summer, linewidth=0, antialiased=False)
ax.set_yticks([0, 50, 100, 150, 200])
fig.colorbar(surface, label='Membrane potential (mV)', shrink=0.60, aspect=15)
plt.show()
def hh_figtype_5(timeAxis, voltageTraces, hGateTraces, mGateTraces):
fig, axes = plt.subplots(nrows=2, ncols=1, sharex=True)
fig.patch.set_alpha(0.0)
for ax in axes:
ax.patch.set_alpha(0.0)
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
iterable = range(0, voltageTraces.shape[0], 1)
colormap = plt.cm.summer
norm = mpl.colors.Normalize(vmin=1, vmax=13)
colors = plt.cm.summer(np.linspace(0, 0.9, len(iterable)))
for trace in iterable:
axes[0].plot(timeAxis, voltageTraces[trace, :], linewidth=0.8, color=colors[trace], label='Vm{}'.format(trace))
axes[1].plot(timeAxis, hGateTraces[trace, :], linewidth=0.8, color=colors[trace], label='hGate{}'.format(trace), linestyle='dotted')
axes[1].plot(timeAxis, mGateTraces[trace, :], linewidth=0.8, color=colors[trace], label='mGate{}'.format(trace), linestyle='dashed')
axes[0].set_ylabel("Vm ($mV$)")
axes[1].set_xlabel("Time (ms)")
plt.show()
def hh_figtype_6():
plt.rcParams.update({'font.size': 12})
extracellular_potassium_concentrations = list(map(lambda x: x/1.0, range(1, 21, 2)))
median_hGate_active_fractions = [0.869, 0.767, 0.655, 0.542, 0.422, 0.326, 0.231, 0.141, 0.092, 0.079]
peak_mGate_active_fractions = [0.995, 0.994, 0.991, 0.991, 0.987, 0.984, 0.943, 0.836, 0.677, 0.485]
peak_sodium_currents = [1230.9, 974.5, 844.2, 777.2, 655.0, 614.0, 419.6, 274.8, 166.4, 82.8]
stable_Vm = [-70.77, -65.68, -61.81, -58.34, -55.24, -52.03, -48.70]
plt.close()
fig, axes = plt.subplots(nrows=3, ncols=1, sharex=True)
fig.patch.set_alpha(0.0)
for i, ax in enumerate(axes, start=0):
ax.patch.set_alpha(0.0)
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
if i == 0:
ax.plot(extracellular_potassium_concentrations[:len(stable_Vm)], stable_Vm, color='deeppink', linewidth=0.8, label='Vm')
ax.axvline(x=extracellular_potassium_concentrations[len(stable_Vm) - 1], color='k')
ax.text(extracellular_potassium_concentrations[len(stable_Vm) - 1] + 0.1, -70, 'Oscillations', rotation=90, fontsize=22)
ax.set_ylabel("Resting Vm ($mV$)")
elif i == 1:
ax.plot(extracellular_potassium_concentrations, median_hGate_active_fractions, color='gold', linewidth=0.8, label='$h$ (median)')
ax.plot(extracellular_potassium_concentrations, peak_mGate_active_fractions, color='red', linewidth=0.8, label='$m$ (maximum)')
ax.legend(title='Gate', edgecolor='w', framealpha=0.0, bbox_to_anchor=(1.0, 1), loc='upper left')
#ax.axvline(x=extracellular_potassium_concentrations[len(stable_Vm) - 1], color='k')
ax.set_ylabel("Active fraction")
else:
ax.plot(extracellular_potassium_concentrations, peak_sodium_currents, color='orangered', linewidth=0.8, label='max $Na^{+}$ current')
#ax.axvline(x=extracellular_potassium_concentrations[len(stable_Vm) - 1], color='k')
ax.set_ylabel("$Na^{+}$ Current ($\mu A/cm^{2}$)")
ax.set_xlabel("$[K^{+}]_{E}$ $(mM)$")
plt.show()
def hh_figtype_7(embedVector_1, embedVector_2, embedVector_3):
colormap = plt.cm.summer
norm = mpl.colors.Normalize(vmin=-100, vmax=60)
colors = plt.cm.summer(np.linspace(0, 0.9, len(embedVector_1)))
plt.close()
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d') # Axes3D object
ax.set_xlabel("0$\mu$")
ax.set_ylabel("1$\mu$")
ax.set_zlabel("2$\mu$")
ax.grid(False)
ax.w_xaxis.pane.fill = False
ax.w_yaxis.pane.fill = False
ax.w_zaxis.pane.fill = False
ax.scatter(embedVector_1, embedVector_2, embedVector_3, color=colors, marker='.')
ax = plt.gca()
fig.set_facecolor('black')
ax.set_facecolor('black')
ax.xaxis.set_ticklabels([])
ax.yaxis.set_ticklabels([])
ax.zaxis.set_ticklabels([])
for line in ax.xaxis.get_ticklines():
line.set_visible(False)
for line in ax.yaxis.get_ticklines():
line.set_visible(False)
for line in ax.zaxis.get_ticklines():
line.set_visible(False)
plt.show()
def main():
hh_figtype_6()
if __name__ == '__main__':
main()
| 334.469965
| 2,036
| 0.795996
| 10,127
| 94,655
| 7.400711
| 0.116323
| 0.003522
| 0.007112
| 0.001868
| 0.934701
| 0.930844
| 0.925974
| 0.925334
| 0.923466
| 0.921505
| 0
| 0.767703
| 0.066494
| 94,655
| 283
| 2,037
| 334.469965
| 0.080488
| 0.007195
| 0
| 0.259414
| 0
| 0
| 0.008227
| 0.002884
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037657
| false
| 0
| 0.020921
| 0
| 0.062762
| 0.008368
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
7ef64e3728f5f0c8aca85346e0af93007c894ac3
| 240
|
py
|
Python
|
Testes Basicos/format.py
|
gustavoLuuD/estudos_python
|
c8d3e97913d8fc2d046c7a1942b24800779438f5
|
[
"MIT"
] | null | null | null |
Testes Basicos/format.py
|
gustavoLuuD/estudos_python
|
c8d3e97913d8fc2d046c7a1942b24800779438f5
|
[
"MIT"
] | null | null | null |
Testes Basicos/format.py
|
gustavoLuuD/estudos_python
|
c8d3e97913d8fc2d046c7a1942b24800779438f5
|
[
"MIT"
] | null | null | null |
print("{:f}".format(123.5)) # 123.500000
print("{:.2f}".format(123.5)) # 123.50
print("{:7.2f}".format(123.5)) # 123.50
print("{:07.2f}".format(123.5)) # 0123.50
print("{:d}".format(123)) # 0123
print(f"Eu sei contar até {1234}")
| 40
| 44
| 0.575
| 42
| 240
| 3.285714
| 0.404762
| 0.326087
| 0.289855
| 0.282609
| 0.318841
| 0.318841
| 0.318841
| 0
| 0
| 0
| 0
| 0.27619
| 0.125
| 240
| 6
| 45
| 40
| 0.380952
| 0.158333
| 0
| 0
| 0
| 0
| 0.269036
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
add1bfb790d36509c7d34c46a8a5bb503cd25703
| 519
|
py
|
Python
|
venv/Lib/site-packages/konlpy/hangul.py
|
movierecommend-chatbot/chat-bot
|
fc40c1937e8f597230578c1957305ad22f8280e4
|
[
"bzip2-1.0.6"
] | null | null | null |
venv/Lib/site-packages/konlpy/hangul.py
|
movierecommend-chatbot/chat-bot
|
fc40c1937e8f597230578c1957305ad22f8280e4
|
[
"bzip2-1.0.6"
] | null | null | null |
venv/Lib/site-packages/konlpy/hangul.py
|
movierecommend-chatbot/chat-bot
|
fc40c1937e8f597230578c1957305ad22f8280e4
|
[
"bzip2-1.0.6"
] | null | null | null |
#! /usr/bin/python
# -*- coding: utf-8 -*-
def compose_char(char):
return NotImplementedError
def compose(string):
return NotImplementedError
def decompose_char(char):
return NotImplementedError
def decompose(string, aslist=False):
l = (decompose_char(c) for c in string)
if aslist:
return list(l)
else:
flattened = filter(' ', sum(l, []))
return ''.join(flattened)
def to_unicode():
"""
TODO
NOTE
FIXME
"""
return NotImplementedError
| 14.828571
| 43
| 0.620424
| 57
| 519
| 5.578947
| 0.54386
| 0.314465
| 0.264151
| 0.207547
| 0.226415
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002604
| 0.260116
| 519
| 34
| 44
| 15.264706
| 0.825521
| 0.105973
| 0
| 0.266667
| 0
| 0
| 0.002278
| 0
| 0
| 0
| 0
| 0.058824
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.2
| 0.733333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
adffcbf2f391b71d8fde39d0d9e499ecc83df09b
| 2,802
|
py
|
Python
|
tests/utils/test_utils_has_lost.py
|
battleforcastile/battleforcastile
|
65223fcb56ecc550f1a7c7b70beadff22c866d85
|
[
"MIT"
] | null | null | null |
tests/utils/test_utils_has_lost.py
|
battleforcastile/battleforcastile
|
65223fcb56ecc550f1a7c7b70beadff22c866d85
|
[
"MIT"
] | 1
|
2021-08-21T10:16:03.000Z
|
2021-08-21T10:16:03.000Z
|
tests/utils/test_utils_has_lost.py
|
battleforcastile/battleforcastile
|
65223fcb56ecc550f1a7c7b70beadff22c866d85
|
[
"MIT"
] | null | null | null |
from battleforcastile.utils.has_lost import has_lost
def test_if_has_lost_is_true_when_character_has_lower_value_and_an_empty_hand():
state = {
'hero': {
'value': 10
},
'board': [[], []],
'enemy': {
'value': 20
}
}
num_cards_in_hand_hero = 0
num_cards_in_hand_enemy = 0
assert has_lost(state['hero']['value'], state['enemy']['value'],
num_cards_in_hand_hero, num_cards_in_hand_enemy) is True
def test_if_has_lost_is_false_when_character_has_lower_value_but_not_an_empty_hand():
state = {
'hero': {
'value': 10
},
'board': [[], []],
'enemy': {
'value': 20
}
}
num_cards_in_hand_hero = 1
num_cards_in_hand_enemy = 0
assert has_lost(state['hero']['value'], state['enemy']['value'],
num_cards_in_hand_hero, num_cards_in_hand_enemy) is False
def test_if_has_lost_is_false_when_character_has_bigger_value_and_an_empty_hand():
state = {
'hero': {
'value': 20
},
'board': [[], []],
'enemy': {
'value': 10
}
}
num_cards_in_hand_hero = 0
num_cards_in_hand_enemy = 0
assert has_lost(state['hero']['value'], state['enemy']['value'],
num_cards_in_hand_hero, num_cards_in_hand_enemy) is False
def test_if_has_lost_is_false_when_character_has_bigger_value_and_not_an_empty_hand():
state = {
'hero': {
'value': 20
},
'board': [[], []],
'enemy': {
'value': 10
}
}
num_cards_in_hand_hero = 1
num_cards_in_hand_enemy = 0
assert has_lost(state['hero']['value'], state['enemy']['value'],
num_cards_in_hand_hero, num_cards_in_hand_enemy) is False
def test_if_has_lost_is_true_when_character_has_equal_value_an_empty_hand_and_wins_tie_false():
state = {
'hero': {
'value': 20
},
'board': [[], []],
'enemy': {
'value': 20
}
}
num_cards_in_hand_hero = 0
num_cards_in_hand_enemy = 0
assert has_lost(state['hero']['value'], state['enemy']['value'],
num_cards_in_hand_hero, num_cards_in_hand_enemy, wins_tie=False) is True
def test_if_has_lost_is_false_when_character_has_equal_value_an_empty_hand_and_wins_tie_true():
state = {
'hero': {
'value': 20
},
'board': [[], []],
'enemy': {
'value': 20
}
}
num_cards_in_hand_hero = 0
num_cards_in_hand_enemy = 0
assert has_lost(state['hero']['value'], state['enemy']['value'],
num_cards_in_hand_hero, num_cards_in_hand_enemy, wins_tie=True) is False
| 28.30303
| 95
| 0.577802
| 364
| 2,802
| 3.903846
| 0.104396
| 0.135116
| 0.168895
| 0.236453
| 0.948628
| 0.938072
| 0.938072
| 0.933849
| 0.92822
| 0.92822
| 0
| 0.018302
| 0.298001
| 2,802
| 98
| 96
| 28.591837
| 0.704118
| 0
| 0
| 0.670588
| 0
| 0
| 0.092077
| 0
| 0
| 0
| 0
| 0
| 0.070588
| 1
| 0.070588
| false
| 0
| 0.011765
| 0
| 0.082353
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bc039ad3cf1ece508f29574d0c504b375b924109
| 17,915
|
py
|
Python
|
bLib/trash.py
|
scottwedge/blib
|
9a035dafad46fb2723562f19b4549a4216c2cf7d
|
[
"MIT"
] | 7
|
2020-07-15T08:40:34.000Z
|
2022-03-02T07:00:02.000Z
|
bLib/trash.py
|
scottwedge/blib
|
9a035dafad46fb2723562f19b4549a4216c2cf7d
|
[
"MIT"
] | 2
|
2020-05-20T01:37:38.000Z
|
2020-05-20T01:55:35.000Z
|
bLib/trash.py
|
scottwedge/blib
|
9a035dafad46fb2723562f19b4549a4216c2cf7d
|
[
"MIT"
] | 2
|
2020-05-20T01:32:47.000Z
|
2020-11-25T07:59:24.000Z
|
self.cur_inp = radamsa.radamsa(testcase.fullpath)
while not self.cur_inp:
self.cur_inp = radamsa.radamsa(testcase.fullpath)
self.logger.info('mutating ' + testcase)
r = self.executor.exec_one(self.cur_inp)
if r == OK:
pass
elif r == CRASH:
self.logger.info('testcase result in a crash')
self._crash_handler()
elif r == TIMEOUT:
self.logger.info('testcase timeout')
self._timeout_handler()
elif r == CLIENT_FUCKUP:
'''
TODO
fix client so we don't have to handle this case anymore\
handle it like timeout for now
'''
self.logger.error('client fuckup')
self._timeout_handler()
''' for android '''
''' everything is through rpc, slow but works for non root device '''
if len(inp) < 5:
return
len_p2 = next_pow2(len(inp))
self.logger.debug('len_p2: %d' % len_p2)
''' the number of bytes we gonna remove '''
remove_len = max(len_p2 / self.TRIM_START_STEPS, self.TRIM_MIN_BYTES)
while remove_len >= max(len_p2 / self.TRIM_END_STEPS, self.TRIM_MIN_BYTES):
''' our starting position '''
remove_pos = remove_len
self.stage_name = 'trim %d' % remove_len
self.stage_cur = 0
self.stage_max = len(inp) / remove_len
while remove_pos < len(inp):
trim_avail = min(remove_len, len(inp) - remove_pos)
class frida_bCov():
def __init__(self, session, log_level=logging.DEBUG):
self.session = session
self.runtime = 'duk'
self.options = None
self.crash_cb = None
self.covlen = 0
self.logger = logging.getLogger('frida_bCov')
self.logger.setLevel(log_level)
def set_options(self, options):
self.options = options
if options:
if 'cov_module_names' not in self.options:
raise Exception('Missing cov_module_names')
def load(self):
if not self.options:
raise Exception('set bCov.options first')
script_path = join(BCOV_SCRIPTDIR, 'cov_event.js')
with open(script_path, 'r') as scriptfile:
self.script = self.session.create_script(scriptfile.read(), runtime=self.runtime)
self.script.on('message', self.on_message)
self.script.load()
#self.script.exports.init(self.options)
self.loaded = True
def get_options(self):
return self.script.exports.get_options()
def get_modules(self):
modules = self.script.exports.get_modules()
for module in modules:
module['base'] = int(module['base'], 16)
return modules
def get_cov(self):
covs = []
for temp in self.script.exports.get_cov():
cov = []
for addr in temp.keys():
cov.append(int(addr))
covs.append(cov)
return covs
def load_cov(self, covs):
raise Exception('not implemented')
def get_covlen(self):
return self.script.exports.get_covlen()
def on_crash(self):
details = self.script.exports.get_crash()
if self.crash_cb:
self.crash_cb(details)
else:
self.logger.info(details)
def on_message(self, message, data):
MSG_DEBUG = '\x01'
MSG_COVLEN = '\x03'
MSG_CRASH = '\x04'
if (message['type'] == 'send'):
payload = message['payload']
if payload[0] == MSG_DEBUG:
self.logger.debug(payload)
elif payload[0] == MSG_COVLEN:
self.covlen = int(payload[1:])
elif (message['type'] == 'error'):
self.logger.error(message)
def get_covlen(self):
return self.script.exports.get_covlen()
def on_crash(self):
details = self.script.exports.get_crash()
if self.crash_cb:
self.crash_cb(details)
else:
self.logger.info(details)
@property
def covlen(self):
return u32_pointer(self.shm)[0]
class bFuzz_AFL():
def __init__(self, args, log_level=logging.DEBUG):
self.fuzzer_id = 'f1'
# self.bin_path = path
self.args = args
self.options = None
self.covlen = 0
self.queue = []
self.hang_id = 0
self.crash_id = 0
self.dryrun_idx = 0
self.new_path_count = 0
self.timeout = 60*2
self.timer_started = 0
self.nexecs = 0
self.logger = logging.getLogger('bFuzzer')
self.logger.setLevel(log_level)
self.lock = threading.Lock()
def kill(self):
frida.kill(self.pid)
def start_process(self, covs=None):
self.pid = frida.spawn(self.args)
self.session = frida.attach(self.pid)
self.bcov = frida_bCov(self.session, type='shm')
self.bcov.crash_cb = self.on_crash
self.bcov.set_options(
{
'fuzzer_id': self.fuzzer_id,
'cov_module_names': ['AdobeXMP.dll'],
'target_module_name': 'EScript.api',
'target_offset': 0xa8020
}
)
self.bcov.load()
if covs:
self.bcov.load_cov(covs)
def post_input(self, inp):
with open(self.cur_inp, 'wb') as f:
f.write(inp)
self.set_event()
def get_next_dryrun_input(self):
if self.dryrun_idx < len(self.queue):
fname = self.queue[self.dryrun_idx]
self.logger.debug('dryrun: ' + fname)
r = self.read_input(fname)
self.dryrun_idx += 1
return r
else:
return None
'''for detect timeout and unexpected crash '''
def timeout_handler(self):
covs = self.bcov.get_cov()
fname = 'hang_%d' % self.hang_id
self.hang_id += 1
shutil.copy(self.cur_inp, join(self.hangdir, fname))
self.logger.info('timeout')
self.kill()
self.start_process(covs)
self.load_inpscript()
frida.resume(self.pid)
def get_next_fuzzing_input(self):
fname = random.choice(self.queue)
self.logger.debug('mutating %s' % fname)
return radamsa.radamsa(join(self.queuedir, fname))
def found_new_path(self):
self.new_path_count += 1
fname = 'id_%06d' % self.new_path_count
shutil.copy(self.cur_inp, join(self.queuedir, fname))
self.queue.append(fname)
def state_dryrun(self):
inp = self.get_next_dryrun_input()
if inp == None:
self.covlen = self.bcov.covlen
self.logger.info('dryrun finished')
self.state = STATE_FUZZING
self.state_fuzzing()
else:
self.post_input(inp)
def state_fuzzing(self):
if self.bcov.covlen > self.covlen:
self.covlen = self.bcov.covlen
self.found_new_path()
inp = self.get_next_fuzzing_input()
self.post_input(inp)
def inp_script_cb(self):
self.logger.debug('>>')
self.lock.acquire()
self.nexecs += 1
if self.nexecs%10==0:
self.runtime = time.time() - self.starttime
self.logger.info('exec speed: %f' % (self.nexecs/self.runtime))
if self.timer_started == 1:
self.timer.cancel()
self.timer_started = 0
self.logger.info('covlen: %d' % self.bcov.covlen)
if self.state == STATE_DRYRUN:
self.state_dryrun()
elif self.state == STATE_FUZZING:
self.state_fuzzing()
self.timer = threading.Timer(self.timeout, self.timeout_handler)
self.timer.start()
self.timer_started = 1
self.logger.debug('<<')
self.lock.release()
def on_crash(self, details):
self.timer.cancel()
self.timer_started = 0
covs = self.bcov.get_cov()
shutil.copyfile(self.cur_inp, join(self.crashdir, 'crash_%d'%self.crash_id))
self.crash_id += 1
self.logger.info('crash')
self.kill()
self.start_process(covs)
self.load_inpscript()
frida.resume(self.pid)
def on_message(self, message, data):
# self.logger.debug(message)
MSG_INPUT = '\x01'
if message['type'] == 'send':
if message['payload'] == MSG_INPUT:
self.inp_script_cb()
elif message['type'] == 'error':
self.logger.error(message)
def reload_queue(self):
for fname in os.listdir(self.queuedir):
if fname not in self.queue:
self.queue.append(fname)
def save_state(self):
fuzzer_state = {
'new_path_count': self.new_path_count,
'state': self.state,
# ugly hack, we are not sure if the current dryrun is finished
'dryrun_idx': self.dryrun_idx-1,
'hang_id': self.hang_id,
'crash_id:': self.crash_id
}
with open(join(self.odir, 'state.pkl'), 'wb') as f:
f.write(pickle.dumps(fuzzer_state))
with open(join(self.odir, 'cov.pkl'), 'wb') as f:
f.write(pickle.dumps(self.bcov.get_cov(), protocol=2))
def load_state(self):
with open(join(self.odir, 'cov.pkl'), 'rb') as f:
covs = pickle.loads(f.read())
self.bcov.load_cov(covs)
with open(join(self.odir, 'state.pkl'), 'rb') as f:
fuzzer_state = pickle.loads(f.read())
self.reload_queue()
self.state = fuzzer_state['state']
self.new_path_count = fuzzer_state['new_path_count']
self.dryrun_idx = fuzzer_state['dryrun_idx']
self.hang_id = fuzzer_state['hang_id']
self.crash_id = fuzzer_state['crash_id']
def setup_event(self):
CreateEvent = windll.kernel32.CreateEventA
CreateEvent.argtypes = (LPVOID, BOOL, BOOL, LPCSTR)
CreateEvent.restype = HANDLE
self.event = CreateEvent(0, 1, 0, c_char_p(bytes(self.fuzzer_id, 'ansi')))
def set_event(self):
SetEvent = windll.kernel32.SetEvent
SetEvent.argtypes = (HANDLE,)
SetEvent(self.event)
def load_inpscript(self):
input_script = self.options.get('input_script')
if input_script:
self.logger.debug('using input_script: %s' % input_script)
with open(input_script, 'r') as scriptfile:
self.input_script = self.session.create_script(scriptfile.read())
self.input_script.on('message', self.on_message)
self.input_script.load()
self.setup_event()
options = {
'input_path': self.cur_inp,
'event_name': self.fuzzer_id
}
self.input_script.exports.init(options)
def start(self):
if not self.options:
raise Exception('options not set')
self.start_process()
radamsa.init(self.fuzzer_id)
self.load_inpscript()
if self.options.get('skip_dryrun'):
self.state = STATE_FUZZING
else:
self.state = STATE_DRYRUN
if os.path.exists(join(self.odir, 'cov.pkl')):
self.logger.info('resuming')
self.load_state()
else:
self.logger.info('new fuzzing session')
if os.path.exists(self.odir):
shutil.rmtree(self.odir)
os.makedirs(self.odir)
os.mkdir(self.queuedir)
os.mkdir(self.crashdir)
os.mkdir(self.hangdir)
for fname in os.listdir(self.idir):
self.add_file_to_queue(join(self.idir, fname))
self.starttime = time.time()
frida.resume(self.pid)
def set_options(self, options):
self.options = options
if options:
if 'idir' not in options:
options['idir'] = 'in'
self.logger.info('idir not specified, using default value `in`')
if 'odir' not in options:
options['odir'] = 'out'
self.logger.info('odir not specified, using default value `out`')
self.idir = options['idir']
self.odir = join(options['odir'], self.fuzzer_id)
self.queuedir = join(self.odir, 'queue')
self.crashdir = join(self.odir, 'crash')
self.hangdir = join(self.odir, 'hang')
self.cur_inp = join(self.odir, 'cur_inp')
def add_file_to_queue(self, path):
fname = os.path.basename(path)
''' copy file to queue follder '''
shutil.copyfile(path, join(self.queuedir, fname))
''' add fname to queue '''
self.queue.append(fname)
def read_input(self, fname):
with open(join(self.queuedir, fname), 'rb') as f:
d = f.read()
return d
class bFuzz():
def __init__(self, args, log_level=logging.DEBUG):
self.fuzzer_id = 'f1'
# self.bin_path = path
self.args = args
self.options = None
self.covlen = 0
self.queue = []
self.hang_id = 0
self.crash_id = 0
self.dryrun_idx = 0
self.new_path_count = 0
self.timeout = 60*2
self.timer_started = 0
self.nexecs = 0
self.logger = logging.getLogger('bFuzzer')
self.logger.setLevel(log_level)
self.lock = threading.Lock()
def kill(self):
frida.kill(self.pid)
def start_process(self, covs=None):
self.pid = frida.spawn(self.args)
self.session = frida.attach(self.pid)
self.bcov = frida_bCov(self.session, type='shm')
self.bcov.crash_cb = self.on_crash
self.bcov.set_options(
{
'fuzzer_id': self.fuzzer_id,
'cov_module_names': ['AdobeXMP.dll'],
'target_module_name': 'EScript.api',
'target_offset': 0xa8020
}
)
self.bcov.load()
if covs:
self.bcov.load_cov(covs)
def post_input(self, inp):
with open(self.cur_inp, 'wb') as f:
f.write(inp)
self.set_event()
def get_next_dryrun_input(self):
if self.dryrun_idx < len(self.queue):
fname = self.queue[self.dryrun_idx]
self.logger.debug('dryrun: ' + fname)
r = self.read_input(fname)
self.dryrun_idx += 1
return r
else:
return None
'''for detect timeout and unexpected crash '''
def timeout_handler(self):
covs = self.bcov.get_cov()
fname = 'hang_%d' % self.hang_id
self.hang_id += 1
shutil.copy(self.cur_inp, join(self.hangdir, fname))
self.logger.info('timeout')
self.kill()
self.start_process(covs)
self.load_inpscript()
frida.resume(self.pid)
def get_next_fuzzing_input(self):
fname = random.choice(self.queue)
self.logger.debug('mutating %s' % fname)
return radamsa.radamsa(join(self.queuedir, fname))
def found_new_path(self):
self.new_path_count += 1
fname = 'id_%06d' % self.new_path_count
shutil.copy(self.cur_inp, join(self.queuedir, fname))
self.queue.append(fname)
def state_dryrun(self):
inp = self.get_next_dryrun_input()
if inp == None:
self.covlen = self.bcov.covlen
self.logger.info('dryrun finished')
self.state = STATE_FUZZING
self.state_fuzzing()
else:
self.post_input(inp)
def state_fuzzing(self):
if self.bcov.covlen > self.covlen:
self.covlen = self.bcov.covlen
self.found_new_path()
inp = self.get_next_fuzzing_input()
self.post_input(inp)
def inp_script_cb(self):
self.logger.debug('>>')
self.lock.acquire()
self.nexecs += 1
if self.nexecs%10==0:
self.runtime = time.time() - self.starttime
self.logger.info('exec speed: %f' % (self.nexecs/self.runtime))
if self.timer_started == 1:
self.timer.cancel()
self.timer_started = 0
self.logger.info('covlen: %d' % self.bcov.covlen)
if self.state == STATE_DRYRUN:
self.state_dryrun()
elif self.state == STATE_FUZZING:
self.state_fuzzing()
self.timer = threading.Timer(self.timeout, self.timeout_handler)
self.timer.start()
self.timer_started = 1
self.logger.debug('<<')
self.lock.release()
def on_crash(self, details):
self.timer.cancel()
self.timer_started = 0
covs = self.bcov.get_cov()
shutil.copyfile(self.cur_inp, join(self.crashdir, 'crash_%d'%self.crash_id))
self.crash_id += 1
self.logger.info('crash')
self.kill()
self.start_process(covs)
self.load_inpscript()
frida.resume(self.pid)
def on_message(self, message, data):
# self.logger.debug(message)
MSG_INPUT = '\x01'
if message['type'] == 'send':
if message['payload'] == MSG_INPUT:
self.inp_script_cb()
elif message['type'] == 'error':
self.logger.error(message)
def reload_queue(self):
for fname in os.listdir(self.queuedir):
if fname not in self.queue:
self.queue.append(fname)
def save_state(self):
fuzzer_state = {
'new_path_count': self.new_path_count,
'state': self.state,
# ugly hack, we are not sure if the current dryrun is finished
'dryrun_idx': self.dryrun_idx-1,
'hang_id': self.hang_id,
'crash_id:': self.crash_id
}
with open(join(self.odir, 'state.pkl'), 'wb') as f:
f.write(pickle.dumps(fuzzer_state))
with open(join(self.odir, 'cov.pkl'), 'wb') as f:
f.write(pickle.dumps(self.bcov.get_cov(), protocol=2))
def load_state(self):
with open(join(self.odir, 'cov.pkl'), 'rb') as f:
covs = pickle.loads(f.read())
self.bcov.load_cov(covs)
with open(join(self.odir, 'state.pkl'), 'rb') as f:
fuzzer_state = pickle.loads(f.read())
self.reload_queue()
self.state = fuzzer_state['state']
self.new_path_count = fuzzer_state['new_path_count']
self.dryrun_idx = fuzzer_state['dryrun_idx']
self.hang_id = fuzzer_state['hang_id']
self.crash_id = fuzzer_state['crash_id']
def setup_event(self):
CreateEvent = windll.kernel32.CreateEventA
CreateEvent.argtypes = (LPVOID, BOOL, BOOL, LPCSTR)
CreateEvent.restype = HANDLE
self.event = CreateEvent(0, 1, 0, c_char_p(bytes(self.fuzzer_id, 'ansi')))
def set_event(self):
SetEvent = windll.kernel32.SetEvent
SetEvent.argtypes = (HANDLE,)
SetEvent(self.event)
def load_inpscript(self):
input_script = self.options.get('input_script')
if input_script:
self.logger.debug('using input_script: %s' % input_script)
with open(input_script, 'r') as scriptfile:
self.input_script = self.session.create_script(scriptfile.read())
self.input_script.on('message', self.on_message)
self.input_script.load()
self.setup_event()
options = {
'input_path': self.cur_inp,
'event_name': self.fuzzer_id
}
self.input_script.exports.init(options)
def start(self):
if not self.options:
raise Exception('options not set')
self.start_process()
radamsa.init(self.fuzzer_id)
self.load_inpscript()
if self.options.get('skip_dryrun'):
self.state = STATE_FUZZING
else:
self.state = STATE_DRYRUN
if os.path.exists(join(self.odir, 'cov.pkl')):
self.logger.info('resuming')
self.load_state()
else:
self.logger.info('new fuzzing session')
if os.path.exists(self.odir):
shutil.rmtree(self.odir)
os.makedirs(self.odir)
os.mkdir(self.queuedir)
os.mkdir(self.crashdir)
os.mkdir(self.hangdir)
for fname in os.listdir(self.idir):
self.add_file_to_queue(join(self.idir, fname))
self.starttime = time.time()
frida.resume(self.pid)
def set_options(self, options):
self.options = options
if options:
if 'idir' not in options:
options['idir'] = 'in'
self.logger.info('idir not specified, using default value `in`')
if 'odir' not in options:
options['odir'] = 'out'
self.logger.info('odir not specified, using default value `out`')
self.idir = options['idir']
self.odir = join(options['odir'], self.fuzzer_id)
self.queuedir = join(self.odir, 'queue')
self.crashdir = join(self.odir, 'crash')
self.hangdir = join(self.odir, 'hang')
self.cur_inp = join(self.odir, 'cur_inp')
def add_file_to_queue(self, path):
fname = os.path.basename(path)
''' copy file to queue follder '''
shutil.copyfile(path, join(self.queuedir, fname))
''' add fname to queue '''
self.queue.append(fname)
def read_input(self, fname):
with open(join(self.queuedir, fname), 'rb') as f:
d = f.read()
return d
| 26.580119
| 84
| 0.688585
| 2,688
| 17,915
| 4.431548
| 0.098214
| 0.039456
| 0.027032
| 0.013432
| 0.873741
| 0.862576
| 0.85779
| 0.837727
| 0.837727
| 0.834201
| 0
| 0.007034
| 0.166788
| 17,915
| 674
| 85
| 26.580119
| 0.790983
| 0
| 0
| 0.8473
| 0
| 0
| 0.085125
| 0
| 0
| 0
| 0.000816
| 0.001484
| 0
| 0
| null | null | 0.001862
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bc3ac99bf8f85c9552b873d1229ecaaef282d0e7
| 99,172
|
py
|
Python
|
dlkit/manager_impls/repository/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
dlkit/manager_impls/repository/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
dlkit/manager_impls/repository/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
"""Manager utility implementations of repository managers."""
# pylint: disable=no-init
# Numerous classes don't require __init__.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
from ..osid import managers as osid_managers
from ..osid.osid_errors import NullArgument
from ..osid.osid_errors import Unimplemented
from ..type.objects import TypeList
from dlkit.abstract_osid.repository import managers as abc_repository_managers
class RepositoryProfile(abc_repository_managers.RepositoryProfile, osid_managers.OsidProfile):
"""The repository profile describes interoperability among repository services."""
def supports_visible_federation(self):
"""Tests if federation is visible.
return: (boolean) - ``true`` if visible federation is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_asset_lookup(self):
"""Tests if asset lookup is supported.
return: (boolean) - ``true`` if asset lookup is supported ``,``
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_asset_query(self):
"""Tests if asset query is supported.
return: (boolean) - ``true`` if asset query is supported ``,``
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_asset_search(self):
"""Tests if asset search is supported.
return: (boolean) - ``true`` if asset search is supported ``,``
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_asset_admin(self):
"""Tests if asset administration is supported.
return: (boolean) - ``true`` if asset administration is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_asset_notification(self):
"""Tests if asset notification is supported.
A repository may send messages when assets are created,
modified, or deleted.
return: (boolean) - ``true`` if asset notification is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_asset_repository(self):
"""Tests if retrieving mappings of assets and repositories is supported.
return: (boolean) - ``true`` if asset repository mapping
retrieval is supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_asset_repository_assignment(self):
"""Tests if managing mappings of assets and repositories is supported.
return: (boolean) - ``true`` if asset repository assignment is
supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_asset_smart_repository(self):
"""Tests if asset smart repository is supported.
return: (boolean) - ``true`` if asset smart repository is
supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_asset_temporal(self):
"""Tests if retrieving mappings of assets and time coverage is supported.
return: (boolean) - ``true`` if asset temporal mapping retrieval
is supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_asset_temporal_assignment(self):
"""Tests if managing mappings of assets and time ocverage is supported.
return: (boolean) - ``true`` if asset temporal assignment is
supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_asset_spatial(self):
"""Tests if retrieving mappings of assets and spatial coverage is supported.
return: (boolean) - ``true`` if asset spatial mapping retrieval
is supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_asset_spatial_assignment(self):
"""Tests if managing mappings of assets and spatial ocverage is supported.
return: (boolean) - ``true`` if asset spatial assignment is
supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_asset_composition(self):
"""Tests if assets are included in compositions.
return: (boolean) - ``true`` if asset composition supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_asset_composition_design(self):
"""Tests if mapping assets to compositions is supported.
return: (boolean) - ``true`` if designing asset compositions is
supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_composition_lookup(self):
"""Tests if composition lookup is supported.
return: (boolean) - ``true`` if composition lookup is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_composition_query(self):
"""Tests if composition query is supported.
return: (boolean) - ``true`` if composition query is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_composition_search(self):
"""Tests if composition search is supported.
return: (boolean) - ``true`` if composition search is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_composition_admin(self):
"""Tests if composition administration is supported.
return: (boolean) - ``true`` if composition administration is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_composition_notification(self):
"""Tests if composition notification is supported.
return: (boolean) - ``true`` if composition notification is
supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_composition_repository(self):
"""Tests if retrieval of composition to repository mappings is supported.
return: (boolean) - ``true`` if composition to repository
mapping is supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_composition_repository_assignment(self):
"""Tests if assigning composition to repository mappings is supported.
return: (boolean) - ``true`` if composition to repository
assignment is supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_composition_smart_repository(self):
"""Tests if composition smart repository is supported.
return: (boolean) - ``true`` if composition smart repository is
supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_repository_lookup(self):
"""Tests if repository lookup is supported.
return: (boolean) - ``true`` if repository lookup is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_repository_query(self):
"""Tests if repository query is supported.
return: (boolean) - ``true`` if repository query is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_repository_search(self):
"""Tests if repository search is supported.
return: (boolean) - ``true`` if repository search is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_repository_admin(self):
"""Tests if repository administration is supported.
return: (boolean) - ``true`` if repository administration is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_repository_notification(self):
"""Tests if repository notification is supported.
Messages may be sent when ``Repository`` objects are created,
deleted or updated. Notifications for assets within repositories
are sent via the asset notification session.
return: (boolean) - ``true`` if repository notification is
supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_repository_hierarchy(self):
"""Tests if a repository hierarchy traversal is supported.
return: (boolean) - ``true`` if a repository hierarchy traversal
is supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_repository_hierarchy_design(self):
"""Tests if a repository hierarchy design is supported.
return: (boolean) - ``true`` if a repository hierarchy design is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_repository_batch(self):
"""Tests if a repository batch service is supported.
return: (boolean) - ``true`` if a repository batch service is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_repository_rules(self):
"""Tests if a repository rules service is supported.
return: (boolean) - ``true`` if a repository rules service is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def get_asset_record_types(self):
"""Gets all the asset record types supported.
return: (osid.type.TypeList) - the list of supported asset
record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
asset_record_types = property(fget=get_asset_record_types)
def supports_asset_record_type(self, asset_record_type=None):
"""Tests if a given asset type is supported.
arg: asset_record_type (osid.type.Type): the asset record
type
return: (boolean) - ``true`` if the asset record type is
supported ``,`` ``false`` otherwise
raise: NullArgument - ``asset_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if asset_record_type is None:
raise NullArgument()
return False
def get_asset_search_record_types(self):
"""Gets all the asset search record types supported.
return: (osid.type.TypeList) - the list of supported asset
search record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
asset_search_record_types = property(fget=get_asset_search_record_types)
def supports_asset_search_record_type(self, asset_search_record_type=None):
"""Tests if a given asset search record type is supported.
arg: asset_search_record_type (osid.type.Type): the asset
search record type
return: (boolean) - ``true`` if the asset search record type is
supported ``,`` ``false`` otherwise
raise: NullArgument - ``asset_search_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if asset_search_record_type is None:
raise NullArgument()
return False
def get_asset_content_record_types(self):
"""Gets all the asset content record types supported.
return: (osid.type.TypeList) - the list of supported asset
content record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
asset_content_record_types = property(fget=get_asset_content_record_types)
def supports_asset_content_record_type(self, asset_content_record_type=None):
"""Tests if a given asset content record type is supported.
arg: asset_content_record_type (osid.type.Type): the asset
content record type
return: (boolean) - ``true`` if the asset content record type is
supported ``,`` ``false`` otherwise
raise: NullArgument - ``asset_content_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if asset_content_record_type is None:
raise NullArgument()
return False
def get_composition_record_types(self):
"""Gets all the composition record types supported.
return: (osid.type.TypeList) - the list of supported composition
record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
composition_record_types = property(fget=get_composition_record_types)
def supports_composition_record_type(self, composition_record_type=None):
"""Tests if a given composition record type is supported.
arg: composition_record_type (osid.type.Type): the
composition record type
return: (boolean) - ``true`` if the composition record type is
supported ``,`` ``false`` otherwise
raise: NullArgument - ``composition_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if composition_record_type is None:
raise NullArgument()
return False
def get_composition_search_record_types(self):
"""Gets all the composition search record types supported.
return: (osid.type.TypeList) - the list of supported composition
search record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
composition_search_record_types = property(fget=get_composition_search_record_types)
def supports_composition_search_record_type(self, composition_search_record_type=None):
"""Tests if a given composition search record type is supported.
arg: composition_search_record_type (osid.type.Type): the
composition serach type
return: (boolean) - ``true`` if the composition search record
type is supported ``,`` ``false`` otherwise
raise: NullArgument - ``composition_search_record_type`` is
``null``
*compliance: mandatory -- This method must be implemented.*
"""
if composition_search_record_type is None:
raise NullArgument()
return False
def get_repository_record_types(self):
"""Gets all the repository record types supported.
return: (osid.type.TypeList) - the list of supported repository
record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
repository_record_types = property(fget=get_repository_record_types)
def supports_repository_record_type(self, repository_record_type=None):
"""Tests if a given repository record type is supported.
arg: repository_record_type (osid.type.Type): the repository
record type
return: (boolean) - ``true`` if the repository record type is
supported ``,`` ``false`` otherwise
raise: NullArgument - ``repository_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if repository_record_type is None:
raise NullArgument()
return False
def get_repository_search_record_types(self):
"""Gets all the repository search record types supported.
return: (osid.type.TypeList) - the list of supported repository
search record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
repository_search_record_types = property(fget=get_repository_search_record_types)
def supports_repository_search_record_type(self, repository_search_record_type=None):
"""Tests if a given repository search record type is supported.
arg: repository_search_record_type (osid.type.Type): the
repository search type
return: (boolean) - ``true`` if the repository search record
type is supported ``,`` ``false`` otherwise
raise: NullArgument - ``repository_search_record_type`` is
``null``
*compliance: mandatory -- This method must be implemented.*
"""
if repository_search_record_type is None:
raise NullArgument()
return False
def get_spatial_unit_record_types(self):
"""Gets all the spatial unit record types supported.
return: (osid.type.TypeList) - the list of supported spatial
unit record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
spatial_unit_record_types = property(fget=get_spatial_unit_record_types)
def supports_spatial_unit_record_type(self, spatial_unit_record_type=None):
"""Tests if a given spatial unit record type is supported.
arg: spatial_unit_record_type (osid.type.Type): the spatial
unit record type
return: (boolean) - ``true`` if the spatial unit record type is
supported ``,`` ``false`` otherwise
raise: NullArgument - ``spatial_unit_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if spatial_unit_record_type is None:
raise NullArgument()
return False
def get_coordinate_types(self):
"""Gets all the coordinate types supported.
return: (osid.type.TypeList) - the list of supported coordinate
types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
coordinate_types = property(fget=get_coordinate_types)
def supports_coordinate_type(self, coordinate_type=None):
"""Tests if a given coordinate type is supported.
arg: coordinate_type (osid.type.Type): the coordinate type
return: (boolean) - ``true`` if the coordinate type is supported
``,`` ``false`` otherwise
raise: NullArgument - ``coordinate_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if coordinate_type is None:
raise NullArgument()
return False
class RepositoryManager(abc_repository_managers.RepositoryManager, osid_managers.OsidManager, RepositoryProfile):
"""The repository manager provides access to asset lookup and creation session and provides interoperability tests for various aspects of this service.
The sessions included in this manager are:
* ``AssetLookupSession:`` a session to retrieve assets
* ``AssetQuerySession:`` a session to query assets
* ``AssetSearchSession:`` a session to search for assets
* ``AssetAdminSession:`` a session to create and delete assets
* ``AssetNotificationSession:`` a session to receive notifications
pertaining to asset changes
* ``AssetRepositorySession:`` a session to look up asset to
repository mappings
* ``AssetRepositoryAssignmentSession:`` a session to manage asset
to repository mappings
* ``AssetSmartRepositorySession:`` a session to manage dynamic
repositories of assets
* ``AssetTemporalSession:`` a session to access the temporal
coverage of an asset
* ``AssetTemporalAssignmentSession:`` a session to manage the
temporal coverage of an asset
* ``AssetSpatialSession:`` a session to access the spatial
coverage of an asset
* ``AssetSpatialAssignmentSession:`` a session to manage the
spatial coverage of an asset
* ``AssetCompositionSession:`` a session to look up asset
composition mappings
* ``AssetCompositionDesignSession:`` a session to map assets to
compositions
* ``CompositionLookupSession: a`` session to retrieve compositions
* ``CompositionQuerySession:`` a session to query compositions
* ``CompositionSearchSession:`` a session to search for
compositions
* ``CompositionAdminSession:`` a session to create, update and
delete compositions
* ``CompositionNotificationSession:`` a session to receive
notifications pertaining to changes in compositions
* ``CompositionRepositorySession:`` a session to retrieve
composition repository mappings
* ``CompositionRepositoryAssignmentSession:`` a session to manage
composition repository mappings
* ``CompositionSmartRepositorySession:`` a session to manage
dynamic repositories of compositions
* ``RepositoryLookupSession: a`` session to retrieve repositories
* ``RepositoryQuerySession:`` a session to query repositories
* ``RepositorySearchSession:`` a session to search for
repositories
* ``RepositoryAdminSession:`` a session to create, update and
delete repositories
* ``RepositoryNotificationSession:`` a session to receive
notifications pertaining to changes in repositories
* ``RepositoryHierarchySession:`` a session to traverse repository
hierarchies
* ``RepositoryHierarchyDesignSession:`` a session to manage
repository hierarchies
"""
def get_asset_lookup_session(self):
"""Gets the ``OsidSession`` associated with the asset lookup service.
return: (osid.repository.AssetLookupSession) - the new
``AssetLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_lookup()`` is ``true``.*
"""
raise Unimplemented()
asset_lookup_session = property(fget=get_asset_lookup_session)
def get_asset_lookup_session_for_repository(self, repository_id=None):
"""Gets the ``OsidSession`` associated with the asset lookup service for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.AssetLookupSession) - the new
``AssetLookupSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_lookup()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None:
raise NullArgument
raise Unimplemented()
def get_asset_query_session(self):
"""Gets an asset query session.
return: (osid.repository.AssetQuerySession) - an
``AssetQuerySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_query()`` is ``true``.*
"""
raise Unimplemented()
asset_query_session = property(fget=get_asset_query_session)
def get_asset_query_session_for_repository(self, repository_id=None):
"""Gets an asset query session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.AssetQuerySession) - an
``AssetQuerySession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_query()`` and ``supports_visible_federation()``
are ``true``.*
"""
if repository_id is None:
raise NullArgument
raise Unimplemented()
def get_asset_search_session(self):
"""Gets an asset search session.
return: (osid.repository.AssetSearchSession) - an
``AssetSearchSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_search()`` is ``true``.*
"""
raise Unimplemented()
asset_search_session = property(fget=get_asset_search_session)
def get_asset_search_session_for_repository(self, repository_id=None):
"""Gets an asset search session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.AssetSearchSession) - an
``AssetSearchSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_search()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None:
raise NullArgument
raise Unimplemented()
def get_asset_admin_session(self):
"""Gets an asset administration session for creating, updating and deleting assets.
return: (osid.repository.AssetAdminSession) - an
``AssetAdminSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_admin()`` is ``true``.*
"""
raise Unimplemented()
asset_admin_session = property(fget=get_asset_admin_session)
def get_asset_admin_session_for_repository(self, repository_id=None):
"""Gets an asset administration session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.AssetAdminSession) - an
``AssetAdminSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_admin()`` and ``supports_visible_federation()``
are ``true``.*
"""
if repository_id is None:
raise NullArgument
raise Unimplemented()
def get_asset_notification_session(self, asset_receiver=None):
"""Gets the notification session for notifications pertaining to asset changes.
arg: asset_receiver (osid.repository.AssetReceiver): the
notification callback
return: (osid.repository.AssetNotificationSession) - an
``AssetNotificationSession``
raise: NullArgument - ``asset_receiver`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_notification()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_asset_notification()`` is ``true``.*
"""
raise Unimplemented()
def get_asset_notification_session_for_repository(self, asset_receiver=None, repository_id=None):
"""Gets the asset notification session for the given repository.
arg: asset_receiver (osid.repository.AssetReceiver): the
notification callback
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.AssetNotificationSession) - an
``AssetNotificationSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``asset_receiver`` or ``repository_id``
is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_notification()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_notfication()`` and
``supports_visible_federation()`` are ``true``.*
"""
if asset_receiver is None:
raise NullArgument
if repository_id is None:
raise NullArgument
raise Unimplemented()
def get_asset_repository_session(self):
"""Gets the session for retrieving asset to repository mappings.
return: (osid.repository.AssetRepositorySession) - an
``AssetRepositorySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_repository()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_asset_repository()`` is ``true``.*
"""
raise Unimplemented()
asset_repository_session = property(fget=get_asset_repository_session)
def get_asset_repository_assignment_session(self):
"""Gets the session for assigning asset to repository mappings.
return: (osid.repository.AssetRepositoryAssignmentSession) - an
``AssetRepositoryAsignmentSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_asset_repository_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_repository_assignment()`` is ``true``.*
"""
raise Unimplemented()
asset_repository_assignment_session = property(fget=get_asset_repository_assignment_session)
def get_asset_smart_repository_session(self, repository_id=None):
"""Gets an asset smart repository session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.AssetSmartRepositorySession) - an
``AssetSmartRepositorySession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_smart_repository()``
``false``
*compliance: optional -- This method must be implemented if
``supports_asset_smart_repository()`` is ``true``.*
"""
if repository_id is None:
raise NullArgument
raise Unimplemented()
def get_asset_temporal_session(self):
"""Gets the session for retrieving temporal coverage of an asset.
return: (osid.repository.AssetTemporalSession) - an
``AssetTemporalSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_temporal()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_asset_temporal()`` is ``true``.*
"""
raise Unimplemented()
asset_temporal_session = property(fget=get_asset_temporal_session)
def get_asset_temporal_session_for_repository(self, repository_id=None):
"""Gets the session for retrieving temporal coverage of an asset for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.AssetTemporalSession) - an
``AssetTemporalSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_temporal()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_temporal()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None:
raise NullArgument
raise Unimplemented()
def get_asset_temporal_assignment_session(self):
"""Gets the session for assigning temporal coverage to an asset.
return: (osid.repository.AssetTemporalAssignmentSession) - an
``AssetTemporalAssignmentSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_temporal_assignment()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_temporal_assignment()`` is ``true``.*
"""
raise Unimplemented()
asset_temporal_assignment_session = property(fget=get_asset_temporal_assignment_session)
def get_asset_temporal_assignment_session_for_repository(self, repository_id=None):
"""Gets the session for assigning temporal coverage of an asset for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.AssetTemporalAssignmentSession) - an
``AssetTemporalAssignmentSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_temporal_assignment()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_temporal_assignment()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None:
raise NullArgument
raise Unimplemented()
def get_asset_spatial_session(self):
"""Gets the session for retrieving spatial coverage of an asset.
return: (osid.repository.AssetSpatialSession) - an
``AssetSpatialSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_spatial_assets()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_spatial_assets()`` is ``true``.*
"""
raise Unimplemented()
asset_spatial_session = property(fget=get_asset_spatial_session)
def get_asset_spatial_session_for_repository(self, repository_id=None):
"""Gets the session for retrieving spatial coverage of an asset for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.AssetSpatialSession) - an
``AssetSpatialSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_spatial()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_spatial()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None:
raise NullArgument
raise Unimplemented()
def get_asset_spatial_assignment_session(self):
"""Gets the session for assigning spatial coverage to an asset.
return: (osid.repository.AssetSpatialAssignmentSession) - an
``AssetSpatialAssignmentSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_spatial_assignment()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_spatial_assignment()`` is ``true``.*
"""
raise Unimplemented()
asset_spatial_assignment_session = property(fget=get_asset_spatial_assignment_session)
def get_asset_spatial_assignment_session_for_repository(self, repository_id=None):
"""Gets the session for assigning spatial coverage of an asset for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.AssetSpatialAssignmentSession) - an
``AssetSpatialAssignmentSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_spatial_assignment()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_spatial_assignment()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None:
raise NullArgument
raise Unimplemented()
def get_asset_composition_session(self):
"""Gets the session for retrieving asset compositions.
return: (osid.repository.AssetCompositionSession) - an
``AssetCompositionSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_composition()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_asset_composition()`` is ``true``.*
"""
raise Unimplemented()
asset_composition_session = property(fget=get_asset_composition_session)
def get_asset_composition_design_session(self):
"""Gets the session for creating asset compositions.
return: (osid.repository.AssetCompositionDesignSession) - an
``AssetCompositionDesignSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_composition_design()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_composition_design()`` is ``true``.*
"""
raise Unimplemented()
asset_composition_design_session = property(fget=get_asset_composition_design_session)
def get_composition_lookup_session(self):
"""Gets the ``OsidSession`` associated with the composition lookup service.
return: (osid.repository.CompositionLookupSession) - the new
``CompositionLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_lookup()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_lookup()`` is ``true``.*
"""
raise Unimplemented()
composition_lookup_session = property(fget=get_composition_lookup_session)
def get_composition_lookup_session_for_repository(self, repository_id=None):
"""Gets the ``OsidSession`` associated with the composition lookup service for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.CompositionLookupSession) - the new
``CompositionLookupSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_composition_lookup()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None:
raise NullArgument
raise Unimplemented()
def get_composition_query_session(self):
"""Gets a composition query session.
return: (osid.repository.CompositionQuerySession) - a
``CompositionQuerySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_query()`` is ``true``.*
"""
raise Unimplemented()
composition_query_session = property(fget=get_composition_query_session)
def get_composition_query_session_for_repository(self, repository_id=None):
"""Gets a composition query session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.CompositionQuerySession) - a
``CompositionQuerySession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_composition_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None:
raise NullArgument
raise Unimplemented()
def get_composition_search_session(self):
"""Gets a composition search session.
return: (osid.repository.CompositionSearchSession) - a
``CompositionSearchSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_search()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_search()`` is ``true``.*
"""
raise Unimplemented()
composition_search_session = property(fget=get_composition_search_session)
def get_composition_search_session_for_repository(self, repository_id=None):
"""Gets a composition search session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.CompositionSearchSession) - a
``CompositionSearchSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_composition_search()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None:
raise NullArgument
raise Unimplemented()
def get_composition_admin_session(self):
"""Gets a composition administration session for creating, updating and deleting compositions.
return: (osid.repository.CompositionAdminSession) - a
``CompositionAdminSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_admin()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_admin()`` is ``true``.*
"""
raise Unimplemented()
composition_admin_session = property(fget=get_composition_admin_session)
def get_composition_admin_session_for_repository(self, repository_id=None):
"""Gets a composiiton administrative session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.CompositionAdminSession) - a
``CompositionAdminSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None:
raise NullArgument
raise Unimplemented()
def get_composition_notification_session(self, composition_receiver=None):
"""Gets the notification session for notifications pertaining to composition changes.
arg: composition_receiver
(osid.repository.CompositionReceiver): the notification
callback
return: (osid.repository.CompositionNotificationSession) - a
``CompositionNotificationSession``
raise: NullArgument - ``composition_receiver`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_notification()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_notification()`` is ``true``.*
"""
raise Unimplemented()
def get_composition_notification_session_for_repository(self, composition_receiver=None, repository_id=None):
"""Gets the composition notification session for the given repository.
arg: composition_receiver
(osid.repository.CompositionReceiver): the notification
callback
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.CompositionNotificationSession) - a
``CompositionNotificationSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``composition_receiver`` or
``repository_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_composition_notification()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_notfication()`` and
``supports_visible_federation()`` are ``true``.*
"""
if composition_receiver is None:
raise NullArgument
if repository_id is None:
raise NullArgument
raise Unimplemented()
def get_composition_repository_session(self):
"""Gets the session for retrieving composition to repository mappings.
return: (osid.repository.CompositionRepositorySession) - a
``CompositionRepositorySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_repository()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_repository()`` is ``true``.*
"""
raise Unimplemented()
composition_repository_session = property(fget=get_composition_repository_session)
def get_composition_repository_assignment_session(self):
"""Gets the session for assigning composition to repository mappings.
return: (osid.repository.CompositionRepositoryAssignmentSession)
- a ``CompositionRepositoryAssignmentSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_composition_repository_assignment()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_repository_assignment()`` is ``true``.*
"""
raise Unimplemented()
composition_repository_assignment_session = property(fget=get_composition_repository_assignment_session)
def get_composition_smart_repository_session(self, repository_id=None):
"""Gets a composition smart repository session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
return: (osid.repository.CompositionSmartRepositorySession) - a
``CompositionSmartRepositorySession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented -
``supports_composition_smart_repository()`` ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_smart_repository()`` is ``true``.*
"""
if repository_id is None:
raise NullArgument
raise Unimplemented()
def get_repository_lookup_session(self):
"""Gets the repository lookup session.
return: (osid.repository.RepositoryLookupSession) - a
``RepositoryLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_lookup()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_lookup()`` is ``true``.*
"""
raise Unimplemented()
repository_lookup_session = property(fget=get_repository_lookup_session)
def get_repository_query_session(self):
"""Gets the repository query session.
return: (osid.repository.RepositoryQuerySession) - a
``RepositoryQuerySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_query()`` is ``true``.*
"""
raise Unimplemented()
repository_query_session = property(fget=get_repository_query_session)
def get_repository_search_session(self):
"""Gets the repository search session.
return: (osid.repository.RepositorySearchSession) - a
``RepositorySearchSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_search()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_search()`` is ``true``.*
"""
raise Unimplemented()
repository_search_session = property(fget=get_repository_search_session)
def get_repository_admin_session(self):
"""Gets the repository administrative session for creating, updating and deleteing repositories.
return: (osid.repository.RepositoryAdminSession) - a
``RepositoryAdminSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_admin()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_admin()`` is ``true``.*
"""
raise Unimplemented()
repository_admin_session = property(fget=get_repository_admin_session)
def get_repository_notification_session(self, repository_receiver=None):
"""Gets the notification session for subscribing to changes to a repository.
arg: repository_receiver
(osid.repository.RepositoryReceiver): the notification
callback
return: (osid.repository.RepositoryNotificationSession) - a
``RepositoryNotificationSession``
raise: NullArgument - ``repository_receiver`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_notification()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_repository_notification()`` is ``true``.*
"""
raise Unimplemented()
def get_repository_hierarchy_session(self):
"""Gets the repository hierarchy traversal session.
return: (osid.repository.RepositoryHierarchySession) - ``a
RepositoryHierarchySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_hierarchy()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_hierarchy()`` is ``true``.*
"""
raise Unimplemented()
repository_hierarchy_session = property(fget=get_repository_hierarchy_session)
def get_repository_hierarchy_design_session(self):
"""Gets the repository hierarchy design session.
return: (osid.repository.RepositoryHierarchyDesignSession) - a
``RepostoryHierarchyDesignSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_repository_hierarchy_design()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_repository_hierarchy_design()`` is ``true``.*
"""
raise Unimplemented()
repository_hierarchy_design_session = property(fget=get_repository_hierarchy_design_session)
def get_repository_batch_manager(self):
"""Gets a ``RepositoryBatchManager``.
return: (osid.repository.batch.RepositoryBatchManager) - a
``RepostoryBatchManager``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_batch()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_batch()`` is ``true``.*
"""
raise Unimplemented()
repository_batch_manager = property(fget=get_repository_batch_manager)
def get_repository_rules_manager(self):
"""Gets a ``RepositoryRulesManager``.
return: (osid.repository.rules.RepositoryRulesManager) - a
``RepostoryRulesManager``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_rules()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_rules()`` is ``true``.*
"""
raise Unimplemented()
repository_rules_manager = property(fget=get_repository_rules_manager)
class RepositoryProxyManager(abc_repository_managers.RepositoryProxyManager, osid_managers.OsidProxyManager, RepositoryProfile):
"""The repository manager provides access to asset lookup and creation session and provides interoperability tests for various aspects of this service.
Methods in this manager support the passing of a ``Proxy`` for the
purposes of passing information from a server environment. The
sessions included in this manager are:
* ``AssetLookupSession:`` a session to retrieve assets
* ``AssetQuerySession:`` a session to query assets
* ``AssetSearchSession:`` a session to search for assets
* ``AssetAdminSession:`` a session to create and delete assets
* ``AssetNotificationSession:`` a session to receive notifications
pertaining to asset changes
* ``AssetRepositorySession:`` a session to look up asset to
repository mappings
* ``AssetRepositoryAssignmentSession:`` a session to manage asset
to repository mappings
* ``AssetSmartRepositorySession:`` a session to manage dynamic
repositories of assets
* ``AssetTemporalSession:`` a session to access the temporal
coverage of an asset
* ``AssetTemporalAssignmentSession:`` a session to manage the
temporal coverage of an asset
* ``AssetSpatialSession:`` a session to access the spatial
coverage of an asset
* ``AssetSpatialAssignmentSession:`` a session to manage the
spatial coverage of an asset
* ``AssetCompositionSession:`` a session to look up asset
composition mappings
* ``AssetCompositionDesignSession:`` a session to map assets to
compositions
* ``CompositionLookupSession: a`` session to retrieve compositions
* ``CompositionQuerySession:`` a session to query compositions
* ``CompositionSearchSession:`` a session to search for
compositions
* ``CompositionAdminSession:`` a session to create, update and
delete compositions
* ``CompositionNotificationSession:`` a session to receive
notifications pertaining to changes in compositions
* ``CompositionRepositorySession:`` a session to retrieve
composition repository mappings
* ``CompositionRepositoryAssignmentSession:`` a session to manage
composition repository mappings
* ``CompositionSmartRepositorySession:`` a session to manage
dynamic repositories of compositions
* ``RepositoryLookupSession: a`` session to retrieve repositories
* ``RepositoryQuerySession:`` a session to query repositories
* ``RepositorySearchSession:`` a session to search for
repositories
* ``RepositoryAdminSession:`` a session to create, update and
delete repositories
* ``RepositoryNotificationSession:`` a session to receive
notifications pertaining to changes in repositories
* ``RepositoryHierarchySession:`` a session to traverse repository
hierarchies
* ``RepositoryHierarchyDesignSession:`` a session to manage
repository hierarchies
"""
def get_asset_lookup_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the asset lookup service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetLookupSession) - an
``AssetLookupSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_lookup()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_asset_lookup_session_for_repository(self, repository_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the asset lookup service for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetLookupSession) - an
``AssetLookupSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_lookup()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_asset_query_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the asset query service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetQuerySession) - an
``AssetQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_query()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_asset_query_session_for_repository(self, repository_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the asset query service for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetQuerySession) - an
``AssetQuerySession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_query()`` and ``supports_visible_federation()``
are ``true``.*
"""
if repository_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_asset_search_session(self, proxy=None):
"""Gets an asset search session.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetSearchSession) - an
``AssetSearchSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_search()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_asset_search_session_for_repository(self, repository_id=None, proxy=None):
"""Gets an asset search session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetSearchSession) - an
``AssetSearchSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_search()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_asset_admin_session(self, proxy=None):
"""Gets an asset administration session for creating, updating and deleting assets.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetAdminSession) - an
``AssetAdminSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_admin()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_asset_admin_session_for_repository(self, repository_id=None, proxy=None):
"""Gets an asset administration session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetAdminSession) - an
``AssetAdminSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_admin()`` and ``supports_visible_federation()``
are ``true``.*
"""
if repository_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_asset_notification_session(self, asset_receiver=None, proxy=None):
"""Gets the notification session for notifications pertaining to asset changes.
arg: asset_receiver (osid.repository.AssetReceiver): the
notification callback
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetNotificationSession) - an
``AssetNotificationSession``
raise: NullArgument - ``asset_receiver`` or ``proxy`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_notification()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_asset_notification()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_asset_notification_session_for_repository(self, asset_receiver=None, repository_id=None, proxy=None):
"""Gets the asset notification session for the given repository.
arg: asset_receiver (osid.repository.AssetReceiver): the
notification callback
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetNotificationSession) - an
``AssetNotificationSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``asset_receiver, repository_id`` or
``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_notification()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_notfication()`` and
``supports_visible_federation()`` are ``true``.*
"""
if asset_receiver is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_asset_repository_session(self, proxy=None):
"""Gets the session for retrieving asset to repository mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetRepositorySession) - an
``AssetRepositorySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_repository()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_asset_repository()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_asset_repository_assignment_session(self, proxy=None):
"""Gets the session for assigning asset to repository mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetRepositoryAssignmentSession) - an
``AssetRepositoryAsignmentSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_asset_repository_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_repository_assignment()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_asset_smart_repository_session(self, repository_id=None, proxy=None):
"""Gets an asset smart repository session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetSmartRepositorySession) - an
``AssetSmartRepositorySession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_asset_smart_repository()``
``false``
*compliance: optional -- This method must be implemented if
``supports_asset_smart_repository()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_asset_temporal_session(self, proxy=None):
"""Gets the session for retrieving temporal coverage of an asset.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetTemporalSession) - an
``AssetTemporalSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_temporal()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_asset_temporal()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_asset_temporal_session_for_repository(self, repository_id=None, proxy=None):
"""Gets the session for retrieving temporal coverage of an asset for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetTemporalSession) - an
``AssetTemporalSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_temporal()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_temporal()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_asset_temporal_assignment_session(self, proxy=None):
"""Gets the session for assigning temporal coverage to an asset.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetTemporalAssignmentSession) - an
``AssetTemporalAssignmentSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_temporal_assignment()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_temporal_assignment()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_asset_temporal_assignment_session_for_repository(self, repository_id=None, proxy=None):
"""Gets the session for assigning temporal coverage of an asset for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetTemporalAssignmentSession) - an
``AssetTemporalAssignmentSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_temporal_assignment()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_temporal_assignment()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_asset_spatial_session(self, proxy=None):
"""Gets the session for retrieving spatial coverage of an asset.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetSpatialSession) - an
``AssetSpatialSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_spatial_assets()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_spatial_assets()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_asset_spatial_session_for_repository(self, repository_id=None, proxy=None):
"""Gets the session for retrieving spatial coverage of an asset for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetSpatialSession) - an
``AssetSpatialSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_spatial()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_spatial()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_asset_spatial_assignment_session(self, proxy=None):
"""Gets the session for assigning spatial coverage to an asset.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetSpatialAssignmentSession) - an
``AssetSpatialAssignmentSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_spatial_assignment()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_spatial_assignment()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_asset_spatial_assignment_session_for_repository(self, repository_id=None, proxy=None):
"""Gets the session for assigning spatial coverage of an asset for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetSpatialAssignmentSession) - an
``AssetSpatialAssignmentSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_spatial_assignment()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_spatial_assignment()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_asset_composition_session(self, proxy=None):
"""Gets the session for retrieving asset compositions.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetCompositionSession) - an
``AssetCompositionSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_composition()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_asset_composition()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_asset_composition_design_session(self, proxy=None):
"""Gets the session for creating asset compositions.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetCompositionDesignSession) - an
``AssetCompositionDesignSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_asset_composition_design()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_asset_composition_design()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_composition_lookup_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the composition lookup service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionLookupSession) - the new
``CompositionLookupSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_lookup()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_lookup()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_composition_lookup_session_for_repository(self, repository_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the composition lookup service for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionLookupSession) - the new
``CompositionLookupSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_composition_lookup()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_lookup()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_composition_query_session(self, proxy=None):
"""Gets a composition query session.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionSearchSession) - a
``CompositionQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_query()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_composition_query_session_for_repository(self, repository_id=None, proxy=None):
"""Gets a composition query session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionQuerySession) - a
``CompositionQuerySession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_composition_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_query()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_composition_search_session(self, proxy=None):
"""Gets a composition search session.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionSearchSession) - a
``CompositionSearchSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_search()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_search()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_composition_search_session_for_repository(self, repository_id=None, proxy=None):
"""Gets a composition search session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionSearchSession) - a
``CompositionSearchSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_composition_search()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_search()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_composition_admin_session(self, proxy=None):
"""Gets a composition administration session for creating, updating and deleting compositions.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionAdminSession) - a
``CompositionAdminSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_admin()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_admin()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_composition_admin_session_for_repository(self, repository_id=None, proxy=None):
"""Gets a composiiton administrative session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionAdminSession) - a
``CompositionAdminSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_admin()`` and
``supports_visible_federation()`` are ``true``.*
"""
if repository_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_composition_notification_session(self, composition_receiver=None, proxy=None):
"""Gets the notification session for notifications pertaining to composition changes.
arg: composition_receiver
(osid.repository.CompositionReceiver): the notification
callback
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionNotificationSession) - a
``CompositionNotificationSession``
raise: NullArgument - ``composition_receiver`` or ``proxy`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_notification()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_notification()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_composition_notification_session_for_repository(self, composition_receiver=None, repository_id=None, proxy=None):
"""Gets the composition notification session for the given repository.
arg: composition_receiver
(osid.repository.CompositionReceiver): the notification
callback
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionNotificationSession) - a
``CompositionNotificationSession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``composition_receiver, repository_id``
or ``proxy`` is ``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented - ``supports_composition_notification()``
or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_notfication()`` and
``supports_visible_federation()`` are ``true``.*
"""
if composition_receiver is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_composition_repository_session(self, proxy=None):
"""Gets the session for retrieving composition to repository mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionRepositorySession) - a
``CompositionRepositorySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_composition_repository()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_repository()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_composition_repository_assignment_session(self, proxy=None):
"""Gets the session for assigning composition to repository mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionRepositoryAssignmentSession)
- a ``CompositionRepositoryAssignmentSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_composition_repository_assignment()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_composition_repository_assignment()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_composition_smart_repository_session(self, repository_id=None, proxy=None):
"""Gets a composition smart repository session for the given repository.
arg: repository_id (osid.id.Id): the ``Id`` of the repository
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionSmartRepositorySession) - a
``CompositionSmartRepositorySession``
raise: NotFound - ``repository_id`` not found
raise: NullArgument - ``repository_id`` or ``proxy`` is
``null``
raise: OperationFailed - ``unable to complete request``
raise: Unimplemented -
``supports_composition_smart_repository()`` ``false``
*compliance: optional -- This method must be implemented if
``supports_composition_smart_repository()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_repository_lookup_session(self, proxy=None):
"""Gets the repository lookup session.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositoryLookupSession) - a
``RepositoryLookupSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_lookup()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_lookup()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_repository_query_session(self, proxy=None):
"""Gets the repository query session.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositoryQuerySession) - a
``RepositoryQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_query()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_repository_search_session(self, proxy=None):
"""Gets the repository search session.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositorySearchSession) - a
``RepositorySearchSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_search()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_search()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_repository_admin_session(self, proxy=None):
"""Gets the repository administrative session for creating, updating and deleteing repositories.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositoryAdminSession) - a
``RepositoryAdminSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_admin()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_admin()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_repository_notification_session(self, repository_receiver=None, proxy=None):
"""Gets the notification session for subscribing to changes to a repository.
arg: repository_receiver
(osid.repository.RepositoryReceiver): the notification
callback
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositoryNotificationSession) - a
``RepositoryNotificationSession``
raise: NullArgument - ``repository_receiver`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_notification()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_repository_notification()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_repository_hierarchy_session(self, proxy=None):
"""Gets the repository hierarchy traversal session.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositoryHierarchySession) - ``a
RepositoryHierarchySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_hierarchy()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_hierarchy()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_repository_hierarchy_design_session(self, proxy=None):
"""Gets the repository hierarchy design session.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.RepositoryHierarchyDesignSession) - a
``RepostoryHierarchyDesignSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented -
``supports_repository_hierarchy_design()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_repository_hierarchy_design()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_repository_batch_proxy_manager(self):
"""Gets a ``RepositoryBatchProxyManager``.
return: (osid.repository.batch.RepositoryBatchProxyManager) - a
``RepostoryBatchProxyManager``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_batch()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_batch()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
repository_batch_proxy_manager = property(fget=get_repository_batch_proxy_manager)
def get_repository_rules_proxy_manager(self):
"""Gets a ``RepositoryRulesProxyManager``.
return: (osid.repository.rules.RepositoryRulesProxyManager) - a
``RepostoryRulesProxyManager``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_repository_rules()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_repository_rules()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
repository_rules_proxy_manager = property(fget=get_repository_rules_proxy_manager)
| 41.442541
| 155
| 0.640816
| 9,796
| 99,172
| 6.33432
| 0.023785
| 0.052215
| 0.031587
| 0.036099
| 0.956697
| 0.921967
| 0.889897
| 0.823387
| 0.803984
| 0.783259
| 0
| 0
| 0.265055
| 99,172
| 2,392
| 156
| 41.459866
| 0.851343
| 0.666065
| 0
| 0.577825
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.298507
| false
| 0
| 0.010661
| 0
| 0.501066
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
70c3a4e66a458130d30abfeb7d2aff4b3c4d00a5
| 18,264
|
py
|
Python
|
varsom_avalanche_client/api/avalanche_warning_by_coordinates_api.py
|
NVE/python-varsom-avalanche-client
|
c7787bf070d8ea91efd3a2a9e7782eedd4961528
|
[
"MIT"
] | null | null | null |
varsom_avalanche_client/api/avalanche_warning_by_coordinates_api.py
|
NVE/python-varsom-avalanche-client
|
c7787bf070d8ea91efd3a2a9e7782eedd4961528
|
[
"MIT"
] | null | null | null |
varsom_avalanche_client/api/avalanche_warning_by_coordinates_api.py
|
NVE/python-varsom-avalanche-client
|
c7787bf070d8ea91efd3a2a9e7782eedd4961528
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Snøskredvarsel API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v5.0.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from varsom_avalanche_client.api_client import ApiClient
class AvalancheWarningByCoordinatesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def avalanche_warning_by_coordinates_detail(self, x, y, langkey, startdate, enddate, **kwargs): # noqa: E501
"""avalanche_warning_by_coordinates_detail # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.avalanche_warning_by_coordinates_detail(x, y, langkey, startdate, enddate, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float x: (required)
:param float y: (required)
:param int langkey: (required)
:param datetime startdate: (required)
:param datetime enddate: (required)
:return: list[AvalancheWarningDetail]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.avalanche_warning_by_coordinates_detail_with_http_info(x, y, langkey, startdate, enddate, **kwargs) # noqa: E501
else:
(data) = self.avalanche_warning_by_coordinates_detail_with_http_info(x, y, langkey, startdate, enddate, **kwargs) # noqa: E501
return data
def avalanche_warning_by_coordinates_detail_with_http_info(self, x, y, langkey, startdate, enddate, **kwargs): # noqa: E501
"""avalanche_warning_by_coordinates_detail # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.avalanche_warning_by_coordinates_detail_with_http_info(x, y, langkey, startdate, enddate, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float x: (required)
:param float y: (required)
:param int langkey: (required)
:param datetime startdate: (required)
:param datetime enddate: (required)
:return: list[AvalancheWarningDetail]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['x', 'y', 'langkey', 'startdate', 'enddate'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method avalanche_warning_by_coordinates_detail" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'x' is set
if ('x' not in params or
params['x'] is None):
raise ValueError("Missing the required parameter `x` when calling `avalanche_warning_by_coordinates_detail`") # noqa: E501
# verify the required parameter 'y' is set
if ('y' not in params or
params['y'] is None):
raise ValueError("Missing the required parameter `y` when calling `avalanche_warning_by_coordinates_detail`") # noqa: E501
# verify the required parameter 'langkey' is set
if ('langkey' not in params or
params['langkey'] is None):
raise ValueError("Missing the required parameter `langkey` when calling `avalanche_warning_by_coordinates_detail`") # noqa: E501
# verify the required parameter 'startdate' is set
if ('startdate' not in params or
params['startdate'] is None):
raise ValueError("Missing the required parameter `startdate` when calling `avalanche_warning_by_coordinates_detail`") # noqa: E501
# verify the required parameter 'enddate' is set
if ('enddate' not in params or
params['enddate'] is None):
raise ValueError("Missing the required parameter `enddate` when calling `avalanche_warning_by_coordinates_detail`") # noqa: E501
collection_formats = {}
path_params = {}
if 'x' in params:
path_params['x'] = params['x'] # noqa: E501
if 'y' in params:
path_params['y'] = params['y'] # noqa: E501
if 'langkey' in params:
path_params['langkey'] = params['langkey'] # noqa: E501
if 'startdate' in params:
path_params['startdate'] = params['startdate'] # noqa: E501
if 'enddate' in params:
path_params['enddate'] = params['enddate'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/json', 'application/xml', 'text/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/AvalancheWarningByCoordinates/Detail/{x}/{y}/{langkey}/{startdate}/{enddate}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[AvalancheWarningDetail]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def avalanche_warning_by_coordinates_obs(self, x, y, langkey, startdate, enddate, **kwargs): # noqa: E501
"""avalanche_warning_by_coordinates_obs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.avalanche_warning_by_coordinates_obs(x, y, langkey, startdate, enddate, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float x: (required)
:param float y: (required)
:param int langkey: (required)
:param datetime startdate: (required)
:param datetime enddate: (required)
:return: list[ObsWarning]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.avalanche_warning_by_coordinates_obs_with_http_info(x, y, langkey, startdate, enddate, **kwargs) # noqa: E501
else:
(data) = self.avalanche_warning_by_coordinates_obs_with_http_info(x, y, langkey, startdate, enddate, **kwargs) # noqa: E501
return data
def avalanche_warning_by_coordinates_obs_with_http_info(self, x, y, langkey, startdate, enddate, **kwargs): # noqa: E501
"""avalanche_warning_by_coordinates_obs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.avalanche_warning_by_coordinates_obs_with_http_info(x, y, langkey, startdate, enddate, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float x: (required)
:param float y: (required)
:param int langkey: (required)
:param datetime startdate: (required)
:param datetime enddate: (required)
:return: list[ObsWarning]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['x', 'y', 'langkey', 'startdate', 'enddate'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method avalanche_warning_by_coordinates_obs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'x' is set
if ('x' not in params or
params['x'] is None):
raise ValueError("Missing the required parameter `x` when calling `avalanche_warning_by_coordinates_obs`") # noqa: E501
# verify the required parameter 'y' is set
if ('y' not in params or
params['y'] is None):
raise ValueError("Missing the required parameter `y` when calling `avalanche_warning_by_coordinates_obs`") # noqa: E501
# verify the required parameter 'langkey' is set
if ('langkey' not in params or
params['langkey'] is None):
raise ValueError("Missing the required parameter `langkey` when calling `avalanche_warning_by_coordinates_obs`") # noqa: E501
# verify the required parameter 'startdate' is set
if ('startdate' not in params or
params['startdate'] is None):
raise ValueError("Missing the required parameter `startdate` when calling `avalanche_warning_by_coordinates_obs`") # noqa: E501
# verify the required parameter 'enddate' is set
if ('enddate' not in params or
params['enddate'] is None):
raise ValueError("Missing the required parameter `enddate` when calling `avalanche_warning_by_coordinates_obs`") # noqa: E501
collection_formats = {}
path_params = {}
if 'x' in params:
path_params['x'] = params['x'] # noqa: E501
if 'y' in params:
path_params['y'] = params['y'] # noqa: E501
if 'langkey' in params:
path_params['langkey'] = params['langkey'] # noqa: E501
if 'startdate' in params:
path_params['startdate'] = params['startdate'] # noqa: E501
if 'enddate' in params:
path_params['enddate'] = params['enddate'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/json', 'application/xml', 'text/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/AvalancheWarningByCoordinates/Obs/{x}/{y}/{langkey}/{startdate}/{enddate}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ObsWarning]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def avalanche_warning_by_coordinates_simple(self, x, y, langkey, startdate, enddate, **kwargs): # noqa: E501
"""avalanche_warning_by_coordinates_simple # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.avalanche_warning_by_coordinates_simple(x, y, langkey, startdate, enddate, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float x: (required)
:param float y: (required)
:param int langkey: (required)
:param datetime startdate: (required)
:param datetime enddate: (required)
:return: list[AvalancheWarningSimple]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.avalanche_warning_by_coordinates_simple_with_http_info(x, y, langkey, startdate, enddate, **kwargs) # noqa: E501
else:
(data) = self.avalanche_warning_by_coordinates_simple_with_http_info(x, y, langkey, startdate, enddate, **kwargs) # noqa: E501
return data
def avalanche_warning_by_coordinates_simple_with_http_info(self, x, y, langkey, startdate, enddate, **kwargs): # noqa: E501
"""avalanche_warning_by_coordinates_simple # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.avalanche_warning_by_coordinates_simple_with_http_info(x, y, langkey, startdate, enddate, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float x: (required)
:param float y: (required)
:param int langkey: (required)
:param datetime startdate: (required)
:param datetime enddate: (required)
:return: list[AvalancheWarningSimple]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['x', 'y', 'langkey', 'startdate', 'enddate'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method avalanche_warning_by_coordinates_simple" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'x' is set
if ('x' not in params or
params['x'] is None):
raise ValueError("Missing the required parameter `x` when calling `avalanche_warning_by_coordinates_simple`") # noqa: E501
# verify the required parameter 'y' is set
if ('y' not in params or
params['y'] is None):
raise ValueError("Missing the required parameter `y` when calling `avalanche_warning_by_coordinates_simple`") # noqa: E501
# verify the required parameter 'langkey' is set
if ('langkey' not in params or
params['langkey'] is None):
raise ValueError("Missing the required parameter `langkey` when calling `avalanche_warning_by_coordinates_simple`") # noqa: E501
# verify the required parameter 'startdate' is set
if ('startdate' not in params or
params['startdate'] is None):
raise ValueError("Missing the required parameter `startdate` when calling `avalanche_warning_by_coordinates_simple`") # noqa: E501
# verify the required parameter 'enddate' is set
if ('enddate' not in params or
params['enddate'] is None):
raise ValueError("Missing the required parameter `enddate` when calling `avalanche_warning_by_coordinates_simple`") # noqa: E501
collection_formats = {}
path_params = {}
if 'x' in params:
path_params['x'] = params['x'] # noqa: E501
if 'y' in params:
path_params['y'] = params['y'] # noqa: E501
if 'langkey' in params:
path_params['langkey'] = params['langkey'] # noqa: E501
if 'startdate' in params:
path_params['startdate'] = params['startdate'] # noqa: E501
if 'enddate' in params:
path_params['enddate'] = params['enddate'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/json', 'application/xml', 'text/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/AvalancheWarningByCoordinates/Simple/{x}/{y}/{langkey}/{startdate}/{enddate}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[AvalancheWarningSimple]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 44.655257
| 143
| 0.625876
| 2,085
| 18,264
| 5.265707
| 0.078657
| 0.044448
| 0.068859
| 0.110939
| 0.952273
| 0.950906
| 0.945259
| 0.934876
| 0.933509
| 0.932143
| 0
| 0.014556
| 0.277814
| 18,264
| 408
| 144
| 44.764706
| 0.817817
| 0.305081
| 0
| 0.786667
| 1
| 0
| 0.260012
| 0.101846
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031111
| false
| 0
| 0.017778
| 0
| 0.093333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.