hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fea4bdd656610a513a9bc616fa608e87bd242fcb
| 23,000
|
py
|
Python
|
sdk/python/pulumi_azure/kusto/database.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/kusto/database.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/kusto/database.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['DatabaseArgs', 'Database']
@pulumi.input_type
class DatabaseArgs:
def __init__(__self__, *,
cluster_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
hot_cache_period: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
soft_delete_period: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Database resource.
:param pulumi.Input[str] cluster_name: Specifies the name of the Kusto Cluster this database will be added to. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: Specifies the Resource Group where the Kusto Database should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] hot_cache_period: The time the data that should be kept in cache for fast queries as ISO 8601 timespan. Default is unlimited. For more information see: [ISO 8601 Timespan](https://en.wikipedia.org/wiki/ISO_8601#Durations)
:param pulumi.Input[str] location: The location where the Kusto Database should be created. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the Kusto Database to create. Changing this forces a new resource to be created.
:param pulumi.Input[str] soft_delete_period: The time the data should be kept before it stops being accessible to queries as ISO 8601 timespan. Default is unlimited. For more information see: [ISO 8601 Timespan](https://en.wikipedia.org/wiki/ISO_8601#Durations)
"""
pulumi.set(__self__, "cluster_name", cluster_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if hot_cache_period is not None:
pulumi.set(__self__, "hot_cache_period", hot_cache_period)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if soft_delete_period is not None:
pulumi.set(__self__, "soft_delete_period", soft_delete_period)
@property
@pulumi.getter(name="clusterName")
def cluster_name(self) -> pulumi.Input[str]:
"""
Specifies the name of the Kusto Cluster this database will be added to. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "cluster_name")
@cluster_name.setter
def cluster_name(self, value: pulumi.Input[str]):
pulumi.set(self, "cluster_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
Specifies the Resource Group where the Kusto Database should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="hotCachePeriod")
def hot_cache_period(self) -> Optional[pulumi.Input[str]]:
"""
The time the data that should be kept in cache for fast queries as ISO 8601 timespan. Default is unlimited. For more information see: [ISO 8601 Timespan](https://en.wikipedia.org/wiki/ISO_8601#Durations)
"""
return pulumi.get(self, "hot_cache_period")
@hot_cache_period.setter
def hot_cache_period(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "hot_cache_period", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The location where the Kusto Database should be created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Kusto Database to create. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="softDeletePeriod")
def soft_delete_period(self) -> Optional[pulumi.Input[str]]:
"""
The time the data should be kept before it stops being accessible to queries as ISO 8601 timespan. Default is unlimited. For more information see: [ISO 8601 Timespan](https://en.wikipedia.org/wiki/ISO_8601#Durations)
"""
return pulumi.get(self, "soft_delete_period")
@soft_delete_period.setter
def soft_delete_period(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "soft_delete_period", value)
@pulumi.input_type
class _DatabaseState:
def __init__(__self__, *,
cluster_name: Optional[pulumi.Input[str]] = None,
hot_cache_period: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
size: Optional[pulumi.Input[float]] = None,
soft_delete_period: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Database resources.
:param pulumi.Input[str] cluster_name: Specifies the name of the Kusto Cluster this database will be added to. Changing this forces a new resource to be created.
:param pulumi.Input[str] hot_cache_period: The time the data that should be kept in cache for fast queries as ISO 8601 timespan. Default is unlimited. For more information see: [ISO 8601 Timespan](https://en.wikipedia.org/wiki/ISO_8601#Durations)
:param pulumi.Input[str] location: The location where the Kusto Database should be created. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the Kusto Database to create. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: Specifies the Resource Group where the Kusto Database should exist. Changing this forces a new resource to be created.
:param pulumi.Input[float] size: The size of the database in bytes.
:param pulumi.Input[str] soft_delete_period: The time the data should be kept before it stops being accessible to queries as ISO 8601 timespan. Default is unlimited. For more information see: [ISO 8601 Timespan](https://en.wikipedia.org/wiki/ISO_8601#Durations)
"""
if cluster_name is not None:
pulumi.set(__self__, "cluster_name", cluster_name)
if hot_cache_period is not None:
pulumi.set(__self__, "hot_cache_period", hot_cache_period)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if size is not None:
pulumi.set(__self__, "size", size)
if soft_delete_period is not None:
pulumi.set(__self__, "soft_delete_period", soft_delete_period)
@property
@pulumi.getter(name="clusterName")
def cluster_name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Kusto Cluster this database will be added to. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "cluster_name")
@cluster_name.setter
def cluster_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_name", value)
@property
@pulumi.getter(name="hotCachePeriod")
def hot_cache_period(self) -> Optional[pulumi.Input[str]]:
"""
The time the data that should be kept in cache for fast queries as ISO 8601 timespan. Default is unlimited. For more information see: [ISO 8601 Timespan](https://en.wikipedia.org/wiki/ISO_8601#Durations)
"""
return pulumi.get(self, "hot_cache_period")
@hot_cache_period.setter
def hot_cache_period(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "hot_cache_period", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The location where the Kusto Database should be created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Kusto Database to create. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the Resource Group where the Kusto Database should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def size(self) -> Optional[pulumi.Input[float]]:
"""
The size of the database in bytes.
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "size", value)
@property
@pulumi.getter(name="softDeletePeriod")
def soft_delete_period(self) -> Optional[pulumi.Input[str]]:
"""
The time the data should be kept before it stops being accessible to queries as ISO 8601 timespan. Default is unlimited. For more information see: [ISO 8601 Timespan](https://en.wikipedia.org/wiki/ISO_8601#Durations)
"""
return pulumi.get(self, "soft_delete_period")
@soft_delete_period.setter
def soft_delete_period(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "soft_delete_period", value)
class Database(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cluster_name: Optional[pulumi.Input[str]] = None,
hot_cache_period: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
soft_delete_period: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a Kusto (also known as Azure Data Explorer) Database
## Example Usage
```python
import pulumi
import pulumi_azure as azure
rg = azure.core.ResourceGroup("rg", location="West Europe")
cluster = azure.kusto.Cluster("cluster",
location=rg.location,
resource_group_name=rg.name,
sku=azure.kusto.ClusterSkuArgs(
name="Standard_D13_v2",
capacity=2,
))
database = azure.kusto.Database("database",
resource_group_name=rg.name,
location=rg.location,
cluster_name=cluster.name,
hot_cache_period="P7D",
soft_delete_period="P31D")
```
## Import
Kusto Clusters can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:kusto/database:Database example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] cluster_name: Specifies the name of the Kusto Cluster this database will be added to. Changing this forces a new resource to be created.
:param pulumi.Input[str] hot_cache_period: The time the data that should be kept in cache for fast queries as ISO 8601 timespan. Default is unlimited. For more information see: [ISO 8601 Timespan](https://en.wikipedia.org/wiki/ISO_8601#Durations)
:param pulumi.Input[str] location: The location where the Kusto Database should be created. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the Kusto Database to create. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: Specifies the Resource Group where the Kusto Database should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] soft_delete_period: The time the data should be kept before it stops being accessible to queries as ISO 8601 timespan. Default is unlimited. For more information see: [ISO 8601 Timespan](https://en.wikipedia.org/wiki/ISO_8601#Durations)
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DatabaseArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Kusto (also known as Azure Data Explorer) Database
## Example Usage
```python
import pulumi
import pulumi_azure as azure
rg = azure.core.ResourceGroup("rg", location="West Europe")
cluster = azure.kusto.Cluster("cluster",
location=rg.location,
resource_group_name=rg.name,
sku=azure.kusto.ClusterSkuArgs(
name="Standard_D13_v2",
capacity=2,
))
database = azure.kusto.Database("database",
resource_group_name=rg.name,
location=rg.location,
cluster_name=cluster.name,
hot_cache_period="P7D",
soft_delete_period="P31D")
```
## Import
Kusto Clusters can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:kusto/database:Database example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Clusters/cluster1/Databases/database1
```
:param str resource_name: The name of the resource.
:param DatabaseArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DatabaseArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cluster_name: Optional[pulumi.Input[str]] = None,
hot_cache_period: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
soft_delete_period: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DatabaseArgs.__new__(DatabaseArgs)
if cluster_name is None and not opts.urn:
raise TypeError("Missing required property 'cluster_name'")
__props__.__dict__["cluster_name"] = cluster_name
__props__.__dict__["hot_cache_period"] = hot_cache_period
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["soft_delete_period"] = soft_delete_period
__props__.__dict__["size"] = None
super(Database, __self__).__init__(
'azure:kusto/database:Database',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
cluster_name: Optional[pulumi.Input[str]] = None,
hot_cache_period: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
size: Optional[pulumi.Input[float]] = None,
soft_delete_period: Optional[pulumi.Input[str]] = None) -> 'Database':
"""
Get an existing Database resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] cluster_name: Specifies the name of the Kusto Cluster this database will be added to. Changing this forces a new resource to be created.
:param pulumi.Input[str] hot_cache_period: The time the data that should be kept in cache for fast queries as ISO 8601 timespan. Default is unlimited. For more information see: [ISO 8601 Timespan](https://en.wikipedia.org/wiki/ISO_8601#Durations)
:param pulumi.Input[str] location: The location where the Kusto Database should be created. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the Kusto Database to create. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: Specifies the Resource Group where the Kusto Database should exist. Changing this forces a new resource to be created.
:param pulumi.Input[float] size: The size of the database in bytes.
:param pulumi.Input[str] soft_delete_period: The time the data should be kept before it stops being accessible to queries as ISO 8601 timespan. Default is unlimited. For more information see: [ISO 8601 Timespan](https://en.wikipedia.org/wiki/ISO_8601#Durations)
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DatabaseState.__new__(_DatabaseState)
__props__.__dict__["cluster_name"] = cluster_name
__props__.__dict__["hot_cache_period"] = hot_cache_period
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["size"] = size
__props__.__dict__["soft_delete_period"] = soft_delete_period
return Database(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="clusterName")
def cluster_name(self) -> pulumi.Output[str]:
"""
Specifies the name of the Kusto Cluster this database will be added to. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "cluster_name")
@property
@pulumi.getter(name="hotCachePeriod")
def hot_cache_period(self) -> pulumi.Output[Optional[str]]:
"""
The time the data that should be kept in cache for fast queries as ISO 8601 timespan. Default is unlimited. For more information see: [ISO 8601 Timespan](https://en.wikipedia.org/wiki/ISO_8601#Durations)
"""
return pulumi.get(self, "hot_cache_period")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The location where the Kusto Database should be created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the Kusto Database to create. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
Specifies the Resource Group where the Kusto Database should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter
def size(self) -> pulumi.Output[float]:
"""
The size of the database in bytes.
"""
return pulumi.get(self, "size")
@property
@pulumi.getter(name="softDeletePeriod")
def soft_delete_period(self) -> pulumi.Output[Optional[str]]:
"""
The time the data should be kept before it stops being accessible to queries as ISO 8601 timespan. Default is unlimited. For more information see: [ISO 8601 Timespan](https://en.wikipedia.org/wiki/ISO_8601#Durations)
"""
return pulumi.get(self, "soft_delete_period")
| 48.625793
| 269
| 0.66587
| 2,925
| 23,000
| 5.042051
| 0.073162
| 0.065636
| 0.075943
| 0.071603
| 0.892867
| 0.874491
| 0.862625
| 0.848318
| 0.838826
| 0.838826
| 0
| 0.01449
| 0.24087
| 23,000
| 472
| 270
| 48.728814
| 0.830183
| 0.430304
| 0
| 0.716
| 1
| 0
| 0.100506
| 0.004216
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16
| false
| 0.004
| 0.02
| 0
| 0.276
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
22861b07e3cef96cb439ea58bef2e2188c540d64
| 7,748
|
py
|
Python
|
python/paddle/fluid/tests/unittests/test_complex_matmul.py
|
jiushinakecai/Paddle
|
40f9dbd26e6cda139795307c35e194270358d063
|
[
"Apache-2.0"
] | 1
|
2020-12-03T05:11:47.000Z
|
2020-12-03T05:11:47.000Z
|
python/paddle/fluid/tests/unittests/test_complex_matmul.py
|
jiushinakecai/Paddle
|
40f9dbd26e6cda139795307c35e194270358d063
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/fluid/tests/unittests/test_complex_matmul.py
|
jiushinakecai/Paddle
|
40f9dbd26e6cda139795307c35e194270358d063
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import paddle
import numpy as np
import paddle.fluid as fluid
import paddle.fluid.dygraph as dg
class TestComplexMatMulLayer(unittest.TestCase):
def setUp(self):
self._places = [fluid.CPUPlace()]
if fluid.core.is_compiled_with_cuda():
self._places.append(fluid.CUDAPlace(0))
def compare_by_complex_api(self, x, y):
np_result = np.matmul(x, y)
for place in self._places:
with dg.guard(place):
x_var = dg.to_variable(x)
y_var = dg.to_variable(y)
result = paddle.complex.matmul(x_var, y_var)
self.assertTrue(np.allclose(result.numpy(), np_result))
def compare_by_basic_api(self, x, y):
np_result = np.matmul(x, y)
for place in self._places:
with dg.guard(place):
x_var = fluid.core.VarBase(
value=x,
place=place,
persistable=False,
zero_copy=None,
name='')
y_var = fluid.core.VarBase(
value=y,
place=place,
persistable=False,
zero_copy=None,
name='')
result = paddle.matmul(x_var, y_var)
self.assertTrue(np.allclose(result.numpy(), np_result))
def compare_op_by_complex_api(self, x, y):
np_result = np.matmul(x, y)
for place in self._places:
with dg.guard(place):
x_var = dg.to_variable(x)
y_var = dg.to_variable(y)
result = x_var.matmul(y_var)
self.assertTrue(np.allclose(result.numpy(), np_result))
def compare_op_by_basic_api(self, x, y):
np_result = np.matmul(x, y)
for place in self._places:
with dg.guard(place):
x_var = fluid.core.VarBase(
value=x,
place=place,
persistable=False,
zero_copy=None,
name='')
y_var = fluid.core.VarBase(
value=y,
place=place,
persistable=False,
zero_copy=None,
name='')
result = x_var.matmul(y_var)
self.assertTrue(np.allclose(result.numpy(), np_result))
def test_complex_xy(self):
x = np.random.random(
(2, 3, 4, 5)).astype("float32") + 1J * np.random.random(
(2, 3, 4, 5)).astype("float32")
y = np.random.random(
(2, 3, 5, 4)).astype("float32") + 1J * np.random.random(
(2, 3, 5, 4)).astype("float32")
self.compare_by_complex_api(x, y)
self.compare_op_by_complex_api(x, y)
self.compare_by_basic_api(x, y)
self.compare_op_by_basic_api(x, y)
def test_complex_x(self):
x = np.random.random(
(2, 3, 4, 5)).astype("float32") + 1J * np.random.random(
(2, 3, 4, 5)).astype("float32")
y = np.random.random((2, 3, 5, 4)).astype("float32")
self.compare_by_complex_api(x, y)
self.compare_op_by_complex_api(x, y)
def test_complex_y(self):
x = np.random.random((2, 3, 4, 5)).astype("float32")
y = np.random.random(
(2, 3, 5, 4)).astype("float32") + 1J * np.random.random(
(2, 3, 5, 4)).astype("float32")
self.compare_by_complex_api(x, y)
def test_complex_xy_128(self):
x = np.random.random(
(2, 3, 4, 5)).astype("float64") + 1J * np.random.random(
(2, 3, 4, 5)).astype("float64")
y = np.random.random(
(2, 3, 5, 4)).astype("float64") + 1J * np.random.random(
(2, 3, 5, 4)).astype("float64")
self.compare_by_basic_api(x, y)
self.compare_op_by_basic_api(x, y)
def test_complex_xy_gemv(self):
x = np.random.random(
(2, 1, 100)).astype("float32") + 1J * np.random.random(
(2, 1, 100)).astype("float32")
y = np.random.random((100)).astype("float32") + 1J * np.random.random(
(100)).astype("float32")
self.compare_by_basic_api(x, y)
self.compare_op_by_basic_api(x, y)
x = np.random.random(
(2, 1, 100)).astype("float64") + 1J * np.random.random(
(2, 1, 100)).astype("float64")
y = np.random.random((100)).astype("float64") + 1J * np.random.random(
(100)).astype("float64")
self.compare_by_basic_api(x, y)
self.compare_op_by_basic_api(x, y)
def test_complex_xy_gemm_128(self):
x = np.random.random(
(1, 2, 50)).astype("float64") + 1J * np.random.random(
(1, 2, 50)).astype("float64")
y = np.random.random(
(1, 50, 2)).astype("float64") + 1J * np.random.random(
(1, 50, 2)).astype("float64")
self.compare_by_basic_api(x, y)
self.compare_op_by_basic_api(x, y)
class TestComplexMatMulLayerGEMM(unittest.TestCase):
def setUp(self):
self._places = [fluid.CPUPlace()]
if fluid.core.is_compiled_with_cuda():
self._places.append(fluid.CUDAPlace(0))
def compare_by_basic_api(self, x, y):
np_result = np.matmul(x, y)
for place in self._places:
with dg.guard(place):
x_var = fluid.core.VarBase(
value=x,
place=place,
persistable=False,
zero_copy=None,
name='')
y_var = fluid.core.VarBase(
value=y,
place=place,
persistable=False,
zero_copy=None,
name='')
result = paddle.matmul(x_var, y_var)
self.assertTrue(np.allclose(result.numpy(), np_result))
def compare_op_by_basic_api(self, x, y):
np_result = np.matmul(x, y)
for place in self._places:
with dg.guard(place):
x_var = fluid.core.VarBase(
value=x,
place=place,
persistable=False,
zero_copy=None,
name='')
y_var = fluid.core.VarBase(
value=y,
place=place,
persistable=False,
zero_copy=None,
name='')
result = x_var.matmul(y_var)
self.assertTrue(np.allclose(result.numpy(), np_result))
def test_complex_xy_gemm_64(self):
x = np.random.random(
(1, 2, 50)).astype("float32") + 1J * np.random.random(
(1, 2, 50)).astype("float32")
y = np.random.random(
(1, 50, 2)).astype("float32") + 1J * np.random.random(
(1, 50, 2)).astype("float32")
self.compare_by_basic_api(x, y)
self.compare_op_by_basic_api(x, y)
if __name__ == '__main__':
unittest.main()
| 37.61165
| 78
| 0.527491
| 983
| 7,748
| 3.982706
| 0.132248
| 0.015837
| 0.10728
| 0.068966
| 0.831418
| 0.829374
| 0.817625
| 0.800766
| 0.712388
| 0.674074
| 0
| 0.041997
| 0.348477
| 7,748
| 205
| 79
| 37.795122
| 0.733558
| 0.075245
| 0
| 0.760234
| 0
| 0
| 0.030494
| 0
| 0
| 0
| 0
| 0
| 0.035088
| 1
| 0.087719
| false
| 0
| 0.02924
| 0
| 0.128655
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22bed2fa5353759031a15b4af07e1efff1c13ec0
| 68,637
|
py
|
Python
|
benchmarks/SimResults/micro_pinned_train_combos/cmpB_bwavesgcccactusADMmilc/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/micro_pinned_train_combos/cmpB_bwavesgcccactusADMmilc/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/micro_pinned_train_combos/cmpB_bwavesgcccactusADMmilc/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.14084,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.31331,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.695547,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.226367,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.391986,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.224815,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.843169,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.117117,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.37916,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.131404,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.008206,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.114712,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0606884,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.246116,
'Execution Unit/Register Files/Runtime Dynamic': 0.0688944,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.315775,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.605311,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 2.23606,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000195614,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000195614,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000169046,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 6.47116e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000871793,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00143207,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00192316,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0583413,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.71101,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.11977,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.198153,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.11238,
'Instruction Fetch Unit/Runtime Dynamic': 0.37962,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.057901,
'L2/Runtime Dynamic': 0.0194135,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.82417,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.793134,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0513449,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0513449,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.06762,
'Load Store Unit/Runtime Dynamic': 1.09769,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.126608,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.253216,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0449335,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0458001,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.230737,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0196433,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.466917,
'Memory Management Unit/Runtime Dynamic': 0.0654435,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 20.6457,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.458438,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0170917,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.108124,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.583654,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 4.38189,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 1.88938e-06,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.20269,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.01201e-05,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.13022,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.21004,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.106021,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.446281,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.148932,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.11426,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 1.91191e-06,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00546201,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0394978,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0403949,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0394997,
'Execution Unit/Register Files/Runtime Dynamic': 0.0458569,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0832112,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.21385,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.31406,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00187773,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00187773,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00168987,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000683916,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000580276,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0060256,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0160608,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0388326,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.47009,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.146359,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.131893,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.80848,
'Instruction Fetch Unit/Runtime Dynamic': 0.339172,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0303831,
'L2/Runtime Dynamic': 0.00687843,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.41739,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.576575,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0381846,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0381845,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.59771,
'Load Store Unit/Runtime Dynamic': 0.803073,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0941567,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.188313,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0334165,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0337481,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.153581,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0243628,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.367094,
'Memory Management Unit/Runtime Dynamic': 0.0581109,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 15.5074,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 4.44203e-06,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00587522,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0660609,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0719406,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.59323,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0447235,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.237817,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.239554,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.10419,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.168054,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0848279,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.357071,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0824347,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.40728,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0452569,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00437018,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0484253,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0323201,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0936822,
'Execution Unit/Register Files/Runtime Dynamic': 0.0366903,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.113202,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.281312,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.31827,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000316505,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000316505,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00029046,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000120528,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000464281,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00138775,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00250638,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0310702,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.97633,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.071936,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.105528,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.29076,
'Instruction Fetch Unit/Runtime Dynamic': 0.212429,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0416814,
'L2/Runtime Dynamic': 0.00360731,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.4574,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.590503,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.039479,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0394791,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.64383,
'Load Store Unit/Runtime Dynamic': 0.82468,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0973486,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.194698,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0345493,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0351737,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.122881,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.011798,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.338339,
'Memory Management Unit/Runtime Dynamic': 0.0469717,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 15.3114,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.119051,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00614957,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0511568,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.176357,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.58231,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0888717,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.272492,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.562956,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.171594,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.276775,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.139706,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.588075,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.109944,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.98229,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.106354,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00719742,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0819427,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0532293,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.188297,
'Execution Unit/Register Files/Runtime Dynamic': 0.0604267,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.194853,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.509333,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.8357,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 1.93227e-05,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 1.93227e-05,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 1.68515e-05,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 6.53527e-06,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000764643,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00082014,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.000184496,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0511707,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.25489,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.125925,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.173799,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.63137,
'Instruction Fetch Unit/Runtime Dynamic': 0.3519,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.036246,
'L2/Runtime Dynamic': 0.0100269,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.39647,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.04918,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.06986,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0698601,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.72636,
'Load Store Unit/Runtime Dynamic': 1.46356,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.172263,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.344526,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0611367,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0616664,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.202377,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0206872,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.463508,
'Memory Management Unit/Runtime Dynamic': 0.0823535,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 18.4293,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.27977,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0111466,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.082187,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.373103,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.11665,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 6.331041068697784,
'Runtime Dynamic': 6.331041068697784,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.27929,
'Runtime Dynamic': 0.0828464,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 70.173,
'Peak Power': 103.285,
'Runtime Dynamic': 13.7569,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 69.8937,
'Total Cores/Runtime Dynamic': 13.6741,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.27929,
'Total L3s/Runtime Dynamic': 0.0828464,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.095186
| 124
| 0.682081
| 8,091
| 68,637
| 5.78025
| 0.067853
| 0.123503
| 0.112898
| 0.093397
| 0.938719
| 0.931427
| 0.918406
| 0.886867
| 0.862641
| 0.842307
| 0
| 0.132066
| 0.224238
| 68,637
| 914
| 125
| 75.095186
| 0.746272
| 0
| 0
| 0.642232
| 0
| 0
| 0.657143
| 0.048078
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22c34e0898f94573fff407bbdbcdd9b484150aaa
| 30,135
|
py
|
Python
|
recipes/test_views.py
|
UW-GAC/pie
|
89ae277f5ba1357580d78c3527f26200686308a6
|
[
"MIT"
] | null | null | null |
recipes/test_views.py
|
UW-GAC/pie
|
89ae277f5ba1357580d78c3527f26200686308a6
|
[
"MIT"
] | 3
|
2020-01-02T20:17:06.000Z
|
2020-01-04T21:13:09.000Z
|
recipes/test_views.py
|
UW-GAC/pie
|
89ae277f5ba1357580d78c3527f26200686308a6
|
[
"MIT"
] | 1
|
2021-10-29T22:15:27.000Z
|
2021-10-29T22:15:27.000Z
|
"""Test the functions and classes for recipes.views."""
from django.contrib.auth.models import Group
from django.urls import reverse
from core.factories import UserFactory, SuperUserFactory, USER_FACTORY_PASSWORD
from core.utils import (DCCAnalystLoginTestCase, DCCDeveloperLoginTestCase, LoginRequiredTestCase,
RecipeSubmitterLoginTestCase, SuperuserLoginTestCase, UserLoginTestCase)
from trait_browser.factories import SourceTraitFactory
from . import factories
from . import models
class UnitRecipeViewsTest(RecipeSubmitterLoginTestCase):
def test_create_unit_recipe(self):
"""The CreateUnitRecipe view can be navigated to."""
url = reverse('recipes:unit:create')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_create_unit_recipe_creates_new_object(self):
"""The CreateUnitRecipe view creates a new UnitRecipe and redirects to its detail page."""
new_unit_name = 'Example study name, subcohort 5.'
source_traits = SourceTraitFactory.create_batch(
3, source_dataset__source_study_version__study__global_study__i_id=1)
input = {'name': new_unit_name,
'instructions': 'Do something to combine these variables',
'age_variables': [str(source_traits[0].pk), ],
'batch_variables': [str(source_traits[1].pk), ],
'phenotype_variables': [str(source_traits[2].pk), ],
'type': models.UnitRecipe.UNIT_RECODE
}
url = reverse('recipes:unit:create')
response = self.client.post(url, input)
new_unit = models.UnitRecipe.objects.filter(name=new_unit_name)
self.assertTrue(len(new_unit) == 1)
new_unit = new_unit[0]
self.assertIsInstance(new_unit.pk, int)
self.assertRedirects(response, new_unit.get_absolute_url())
def test_update_unit_recipe(self):
"""The UpdateUnitRecipe view can be navigated to."""
new_recipe = factories.UnitRecipeFactory.create(creator=self.user)
url = reverse('recipes:unit:edit', kwargs={'pk': new_recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_update_unit_recipe_changes_object(self):
"""The UpdateUnitRecipe view updates the name field."""
source_traits = SourceTraitFactory.create_batch(
3, source_dataset__source_study_version__study__global_study__i_id=1)
new_unit = factories.UnitRecipeFactory.create(
creator=self.user, age_variables=[source_traits[0]], batch_variables=[source_traits[1]],
phenotype_variables=[source_traits[2]])
url = reverse('recipes:unit:edit', kwargs={'pk': new_unit.pk})
edited_name = 'Hi ho there, Kermit the frog here.'
input = {'name': edited_name,
'instructions': new_unit.instructions,
'age_variables': [str(v.pk) for v in new_unit.age_variables.all()],
'batch_variables': [str(v.pk) for v in new_unit.batch_variables.all()],
'phenotype_variables': [str(v.pk) for v in new_unit.phenotype_variables.all()],
'type': models.UnitRecipe.OTHER,
}
response = self.client.post(url, input)
# self.assertRedirects(response, new_unit.get_absolute_url())
new_unit.refresh_from_db()
self.assertEqual(new_unit.name, edited_name)
def test_update_unit_recipe_error_on_invalid_pk(self):
"""The UpdateUnitRecipe view gives an error when given an invalid pk."""
url = reverse('recipes:unit:edit', kwargs={'pk': 1})
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_unit_recipe_detail(self):
"""The UnitRecipeDetail view is viewable for a valid pk."""
new_recipe = factories.UnitRecipeFactory.create(creator=self.user)
url = reverse('recipes:unit:detail', kwargs={'pk': new_recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_unit_recipe_detail_error_for_invalid_pk(self):
"""The UnitRecipeDetail view gives an error for an invalid pk."""
url = reverse('recipes:unit:detail', kwargs={'pk': 1})
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_unit_recipe_detail_error_for_other_user_recipes(self):
"""A user cannot view detail pages for other users' UnitRecipes."""
new_recipe = factories.UnitRecipeFactory.create(creator=self.user)
url = reverse('recipes:unit:detail', kwargs={'pk': new_recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# A different recipe_submitter cannot view the page.
submitter = UserFactory.create()
submitter.groups.add(Group.objects.get(name='recipe_submitters'))
self.client.logout()
self.client.login(username=submitter.email, password=USER_FACTORY_PASSWORD)
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
# A dcc_analyst can view the page.
analyst = UserFactory.create(is_staff=True)
analyst.groups.add(Group.objects.get(name='dcc_analysts'))
self.client.logout()
self.client.login(username=analyst.email, password=USER_FACTORY_PASSWORD)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# A dcc_developer cannot view the page.
developer = UserFactory.create(is_staff=True)
developer.groups.add(Group.objects.get(name='dcc_developers'))
self.client.logout()
self.client.login(username=developer.email, password=USER_FACTORY_PASSWORD)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# A superuser cannot view the page.
superuser = SuperUserFactory.create(is_staff=True)
self.client.logout()
self.client.login(username=superuser.email, password=USER_FACTORY_PASSWORD)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_update_unit_recipe_cannot_edit_other_user_recipes(self):
"""A user cannot access UpdateUnitRecipe view for another user's saved unit recipe."""
new_recipe = factories.UnitRecipeFactory.create(creator=self.user)
url = reverse('recipes:unit:edit', kwargs={'pk': new_recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# A different recipe_submitter cannot view the page.
submitter = UserFactory.create()
submitter.groups.add(Group.objects.get(name='recipe_submitters'))
self.client.logout()
self.client.login(username=submitter.email, password=USER_FACTORY_PASSWORD)
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
# A dcc_analyst cannot view the page.
analyst = UserFactory.create(is_staff=True)
analyst.groups.add(Group.objects.get(name='dcc_analysts'))
self.client.logout()
self.client.login(username=analyst.email, password=USER_FACTORY_PASSWORD)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# A dcc_developer cannot view the page.
developer = UserFactory.create(is_staff=True)
developer.groups.add(Group.objects.get(name='dcc_developers'))
self.client.logout()
self.client.login(username=developer.email, password=USER_FACTORY_PASSWORD)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# A superuser cannot view the page.
superuser = SuperUserFactory.create(is_staff=True)
self.client.logout()
self.client.login(username=superuser.email, password=USER_FACTORY_PASSWORD)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
class HarmonizationRecipeViewsTest(RecipeSubmitterLoginTestCase):
def test_create_harmonization_recipe(self):
"""The CreateHarmonizationRecipe view can be navigated to."""
url = reverse('recipes:harmonization:create')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_create_harmonization_recipe_creates_new_object(self):
"""The CreateHarmonizationRecipe view creates a new HarmonizationRecipe and redirects to its detail page."""
new_harmonization_name = 'Harmonization of BMI across all time points.'
units = factories.UnitRecipeFactory.create_batch(3, creator=self.user)
input = {'name': new_harmonization_name,
'units': [str(u.pk) for u in units],
'target_name': 'test_variable_name',
'target_description': 'This is a test variable.',
'encoded_values': '1: blue\r\n2: red\r\n3: yellow',
'measurement_unit': 'kilograms',
}
url = reverse('recipes:harmonization:create')
response = self.client.post(url, input)
new_harmonization = models.HarmonizationRecipe.objects.filter(name=new_harmonization_name)
self.assertTrue(len(new_harmonization) == 1)
new_harmonization = new_harmonization[0]
self.assertIsInstance(new_harmonization.pk, int)
self.assertRedirects(response, new_harmonization.get_absolute_url())
def test_update_harmonization_recipe(self):
"""The UpdateHarmonizationRecipe view can be navigated to."""
new_recipe = factories.HarmonizationRecipeFactory.create(creator=self.user)
url = reverse('recipes:harmonization:edit', kwargs={'pk': new_recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_update_harmonization_recipe_changes_object(self):
"""The UpdateHarmonizationRecipe view updates the name field."""
units = factories.UnitRecipeFactory.create_batch(3, creator=self.user)
new_harmonization = factories.HarmonizationRecipeFactory.create(creator=self.user, units=units)
url = reverse('recipes:harmonization:edit', kwargs={'pk': new_harmonization.pk})
edited_name = 'Hi ho there, Kermit the frog here.'
input = {'name': edited_name,
'units': [str(u.pk) for u in new_harmonization.units.all()],
'target_name': new_harmonization.target_name,
'target_description': new_harmonization.target_description,
'encoded_values': new_harmonization.encoded_values,
'measurement_unit': new_harmonization.measurement_unit,
}
response = self.client.post(url, input)
# self.assertRedirects(response, new_harmonization.get_absolute_url())
new_harmonization.refresh_from_db()
self.assertEqual(new_harmonization.name, edited_name)
def test_update_harmonization_recipe_error_on_invalid_pk(self):
"""The UpdateHarmonizationRecipe view gives an error when given an invalid pk."""
url = reverse('recipes:harmonization:edit', kwargs={'pk': 1})
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_harmonization_recipe_detail(self):
"""The HarmonizationRecipeDetail view is viewable for a valid pk."""
new_recipe = factories.HarmonizationRecipeFactory.create(creator=self.user)
url = reverse('recipes:harmonization:detail', kwargs={'pk': new_recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_harmonization_recipe_detail_error_for_invalid_pk(self):
"""The UnitRecipeDetail view gives an error for an invalid pk."""
url = reverse('recipes:harmonization:detail', kwargs={'pk': 1})
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
def test_harmonization_recipe_detail_error_for_other_user_recipes(self):
"""A user cannot view detail pages for other users' HarmonizationRecipes."""
new_recipe = factories.HarmonizationRecipeFactory.create(creator=self.user)
url = reverse('recipes:harmonization:detail', kwargs={'pk': new_recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# A different recipe_submitter cannot view the page.
submitter = UserFactory.create()
submitter.groups.add(Group.objects.get(name='recipe_submitters'))
self.client.logout()
self.client.login(username=submitter.email, password=USER_FACTORY_PASSWORD)
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
# A dcc_analyst can view the page.
analyst = UserFactory.create(is_staff=True)
analyst.groups.add(Group.objects.get(name='dcc_analysts'))
self.client.logout()
self.client.login(username=analyst.email, password=USER_FACTORY_PASSWORD)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# A dcc_developer can view the page.
developer = UserFactory.create(is_staff=True)
developer.groups.add(Group.objects.get(name='dcc_developers'))
self.client.logout()
self.client.login(username=developer.email, password=USER_FACTORY_PASSWORD)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# A superuser can view the page.
superuser = SuperUserFactory.create(is_staff=True)
self.client.logout()
self.client.login(username=superuser.email, password=USER_FACTORY_PASSWORD)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_update_harmonization_recipe_cannot_edit_other_user_recipes(self):
"""A user cannot access UpdateHarmonizationRecipe view for another user's saved unit recipe."""
new_recipe = factories.HarmonizationRecipeFactory.create(creator=self.user)
url = reverse('recipes:harmonization:edit', kwargs={'pk': new_recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# A different recipe_submitter cannot view the page.
submitter = UserFactory.create()
submitter.groups.add(Group.objects.get(name='recipe_submitters'))
self.client.logout()
self.client.login(username=submitter.email, password=USER_FACTORY_PASSWORD)
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
# A dcc_analyst can view the page.
analyst = UserFactory.create(is_staff=True)
analyst.groups.add(Group.objects.get(name='dcc_analysts'))
self.client.logout()
self.client.login(username=analyst.email, password=USER_FACTORY_PASSWORD)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# A dcc_developer can view the page.
developer = UserFactory.create(is_staff=True)
developer.groups.add(Group.objects.get(name='dcc_developers'))
self.client.logout()
self.client.login(username=developer.email, password=USER_FACTORY_PASSWORD)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# A superuser can view the page.
superuser = SuperUserFactory.create(is_staff=True)
self.client.logout()
self.client.login(username=superuser.email, password=USER_FACTORY_PASSWORD)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
class GrouplessUserRecipeViewsTest(UserLoginTestCase):
def test_unit_create_forbidden_to_groupless(self):
"""The CreateUnitRecipe view can't be accessed by groupless users."""
url = reverse('recipes:unit:create')
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
def test_harmonization_create_forbidden_to_groupless(self):
"""The CreateHarmonizationRecipe view can't be accessed by groupless users."""
url = reverse('recipes:harmonization:create')
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
def test_unit_edit_forbidden_to_groupless(self):
"""The UpdateUnitRecipe view can't be accessed by groupless users."""
recipe = factories.UnitRecipeFactory.create()
url = reverse('recipes:unit:edit', kwargs={'pk': recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
def test_harmonization_edit_forbidden_to_groupless(self):
"""The UpdateHarmonizationRecipe view can't be accessed by groupless users."""
recipe = factories.HarmonizationRecipeFactory.create()
url = reverse('recipes:harmonization:edit', kwargs={'pk': recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
def test_unit_detail_forbidden_to_groupless(self):
"""The UnitRecipeDetail view can't be accessed by groupless users."""
recipe = factories.UnitRecipeFactory.create()
url = reverse('recipes:unit:detail', kwargs={'pk': recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
def test_harmonization_detail_forbidden_to_groupless(self):
"""The HarmonizationRecipeDetail view can't be accessed by groupless users."""
recipe = factories.HarmonizationRecipeFactory.create()
url = reverse('recipes:harmonization:detail', kwargs={'pk': recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
def test_recipes_not_on_homepage(self):
"""The harmonization recipe elements don't appear on the home page for groupless users."""
url = reverse('home')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, '<h2>Submit a harmonization recipe</h2>', html=True)
self.assertNotContains(response,
"""<a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button"
aria-haspopup="true" aria-expanded="false">Harmonization recipes
<span class="caret"></span></a>""",
html=True)
def test_recipes_not_on_profile_page(self):
"""The harmonization recipe tabs don't appear on the profile page for groupless users."""
url = reverse('profiles:profile')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertNotContains(
response,
'<li role="presentation"><a href="#unitrecipes" role="tab" data-toggle="tab">Unit Recipes</a></li>',
html=True)
self.assertNotContains(
response,
'<li role="presentation"><a href="#harmonizationrecipes" role="tab" data-toggle="tab">Harmonization Recipes</a></li>', # noqa: E501
html=True)
class DCCAnalystRecipeViewsTest(DCCAnalystLoginTestCase):
def test_unit_create_viewable_to_groupless(self):
"""The CreateUnitRecipe view can be accessed by dcc_analysts users.."""
url = reverse('recipes:unit:create')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_harmonization_create_viewable_to_groupless(self):
"""The CreateHarmonizationRecipe view can be accessed by dcc_analysts users.."""
url = reverse('recipes:harmonization:create')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_unit_edit_viewable_to_groupless(self):
"""The UpdateUnitRecipe view can be accessed by dcc_analysts users.."""
recipe = factories.UnitRecipeFactory.create(creator=self.user)
url = reverse('recipes:unit:edit', kwargs={'pk': recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_harmonization_edit_viewable_to_groupless(self):
"""The UpdateHarmonizationRecipe view can be accessed by dcc_analysts users.."""
recipe = factories.HarmonizationRecipeFactory.create(creator=self.user)
url = reverse('recipes:harmonization:edit', kwargs={'pk': recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_unit_detail_viewable_to_groupless(self):
"""The UnitRecipeDetail view can be accessed by dcc_analysts users.."""
recipe = factories.UnitRecipeFactory.create(creator=self.user)
url = reverse('recipes:unit:detail', kwargs={'pk': recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_harmonization_detail_viewable_to_groupless(self):
"""The HarmonizationRecipeDetail view can be accessed by dcc_analysts users.."""
recipe = factories.HarmonizationRecipeFactory.create(creator=self.user)
url = reverse('recipes:harmonization:detail', kwargs={'pk': recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_recipes_on_homepage(self):
"""The harmonization recipe elements don't appear on the home page for DCC analyst users."""
url = reverse('home')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<h2>Submit a harmonization recipe</h2>', html=True)
self.assertContains(response,
"""<a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button"
aria-haspopup="true" aria-expanded="false">Harmonization recipes
<span class="caret"></span></a>""",
html=True)
def test_recipes_on_profile_page(self):
"""The harmonization recipe tabs don't appear on the profile page for DCC analyst users."""
url = reverse('profiles:profile')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
'<li role="presentation"><a href="#unitrecipes" role="tab" data-toggle="tab">My Unit Recipes</a></li>',
html=True)
self.assertContains(
response,
'<li role="presentation"><a href="#harmonizationrecipes" role="tab" data-toggle="tab">My Harmonization Recipes</a></li>', # noqa: E501
html=True)
class DCCDeveloperRecipeViewsTest(DCCDeveloperLoginTestCase):
def test_unit_create_viewable_to_groupless(self):
"""The CreateUnitRecipe view can be accessed by dcc_developers users.."""
url = reverse('recipes:unit:create')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_harmonization_create_viewable_to_groupless(self):
"""The CreateHarmonizationRecipe view can be accessed by dcc_developers users.."""
url = reverse('recipes:harmonization:create')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_unit_edit_viewable_to_groupless(self):
"""The UpdateUnitRecipe view can be accessed by dcc_developers users.."""
recipe = factories.UnitRecipeFactory.create(creator=self.user)
url = reverse('recipes:unit:edit', kwargs={'pk': recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_harmonization_edit_viewable_to_groupless(self):
"""The UpdateHarmonizationRecipe view can be accessed by dcc_developers users.."""
recipe = factories.HarmonizationRecipeFactory.create(creator=self.user)
url = reverse('recipes:harmonization:edit', kwargs={'pk': recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_unit_detail_viewable_to_groupless(self):
"""The UnitRecipeDetail view can be accessed by dcc_developers users.."""
recipe = factories.UnitRecipeFactory.create(creator=self.user)
url = reverse('recipes:unit:detail', kwargs={'pk': recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_harmonization_detail_viewable_to_groupless(self):
"""The HarmonizationRecipeDetail view can be accessed by dcc_developers users.."""
recipe = factories.HarmonizationRecipeFactory.create(creator=self.user)
url = reverse('recipes:harmonization:detail', kwargs={'pk': recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_recipes_on_homepage(self):
"""The harmonization recipe elements don't appear on the home page for DCC developer users."""
url = reverse('home')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<h2>Submit a harmonization recipe</h2>', html=True)
self.assertContains(response,
"""<a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button"
aria-haspopup="true" aria-expanded="false">Harmonization recipes
<span class="caret"></span></a>""",
html=True)
def test_recipes_on_profile_page(self):
"""The harmonization recipe tabs don't appear on the profile page for DCC developer users."""
url = reverse('profiles:profile')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
'<li role="presentation"><a href="#unitrecipes" role="tab" data-toggle="tab">My Unit Recipes</a></li>',
html=True)
self.assertContains(
response,
'<li role="presentation"><a href="#harmonizationrecipes" role="tab" data-toggle="tab">My Harmonization Recipes</a></li>', # noqa: E501
html=True)
class SuperuserRecipeViewsTest(SuperuserLoginTestCase):
def test_unit_create_viewable_to_groupless(self):
"""The CreateUnitRecipe view can be accessed by superusers.."""
url = reverse('recipes:unit:create')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_harmonization_create_viewable_to_groupless(self):
"""The CreateHarmonizationRecipe view can be accessed by superusers.."""
url = reverse('recipes:harmonization:create')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_unit_edit_viewable_to_groupless(self):
"""The UpdateUnitRecipe view can be accessed by superusers.."""
recipe = factories.UnitRecipeFactory.create(creator=self.user)
url = reverse('recipes:unit:edit', kwargs={'pk': recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_harmonization_edit_viewable_to_groupless(self):
"""The UpdateHarmonizationRecipe view can be accessed by superusers.."""
recipe = factories.HarmonizationRecipeFactory.create(creator=self.user)
url = reverse('recipes:harmonization:edit', kwargs={'pk': recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_unit_detail_viewable_to_groupless(self):
"""The UnitRecipeDetail view can be accessed by superusers.."""
recipe = factories.UnitRecipeFactory.create(creator=self.user)
url = reverse('recipes:unit:detail', kwargs={'pk': recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_harmonization_detail_viewable_to_groupless(self):
"""The HarmonizationRecipeDetail view can be accessed by superusers.."""
recipe = factories.HarmonizationRecipeFactory.create(creator=self.user)
url = reverse('recipes:harmonization:detail', kwargs={'pk': recipe.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_recipes_on_homepage(self):
"""The harmonization recipe elements don't appear on the home page for superusers."""
url = reverse('home')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<h2>Submit a harmonization recipe</h2>', html=True)
self.assertContains(response,
"""<a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button"
aria-haspopup="true" aria-expanded="false">Harmonization recipes
<span class="caret"></span></a>""",
html=True)
def test_recipes_on_profile_page(self):
"""The harmonization recipe tabs don't appear on the profile page for superusers."""
url = reverse('profiles:profile')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
'<li role="presentation"><a href="#unitrecipes" role="tab" data-toggle="tab">My Unit Recipes</a></li>',
html=True)
self.assertContains(
response,
'<li role="presentation"><a href="#harmonizationrecipes" role="tab" data-toggle="tab">My Harmonization Recipes</a></li>', # noqa: E501
html=True)
class RecipesLoginRequiredTest(LoginRequiredTestCase):
def test_recipes_login_required(self):
"""All recipes urls redirect to login page if no user is logged in."""
self.assert_redirect_all_urls('recipes')
| 51.956897
| 147
| 0.679841
| 3,486
| 30,135
| 5.72117
| 0.069134
| 0.049138
| 0.059567
| 0.065283
| 0.881117
| 0.865975
| 0.839551
| 0.817639
| 0.792619
| 0.779382
| 0
| 0.009675
| 0.211117
| 30,135
| 579
| 148
| 52.046632
| 0.82926
| 0.146408
| 0
| 0.762125
| 0
| 0.018476
| 0.115295
| 0.037249
| 0
| 0
| 0
| 0
| 0.200924
| 1
| 0.117783
| false
| 0.039261
| 0.016166
| 0
| 0.150115
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fe1eb5b113fa7b6fdd1684a062b6b8779a35a5cb
| 42,352
|
py
|
Python
|
profile_page/api/tests_api.py
|
Plongesam/data-structures-game
|
a47c849ea97763eff1005273a58aa3d8ab663ff2
|
[
"Apache-2.0"
] | 2
|
2021-03-02T20:06:34.000Z
|
2021-03-31T02:51:35.000Z
|
profile_page/api/tests_api.py
|
Plongesam/data-structures-game
|
a47c849ea97763eff1005273a58aa3d8ab663ff2
|
[
"Apache-2.0"
] | 68
|
2021-03-02T20:20:21.000Z
|
2021-05-13T02:21:57.000Z
|
profile_page/api/tests_api.py
|
Plongesam/data-structures-game
|
a47c849ea97763eff1005273a58aa3d8ab663ff2
|
[
"Apache-2.0"
] | null | null | null |
"""
Run: python manage.py test profile_page.api.tests_api
Reference: https://www.django-rest-framework.org/api-guide/testing/
"""
from time import sleep
import uuid
from django.test import TestCase
from game_board.database import game_board_db as game_db
from profile_page.database import profile_page_db as profile_db
class BColors:
""" "Colors for printing"""
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
class APIOverview(TestCase):
"""Tests calls related to the overview of the API."""
def test_index_loads_properly(self):
"""The index page loads properly"""
sleep(1)
response = self.client.get('')
self.assertEqual(response.status_code, 200, msg=f'{BColors.FAIL}\t[-]\tResponse was not 200!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass return code api_overview.{BColors.ENDC}")
class Profile(TestCase):
"""Tests the API calls that is related to getting the all user profile."""
def setUp(self):
"""Create an account."""
sleep(1)
# temporary user name
self.user_info = str(uuid.uuid1()).split('-')[0]
post_data = {'user_name': self.user_info,
'password1': 'smith1smith1',
'password2': 'smith1smith1',
'email': self.user_info+"@gmail.com"}
response = self.client.post('/profile_page/api/register', post_data)
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed creating an account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass creating a user.{BColors.ENDC}")
# Authentication token
self.token = response.data['token']
def tearDown(self):
"""Removes the testing user from the database."""
profile_db.remove_user(self.user_info)
def test_profile(self):
"""Tests user profile by checking if it contains all fields."""
# logout
post_data = {'user_id': self.user_info,
'token': self.token}
response = self.client.post('/profile_page/api/profile', post_data)
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed getting profile information!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass getting profile information.{BColors.ENDC}")
# check that user profile has all the information required
profile_data_points = list(response.data['user_profile'].keys())
expected_points = ['user_name', 'badges', 'current_story_level', 'friends',
'points','llistStandardPoints', 'llistSurvivalPoints', 'rank', 'saved_games']
self.assertEqual(profile_data_points, expected_points,
msg=f'{BColors.FAIL}\t[-]\tUser profile missing information!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tUser profile contains all the information needed.{BColors.ENDC}")
# check that user profile information is for the correct user
self.assertEqual(response.data['user_profile']['user_name'], self.user_info,
msg=f'{BColors.FAIL}\t[-]\tUser profile is for the wrong user!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tUser profile information for the correct user.{BColors.ENDC}")
class Register(TestCase):
"""Tests the API calls that is related to user registration."""
def setUp(self):
"""Create an account."""
sleep(1)
# temporary user name
self.user_info = str(uuid.uuid1()).split('-')[0]
post_data = {'user_name': self.user_info,
'password1': 'smith1smith1',
'password2': 'smith1smith1',
'email': self.user_info+"@gmail.com"}
response = self.client.post('/profile_page/api/register', post_data)
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed creating an account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass creating a user.{BColors.ENDC}")
# Authentication token
self.token = response.data['token']
def tearDown(self):
"""Removes the testing user from the database."""
profile_db.remove_user(self.user_info)
def test_invalid_api_request(self):
"""Invalid API request fields"""
post_data = {'user_name': self.user_info,
'password1': 'smith1smith1',
'password2': 'smith1smith1',
'wrong_field': self.user_info+"@gmail.com"}
response = self.client.post('/profile_page/api/register', post_data).data
self.assertEqual(response['error'], 'Missing required fields!',
msg=f'{BColors.FAIL}\t[-]\tAccepted invalid POST request!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass not allowing invalid POST request.{BColors.ENDC}")
def test_non_matching_password(self):
"""Attempts to register with non-matching password."""
post_data = {'user_name': self.user_info,
'password1': 'smith1smith1',
'password2': 'smith1smith2',
'email': self.user_info+"@gmail.com"}
response = self.client.post('/profile_page/api/register', post_data).data
self.assertEqual(response['error'], 'Passwords do not match!',
msg=f'{BColors.FAIL}\t[-]\tAccepted non-matching password!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass not allowing non-matching password.{BColors.ENDC}")
def test_user_name_bot(self):
"""Attempts to register username that starts with bot."""
post_data = {'user_name': "bot"+self.user_info,
'password1': 'smith1smith1',
'password2': 'smith1smith1',
'email': self.user_info+"@gmail.com"}
response = self.client.post('/profile_page/api/register', post_data).data
self.assertEqual(response['error'], 'Username can not start with "bot"!',
msg=f'{BColors.FAIL}\t[-]\tAccepted bot user name!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tUser name bot did not allowed.{BColors.ENDC}")
def test_user_short_characters(self):
"""Attempts to register username that is too short."""
post_data = {'user_name': 'smi1',
'password1': 'smith1smith1',
'password2': 'smith1smith1',
'email': self.user_info+"@gmail.com"}
response = self.client.post('/profile_page/api/register', post_data).data
self.assertEqual(response['error'], 'Username must be longer than 5 characters!',
msg=f'{BColors.FAIL}\t[-]\tAccepted short user name!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tShort user name was not allowed.{BColors.ENDC}")
def test_short_password_characters(self):
"""Attempts to register with short password."""
post_data = {'user_name': self.user_info,
'password1': 'smi1',
'password2': 'smi1',
'email': self.user_info+"@gmail.com"}
response = self.client.post('/profile_page/api/register', post_data).data
self.assertEqual(response['error'], 'Password has to be longer than 8 characters!',
msg=f'{BColors.FAIL}\t[-]\tAccepted short password!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tShort password was not allowed.{BColors.ENDC}")
def test_duplicate_user(self):
"""Attempts to register an existing user."""
post_data = {'user_name': self.user_info,
'password1': 'smith1smith1',
'password2': 'smith1smith1',
'email': self.user_info+"a@gmail.com"}
response = self.client.post('/profile_page/api/register', post_data).data
self.assertEqual(response['error'], 'Error when creating the account!',
msg=f'{BColors.FAIL}\t[-]\tAccepted existing user!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass not allowing existing username.{BColors.ENDC}")
def test_duplicate_email(self):
"""Attempts to register an existing email."""
post_data = {'user_name': self.user_info+'a',
'password1': 'smith1smith1',
'password2': 'smith1smith1',
'email': self.user_info+"@gmail.com"}
response = self.client.post('/profile_page/api/register', post_data).data
self.assertEqual(response['error'], 'Error when creating the account!',
msg=f'{BColors.FAIL}\t[-]\tAccepted existing email!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass not allowing existing email.{BColors.ENDC}")
class Login(TestCase):
"""Tests the API calls that is related to user login."""
def setUp(self):
"""Create an account."""
sleep(1)
# temporary user name
self.user_info = str(uuid.uuid1()).split('-')[0]
post_data = {'user_name': self.user_info,
'password1': 'pineapple',
'password2': 'pineapple',
'email': self.user_info+"@gmail.com"}
response = self.client.post('/profile_page/api/register', post_data)
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed creating an account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass creating a user.{BColors.ENDC}")
# Authentication token
self.token = response.data['token']
def tearDown(self):
"""Removes the testing user from the database."""
profile_db.remove_user(self.user_info)
def test_invalid_username(self):
"""Tests logging in with invalid username."""
post_data = {'user_id': self.user_info + 'a',
'password': 'pineapple'}
response = self.client.post('/profile_page/api/login', post_data)
self.assertEqual(response.status_code, 401,
msg=f'{BColors.FAIL}\t[-]\tLogged in with wrong username!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass not allowing incorrect username.{BColors.ENDC}")
def test_invalid_password(self):
"""Tests logging in with invalid password."""
post_data = {'user_id': self.user_info,
'password': 'manymanypineapple'}
response = self.client.post('/profile_page/api/login', post_data)
self.assertEqual(response.status_code, 401,
msg=f'{BColors.FAIL}\t[-]\tLogged in with wrong password!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass not allowing incorrect password.{BColors.ENDC}")
def test_login(self):
"""Tests user login by checking the token."""
# login
post_data = {'user_id': self.user_info,
'password': 'pineapple'}
response = self.client.post('/profile_page/api/login', post_data)
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed to log-in!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass logging in.{BColors.ENDC}")
# expected authentication token
token = response.data['token']
# check if user has the correct token
self.assertEqual(profile_db.check_user(self.user_info, token), True,
msg=f'{BColors.FAIL}\t[-]\tUser has wrong token after log-in!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tUser has the correct token after log-in.{BColors.ENDC}")
class Logout(TestCase):
"""Tests the API calls that is related to user logout."""
def setUp(self):
"""Create an account."""
sleep(1)
# temporary user name
self.user_info = str(uuid.uuid1()).split('-')[0]
post_data = {'user_name': self.user_info,
'password1': 'pineapple',
'password2': 'pineapple',
'email': self.user_info+"@gmail.com"}
response = self.client.post('/profile_page/api/register', post_data)
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed creating an account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass creating a user.{BColors.ENDC}")
# Authentication token
self.token = response.data['token']
def tearDown(self):
"""Removes the testing user from the database."""
profile_db.remove_user(self.user_info)
def test_logout(self):
"""Tests user logout by checking token."""
# logout
post_data = {'user_id': self.user_info,
'token': self.token}
response = self.client.post('/profile_page/api/logout', post_data)
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed logging out!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass logging out.{BColors.ENDC}")
# check if user doesn't have the token after logout
self.assertEqual(profile_db.check_user(self.user_info, self.token), False,
msg=f'{BColors.FAIL}\t[-]\tUser still have the token after log-out!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tUser does not have the token after log-out.{BColors.ENDC}")
class Delete(TestCase):
"""Tests the API calls that is related to deleting user account."""
def setUp(self):
"""Create an account."""
sleep(1)
# temporary user name
self.user_info = str(uuid.uuid1()).split('-')[0]
post_data = {'user_name': self.user_info,
'password1': 'pineapple',
'password2': 'pineapple',
'email': self.user_info+"@gmail.com"}
response = self.client.post('/profile_page/api/register', post_data)
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed creating an account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass creating a user.{BColors.ENDC}")
# Authentication token
self.token = response.data['token']
def test_delete(self):
"""Tests account deleting by checking if it is still in the database."""
# delete account
post_data = {'user_id': self.user_info,
'token': self.token}
response = self.client.post('/profile_page/api/delete', post_data)
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed deleting the account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass deleting the account.{BColors.ENDC}")
# check if user the user does not exist after it got deleted
self.assertEqual(profile_db.read_one_user(self.user_info), False,
msg=f'{BColors.FAIL}\t[-]\tUser still exist after the account deleted!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tUser is no longer in the database after got removed.{BColors.ENDC}")
class SaveBoard(TestCase):
"""Tests the API calls that is related to saving a game board."""
def setUp(self):
"""Create an account and active game."""
sleep(1)
# temporary user name
self.user_info = str(uuid.uuid1()).split('-')[0]
post_data = {'user_name': self.user_info,
'password1': 'pineapple',
'password2': 'pineapple',
'email': self.user_info+"@gmail.com"}
response = self.client.post('/profile_page/api/register', post_data)
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed creating an account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass creating a user.{BColors.ENDC}")
# Authentication token
self.token = response.data['token']
# create a new game
self.game_id = self.client.get('/game_board/api/start_game/Easy/ID1,ID2,ID3/AVL').data['game_id']
self.board = self.client.get('/game_board/api/board/' + str(self.game_id)).data
def tearDown(self):
"""Removes the testing user and game board from the database."""
# remove user
profile_db.remove_user(self.user_info)
# remove the created game
game_db.remove_game(self.game_id)
def test_unauthorized_save(self):
"""Attempts to save a game from an unauthorized account."""
post_data = {'user_id': self.user_info,
'game_id': self.game_id,
'token': self.token + 'a'}
response = self.client.post('/profile_page/api/save_board', post_data)
self.assertEqual(response.status_code, 401,
msg=f'{BColors.FAIL}\t[-]\tSaved game from unauthorized account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass not allowing unauthorized account saving game.{BColors.ENDC}")
def test_nonexisting_save(self):
"""Attempts to save a game that does not exist."""
post_data = {'user_id': self.user_info,
'game_id': self.game_id +'a',
'token': self.token}
response = self.client.post('/profile_page/api/save_board', post_data)
self.assertEqual(response.status_code, 500,
msg=f'{BColors.FAIL}\t[-]\tSaved a game that does not exist!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass not saving a game that does not exist.{BColors.ENDC}")
def test_board_save(self):
"""Attempts to save a game."""
# save the game board
post_data = {'user_id': self.user_info,
'game_id': self.game_id,
'token': self.token}
response = self.client.post('/profile_page/api/save_board', post_data)
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed saving a game!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass saving a game.{BColors.ENDC}")
# check if the saved game is in the list of saved games
saved_games = self.client.get('/profile_page/api/saved_boards/' + str(self.user_info) + '/' \
+ str(self.token)).data['saved_games']
game_ids = list()
for board in saved_games:
game_ids.append(board['game_id'])
self.assertIn(self.game_id, game_ids,
msg=f'{BColors.FAIL}\t[-]\tGame ID was not in the saved games!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tFound the saved game ID in the user's profile.{BColors.ENDC}")
class Share(TestCase):
"""Tests the API calls that is related to sharing the game board."""
def setUp(self):
"""Create two accounts, game, and save the game to the first account."""
sleep(1)
# temporary user names
self.user_1_info = str(uuid.uuid1())
self.user_2_info = str(uuid.uuid1())
# Create the user 1
post_data_1 = {'user_name': self.user_1_info,
'password1': 'pineapple',
'password2': 'pineapple',
'email': self.user_1_info+"@gmail.com"}
response_1 = self.client.post('/profile_page/api/register', post_data_1)
self.assertEqual(response_1.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed creating an account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass creating a user.{BColors.ENDC}")
# Create user 2
post_data_2 = {'user_name': self.user_2_info,
'password1': 'pineapple',
'password2': 'pineapple',
'email': self.user_2_info+"@gmail.com"}
response_2 = self.client.post('/profile_page/api/register', post_data_2)
self.assertEqual(response_2.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed creating an account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass creating a user.{BColors.ENDC}")
# Authentication tokens
self.token_user_1 = response_1.data['token']
self.token_user_2 = response_2.data['token']
# create a new game
self.game_id = self.client.get('/game_board/api/start_game/Easy/ID1,ID2,ID3/AVL').data['game_id']
# Save the game to user 1's profile
post_data_save = {'user_id': self.user_1_info,
'game_id': self.game_id,
'token': self.token_user_1}
response_save = self.client.post('/profile_page/api/save_board', post_data_save)
self.assertEqual(response_save.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed saving a game!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass saving a game.{BColors.ENDC}")
def tearDown(self):
"""Removes the testing user and game board from the database."""
# remove the users
profile_db.remove_user(self.user_1_info)
profile_db.remove_user(self.user_2_info)
# remove the created game
game_db.remove_game(self.game_id)
def test_unauthorized_share(self):
"""Attempts to share a game from an unauthorized account."""
post_data = {'source_user_id': self.user_1_info,
'dest_user_id': self.user_2_info,
'game_id': self.game_id,
'token': self.token_user_1 + 'a'}
response = self.client.post('/profile_page/api/share', post_data)
self.assertEqual(response.status_code, 401,
msg=f'{BColors.FAIL}\t[-]\tShared game from unauthorized account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass not allowing unauthorized account sharing game.{BColors.ENDC}")
def test_nonexisting_board_share(self):
"""Attempts to share a game that does notexist."""
post_data = {'source_user_id': self.user_1_info,
'dest_user_id': self.user_2_info,
'game_id': self.game_id + 'a',
'token': self.token_user_1}
response = self.client.post('/profile_page/api/share', post_data)
self.assertEqual(response.status_code, 500,
msg=f'{BColors.FAIL}\t[-]\tShared game from that does not exist!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass not sharing a game that does not exist.{BColors.ENDC}")
def test_nonexisting_destination_user_share(self):
"""Attempts to share a game with a user that does not exist."""
post_data = {'source_user_id': self.user_1_info,
'dest_user_id': self.user_2_info + 'a',
'game_id': self.game_id,
'token': self.token_user_1}
response = self.client.post('/profile_page/api/share', post_data)
self.assertEqual(response.status_code, 500,
msg=f'{BColors.FAIL}\t[-]\tShared game with a user that does not exist!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass not sharing a game with a user that does not exist.{BColors.ENDC}")
def test_share(self):
"""Attempts to share a game with another user."""
# Share the game
post_data = {'source_user_id': self.user_1_info,
'dest_user_id': self.user_2_info,
'game_id': self.game_id,
'token': self.token_user_1}
response = self.client.post('/profile_page/api/share', post_data)
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed to share a game with a user!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass sharing the game.{BColors.ENDC}")
# Check the destination user if the game was shared
saved_games = self.client.get('/profile_page/api/saved_boards/' + str(self.user_2_info) + '/' \
+ str(self.token_user_2)).data['saved_games']
game_ids = list()
for board in saved_games:
game_ids.append(board['game_id'])
self.assertIn(self.game_id, game_ids,
msg=f'{BColors.FAIL}\t[-]\tGame ID was not in the saved games of destination user!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tFound the saved game ID in the destination user's profile.{BColors.ENDC}")
class DeleteBoard(TestCase):
"""Tests the API calls that is related to deleting the saved game board."""
def setUp(self):
"""Create an account and active game. Then save the game to the user's profile."""
sleep(1)
# temporary user name
self.user_info = str(uuid.uuid1()).split('-')[0]
# Create the user
post_data = {'user_name': self.user_info,
'password1': 'pineapple',
'password2': 'pineapple',
'email': self.user_info+"@gmail.com"}
response_1 = self.client.post('/profile_page/api/register', post_data)
self.assertEqual(response_1.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed creating an account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass creating a user.{BColors.ENDC}")
# Authentication token
self.token = response_1.data['token']
# create a new game
self.game_id = self.client.get('/game_board/api/start_game/Easy/ID1,ID2,ID3/AVL').data['game_id']
# Save the game to user 's profile
post_data_save = {'user_id': self.user_info,
'game_id': self.game_id,
'token': self.token}
response_save = self.client.post('/profile_page/api/save_board', post_data_save)
self.assertEqual(response_save.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed saving a game!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass saving a game.{BColors.ENDC}")
def tearDown(self):
"""Removes the testing user and game board from the database."""
# remove the user
profile_db.remove_user(self.user_info)
# remove the created game
game_db.remove_game(self.game_id)
def test_unauthorized_delete(self):
"""Attempts to delete a game from an unauthorized account."""
post_data = {'user_id': self.user_info,
'game_id': self.game_id,
'token': self.token + 'a'}
response = self.client.post('/profile_page/api/delete_board', post_data)
self.assertEqual(response.status_code, 401,
msg=f'{BColors.FAIL}\t[-]\tDeleted game from unauthorized account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass not allowing unauthorized account delete game.{BColors.ENDC}")
def test_delete(self):
"""Attempts to delete a saved game from an account."""
post_data = {'user_id': self.user_info,
'game_id': self.game_id,
'token': self.token}
response = self.client.post('/profile_page/api/delete_board', post_data)
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed deleting the game from user profile!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass deleting the saved game.{BColors.ENDC}")
class SavedBoards(TestCase):
"""Tests the API calls that is related to listing the saved games."""
def setUp(self):
"""Create an account and 2 active games. Then save the games to the user's profile."""
sleep(1)
# temporary user name
self.user_info = str(uuid.uuid1()).split('-')[0]
# Create the user
post_data = {'user_name': self.user_info,
'password1': 'pineapple',
'password2': 'pineapple',
'email': self.user_info+"@gmail.com"}
response_1 = self.client.post('/profile_page/api/register', post_data)
self.assertEqual(response_1.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed creating an account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass creating a user.{BColors.ENDC}")
# Authentication token
self.token = response_1.data['token']
# create 2 new games
self.game_id_1 = self.client.get('/game_board/api/start_game/Easy/ID1,ID2,ID3/AVL').data['game_id']
self.game_id_2 = self.client.get('/game_board/api/start_game/Hard/ID1,ID2,ID3/AVL').data['game_id']
# Save the games to user 's profile
# save game 1
post_data_save_1 = {'user_id': self.user_info,
'game_id': self.game_id_1,
'token': self.token}
response_save_1 = self.client.post('/profile_page/api/save_board', post_data_save_1)
self.assertEqual(response_save_1.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed saving a game!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass saving a game.{BColors.ENDC}")
# save game 2
post_data_save_2 = {'user_id': self.user_info,
'game_id': self.game_id_2,
'token': self.token}
response_save_2 = self.client.post('/profile_page/api/save_board', post_data_save_2)
self.assertEqual(response_save_2.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed saving a game!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass saving a game.{BColors.ENDC}")
def tearDown(self):
"""Removes the testing user and game board from the database."""
# remove the user
profile_db.remove_user(self.user_info)
# remove the created games
game_db.remove_game(self.game_id_1)
game_db.remove_game(self.game_id_2)
def test_unauthorized_list(self):
"""Attempts to list the saved games from an unauthorized account."""
response = self.client.get('/profile_page/api/saved_boards/' + str(self.user_info) + '/' \
+ str(self.token + 'a'))
self.assertEqual(response.status_code, 401,
msg=f'{BColors.FAIL}\t[-]\tListed the saved games from unauthorized account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass not allowing listing games from unauthorized account.{BColors.ENDC}")
def test_list(self):
"""Attempts to list the saved games."""
# get the list of saved game board in the user's profile
response = self.client.get('/profile_page/api/saved_boards/' + str(self.user_info) + '/' \
+ str(self.token))
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed listing the saved games!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass listing the saved games.{BColors.ENDC}")
# check if both of the saved game boards are in the user's profile
game_ids = list()
for board in response.data['saved_games']:
game_ids.append(board['game_id'])
self.assertEqual(set([self.game_id_1, self.game_id_2]), set(game_ids),
msg=f'{BColors.FAIL}\t[-]\tGame ID was not in the saved games of destination user!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tBoth game IDs were in the user's profile.{BColors.ENDC}")
class LoadBoard(TestCase):
"""Tests the API calls that is related to loading saved games."""
def setUp(self):
"""Create an account and active game. Then save the games to the user's profile."""
sleep(1)
# temporary user name
self.user_info = str(uuid.uuid1()).split('-')[0]
# Create the user
post_data = {'user_name': self.user_info,
'password1': 'pineapple',
'password2': 'pineapple',
'email': self.user_info+"@gmail.com"}
response_1 = self.client.post('/profile_page/api/register', post_data)
self.assertEqual(response_1.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed creating an account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass creating a user.{BColors.ENDC}")
# Authentication token
self.token = response_1.data['token']
# create a new game
self.game_id = self.client.get('/game_board/api/start_game/Easy/ID1,ID2,ID3/AVL').data['game_id']
# Save the game to user 's profile
post_data_save = {'user_id': self.user_info,
'game_id': self.game_id,
'token': self.token}
response_save = self.client.post('/profile_page/api/save_board', post_data_save)
self.assertEqual(response_save.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed saving a game!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass saving a game.{BColors.ENDC}")
# remove the active games since we can't have the duplicate active games
game_db.remove_game(self.game_id)
def tearDown(self):
"""Removes the testing user and game board from the database."""
# remove the user
profile_db.remove_user(self.user_info)
# remove the created games
game_db.remove_game(self.game_id)
def test_unauthorized_load(self):
"""Attempts to list the load games from an unauthorized account."""
post_data = {'user_id': self.user_info,
'game_id': self.game_id,
'token': self.token + 'a'}
response = self.client.post('/profile_page/api/load_board', post_data)
self.assertEqual(response.status_code, 401,
msg=f'{BColors.FAIL}\t[-]\tLoaded game from unauthorized account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass not allowing unauthorized account loading game.{BColors.ENDC}")
def test_load_board(self):
"""Attempts to list the load games from an unauthorized account."""
# load the game board
post_data = {'user_id': self.user_info,
'game_id': self.game_id,
'token': self.token}
response = self.client.post('/profile_page/api/load_board', post_data)
loaded_game_id = response.data['game_id']
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed loading a game!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass loading a saved game board.{BColors.ENDC}")
# make sure it is not the same game
self.assertEqual(response.data['game_id'], loaded_game_id,
msg=f'{BColors.FAIL}\t[-]\tLoaded game is different!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass loading same game.{BColors.ENDC}")
# check if the loaded game is now in the active games
active_game = self.client.get('/game_board/api/board/' + str(loaded_game_id))
self.assertEqual(active_game.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tLoaded game is not in the active games!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass loading the in to the active games.{BColors.ENDC}")
class AddRemoveFriend(TestCase):
"""Tests the API calls that is related to adding friend."""
def setUp(self):
"""Create two accounts."""
sleep(1)
# temporary user names
self.user_1_info = str(uuid.uuid1()).split('-')[0]
self.user_2_info = str(uuid.uuid1()).split('-')[0]
# Create the user 1
post_data_1 = {'user_name': self.user_1_info,
'password1': 'pineapple',
'password2': 'pineapple',
'email': self.user_1_info+"@gmail.com"}
response_1 = self.client.post('/profile_page/api/register', post_data_1)
self.assertEqual(response_1.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed creating an account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass creating a user.{BColors.ENDC}")
# Create user 2
post_data_2 = {'user_name': self.user_2_info,
'password1': 'pineapple',
'password2': 'pineapple',
'email': self.user_2_info+"@gmail.com"}
response_2 = self.client.post('/profile_page/api/register', post_data_2)
self.assertEqual(response_2.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed creating an account!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass creating a user.{BColors.ENDC}")
# Authentication tokens
self.token_user_1 = response_1.data['token']
self.token_user_2 = response_2.data['token']
def tearDown(self):
"""Removes the testing users."""
# remove the users
profile_db.remove_user(self.user_1_info)
profile_db.remove_user(self.user_2_info)
def test_decline_friend(self):
"""Attempts to decline the friend request after adding."""
# First add friend
post_data = {'source_user_id': self.user_1_info,
'dest_user_id': self.user_2_info,
'token': self.token_user_1}
response = self.client.post('/profile_page/api/add_friend', post_data)
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed adding the friend!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass adding the friend.{BColors.ENDC}")
# TODO: Check that destination user has the pending friend request in the list (pending database).
# Decline friend request
post_data2 = {'source_user_id': self.user_2_info,
'dest_user_id': self.user_1_info,
'accept': 'no',
'token': self.token_user_2}
response2 = self.client.post('/profile_page/api/accept_decline_friend', post_data2)
self.assertEqual(response2.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed declining the friend request!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass declining the friend request.{BColors.ENDC}")
# TODO: Make sure the user does not have that friend in its profile (pending database).
# TODO: Make sure the user add request is no longer in the user's profile (pending database).
def test_accept_friend(self):
"""Attempts to accept the friend request after adding."""
# First add friend
post_data = {'source_user_id': self.user_1_info,
'dest_user_id': self.user_2_info,
'token': self.token_user_1}
response = self.client.post('/profile_page/api/add_friend', post_data)
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed adding the friend!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass adding the friend.{BColors.ENDC}")
# Accept friend request
post_data2 = {'source_user_id': self.user_2_info,
'dest_user_id': self.user_1_info,
'accept': 'yes',
'token': self.token_user_2}
response2 = self.client.post('/profile_page/api/accept_decline_friend', post_data2)
self.assertEqual(response2.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed accepting the friend request!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass accepting the friend request.{BColors.ENDC}")
# TODO: Make sure the added user is in both of the user's profile (pending database).
# TODO: Make sure added user is not in the pending friend request list (pending database).
def test_remove_friend(self):
"""Attempts to remove the friend after adding."""
# First add friend
post_data = {'source_user_id': self.user_1_info,
'dest_user_id': self.user_2_info,
'token': self.token_user_1}
response = self.client.post('/profile_page/api/add_friend', post_data)
self.assertEqual(response.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed adding the friend!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass adding the friend.{BColors.ENDC}")
# Accept friend request
post_data2 = {'source_user_id': self.user_2_info,
'dest_user_id': self.user_1_info,
'accept': 'yes',
'token': self.token_user_2}
response2 = self.client.post('/profile_page/api/accept_decline_friend', post_data2)
self.assertEqual(response2.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed accepting the friend request!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass accepting the friend request.{BColors.ENDC}")
# Remove the friend from the profile
post_data3 = {'source_user_id': self.user_1_info,
'dest_user_id': self.user_2_info,
'token': self.token_user_1}
response3 = self.client.post('/profile_page/api/remove_friend', post_data3)
self.assertEqual(response3.status_code, 200,
msg=f'{BColors.FAIL}\t[-]\tFailed removing the friend!{BColors.ENDC}')
print(f"{BColors.OKGREEN}\t[+]\tPass removing the friend from profile.{BColors.ENDC}")
# TODO: Make sure both of the profiles no longer has each other under friends.
| 45.885157
| 120
| 0.601766
| 5,330
| 42,352
| 4.633959
| 0.055159
| 0.040164
| 0.034981
| 0.037653
| 0.852626
| 0.824284
| 0.785457
| 0.769869
| 0.731406
| 0.684036
| 0
| 0.014184
| 0.265891
| 42,352
| 922
| 121
| 45.934924
| 0.780226
| 0.13435
| 0
| 0.660211
| 0
| 0.022887
| 0.35429
| 0.183034
| 0
| 0
| 0
| 0.001085
| 0.109155
| 1
| 0.089789
| false
| 0.181338
| 0.008803
| 0
| 0.137324
| 0.109155
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
a3b23f360e0455db0e903b5fb0d64e33da2ba0a6
| 126
|
py
|
Python
|
ai-safety-debate/agent/__init__.py
|
david-lindner/ai-safety-debate
|
f291a0873548cade5b3f37ee7c57c2430f333e9c
|
[
"Apache-2.0"
] | 2
|
2019-10-09T12:14:50.000Z
|
2020-02-13T13:27:48.000Z
|
ai-safety-debate/agent/__init__.py
|
david-lindner/ai-safety-debate
|
f291a0873548cade5b3f37ee7c57c2430f333e9c
|
[
"Apache-2.0"
] | 17
|
2019-05-01T07:55:57.000Z
|
2019-06-07T15:11:50.000Z
|
ai-safety-debate/agent/__init__.py
|
david-lindner/ai-safety-debate
|
f291a0873548cade5b3f37ee7c57c2430f333e9c
|
[
"Apache-2.0"
] | null | null | null |
from .debate_agent import DebateAgent
from .debate_agent import DebatePlayers
from .debate_classifier import DebateClassifier
| 31.5
| 47
| 0.880952
| 15
| 126
| 7.2
| 0.533333
| 0.277778
| 0.277778
| 0.388889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 126
| 3
| 48
| 42
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a3cbd9c0842a8a1b97379ff21c62552a1d98710a
| 20,576
|
py
|
Python
|
tests/policy/test_offline_continuous.py
|
han20192019/newRL
|
53598edab284b4364d127ec5662137de3f9c1206
|
[
"Apache-2.0"
] | 387
|
2020-07-19T14:56:36.000Z
|
2022-03-29T15:25:21.000Z
|
tests/policy/test_offline_continuous.py
|
han20192019/newRL
|
53598edab284b4364d127ec5662137de3f9c1206
|
[
"Apache-2.0"
] | 89
|
2020-10-04T17:04:42.000Z
|
2022-03-27T10:43:15.000Z
|
tests/policy/test_offline_continuous.py
|
han20192019/newRL
|
53598edab284b4364d127ec5662137de3f9c1206
|
[
"Apache-2.0"
] | 53
|
2020-08-18T09:52:22.000Z
|
2022-03-30T23:16:13.000Z
|
import numpy as np
import pytest
import torch
from obp.policy.offline_continuous import ContinuousNNPolicyLearner
# dim_context, pg_method, bandwidth, output_space, hidden_layer_size, activation, solver, alpha,
# batch_size, learning_rate_init, max_iter, shuffle, random_state, tol, momentum, nesterovs_momentum,
# early_stopping, validation_fraction, beta_1, beta_2, epsilon, n_iter_no_change, q_func_estimator_hyperparams, description
invalid_input_of_nn_policy_learner_init = [
(
0, #
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"`dim_context`= 0, must be >= 1",
),
(
10,
"None", #
2,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"pg_method must be one of 'dgp', 'ipw', or 'dr'",
),
(
10,
"ipw",
-0.1, #
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"`bandwidth`= -0.1, must be > 0",
),
(
10,
"ipw",
0.1,
("", ""), #
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"output_space must be tuple of integers or floats",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, ""), #
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"hidden_layer_size must be tuple of positive integers",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"None", #
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"activation must be one of 'identity', 'logistic', 'tanh', 'relu', or 'elu'",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"None", #
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"solver must be one of 'adam', 'adagrad', or 'sgd'",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
-1.0, #
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"`alpha`= -1.0, must be >= 0.0",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
0, #
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"batch_size must be a positive integer or 'auto'",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0, #
200,
True,
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"`learning_rate_init`= 0.0, must be > 0.0",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
0, #
True,
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"`max_iter`= 0, must be >= 1",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
None, #
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"shuffle must be a bool",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
"", #
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"'' cannot be used to seed",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
-1.0, #
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"`tol`= -1.0, must be > 0.0",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
2.0, #
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"`momentum`= 2.0, must be <= 1.0",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
"", #
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"nesterovs_momentum must be a bool",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
None, #
0.1,
0.9,
0.999,
1e-8,
10,
None,
"early_stopping must be a bool",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"lbfgs", #
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True, #
0.1,
0.9,
0.999,
1e-8,
10,
None,
"solver must be one of 'adam', 'adagrad', or 'sgd',",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True,
2.0, #
0.9,
0.999,
1e-8,
10,
None,
"`validation_fraction`= 2.0, must be <= 1.0",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True,
0.1,
2.0, #
0.999,
1e-8,
10,
None,
"`beta_1`= 2.0, must be <= 1.0",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
2.0, #
1e-8,
10,
None,
"`beta_2`= 2.0, must be <= 1.0",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
-1.0, #
10,
None,
"`epsilon`= -1.0, must be >= 0.0",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
0, #
None,
"`n_iter_no_change`= 0, must be >= 1",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
"", #
"q_func_estimator_hyperparams must be a dict,",
),
]
valid_input_of_nn_policy_learner_init = [
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"valid input",
),
(
10,
"dpg",
None,
(-10, 10),
(100, 50, 100),
"relu",
"adam",
0.001,
"auto",
0.0001,
200,
True,
123,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
None,
"valid input",
),
(
10,
"ipw",
0.1,
(-10, 10),
(100, 50, 100),
"logistic",
"sgd",
0.001,
50,
0.0001,
200,
True,
None,
1e-4,
0.9,
True,
True,
0.1,
0.9,
0.999,
1e-8,
10,
{},
"valid input",
),
]
@pytest.mark.parametrize(
"dim_context, pg_method, bandwidth, output_space, hidden_layer_size, activation, solver, alpha, batch_size, learning_rate_init, max_iter, shuffle, random_state, tol, momentum, nesterovs_momentum, early_stopping, validation_fraction, beta_1, beta_2, epsilon, n_iter_no_change, q_func_estimator_hyperparams, description",
invalid_input_of_nn_policy_learner_init,
)
def test_nn_policy_learner_init_using_invalid_inputs(
dim_context,
pg_method,
bandwidth,
output_space,
hidden_layer_size,
activation,
solver,
alpha,
batch_size,
learning_rate_init,
max_iter,
shuffle,
random_state,
tol,
momentum,
nesterovs_momentum,
early_stopping,
validation_fraction,
beta_1,
beta_2,
epsilon,
n_iter_no_change,
q_func_estimator_hyperparams,
description,
):
with pytest.raises(ValueError, match=f"{description}*"):
_ = ContinuousNNPolicyLearner(
dim_context=dim_context,
pg_method=pg_method,
bandwidth=bandwidth,
output_space=output_space,
hidden_layer_size=hidden_layer_size,
activation=activation,
solver=solver,
alpha=alpha,
batch_size=batch_size,
learning_rate_init=learning_rate_init,
max_iter=max_iter,
shuffle=shuffle,
random_state=random_state,
tol=tol,
momentum=momentum,
nesterovs_momentum=nesterovs_momentum,
early_stopping=early_stopping,
validation_fraction=validation_fraction,
beta_1=beta_1,
beta_2=beta_2,
epsilon=epsilon,
n_iter_no_change=n_iter_no_change,
q_func_estimator_hyperparams=q_func_estimator_hyperparams,
)
@pytest.mark.parametrize(
"dim_context, pg_method, bandwidth, output_space, hidden_layer_size, activation, solver, alpha, batch_size, learning_rate_init, max_iter, shuffle, random_state, tol, momentum, nesterovs_momentum, early_stopping, validation_fraction, beta_1, beta_2, epsilon, n_iter_no_change, q_func_estimator_hyperparams, description",
valid_input_of_nn_policy_learner_init,
)
def test_nn_policy_learner_init_using_valid_inputs(
dim_context,
pg_method,
bandwidth,
output_space,
hidden_layer_size,
activation,
solver,
alpha,
batch_size,
learning_rate_init,
max_iter,
shuffle,
random_state,
tol,
momentum,
nesterovs_momentum,
early_stopping,
validation_fraction,
beta_1,
beta_2,
epsilon,
n_iter_no_change,
q_func_estimator_hyperparams,
description,
):
nn_policy_learner = ContinuousNNPolicyLearner(
dim_context=dim_context,
pg_method=pg_method,
bandwidth=bandwidth,
output_space=output_space,
hidden_layer_size=hidden_layer_size,
activation=activation,
solver=solver,
alpha=alpha,
batch_size=batch_size,
learning_rate_init=learning_rate_init,
max_iter=max_iter,
shuffle=shuffle,
random_state=random_state,
tol=tol,
momentum=momentum,
nesterovs_momentum=nesterovs_momentum,
early_stopping=early_stopping,
validation_fraction=validation_fraction,
beta_1=beta_1,
beta_2=beta_2,
epsilon=epsilon,
n_iter_no_change=n_iter_no_change,
q_func_estimator_hyperparams=q_func_estimator_hyperparams,
)
assert isinstance(nn_policy_learner, ContinuousNNPolicyLearner)
def test_nn_policy_learner_create_train_data_for_opl():
context = np.ones((100, 2), dtype=np.int32)
action = np.zeros(100, dtype=np.int32)
reward = np.ones((100,), dtype=np.float32)
pscore = np.array([0.5] * 100, dtype=np.float32)
learner1 = ContinuousNNPolicyLearner(dim_context=2, pg_method="dpg")
training_loader, validation_loader = learner1._create_train_data_for_opl(
context=context,
action=action,
reward=reward,
pscore=pscore,
)
assert isinstance(training_loader, torch.utils.data.DataLoader)
assert validation_loader is None
learner2 = ContinuousNNPolicyLearner(
dim_context=2,
pg_method="dpg",
early_stopping=True,
)
training_loader, validation_loader = learner2._create_train_data_for_opl(
context=context,
action=action,
reward=reward,
pscore=pscore,
)
assert isinstance(training_loader, torch.utils.data.DataLoader)
assert isinstance(validation_loader, torch.utils.data.DataLoader)
# context, action, reward, pscore, description
invalid_input_of_nn_policy_learner_fit = [
(
5, #
np.ones(5),
np.ones(5),
np.ones(5) * 0.5,
"context must be 2D array",
),
(
np.ones(5), #
np.ones(5),
np.ones(5),
np.ones(5) * 0.5,
"context must be 2D array",
),
(
np.ones((5, 2)),
5, #
np.ones(5),
np.ones(5) * 0.5,
"action_by_behavior_policy must be 1D array",
),
(
np.ones((5, 2)),
np.ones((5, 2)), #
np.ones(5),
np.ones(5) * 0.5,
"action_by_behavior_policy must be 1D array",
),
(
np.ones((5, 2)),
np.ones(5),
5, #
np.ones(5) * 0.5,
"reward must be 1D array",
),
(
np.ones((5, 2)),
np.ones(5),
np.ones((5, 2)), #
np.ones(5) * 0.5,
"reward must be 1D array",
),
(
np.ones((5, 2)),
np.ones(5),
np.ones(5),
0.5, #
"pscore must be 1D array",
),
(
np.ones((5, 2)),
np.ones(5),
np.ones(5),
np.ones((5, 2)) * 0.5, #
"pscore must be 1D array",
),
(
np.ones((4, 2)), #
np.ones(5),
np.ones(5),
np.ones(5) * 0.5,
"Expected `context.shape[0]",
),
(
np.ones((5, 2)),
np.ones(4), #
np.ones(5),
np.ones(5) * 0.5,
"Expected `context.shape[0]",
),
(
np.ones((5, 2)),
np.ones(5),
np.ones(4), #
np.ones(5) * 0.5,
"Expected `context.shape[0]",
),
(
np.ones((5, 2)),
np.ones(5),
np.ones(5),
np.arange(5) * 0.1, #
"pscore must be positive",
),
(
np.ones((5, 3)), #
np.ones(5),
np.ones(5),
np.ones(5) * 0.5,
"Expected `context.shape[1]",
),
]
valid_input_of_nn_policy_learner_fit = [
(
np.ones((5, 2)),
np.ones(5),
np.ones(5),
np.ones(5) * 0.5,
"valid input (pscore is given)",
),
(
np.ones((5, 2)),
np.ones(5),
np.ones(5),
None,
"valid input (pscore is not given)",
),
]
@pytest.mark.parametrize(
"context, action, reward, pscore, description",
invalid_input_of_nn_policy_learner_fit,
)
def test_nn_policy_learner_fit_using_invalid_inputs(
context,
action,
reward,
pscore,
description,
):
with pytest.raises(ValueError, match=f"{description}*"):
# set parameters
dim_context = 2
pg_method = "dpg"
learner = ContinuousNNPolicyLearner(
dim_context=dim_context, pg_method=pg_method
)
learner.fit(
context=context,
action=action,
reward=reward,
pscore=pscore,
)
@pytest.mark.parametrize(
"context, action, reward, pscore, description",
valid_input_of_nn_policy_learner_fit,
)
def test_nn_policy_learner_fit_using_valid_inputs(
context,
action,
reward,
pscore,
description,
):
# set parameters
dim_context = 2
pg_method = "dpg"
learner = ContinuousNNPolicyLearner(dim_context=dim_context, pg_method=pg_method)
learner.fit(
context=context,
action=action,
reward=reward,
pscore=pscore,
)
def test_nn_policy_learner_predict():
# synthetic data
context = np.ones((5, 2))
action = np.ones(5)
reward = np.ones(5)
# set parameters
dim_context = 2
pg_method = "dpg"
output_space = (-10, 10)
learner = ContinuousNNPolicyLearner(
dim_context=dim_context, pg_method=pg_method, output_space=output_space
)
learner.fit(
context=context,
action=action,
reward=reward,
)
# shape error
with pytest.raises(ValueError, match="context must be 2D array"):
learner.predict(context=np.ones(5))
with pytest.raises(ValueError, match="context must be 2D array"):
learner.predict(context="np.ones(5)")
# inconsistency between dim_context and context
with pytest.raises(ValueError, match="Expected `context.shape[1]*"):
learner.predict(context=np.ones((5, 3)))
# check output shape
predicted_actions = learner.predict(context=context)
assert predicted_actions.shape[0] == context.shape[0]
assert predicted_actions.ndim == 1
assert np.all(output_space[0] <= predicted_actions) or np.all(
predicted_actions <= output_space[1]
)
| 19.247895
| 323
| 0.431425
| 2,314
| 20,576
| 3.680207
| 0.070441
| 0.043682
| 0.046853
| 0.026421
| 0.833725
| 0.817168
| 0.801433
| 0.780531
| 0.749765
| 0.741545
| 0
| 0.127416
| 0.439298
| 20,576
| 1,068
| 324
| 19.265918
| 0.610731
| 0.024203
| 0
| 0.849222
| 0
| 0.002918
| 0.129519
| 0.008937
| 0
| 0
| 0
| 0
| 0.007782
| 1
| 0.005837
| false
| 0
| 0.003891
| 0
| 0.009728
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3d0185d6569ae6a559ba067452ab1da9218887e
| 13,235
|
py
|
Python
|
tests/test_docstring.py
|
RomaKoks/collie_recs
|
bc8979c8dbf68deefb030336d50f07f788cf1667
|
[
"BSD-3-Clause"
] | 70
|
2021-04-13T20:13:35.000Z
|
2021-07-08T03:01:29.000Z
|
tests/test_docstring.py
|
RomaKoks/collie_recs
|
bc8979c8dbf68deefb030336d50f07f788cf1667
|
[
"BSD-3-Clause"
] | 18
|
2021-07-13T22:06:11.000Z
|
2022-01-27T16:27:37.000Z
|
tests/test_docstring.py
|
RomaKoks/collie_recs
|
bc8979c8dbf68deefb030336d50f07f788cf1667
|
[
"BSD-3-Clause"
] | 11
|
2021-07-14T04:58:49.000Z
|
2022-03-05T00:19:22.000Z
|
from collie.utils import merge_docstrings
# START: test class definitions
# NOTE: we include classes here since pytest does not accept classes as fixtures
class BaseClass:
"""
This is the short description.
This is a longer description. It contains many lines.
With line breaks, like this.
You can also have new paragraphs!
NOTE: This is an important note!
Parameters
----------
arg1: str
The first argument
arg2: int
This argument's description is longer.
See how it is on a new line:
* Even with a bullet list now!
arg3: np.array
**kwargs
References
----------
arg8: list
arg9: int
No description above, and that is okay!
"""
def __init__(self, arg1, arg2, arg3, **kwargs):
pass
class ChildClass(BaseClass):
"""
This is the short description for the child.
This is a longer description for the child. It also contains many lines.
With line breaks, like this.
You can also have new paragraphs!
NOTE: This is an important note!
Look, a new line of documentation after the note!
Parameters
----------
arg1: str
The first argument
arg4: int
An important argument!
References
----------
arg8: list
arg9: int
No description above, and that is okay!
arg10: str
This one is new.
"""
def __init__(self, arg1, arg2, arg3, arg4):
pass
class ChildClassWithArgs(BaseClass):
"""
This is the short description for the child.
This is a longer description for the child. It also contains many lines.
With line breaks, like this.
You can also have new paragraphs!
NOTE: This is an important note!
Look, a new line of documentation after the note!
Parameters
----------
arg1: str
The first argument
arg4: int
An important argument!
*args: arguments
A description for these args here.
References
----------
arg8: list
arg9: int
No description above, and that is okay!
arg10: str
This one is new.
"""
def __init__(self, arg1, arg2, arg3, arg4, *args):
pass
class ChildClassWithKwargs(BaseClass):
"""
This is the short description for the child.
This is a longer description for the child. It also contains many lines.
With line breaks, like this.
You can also have new paragraphs!
NOTE: This is an important note!
Look, a new line of documentation after the note!
Parameters
----------
arg1: str
The first argument
arg4: int
An important argument!
**kwargs: keyword argument
Additional keyword arguments to pass into ``BaseClass``
References
----------
arg8: list
arg9: int
No description above, and that is okay!
arg10: str
This one is new.
"""
def __init__(self, arg1, arg2, arg3, arg4, **kwargs):
pass
class ChildClassWithArgsAndKwargs(BaseClass):
"""
This is the short description for the child.
This is a longer description for the child. It also contains many lines.
With line breaks, like this.
You can also have new paragraphs!
NOTE: This is an important note!
Look, a new line of documentation after the note!
Parameters
----------
arg1: str
The first argument
arg4: int
An important argument!
*args: arguments
**kwargs: keyword argument
Additional keyword arguments to pass into ``BaseClass``
References
----------
arg8: list
arg9: int
No description above, and that is okay!
arg10: str
This one is new.
"""
def __init__(self, arg1, arg2, arg3, arg4, *args, **kwargs):
pass
class ChildClassNoParamaters(BaseClass):
"""
No ``Parameters`` section at all here!
References
----------
arg8
"""
def __init__(self):
pass
class ChildClassParamatersOnly(BaseClass):
"""
Note that nothing is after the ``Parameters`` section here.
Parameters
----------
arg1: str
The first argument
arg4: int
An important argument!
*args: arguments
**kwargs: keyword argument
Additional keyword arguments to pass into ``BaseClass``
"""
def __init__(self, arg1, arg2, arg3, arg4, *args, **kwargs):
pass
class ChildClassExtraParamatersNoDoc(BaseClass):
"""
Note that nothing is after the ``Parameters`` section here.
Parameters
----------
arg1: str
The first argument
arg4: int
An important argument!
*args: arguments
**kwargs: keyword argument
Additional keyword arguments to pass into ``BaseClass``
"""
def __init__(self, arg1, arg2, arg3, arg4, extra, *args, **kwargs):
pass
class ChildClassWithTwoExtraSections(BaseClass):
"""
This is the short description for the child.
This is a longer description for the child. It also contains many lines.
With line breaks, like this.
You can also have new paragraphs!
NOTE: This is an important note!
Look, a new line of documentation after the note!
Parameters
----------
arg1: str
The first argument
arg4: int
An important argument!
*args: arguments
**kwargs: keyword argument
Additional keyword arguments to pass into ``BaseClass``
References
----------
arg8: list
arg9: int
No description above, and that is okay!
arg10: str
This one is new.
Notes
-----
This is a note. The above ``References`` section used to say ``Returns``, but classes do not
return anything and I did not feel inclined to change the description.
"""
def __init__(self, arg1, arg2, arg3, arg4, *args, **kwargs):
pass
# START: tests
def test_merge_docstrings():
expected = """
This is the short description for the child.
This is a longer description for the child. It also contains many lines.
With line breaks, like this.
You can also have new paragraphs!
NOTE: This is an important note!
Look, a new line of documentation after the note!
Parameters
----------
arg1: str
The first argument
arg2: int
This argument's description is longer.
See how it is on a new line:
* Even with a bullet list now!
arg3: np.array
arg4: int
An important argument!
References
----------
arg8: list
arg9: int
No description above, and that is okay!
arg10: str
This one is new.
"""
actual = merge_docstrings(BaseClass, ChildClass.__doc__, ChildClass.__init__)
print(expected)
print(actual)
assert actual == expected
def test_merge_docstrings_with_args():
expected = """
This is the short description for the child.
This is a longer description for the child. It also contains many lines.
With line breaks, like this.
You can also have new paragraphs!
NOTE: This is an important note!
Look, a new line of documentation after the note!
Parameters
----------
arg1: str
The first argument
arg2: int
This argument's description is longer.
See how it is on a new line:
* Even with a bullet list now!
arg3: np.array
arg4: int
An important argument!
*args: arguments
A description for these args here.
References
----------
arg8: list
arg9: int
No description above, and that is okay!
arg10: str
This one is new.
"""
actual = merge_docstrings(BaseClass, ChildClassWithArgs.__doc__, ChildClassWithArgs.__init__)
assert actual == expected
def test_merge_docstrings_with_kwargs():
expected = """
This is the short description for the child.
This is a longer description for the child. It also contains many lines.
With line breaks, like this.
You can also have new paragraphs!
NOTE: This is an important note!
Look, a new line of documentation after the note!
Parameters
----------
arg1: str
The first argument
arg2: int
This argument's description is longer.
See how it is on a new line:
* Even with a bullet list now!
arg3: np.array
arg4: int
An important argument!
**kwargs: keyword argument
Additional keyword arguments to pass into ``BaseClass``
References
----------
arg8: list
arg9: int
No description above, and that is okay!
arg10: str
This one is new.
"""
actual = merge_docstrings(BaseClass,
ChildClassWithKwargs.__doc__,
ChildClassWithKwargs.__init__)
assert actual == expected
def test_merge_docstrings_with_args_and_kwargs():
expected = """
This is the short description for the child.
This is a longer description for the child. It also contains many lines.
With line breaks, like this.
You can also have new paragraphs!
NOTE: This is an important note!
Look, a new line of documentation after the note!
Parameters
----------
arg1: str
The first argument
arg2: int
This argument's description is longer.
See how it is on a new line:
* Even with a bullet list now!
arg3: np.array
arg4: int
An important argument!
*args: arguments
**kwargs: keyword argument
Additional keyword arguments to pass into ``BaseClass``
References
----------
arg8: list
arg9: int
No description above, and that is okay!
arg10: str
This one is new.
"""
actual = merge_docstrings(BaseClass,
ChildClassWithArgsAndKwargs.__doc__,
ChildClassWithArgsAndKwargs.__init__)
assert actual == expected
def test_merge_docstrings_no_paramaters_section():
expected = """
No ``Parameters`` section at all here!
References
----------
arg8
"""
actual = merge_docstrings(BaseClass,
ChildClassNoParamaters.__doc__,
ChildClassNoParamaters.__init__)
assert actual == expected
def test_merge_docstrings_parameters_section_nothing_after():
expected = """
Note that nothing is after the ``Parameters`` section here.
Parameters
----------
arg1: str
The first argument
arg2: int
This argument's description is longer.
See how it is on a new line:
* Even with a bullet list now!
arg3: np.array
arg4: int
An important argument!
*args: arguments
**kwargs: keyword argument
Additional keyword arguments to pass into ``BaseClass``
"""
actual = merge_docstrings(BaseClass,
ChildClassParamatersOnly.__doc__,
ChildClassParamatersOnly.__init__)
assert actual == expected
def test_merge_docstrings_extra_parameter_included_with_no_documentation():
expected = """
Note that nothing is after the ``Parameters`` section here.
Parameters
----------
arg1: str
The first argument
arg2: int
This argument's description is longer.
See how it is on a new line:
* Even with a bullet list now!
arg3: np.array
arg4: int
An important argument!
*args: arguments
**kwargs: keyword argument
Additional keyword arguments to pass into ``BaseClass``
"""
actual = merge_docstrings(BaseClass,
ChildClassExtraParamatersNoDoc.__doc__,
ChildClassExtraParamatersNoDoc.__init__)
assert actual == expected
def test_merge_docstrings_with_two_extra_sections():
expected = """
This is the short description for the child.
This is a longer description for the child. It also contains many lines.
With line breaks, like this.
You can also have new paragraphs!
NOTE: This is an important note!
Look, a new line of documentation after the note!
Parameters
----------
arg1: str
The first argument
arg2: int
This argument's description is longer.
See how it is on a new line:
* Even with a bullet list now!
arg3: np.array
arg4: int
An important argument!
*args: arguments
**kwargs: keyword argument
Additional keyword arguments to pass into ``BaseClass``
References
----------
arg8: list
arg9: int
No description above, and that is okay!
arg10: str
This one is new.
Notes
-----
This is a note. The above ``References`` section used to say ``Returns``, but classes do not
return anything and I did not feel inclined to change the description.
"""
actual = merge_docstrings(BaseClass,
ChildClassWithTwoExtraSections.__doc__,
ChildClassWithTwoExtraSections.__init__)
assert actual == expected
| 23.383392
| 97
| 0.615187
| 1,583
| 13,235
| 5.048642
| 0.082123
| 0.026276
| 0.042543
| 0.055055
| 0.866491
| 0.863488
| 0.856356
| 0.856356
| 0.828579
| 0.803053
| 0
| 0.013063
| 0.305931
| 13,235
| 565
| 98
| 23.424779
| 0.856956
| 0.310767
| 0
| 0.812
| 0
| 0.004
| 0.620788
| 0
| 0
| 0
| 0
| 0
| 0.032
| 1
| 0.068
| false
| 0.056
| 0.052
| 0
| 0.16
| 0.008
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
a3e2aee168ff884132dbef5714b54d1017e49e7d
| 268
|
py
|
Python
|
nanome/_internal/_network/_commands/_callbacks/_simple_callback.py
|
rramji/nanome-lib
|
2806598af31cfb4bb6e16366f0b300d2ddcc9c13
|
[
"MIT"
] | null | null | null |
nanome/_internal/_network/_commands/_callbacks/_simple_callback.py
|
rramji/nanome-lib
|
2806598af31cfb4bb6e16366f0b300d2ddcc9c13
|
[
"MIT"
] | null | null | null |
nanome/_internal/_network/_commands/_callbacks/_simple_callback.py
|
rramji/nanome-lib
|
2806598af31cfb4bb6e16366f0b300d2ddcc9c13
|
[
"MIT"
] | null | null | null |
def _simple_callback_arg_unpack(network, arg, request_id):
network._call(request_id, *arg)
def _simple_callback_arg(network, arg, request_id):
network._call(request_id, arg)
def _simple_callback_no_arg(network, arg, request_id):
network._call(request_id)
| 33.5
| 58
| 0.787313
| 40
| 268
| 4.775
| 0.25
| 0.282723
| 0.267016
| 0.298429
| 0.853403
| 0.853403
| 0.853403
| 0.853403
| 0.853403
| 0.617801
| 0
| 0
| 0.11194
| 268
| 8
| 59
| 33.5
| 0.802521
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
a3f883934d1affaa6a95f341b12e60432f4d7002
| 6,693
|
py
|
Python
|
CustomEarlyStopping.py
|
lauromoraes/promoter_paper
|
62aea776cb318a13e142f84dd84bb0a29fb0e83f
|
[
"Apache-2.0"
] | null | null | null |
CustomEarlyStopping.py
|
lauromoraes/promoter_paper
|
62aea776cb318a13e142f84dd84bb0a29fb0e83f
|
[
"Apache-2.0"
] | 9
|
2020-08-12T18:08:46.000Z
|
2022-03-12T00:46:11.000Z
|
CustomEarlyStopping.py
|
lauromoraes/promoter_paper
|
62aea776cb318a13e142f84dd84bb0a29fb0e83f
|
[
"Apache-2.0"
] | null | null | null |
from tensorflow import keras
import numpy as np
class CustomEarlyStopping(keras.callbacks.Callback):
"""Stop training when the loss is at its min, i.e. the loss stops decreasing.
Arguments:
patience_loss: Number of epochs to wait after min has been hit. After this
number of no improvement, training stops.
"""
def __init__(self, patience_loss=0, patience_acc=1.0, threshold=0):
super(CustomEarlyStopping, self).__init__()
self.patience = patience_loss
self.patience_acc = patience_acc
self.threshold = threshold
# best_weights to store the weights at which the minimum loss occurs.
self.best_weights = None
def on_train_begin(self, logs=None):
# The number of epoch it has waited when loss is no longer minimum.
self.wait_loss = 0
# The number of epoch it has waited when accuracy is higher than threshold.
self.wait_acc = 0
# The epoch the training stops at.
self.stopped_epoch = 0
# Initialize the best as infinity.
self.best = np.Inf
def on_epoch_end(self, epoch, logs=None):
current = logs.get("loss")
if np.less(current, self.best):
self.best = current
self.wait_loss = 0
# Record the best weights if current results is better (less).
self.best_weights = self.model.get_weights()
else:
self.wait_loss += 1
if self.wait_loss >= self.patience:
self.stopped_epoch = epoch
self.model.stop_training = True
print("Restoring model weights from the end of the best epoch.")
self.model.set_weights(self.best_weights)
# Monitor - ACC to avoid over fitting
current = logs.get("acc")
if not np.less(current, self.threshold):
self.wait_acc += 1
if self.wait_acc > self.patience:
self.stopped_epoch = epoch
self.model.stop_training = True
print("Restoring model weights from the end of the best epoch.")
self.model.set_weights(self.best_weights)
def on_train_end(self, logs=None):
if self.stopped_epoch > 0:
print("Epoch %05d: early stopping" % (self.stopped_epoch + 1))
class EarlyStoppingAtMinLoss(keras.callbacks.Callback):
"""Stop training when the loss is at its min, i.e. the loss stops decreasing.
Arguments:
patience: Number of epochs to wait after min has been hit. After this
number of no improvement, training stops.
"""
def __init__(self, patience=0):
super(EarlyStoppingAtMinLoss, self).__init__()
self.patience = patience
# best_weights to store the weights at which the minimum loss occurs.
self.best_weights = None
def on_train_begin(self, logs=None):
# The number of epoch it has waited when loss is no longer minimum.
self.wait = 0
# The epoch the training stops at.
self.stopped_epoch = 0
# Initialize the best as infinity.
self.best = np.Inf
def on_epoch_end(self, epoch, logs=None):
current = logs.get("loss")
if np.less(current, self.best):
self.best = current
self.wait = 0
# Record the best weights if current results is better (less).
self.best_weights = self.model.get_weights()
else:
self.wait += 1
if self.wait >= self.patience:
self.stopped_epoch = epoch
self.model.stop_training = True
print("Restoring model weights from the end of the best epoch.")
self.model.set_weights(self.best_weights)
def on_train_end(self, logs=None):
if self.stopped_epoch > 0:
print("Epoch %05d: early stopping" % (self.stopped_epoch + 1))
class CustomCallback(keras.callbacks.Callback):
def on_train_begin(self, logs=None):
keys = list(logs.keys())
print("Starting training; got log keys: {}".format(keys))
def on_train_end(self, logs=None):
keys = list(logs.keys())
print("Stop training; got log keys: {}".format(keys))
def on_epoch_begin(self, epoch, logs=None):
keys = list(logs.keys())
print("Start epoch {} of training; got log keys: {}".format(epoch, keys))
def on_epoch_end(self, epoch, logs=None):
keys = list(logs.keys())
print("End epoch {} of training; got log keys: {}".format(epoch, keys))
def on_test_begin(self, logs=None):
keys = list(logs.keys())
print("Start testing; got log keys: {}".format(keys))
def on_test_end(self, logs=None):
keys = list(logs.keys())
print("Stop testing; got log keys: {}".format(keys))
def on_predict_begin(self, logs=None):
keys = list(logs.keys())
print("Start predicting; got log keys: {}".format(keys))
def on_predict_end(self, logs=None):
keys = list(logs.keys())
print("Stop predicting; got log keys: {}".format(keys))
def on_train_batch_begin(self, batch, logs=None):
keys = list(logs.keys())
print("...Training: start of batch {}; got log keys: {}".format(batch, keys))
def on_train_batch_end(self, batch, logs=None):
keys = list(logs.keys())
print("...Training: end of batch {}; got log keys: {}".format(batch, keys))
def on_test_batch_begin(self, batch, logs=None):
keys = list(logs.keys())
print("...Evaluating: start of batch {}; got log keys: {}".format(batch, keys))
def on_test_batch_end(self, batch, logs=None):
keys = list(logs.keys())
print("...Evaluating: end of batch {}; got log keys: {}".format(batch, keys))
def on_predict_batch_begin(self, batch, logs=None):
keys = list(logs.keys())
print("...Predicting: start of batch {}; got log keys: {}".format(batch, keys))
def on_predict_batch_end(self, batch, logs=None):
keys = list(logs.keys())
print("...Predicting: end of batch {}; got log keys: {}".format(batch, keys))
class LossAndErrorPrintingCallback(keras.callbacks.Callback):
def on_train_batch_end(self, batch, logs=None):
print("For batch {}, loss is {:7.2f}.".format(batch, logs["loss"]))
def on_test_batch_end(self, batch, logs=None):
print("For batch {}, loss is {:7.2f}.".format(batch, logs["loss"]))
def on_epoch_end(self, epoch, logs=None):
print(
"The average loss for epoch {} is {:7.2f} "
"and mean absolute error is {:7.2f}.".format(
epoch, logs["loss"], logs["mean_absolute_error"]
)
)
| 39.140351
| 87
| 0.617212
| 902
| 6,693
| 4.456763
| 0.126386
| 0.028607
| 0.041791
| 0.055721
| 0.856965
| 0.843035
| 0.83209
| 0.829602
| 0.756716
| 0.714925
| 0
| 0.006324
| 0.267593
| 6,693
| 170
| 88
| 39.370588
| 0.813749
| 0.15703
| 0
| 0.559322
| 0
| 0
| 0.17226
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.211864
| false
| 0
| 0.016949
| 0
| 0.262712
| 0.186441
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
432425697d797ad6d8025e1b01e2952a45cdf24f
| 1,211
|
py
|
Python
|
lib/systems/naphthalene.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
lib/systems/naphthalene.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
lib/systems/naphthalene.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
import pulsar as psr
def load_ref_system():
""" Returns naphthalene as found in the IQMol fragment library.
All credit to https://github.com/nutjunkie/IQmol
"""
return psr.make_system("""
C 0.00000 0.70829 -0.00000
C -0.00000 -0.70829 0.00000
C -0.00000 -1.40318 1.21938
C -0.00000 -0.69899 2.42721
C -0.00000 0.69899 2.42721
C 0.00000 1.40318 1.21938
C 0.00000 1.40318 -1.21938
C -0.00000 -1.40318 -1.21938
H -0.00000 -2.48661 1.23562
H -0.00000 -1.23809 3.36576
H 0.00000 1.23809 3.36576
H 0.00000 2.48661 1.23562
C 0.00000 -0.69899 -2.42721
C 0.00000 0.69899 -2.42721
H 0.00000 2.48661 -1.23562
H -0.00000 -2.48661 -1.23562
H -0.00000 -1.23809 -3.36576
H 0.00000 1.23809 -3.36576
""")
| 46.576923
| 67
| 0.401321
| 155
| 1,211
| 3.116129
| 0.264516
| 0.248447
| 0.144928
| 0.099379
| 0.708075
| 0.708075
| 0.708075
| 0.68323
| 0.68323
| 0.68323
| 0
| 0.547297
| 0.511148
| 1,211
| 25
| 68
| 48.44
| 0.268581
| 0.089182
| 0
| 0
| 0
| 0
| 0.922438
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| true
| 0
| 0.045455
| 0
| 0.136364
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
433e3355650b1daff14d600d317ff1d62757ef39
| 63,959
|
py
|
Python
|
mindspore/ops/_op_impl/_custom_op/img2col_impl.py
|
GuoSuiming/mindspore
|
48afc4cfa53d970c0b20eedfb46e039db2a133d5
|
[
"Apache-2.0"
] | 55
|
2020-12-17T10:26:06.000Z
|
2022-03-28T07:18:26.000Z
|
mindspore/ops/_op_impl/_custom_op/img2col_impl.py
|
forwhat461/mindspore
|
59a277756eb4faad9ac9afcc7fd526e8277d4994
|
[
"Apache-2.0"
] | 1
|
2020-12-29T06:46:38.000Z
|
2020-12-29T06:46:38.000Z
|
mindspore/ops/_op_impl/_custom_op/img2col_impl.py
|
forwhat461/mindspore
|
59a277756eb4faad9ac9afcc7fd526e8277d4994
|
[
"Apache-2.0"
] | 14
|
2021-01-29T02:39:47.000Z
|
2022-03-23T05:00:26.000Z
|
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""CusImg2ColNC1HWC0"""
from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType
from te import tik
from topi.cce import util
cus_img2col_info = TBERegOp("CusImg2Col") \
.fusion_type("OPAQUE") \
.async_flag(False) \
.binfile_name("img2col.so") \
.compute_cost(10) \
.kernel_name("CusImg2Col") \
.partial_flag(True) \
.attr("ksizes", "required", "listInt", "all") \
.attr("strides", "required", "listInt", "all") \
.attr("dilates", "required", "listInt", "all") \
.attr("mode", "required", "str", "all") \
.input(0, "x1", False, "required", "all") \
.output(0, "y", False, "required", "all") \
.dtype_format(DataType.F16_5HD, DataType.F16_FracNZ) \
.get_op_info()
@op_info_register(cus_img2col_info)
def CusImg2Col(input_x, output, ksizes, strides, dilates, mode, kernel_name="img2col"):
"""CusImg2Col"""
input_x_shape = input_x.get("shape")
input_x_dtype = input_x.get("dtype")
N, C1, H, W, C0 = input_x_shape
C = C1 * C0
padding = 'SAME'
_, filter_h, filter_w, _ = ksizes
_, stride_h, stride_w, _ = strides
_, dilation_filter_h, dilation_filter_w, _ = dilates
input_shape = (tuple(input_x_shape), input_x_dtype, (filter_h, filter_w), (stride_h, stride_w))
supported_shape = [((32, 32, 14, 14, 16), 'float16', (3, 3), (2, 2)),
((32, 1, 224, 224, 16), 'float16', (7, 7), (2, 2)),
((32, 4, 56, 56, 16), 'float16', (3, 3), (1, 1)),
((32, 8, 56, 56, 16), 'float16', (3, 3), (2, 2)),
((32, 8, 28, 28, 16), 'float16', (3, 3), (1, 1)),
((32, 16, 28, 28, 16), 'float16', (3, 3), (2, 2)),
((32, 16, 14, 14, 16), 'float16', (3, 3), (1, 1)),
((32, 32, 7, 7, 16), 'float16', (3, 3), (1, 1)),
((32, 64, 14, 14, 16), 'float16', (1, 1), (1, 1)),
((32, 32, 7, 7, 16), 'float16', (1, 1), (1, 1)),
((32, 4, 56, 56, 16), 'float16', (1, 1), (1, 1)),
((32, 64, 14, 14, 16), 'float16', (1, 1), (2, 2)),
((32, 128, 7, 7, 16), 'float16', (1, 1), (1, 1)),
((32, 32, 28, 28, 16), 'float16', (1, 1), (2, 2)),
((32, 16, 56, 56, 16), 'float16', (1, 1), (2, 2)),
((32, 8, 28, 28, 16), 'float16', (1, 1), (1, 1)),
((32, 32, 28, 28, 16), 'float16', (1, 1), (1, 1)),
((32, 16, 14, 14, 16), 'float16', (1, 1), (1, 1)),
((32, 16, 56, 56, 16), 'float16', (1, 1), (1, 1)),]
if input_shape not in supported_shape:
raise RuntimeError("input_shape %s is not supported" % str(input_shape))
output_tmp = [N * int(H // stride_h) * int(W // stride_w), filter_h * filter_w * C]
output_shape = [output_tmp[1] // 16, output_tmp[0] // 16, 16, 16]
if util.get_product_version() == util.VERSION_MINI:
tik_instance = tik.Tik(tik.Dprofile("v100", "mini"))
else:
tik_instance = tik.Tik(tik.Dprofile("v100", "cloud"))
input_x = tik_instance.Tensor("float16", input_x_shape, name="input_x", scope=tik.scope_gm)
res = tik_instance.Tensor("float16", output_shape, name="res", scope=tik.scope_gm)
if input_shape == ((32, 1, 224, 224, 16), 'float16', (7, 7), (2, 2)):
pad = [3, 3, 3, 3]
l1_h = 56
l1_w = 224
c1_index = 0
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
input_1_1_local_L1 = tik_instance.Tensor("float16", (200704,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (53760,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
tik_instance.data_move(input_1_1_local_L1, input_x[block_index, 0, 0, 0, 0], 0, 1, 12544, 0, 0)
with tik_instance.for_range(0, 7) as eeb:
with tik_instance.for_range(0, 7) as cc0:
temp = eeb % 2
rep = ((55 - temp - (-3 + eeb)) // 2 + 1) * 7
fetch_filter_w = cc0
fetch_filter_h = eeb
left_top_w = -3
left_top_h = -3
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB, input_1_1_local_L1,
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
rep)
with tik_instance.for_range(0, rep) as cc1:
tik_instance.data_move(res[cc0 + eeb * 7, cc1 + 784 * block_index, 0, 0],
input_1_1_fractal_L1_local_UB[cc1 * 256], 0, 1, 16, 0, 0)
with tik_instance.for_range(1, 3) as eeb0:
tik_instance.data_move(input_1_1_local_L1, input_x[block_index, 0, 56 * eeb0, 0, 0], 0, 1, 12544, 0, 0)
with tik_instance.for_range(0, 7) as eeb:
with tik_instance.for_range(0, 7) as cc0:
temp = eeb % 2
rep_prefix = ((55 - temp - (-3 + eeb)) // 2 + 1) * 7
rep = 196
fetch_filter_w = cc0
fetch_filter_h = eeb
left_top_w = -3
left_top_h = 1 + ((55 - temp - (-3 + eeb)) // 2 - 29) * 2
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB, input_1_1_local_L1,
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
rep)
with tik_instance.for_range(0, rep) as cc1:
tik_instance.data_move(
res[cc0 + eeb * 7, cc1 + rep_prefix + (eeb0 - 1) * rep + 784 * block_index, 0, 0],
input_1_1_fractal_L1_local_UB[cc1 * 256], 0, 1, 16, 0, 0)
tik_instance.data_move(input_1_1_local_L1, input_x[block_index, 0, 56 * 3, 0, 0], 0, 1, 12544, 0, 0)
with tik_instance.for_range(0, 7) as eeb:
with tik_instance.for_range(0, 7) as cc0:
temp = eeb % 2
rep_prefix = ((55 - temp - (-3 + eeb)) // 2 + 1) * 7 + 196 * 2
rep = 784 - rep_prefix
fetch_filter_w = cc0
fetch_filter_h = eeb
left_top_w = -3
left_top_h = 1 + ((55 - temp - (-3 + eeb)) // 2 - 29) * 2
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB, input_1_1_local_L1,
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
rep)
with tik_instance.for_range(0, rep) as cc1:
tik_instance.data_move(res[cc0 + eeb * 7, cc1 + rep_prefix + 784 * block_index, 0, 0],
input_1_1_fractal_L1_local_UB[cc1 * 256], 0, 1, 16, 0, 0)
if input_shape == ((32, 4, 56, 56, 16), 'float16', (3, 3), (1, 1)):
pad = [1, 1, 1, 1]
l1_h = 56
l1_w = 56
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
input_1_1_local_L1 = tik_instance.Tensor("float16", (200704,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (50176,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
tik_instance.data_move(input_1_1_local_L1, input_x[block_index, 0, 0, 0, 0], 0, 1, 12544, 0, 0)
with tik_instance.for_range(0, 9) as eeb0:
rep = 196
fetch_filter_w = eeb0 % 3
fetch_filter_h = eeb0 // 3
left_top_w = -1
left_top_h = -1
with tik_instance.for_range(0, 4) as eeb1:
c1_index = eeb1
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB, input_1_1_local_L1,
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
rep)
with tik_instance.for_range(0, rep) as i:
tik_instance.data_move(res[eeb1 * 9 + eeb0, i + 196 * block_index, 0, 0],
input_1_1_fractal_L1_local_UB[i * 256], 0, 1, 16, 0, 0)
if input_shape == ((32, 8, 56, 56, 16), 'float16', (3, 3), (2, 2)):
pad = [1, 1, 1, 1]
l1_h = 56
l1_w = 56
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
input_1_1_local_L1 = tik_instance.Tensor("float16", (401408,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (112896,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
tik_instance.data_move(input_1_1_local_L1, input_x[block_index, 0, 0, 0, 0], 0, 1, 25088, 0, 0)
with tik_instance.for_range(0, 8) as eeb0:
with tik_instance.for_range(0, 9) as eeb1:
rep = 49
fetch_filter_w = eeb1 % 3
fetch_filter_h = eeb1 // 3
left_top_w = -1
left_top_h = -1
c1_index = eeb0
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[49 * 256 * eeb1], input_1_1_local_L1,
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
rep)
with tik_instance.for_range(0, 9) as eeb1:
with tik_instance.for_range(0, 49) as i:
tik_instance.data_move(res[eeb1 + eeb0 * 9, 49 * block_index + i, 0, 0],
input_1_1_fractal_L1_local_UB[i * 256 + eeb1 * 49 * 256], 0, 1, 16, 0, 0)
if input_shape == ((32, 8, 28, 28, 16), 'float16', (3, 3), (1, 1)):
pad = [1, 1, 1, 1]
l1_h = 28
l1_w = 28
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
input_1_1_local_L1 = tik_instance.Tensor("float16", (100352,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (112896,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
tik_instance.data_move(input_1_1_local_L1, input_x[block_index, 0, 0, 0, 0], 0, 1, 6272, 0, 0)
with tik_instance.for_range(0, 8) as eeb0:
with tik_instance.for_range(0, 9) as eeb1:
rep = 49
fetch_filter_w = eeb1 % 3
fetch_filter_h = eeb1 // 3
left_top_w = -1
left_top_h = -1
c1_index = eeb0
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[49 * 256 * eeb1], input_1_1_local_L1,
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
rep)
with tik_instance.for_range(0, 9) as eeb1:
with tik_instance.for_range(0, 49) as i:
tik_instance.data_move(res[eeb1 + eeb0 * 9, 49 * block_index + i, 0, 0],
input_1_1_fractal_L1_local_UB[i * 256 + eeb1 * 49 * 256], 0, 1, 16, 0, 0)
if input_shape == ((32, 16, 28, 28, 16), 'float16', (3, 3), (2, 2)):
pad = [1, 1, 1, 1]
l1_h = 28
l1_w = 28
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
eeb0 = block_index % 2
eeb1 = block_index // 2
input_1_1_local_L1 = tik_instance.Tensor("float16", (200704,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (53248,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
input_1_2_fractal_L1_local_UB = tik_instance.Tensor("float16", (50176,), scope=tik.scope_ubuf,
name="input_1_2_fractal_L1_local_UB")
with tik_instance.for_range(0, 16) as i:
tik_instance.data_move(input_1_1_local_L1[i * 12544], input_x[i + 16 * eeb0, eeb1, 0, 0, 0], 0, 1, 784,
0, 0)
with tik_instance.for_range(0, 9) as eeb3:
rep = 13
fetch_filter_w = eeb3 % 3
fetch_filter_h = eeb3 // 3
left_top_w = -1
left_top_h = -1
c1_index = 0
with tik_instance.for_range(0, 16) as i:
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[3328 * i], input_1_1_local_L1[12544 * i],
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
rep)
with tik_instance.for_range(0, 16) as i:
tik_instance.data_move(input_1_2_fractal_L1_local_UB[i * 196 * 16],
input_1_1_fractal_L1_local_UB[i * 3328], 0, 1, 196, 0, 0)
with tik_instance.for_range(196 * eeb0, 196 * (eeb0 + 1)) as i:
tik_instance.data_move(res[eeb1 * 9 + eeb3, i, 0, 0],
input_1_2_fractal_L1_local_UB[256 * (i - 196 * eeb0)], 0, 1, 16, 0, 0)
if input_shape == ((32, 16, 14, 14, 16), 'float16', (3, 3), (1, 1)):
pad = [1, 1, 1, 1]
l1_h = 14
l1_w = 14
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
eeb0 = block_index % 2
eeb1 = block_index // 2
input_1_1_local_L1 = tik_instance.Tensor("float16", (50176,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (53248,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
input_1_2_fractal_L1_local_UB = tik_instance.Tensor("float16", (50176,), scope=tik.scope_ubuf,
name="input_1_2_fractal_L1_local_UB")
with tik_instance.for_range(0, 16) as i:
tik_instance.data_move(input_1_1_local_L1[i * 3136], input_x[i + 16 * eeb0, eeb1, 0, 0, 0], 0, 1, 196,
0, 0)
with tik_instance.for_range(0, 9) as eeb3:
rep = 13
fetch_filter_w = eeb3 % 3
fetch_filter_h = eeb3 // 3
left_top_w = -1
left_top_h = -1
c1_index = 0
with tik_instance.for_range(0, 16) as i:
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[3328 * i], input_1_1_local_L1[3136 * i],
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
rep)
with tik_instance.for_range(0, 16) as i:
tik_instance.data_move(input_1_2_fractal_L1_local_UB[i * 196 * 16],
input_1_1_fractal_L1_local_UB[i * 3328], 0, 1, 196, 0, 0)
with tik_instance.for_range(196 * eeb0, 196 * (eeb0 + 1)) as i:
tik_instance.data_move(res[eeb1 * 9 + eeb3, i, 0, 0],
input_1_2_fractal_L1_local_UB[256 * (i - 196 * eeb0)], 0, 1, 16, 0, 0)
if input_shape == ((32, 32, 14, 14, 16), 'float16', (3, 3), (2, 2)):
pad = [1, 1, 1, 1]
l1_h = 14
l1_w = 14
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
input_1_1_local_L1 = tik_instance.Tensor("float16", (100352,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (32768,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
input_1_2_fractal_L1_local_UB = tik_instance.Tensor("float16", (25088,), scope=tik.scope_ubuf,
name="input_1_2_fractal_L1_local_UB")
with tik_instance.for_range(0, 32) as i:
tik_instance.data_move(input_1_1_local_L1[i * 3136], input_x[i, block_index, 0, 0, 0], 0, 1, 196, 0, 0)
with tik_instance.for_range(0, 9) as eeb:
rep = 4
fetch_filter_w = eeb % 3
fetch_filter_h = eeb // 3
left_top_w = -1
left_top_h = -1
c1_index = 0
with tik_instance.for_range(0, 32) as i:
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[1024 * i], input_1_1_local_L1[3136 * i],
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
rep)
with tik_instance.for_range(0, 32) as i:
tik_instance.data_move(input_1_2_fractal_L1_local_UB[i * 49 * 16],
input_1_1_fractal_L1_local_UB[i * 1024], 0, 1, 49, 0, 0)
with tik_instance.for_range(0, 98) as i:
tik_instance.data_move(res[eeb + block_index * 9, i, 0, 0], input_1_2_fractal_L1_local_UB[256 * i],
0, 1, 16, 0, 0)
if input_shape == ((32, 64, 14, 14, 16), 'float16', (1, 1), (2, 2)):
pad = [0, 0, 0, 0]
l1_h = 14
l1_w = 14
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
input_1_1_local_L1 = tik_instance.Tensor("float16", (100352,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (32768,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
input_1_2_fractal_L1_local_UB = tik_instance.Tensor("float16", (25088,), scope=tik.scope_ubuf,
name="input_1_2_fractal_L1_local_UB")
with tik_instance.for_range(0, 2) as eeb0:
with tik_instance.for_range(0, 32) as i:
tik_instance.data_move(input_1_1_local_L1[i * 3136], input_x[i, block_index * 2 + eeb0, 0, 0, 0], 0,
1, 196, 0, 0)
with tik_instance.for_range(0, 32) as i:
rep = 4
fetch_filter_w = 0
fetch_filter_h = 0
left_top_w = 0
left_top_h = 0
c1_index = 0
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[1024 * i], input_1_1_local_L1[3136 * i],
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
rep)
with tik_instance.for_range(0, 32) as i:
tik_instance.data_move(input_1_2_fractal_L1_local_UB[i * 49 * 16],
input_1_1_fractal_L1_local_UB[i * 1024], 0, 1, 49, 0, 0)
with tik_instance.for_range(0, 98) as i:
tik_instance.data_move(res[eeb0 + block_index * 2, i, 0, 0], input_1_2_fractal_L1_local_UB[256 * i],
0, 1, 16, 0, 0)
if input_shape == ((32, 32, 7, 7, 16), 'float16', (3, 3), (1, 1)):
pad = [1, 1, 1, 1]
l1_h = 7
l1_w = 7
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
input_1_1_local_L1 = tik_instance.Tensor("float16", (25088,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (32768,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
input_1_2_fractal_L1_local_UB = tik_instance.Tensor("float16", (25088,), scope=tik.scope_ubuf,
name="input_1_2_fractal_L1_local_UB")
with tik_instance.for_range(0, 32) as i:
tik_instance.data_move(input_1_1_local_L1[i * 784], input_x[i, block_index, 0, 0, 0], 0, 1, 49, 0, 0)
with tik_instance.for_range(0, 9) as eeb:
rep = 4
fetch_filter_w = eeb % 3
fetch_filter_h = eeb // 3
left_top_w = -1
left_top_h = -1
c1_index = 0
with tik_instance.for_range(0, 32) as i:
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[1024 * i], input_1_1_local_L1[784 * i],
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
rep)
with tik_instance.for_range(0, 32) as i:
tik_instance.data_move(input_1_2_fractal_L1_local_UB[i * 49 * 16],
input_1_1_fractal_L1_local_UB[i * 1024], 0, 1, 49, 0, 0)
with tik_instance.for_range(0, 98) as i:
tik_instance.data_move(res[eeb + block_index * 9, i, 0, 0], input_1_2_fractal_L1_local_UB[256 * i],
0, 1, 16, 0, 0)
if input_shape == ((32, 128, 7, 7, 16), 'float16', (1, 1), (1, 1)):
pad = [0, 0, 0, 0]
l1_h = 7
l1_w = 7
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
input_1_1_local_L1 = tik_instance.Tensor("float16", (25088,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (32768,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
input_1_2_fractal_L1_local_UB = tik_instance.Tensor("float16", (25088,), scope=tik.scope_ubuf,
name="input_1_2_fractal_L1_local_UB")
with tik_instance.for_range(0, 4) as eeb0:
with tik_instance.for_range(0, 32) as i:
tik_instance.data_move(input_1_1_local_L1[i * 784], input_x[i, eeb0 + block_index * 4, 0, 0, 0], 0,
1, 49, 0, 0)
with tik_instance.for_range(0, 32) as i:
rep = 4
fetch_filter_w = 0
fetch_filter_h = 0
left_top_w = 0
left_top_h = 0
c1_index = 0
with tik_instance.for_range(0, 32) as i:
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[1024 * i], input_1_1_local_L1[784 * i],
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
rep)
with tik_instance.for_range(0, 32) as i:
tik_instance.data_move(input_1_2_fractal_L1_local_UB[i * 49 * 16],
input_1_1_fractal_L1_local_UB[i * 1024], 0, 1, 49, 0, 0)
with tik_instance.for_range(0, 98) as i:
tik_instance.data_move(res[eeb0 + block_index * 4, i, 0, 0], input_1_2_fractal_L1_local_UB[256 * i],
0, 1, 16, 0, 0)
if input_shape == ((32, 64, 14, 14, 16), 'float16', (1, 1), (1, 1)):
pad = [0, 0, 0, 0]
l1_h = 14
l1_w = 14
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
input_1_1_local_L1 = tik_instance.Tensor("float16", (100352,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_2_local_L1 = tik_instance.Tensor("float16", (100352,), scope=tik.scope_cbuf,
name="input_1_2_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (53248,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
input_1_2_fractal_L1_local_UB = tik_instance.Tensor("float16", (50176,), scope=tik.scope_ubuf,
name="input_1_2_fractal_L1_local_UB")
with tik_instance.for_range(0, 32) as i:
tik_instance.data_move(input_1_1_local_L1[i * 3136], input_x[i, block_index * 2, 0, 0, 0], 0, 1, 196, 0,
0)
tik_instance.data_move(input_1_2_local_L1[i * 3136], input_x[i, block_index * 2 + 1, 0, 0, 0], 0, 1,
196, 0, 0)
with tik_instance.for_range(0, 2) as eeb1:
with tik_instance.for_range(eeb1 * 16, (eeb1 + 1) * 16) as i:
rep = 13
fetch_filter_w = 0
fetch_filter_h = 0
left_top_w = 0
left_top_h = 0
c1_index = 0
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[3328 * (i - eeb1 * 16)],
input_1_1_local_L1[3136 * i],
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
rep)
with tik_instance.for_range(0, 16) as i:
tik_instance.data_move(input_1_2_fractal_L1_local_UB[i * 196 * 16],
input_1_1_fractal_L1_local_UB[i * 3328], 0, 1, 196, 0, 0)
with tik_instance.for_range(eeb1 * 196, (eeb1 + 1) * 196) as i:
tik_instance.data_move(res[block_index * 2, i, 0, 0],
input_1_2_fractal_L1_local_UB[256 * (i - eeb1 * 196)], 0, 1, 16, 0, 0)
with tik_instance.for_range(0, 2) as eeb1:
with tik_instance.for_range(eeb1 * 16, (eeb1 + 1) * 16) as i:
rep = 13
fetch_filter_w = 0
fetch_filter_h = 0
left_top_w = 0
left_top_h = 0
c1_index = 0
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[3328 * (i - eeb1 * 16)],
input_1_2_local_L1[3136 * i],
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
rep)
with tik_instance.for_range(0, 16) as i:
tik_instance.data_move(input_1_2_fractal_L1_local_UB[i * 196 * 16],
input_1_1_fractal_L1_local_UB[i * 3328], 0, 1, 196, 0, 0)
with tik_instance.for_range(eeb1 * 196, (eeb1 + 1) * 196) as i:
tik_instance.data_move(res[block_index * 2 + 1, i, 0, 0],
input_1_2_fractal_L1_local_UB[256 * (i - eeb1 * 196)], 0, 1, 16, 0, 0)
if input_shape == ((32, 32, 28, 28, 16), 'float16', (1, 1), (2, 2)):
pad = [0, 0, 0, 0]
l1_h = 28
l1_w = 28
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
input_1_1_local_L1 = tik_instance.Tensor("float16", (401408,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (53248,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
input_1_2_fractal_L1_local_UB = tik_instance.Tensor("float16", (50176,), scope=tik.scope_ubuf,
name="input_1_2_fractal_L1_local_UB")
with tik_instance.for_range(0, 32) as i:
tik_instance.data_move(input_1_1_local_L1[i * 12544], input_x[i, block_index, 0, 0, 0], 0, 1, 784, 0, 0)
with tik_instance.for_range(0, 16) as i:
rep = 13
fetch_filter_w = 0
fetch_filter_h = 0
left_top_w = 0
left_top_h = 0
c1_index = 0
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[3328 * i], input_1_1_local_L1[12544 * i],
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
rep)
with tik_instance.for_range(0, 16) as i:
tik_instance.data_move(input_1_2_fractal_L1_local_UB[i * 196 * 16],
input_1_1_fractal_L1_local_UB[i * 3328], 0, 1, 196, 0, 0)
with tik_instance.for_range(0, 196) as i:
tik_instance.data_move(res[block_index, i, 0, 0], input_1_2_fractal_L1_local_UB[256 * i], 0, 1, 16, 0,
0)
with tik_instance.for_range(16, 32) as i:
rep = 13
fetch_filter_w = 0
fetch_filter_h = 0
left_top_w = 0
left_top_h = 0
c1_index = 0
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[3328 * (i - 16)], input_1_1_local_L1[12544 * i],
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
rep)
with tik_instance.for_range(0, 16) as i:
tik_instance.data_move(input_1_2_fractal_L1_local_UB[i * 196 * 16],
input_1_1_fractal_L1_local_UB[i * 3328], 0, 1, 196, 0, 0)
with tik_instance.for_range(196, 392) as i:
tik_instance.data_move(res[block_index, i, 0, 0], input_1_2_fractal_L1_local_UB[256 * (i - 196)], 0, 1,
16, 0, 0)
if input_shape == ((32, 32, 7, 7, 16), 'float16', (1, 1), (1, 1)):
if padding == 'SAME':
padding_left = 0
padding_right = 0
padding_top = 0
padding_bottom = 0
pad = [padding_left, padding_right, padding_top, padding_bottom]
l1_h = 7
l1_w = 7
c1_index = 0
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
input_1_1_local_L1 = tik_instance.Tensor("float16", (25088,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (32768,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
input_1_2_fractal_L1_local_UB = tik_instance.Tensor("float16", (25088,), scope=tik.scope_ubuf,
name="input_1_2_fractal_L1_local_UB")
with tik_instance.for_range(0, 32) as i:
tik_instance.data_move(input_1_1_local_L1[i * 784], input_x[i, block_index, 0, 0, 0], 0, 1, 49, 0, 0)
with tik_instance.for_range(0, 32) as i:
fetch_filter_w = 0
fetch_filter_h = 0
left_top_h = 0
left_top_w = 0
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[1024 * i], input_1_1_local_L1[784 * i],
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
4)
with tik_instance.for_range(0, 32) as i:
tik_instance.data_move(input_1_2_fractal_L1_local_UB[i * 49 * 16],
input_1_1_fractal_L1_local_UB[i * 1024], 0, 1, 49, 0, 0)
with tik_instance.for_range(0, 98) as i:
tik_instance.data_move(res[block_index, i, 0, 0], input_1_2_fractal_L1_local_UB[i * 256], 0, 1, 16, 0,
0)
if input_shape == ((32, 4, 56, 56, 16), 'float16', (1, 1), (1, 1)):
if padding == 'SAME':
padding_left = 0
padding_right = 0
padding_top = 0
padding_bottom = 0
pad = [padding_left, padding_right, padding_top, padding_bottom]
l1_h = 56
l1_w = 56
c1_index = 0
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
input_1_1_local_L1 = tik_instance.Tensor("float16", (12544 * 32 // 2,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (100352 // 2,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
tik_instance.data_move(input_1_1_local_L1, input_x[block_index, 0, 0, 0, 0], 0, 1, 12544, 0, 0)
with tik_instance.for_range(0, 4) as eeb:
fetch_filter_w = 0
fetch_filter_h = 0
left_top_h = 0
left_top_w = 0
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB, input_1_1_local_L1[eeb * 56 * 56 * 16],
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
196)
with tik_instance.for_range(0, 196) as rep:
tik_instance.data_move(res[eeb, rep + block_index * 196, 0, 0],
input_1_1_fractal_L1_local_UB[rep * 256], 0, 1, 16, 0, 0)
if input_shape == ((32, 8, 28, 28, 16), 'float16', (1, 1), (1, 1)):
if padding == 'SAME':
padding_left = 0
padding_right = 0
padding_top = 0
padding_bottom = 0
pad = [padding_left, padding_right, padding_top, padding_bottom]
l1_h = 28
l1_w = 28
c1_index = 0
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
input_1_1_local_L1 = tik_instance.Tensor("float16", (6272 * 32 // 2,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (49 * 256 * 8,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
tik_instance.data_move(input_1_1_local_L1, input_x[block_index, 0, 0, 0, 0], 0, 1, 6272, 0, 0)
with tik_instance.for_range(0, 1) as eeb0:
with tik_instance.for_range(0, 8) as eeb1:
fetch_filter_w = 0
fetch_filter_h = 0
left_top_h = 0
left_top_w = 0
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[eeb1 * 49 * 256],
input_1_1_local_L1[(eeb1 + eeb0 * 8) * 28 * 28 * 16],
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
49)
with tik_instance.for_range(0, 8) as eeb1:
with tik_instance.for_range(0, 49) as i:
tik_instance.data_move(res[eeb0 * 8 + eeb1, i + block_index * 49, 0, 0],
input_1_1_fractal_L1_local_UB[i * 256 + eeb1 * 49 * 256], 0, 1, 16, 0, 0)
if input_shape == ((32, 32, 28, 28, 16), 'float16', (1, 1), (1, 1)):
if padding == 'SAME':
padding_left = 0
padding_right = 0
padding_top = 0
padding_bottom = 0
pad = [padding_left, padding_right, padding_top, padding_bottom]
l1_h = 28
l1_w = 28
c1_index = 0
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
input_1_1_local_L1 = tik_instance.Tensor("float16", (25088 * 32 // 2,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (49 * 256 * 8,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
tik_instance.data_move(input_1_1_local_L1, input_x[block_index, 0, 0, 0, 0], 0, 1, 25088, 0, 0)
with tik_instance.for_range(0, 4) as eeb0:
with tik_instance.for_range(0, 8) as eeb1:
fetch_filter_w = 0
fetch_filter_h = 0
left_top_h = 0
left_top_w = 0
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[eeb1 * 49 * 256],
input_1_1_local_L1[(eeb1 + eeb0 * 8) * 28 * 28 * 16],
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
49)
with tik_instance.for_range(0, 8) as eeb1:
with tik_instance.for_range(0, 49) as i:
tik_instance.data_move(res[eeb0 * 8 + eeb1, i + block_index * 49, 0, 0],
input_1_1_fractal_L1_local_UB[i * 256 + eeb1 * 49 * 256], 0, 1, 16, 0, 0)
if input_shape == ((32, 16, 14, 14, 16), 'float16', (1, 1), (1, 1)):
if padding == 'SAME':
padding_left = 0
padding_right = 0
padding_top = 0
padding_bottom = 0
pad = [padding_left, padding_right, padding_top, padding_bottom]
l1_h = 14
l1_w = 14
c1_index = 0
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
eeb0 = block_index % 2
eeb1 = block_index // 2
input_1_1_local_L1 = tik_instance.Tensor("float16", (196 * 32 * 16,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (106496 // 2,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
input_1_2_fractal_L1_local_UB = tik_instance.Tensor("float16", (196 * 16 * 16,), scope=tik.scope_ubuf,
name="input_1_2_fractal_L1_local_UB")
with tik_instance.for_range(0, 32) as i:
tik_instance.data_move(input_1_1_local_L1[i * 3136], input_x[i, eeb1, 0, 0, 0], 0, 1, 196, 0, 0)
with tik_instance.for_range(0, 16) as i:
fetch_filter_w = 0
fetch_filter_h = 0
left_top_h = 0
left_top_w = 0
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[i * 3328],
input_1_1_local_L1[i * 3136 + eeb0 * 16 * 3136],
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
13)
with tik_instance.for_range(0, 16) as i:
tik_instance.data_move(input_1_2_fractal_L1_local_UB[i * 196 * 16],
input_1_1_fractal_L1_local_UB[i * 3328], 0, 1, 196, 0, 0)
with tik_instance.for_range(0, 196) as i:
tik_instance.data_move(res[eeb1, i + 196 * eeb0, 0, 0], input_1_2_fractal_L1_local_UB[256 * i], 0, 1,
16, 0, 0)
if input_shape == ((32, 16, 56, 56, 16), 'float16', (1, 1), (1, 1)):
if padding == 'SAME':
padding_left = 0
padding_right = 0
padding_top = 0
padding_bottom = 0
pad = [padding_left, padding_right, padding_top, padding_bottom]
l1_h = 56
l1_w = 56
c1_index = 0
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
input_1_1_local_L1 = tik_instance.Tensor("float16", (25088 * 32 // 2,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (196 * 256 * 2,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
with tik_instance.for_range(0, 2) as eeb0:
tik_instance.data_move(input_1_1_local_L1, input_x[block_index, eeb0 * 8, 0, 0, 0], 0, 1, 25088, 0, 0)
with tik_instance.for_range(0, 4) as eeb1:
with tik_instance.for_range(0, 2) as eeb2:
fetch_filter_w = 0
fetch_filter_h = 0
left_top_h = 0
left_top_w = 0
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[eeb2 * 196 * 256],
input_1_1_local_L1[(eeb2 + eeb1 * 2) * 56 * 56 * 16],
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
196)
with tik_instance.for_range(0, 2) as eeb2:
with tik_instance.for_range(0, 196) as i:
tik_instance.data_move(res[eeb0 * 8 + eeb1 * 2 + eeb2, i + block_index * 196, 0, 0],
input_1_1_fractal_L1_local_UB[256 * i + eeb2 * 196 * 256], 0, 1, 16,
0, 0)
if input_shape == ((32, 16, 56, 56, 16), 'float16', (1, 1), (2, 2)):
if padding == 'SAME':
padding_left = 0
padding_right = 0
padding_top = 0
padding_bottom = 0
pad = [padding_left, padding_right, padding_top, padding_bottom]
l1_h = 56
l1_w = 56
c1_index = 0
jump_stride = 1
repeat_mode = 1
with tik_instance.for_range(0, 32, block_num=32) as block_index:
input_1_1_local_L1 = tik_instance.Tensor("float16", (25088 * 32 // 2,), scope=tik.scope_cbuf,
name="input_1_1_local_L1")
input_1_1_fractal_L1_local_UB = tik_instance.Tensor("float16", (49 * 256 * 8,), scope=tik.scope_ubuf,
name="input_1_1_fractal_L1_local_UB")
with tik_instance.for_range(0, 2) as eeb0:
tik_instance.data_move(input_1_1_local_L1, input_x[block_index, eeb0 * 8, 0, 0, 0], 0, 1, 25088, 0, 0)
with tik_instance.for_range(0, 8) as eeb1:
fetch_filter_w = 0
fetch_filter_h = 0
left_top_h = 0
left_top_w = 0
tik_instance.load3dv1(input_1_1_fractal_L1_local_UB[eeb1 * 49 * 256],
input_1_1_local_L1[eeb1 * 56 * 56 * 16],
pad,
l1_h,
l1_w,
c1_index,
fetch_filter_w,
fetch_filter_h,
left_top_w,
left_top_h,
stride_w,
stride_h,
filter_w,
filter_h,
dilation_filter_w,
dilation_filter_h,
jump_stride,
repeat_mode,
49)
with tik_instance.for_range(0, 8) as eeb1:
with tik_instance.for_range(0, 49) as i:
tik_instance.data_move(res[eeb0 * 8 + eeb1, i + block_index * 49, 0, 0],
input_1_1_fractal_L1_local_UB[256 * i + eeb1 * 49 * 256], 0, 1, 16, 0, 0)
tik_instance.BuildCCE(kernel_name=kernel_name, inputs=[input_x], outputs=[res])
return tik_instance
| 55.519965
| 120
| 0.398709
| 6,880
| 63,959
| 3.327762
| 0.032558
| 0.119633
| 0.050448
| 0.089452
| 0.919939
| 0.915571
| 0.913125
| 0.908888
| 0.905613
| 0.899891
| 0
| 0.122255
| 0.523742
| 63,959
| 1,151
| 121
| 55.568202
| 0.629362
| 0.010444
| 0
| 0.857671
| 0
| 0
| 0.03294
| 0.013293
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000924
| false
| 0
| 0.002773
| 0
| 0.004621
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4a982c893b4e8606d5192c07f55dcdedb65316c4
| 28,383
|
py
|
Python
|
tests/test_operation.py
|
Neufund/connexion
|
19daa6c6292d0dd4c380d0dc9473c4df1bada234
|
[
"Apache-2.0"
] | 1
|
2020-11-14T11:22:11.000Z
|
2020-11-14T11:22:11.000Z
|
tests/test_operation.py
|
Neufund/connexion
|
19daa6c6292d0dd4c380d0dc9473c4df1bada234
|
[
"Apache-2.0"
] | 2
|
2020-10-26T10:07:35.000Z
|
2020-11-09T17:21:36.000Z
|
tests/test_operation.py
|
Neufund/connexion
|
19daa6c6292d0dd4c380d0dc9473c4df1bada234
|
[
"Apache-2.0"
] | 2
|
2020-10-26T05:54:29.000Z
|
2020-11-14T11:22:14.000Z
|
import math
import pathlib
import types
import mock
import pytest
from connexion.apis.flask_api import Jsonifier
from connexion.decorators.security import (security_passthrough,
verify_oauth_local,
verify_oauth_remote)
from connexion.exceptions import InvalidSpecification
from connexion.operation import Operation
from connexion.resolver import Resolver
TEST_FOLDER = pathlib.Path(__file__).parent
DEFINITIONS = {'new_stack': {'required': ['image_version', 'keep_stacks', 'new_traffic', 'senza_yaml'],
'type': 'object',
'properties': {'keep_stacks': {'type': 'integer',
'description':
'Number of older stacks to keep'},
'image_version': {'type': 'string',
'description':
'Docker image version to deploy'},
'senza_yaml': {'type': 'string',
'description': 'YAML to provide to senza'},
'new_traffic': {'type': 'integer',
'description':
'Percentage of the traffic'}}},
'composed': {'required': ['test'],
'type': 'object',
'properties': {'test': {'schema': {'$ref': '#/definitions/new_stack'}}}}}
PARAMETER_DEFINITIONS = {'myparam': {'in': 'path', 'type': 'integer'}}
OPERATION1 = {'description': 'Adds a new stack to be created by lizzy and returns the '
'information needed to keep track of deployment',
'operationId': 'fakeapi.hello.post_greeting',
'parameters': [{'in': 'body',
'name': 'new_stack',
'required': True,
'schema': {'$ref': '#/definitions/new_stack'}}],
'responses': {201: {'description': 'Stack to be created. The '
'CloudFormation Stack creation can '
"still fail if it's rejected by senza "
'or AWS CF.',
'schema': {'$ref': '#/definitions/stack'}},
400: {'description': 'Stack was not created because request '
'was invalid',
'schema': {'$ref': '#/definitions/problem'}},
401: {'description': 'Stack was not created because the '
'access token was not provided or was '
'not valid for this operation',
'schema': {'$ref': '#/definitions/problem'}}},
'security': [{'oauth': ['uid']}],
'summary': 'Create new stack'}
OPERATION2 = {'description': 'Adds a new stack to be created by lizzy and returns the '
'information needed to keep track of deployment',
'operationId': 'fakeapi.hello.post_greeting',
'parameters': [{'in': 'body',
'name': 'new_stack',
'required': True,
'schema': {'$ref': '#/definitions/new_stack'}},
{'in': 'body',
'name': 'new_stack',
'required': True,
'schema': {'$ref': '#/definitions/new_stack'}}],
'responses': {201: {'description': 'Stack to be created. The '
'CloudFormation Stack creation can '
"still fail if it's rejected by senza "
'or AWS CF.',
'schema': {'$ref': '#/definitions/stack'}},
400: {'description': 'Stack was not created because request '
'was invalid',
'schema': {'$ref': '#/definitions/problem'}},
401: {'description': 'Stack was not created because the '
'access token was not provided or was '
'not valid for this operation',
'schema': {'$ref': '#/definitions/problem'}}},
'security': [{'oauth': ['uid']}],
'summary': 'Create new stack'}
OPERATION3 = {'description': 'Adds a new stack to be created by lizzy and returns the '
'information needed to keep track of deployment',
'operationId': 'fakeapi.hello.post_greeting',
'parameters': [{'in': 'body',
'name': 'new_stack',
'required': True,
'schema': {'$ref': '#/notdefinitions/new_stack'}}],
'responses': {201: {'description': 'Stack to be created. The '
'CloudFormation Stack creation can '
"still fail if it's rejected by senza "
'or AWS CF.',
'schema': {'$ref': '#/definitions/stack'}},
400: {'description': 'Stack was not created because request '
'was invalid',
'schema': {'$ref': '#/definitions/problem'}},
401: {'description': 'Stack was not created because the '
'access token was not provided or was '
'not valid for this operation',
'schema': {'$ref': '#/definitions/problem'}}},
'security': [{'oauth': ['uid']}],
'summary': 'Create new stack'}
OPERATION4 = {'operationId': 'fakeapi.hello.post_greeting',
'parameters': [{'$ref': '#/parameters/myparam'}]}
OPERATION5 = {'operationId': 'fakeapi.hello.post_greeting',
'parameters': [{'$ref': '/parameters/fail'}]}
OPERATION6 = {'description': 'Adds a new stack to be created by lizzy and returns the '
'information needed to keep track of deployment',
'operationId': 'fakeapi.hello.post_greeting',
'parameters': [
{
'in': 'body',
'name': 'new_stack',
'required': True,
'schema': {'$ref': '#/definitions/new_stack'}
},
{
'in': 'query',
'name': 'stack_version',
'default': 'one',
'type': 'number'
}
],
'responses': {201: {'description': 'Stack to be created. The '
'CloudFormation Stack creation can '
"still fail if it's rejected by senza "
'or AWS CF.',
'schema': {'$ref': '#/definitions/stack'}},
400: {'description': 'Stack was not created because request '
'was invalid',
'schema': {'$ref': '#/definitions/problem'}},
401: {'description': 'Stack was not created because the '
'access token was not provided or was '
'not valid for this operation',
'schema': {'$ref': '#/definitions/problem'}}},
'summary': 'Create new stack'}
OPERATION7 = {
'description': 'Adds a new stack to be created by lizzy and returns the '
'information needed to keep track of deployment',
'operationId': 'fakeapi.hello.post_greeting',
'parameters': [
{
'in': 'body',
'name': 'new_stack',
'required': True,
'type': 'integer',
'default': 'stack'
}
],
'responses': {'201': {'description': 'Stack to be created. The '
'CloudFormation Stack creation can '
"still fail if it's rejected by senza "
'or AWS CF.',
'schema': {'$ref': '#/definitions/stack'}},
'400': {'description': 'Stack was not created because request '
'was invalid',
'schema': {'$ref': '#/definitions/problem'}},
'401': {'description': 'Stack was not created because the '
'access token was not provided or was '
'not valid for this operation',
'schema': {'$ref': '#/definitions/problem'}}},
'security': [{'oauth': ['uid']}],
'summary': 'Create new stack'
}
OPERATION8 = {
'operationId': 'fakeapi.hello.schema',
'parameters': [
{
'type': 'object',
'in': 'body',
'name': 'new_stack',
'default': {'keep_stack': 1, 'image_version': 1, 'senza_yaml': 'senza.yaml',
'new_traffic': 100},
'schema': {'$ref': '#/definitions/new_stack'}
}
],
'responses': {},
'security': [{'oauth': ['uid']}],
'summary': 'Create new stack'
}
OPERATION9 = {'description': 'Adds a new stack to be created by lizzy and returns the '
'information needed to keep track of deployment',
'operationId': 'fakeapi.hello.post_greeting',
'parameters': [{'in': 'body',
'name': 'new_stack',
'required': True,
'schema': {'type': 'array', 'items': {'$ref': '#/definitions/new_stack'}}}],
'responses': {'201': {'description': 'Stack to be created. The '
'CloudFormation Stack creation can '
"still fail if it's rejected by senza "
'or AWS CF.',
'schema': {'$ref': '#/definitions/stack'}},
'400': {'description': 'Stack was not created because request '
'was invalid',
'schema': {'$ref': '#/definitions/problem'}},
'401': {'description': 'Stack was not created because the '
'access token was not provided or was '
'not valid for this operation',
'schema': {'$ref': '#/definitions/problem'}}},
'security': [{'oauth': ['uid']}],
'summary': 'Create new stack'}
OPERATION10 = {'description': 'Adds a new stack to be created by lizzy and returns the '
'information needed to keep track of deployment',
'operationId': 'fakeapi.hello.post_greeting',
'parameters': [{'in': 'body',
'name': 'test',
'required': True,
'schema': {'$ref': '#/definitions/composed'}}],
'responses': {'201': {'description': 'Stack to be created. The '
'CloudFormation Stack creation can '
"still fail if it's rejected by senza "
'or AWS CF.',
'schema': {'$ref': '#/definitions/stack'}},
'400': {'description': 'Stack was not created because request '
'was invalid',
'schema': {'$ref': '#/definitions/problem'}},
'401': {'description': 'Stack was not created because the '
'access token was not provided or was '
'not valid for this operation',
'schema': {'$ref': '#/definitions/problem'}}},
'security': [{'oauth': ['uid']}],
'summary': 'Create new stack'}
SECURITY_DEFINITIONS_REMOTE = {'oauth': {'type': 'oauth2',
'flow': 'password',
'x-tokenInfoUrl': 'https://oauth.example/token_info',
'scopes': {'myscope': 'can do stuff'}}}
SECURITY_DEFINITIONS_LOCAL = {'oauth': {'type': 'oauth2',
'flow': 'password',
'x-tokenInfoFunc': 'math.ceil',
'scopes': {'myscope': 'can do stuff'}}}
SECURITY_DEFINITIONS_BOTH = {'oauth': {'type': 'oauth2',
'flow': 'password',
'x-tokenInfoFunc': 'math.ceil',
'x-tokenInfoUrl': 'https://oauth.example/token_info',
'scopes': {'myscope': 'can do stuff'}}}
SECURITY_DEFINITIONS_WO_INFO = {'oauth': {'type': 'oauth2',
'flow': 'password',
'scopes': {'myscope': 'can do stuff'}}}
@pytest.fixture
def api():
return mock.MagicMock(jsonifier=Jsonifier)
def test_operation(api):
operation = Operation(api=api,
method='GET',
path='endpoint',
path_parameters=[],
operation=OPERATION1,
app_produces=['application/json'],
app_consumes=['application/json'],
app_security=[],
security_definitions=SECURITY_DEFINITIONS_REMOTE,
definitions=DEFINITIONS,
parameter_definitions=PARAMETER_DEFINITIONS,
resolver=Resolver())
assert isinstance(operation.function, types.FunctionType)
# security decorator should be a partial with verify_oauth_remote as the function and token url and scopes as arguments.
# See https://docs.python.org/2/library/functools.html#partial-objects
assert operation.security_decorator.func is verify_oauth_remote
assert operation.security_decorator.args == ('https://oauth.example/token_info', set(['uid']))
assert operation.method == 'GET'
assert operation.produces == ['application/json']
assert operation.consumes == ['application/json']
assert operation.security == [{'oauth': ['uid']}]
expected_body_schema = {
'$ref': '#/definitions/new_stack',
'definitions': DEFINITIONS
}
assert operation.body_schema == expected_body_schema
def test_operation_array(api):
operation = Operation(api=api,
method='GET',
path='endpoint',
path_parameters=[],
operation=OPERATION9,
app_produces=['application/json'],
app_consumes=['application/json'],
app_security=[],
security_definitions=SECURITY_DEFINITIONS_REMOTE,
definitions=DEFINITIONS,
parameter_definitions=PARAMETER_DEFINITIONS,
resolver=Resolver())
assert isinstance(operation.function, types.FunctionType)
# security decorator should be a partial with verify_oauth_remote as the function and token url
# and scopes as arguments.
# See https://docs.python.org/2/library/functools.html#partial-objects
assert operation.security_decorator.func is verify_oauth_remote
assert operation.security_decorator.args == ('https://oauth.example/token_info', set(['uid']))
assert operation.method == 'GET'
assert operation.produces == ['application/json']
assert operation.consumes == ['application/json']
assert operation.security == [{'oauth': ['uid']}]
expected_body_schema = {
'type': 'array',
'items': {'$ref': '#/definitions/new_stack'},
'definitions': DEFINITIONS
}
assert operation.body_schema == expected_body_schema
def test_operation_composed_definition(api):
operation = Operation(api=api,
method='GET',
path='endpoint',
path_parameters=[],
operation=OPERATION10,
app_produces=['application/json'],
app_consumes=['application/json'],
app_security=[],
security_definitions=SECURITY_DEFINITIONS_REMOTE,
definitions=DEFINITIONS,
parameter_definitions=PARAMETER_DEFINITIONS,
resolver=Resolver())
assert isinstance(operation.function, types.FunctionType)
# security decorator should be a partial with verify_oauth_remote as the function and
# token url and scopes as arguments.
# See https://docs.python.org/2/library/functools.html#partial-objects
assert operation.security_decorator.func is verify_oauth_remote
assert operation.security_decorator.args == ('https://oauth.example/token_info', set(['uid']))
assert operation.method == 'GET'
assert operation.produces == ['application/json']
assert operation.consumes == ['application/json']
assert operation.security == [{'oauth': ['uid']}]
expected_body_schema = {
'$ref': '#/definitions/composed',
'definitions': DEFINITIONS
}
assert operation.body_schema == expected_body_schema
def test_operation_local_security_oauth2(api):
operation = Operation(api=api,
method='GET',
path='endpoint',
path_parameters=[],
operation=OPERATION10,
app_produces=['application/json'],
app_consumes=['application/json'],
app_security=[],
security_definitions=SECURITY_DEFINITIONS_LOCAL,
definitions=DEFINITIONS,
parameter_definitions=PARAMETER_DEFINITIONS,
resolver=Resolver())
assert isinstance(operation.function, types.FunctionType)
# security decorator should be a partial with verify_oauth_remote as the function and
# token url and scopes as arguments.
# See https://docs.python.org/2/library/functools.html#partial-objects
assert operation.security_decorator.func is verify_oauth_local
assert operation.security_decorator.args == (math.ceil, set(['uid']))
assert operation.method == 'GET'
assert operation.produces == ['application/json']
assert operation.consumes == ['application/json']
assert operation.security == [{'oauth': ['uid']}]
expected_body_schema = {
'$ref': '#/definitions/composed',
'definitions': DEFINITIONS
}
assert operation.body_schema == expected_body_schema
def test_operation_local_security_duplicate_token_info(api):
operation = Operation(api=api,
method='GET',
path='endpoint',
path_parameters=[],
operation=OPERATION10,
app_produces=['application/json'],
app_consumes=['application/json'],
app_security=[],
security_definitions=SECURITY_DEFINITIONS_BOTH,
definitions=DEFINITIONS,
parameter_definitions=PARAMETER_DEFINITIONS,
resolver=Resolver())
assert isinstance(operation.function, types.FunctionType)
# security decorator should be a partial with verify_oauth_remote as the function and
# token url and scopes as arguments.
# See https://docs.python.org/2/library/functools.html#partial-objects
assert operation.security_decorator.func is verify_oauth_local
assert operation.security_decorator.args == (math.ceil, set(['uid']))
assert operation.method == 'GET'
assert operation.produces == ['application/json']
assert operation.consumes == ['application/json']
assert operation.security == [{'oauth': ['uid']}]
expected_body_schema = {
'$ref': '#/definitions/composed',
'definitions': DEFINITIONS
}
assert operation.body_schema == expected_body_schema
def test_non_existent_reference(api):
with pytest.raises(InvalidSpecification) as exc_info: # type: py.code.ExceptionInfo
operation = Operation(api=api,
method='GET',
path='endpoint',
path_parameters=[],
operation=OPERATION1,
app_produces=['application/json'],
app_consumes=['application/json'],
app_security=[],
security_definitions={},
definitions={},
parameter_definitions={},
resolver=Resolver())
operation.body_schema
exception = exc_info.value
assert str(exception) == "<InvalidSpecification: GET endpoint Definition 'new_stack' not found>"
assert repr(exception) == "<InvalidSpecification: GET endpoint Definition 'new_stack' not found>"
def test_multi_body(api):
with pytest.raises(InvalidSpecification) as exc_info: # type: py.code.ExceptionInfo
operation = Operation(api=api,
method='GET',
path='endpoint',
path_parameters=[],
operation=OPERATION2,
app_produces=['application/json'],
app_consumes=['application/json'],
app_security=[],
security_definitions={},
definitions=DEFINITIONS,
parameter_definitions=PARAMETER_DEFINITIONS,
resolver=Resolver())
operation.body_schema
exception = exc_info.value
assert str(exception) == "<InvalidSpecification: GET endpoint There can be one 'body' parameter at most>"
assert repr(exception) == "<InvalidSpecification: GET endpoint There can be one 'body' parameter at most>"
def test_invalid_reference(api):
with pytest.raises(InvalidSpecification) as exc_info: # type: py.code.ExceptionInfo
operation = Operation(api=api,
method='GET',
path='endpoint',
path_parameters=[],
operation=OPERATION3,
app_produces=['application/json'],
app_consumes=['application/json'],
app_security=[],
security_definitions={},
definitions=DEFINITIONS,
parameter_definitions=PARAMETER_DEFINITIONS,
resolver=Resolver())
operation.body_schema
exception = exc_info.value
assert str(exception).startswith("<InvalidSpecification: GET endpoint $ref")
assert repr(exception).startswith("<InvalidSpecification: GET endpoint $ref")
def test_no_token_info(api):
operation = Operation(api=api,
method='GET',
path='endpoint',
path_parameters=[],
operation=OPERATION1,
app_produces=['application/json'],
app_consumes=['application/json'],
app_security=SECURITY_DEFINITIONS_WO_INFO,
security_definitions=SECURITY_DEFINITIONS_WO_INFO,
definitions=DEFINITIONS,
parameter_definitions=PARAMETER_DEFINITIONS,
resolver=Resolver())
assert isinstance(operation.function, types.FunctionType)
assert operation.security_decorator is security_passthrough
assert operation.method == 'GET'
assert operation.produces == ['application/json']
assert operation.consumes == ['application/json']
assert operation.security == [{'oauth': ['uid']}]
expected_body_schema = {
'$ref': '#/definitions/new_stack',
'definitions': DEFINITIONS
}
assert operation.body_schema == expected_body_schema
def test_parameter_reference(api):
operation = Operation(api=api,
method='GET',
path='endpoint',
path_parameters=[],
operation=OPERATION4,
app_produces=['application/json'],
app_consumes=['application/json'],
app_security=[],
security_definitions={},
definitions={},
parameter_definitions=PARAMETER_DEFINITIONS,
resolver=Resolver())
assert operation.parameters == [{'in': 'path', 'type': 'integer'}]
def test_resolve_invalid_reference(api):
with pytest.raises(InvalidSpecification) as exc_info:
Operation(api=api, method='GET', path='endpoint', path_parameters=[],
operation=OPERATION5, app_produces=['application/json'],
app_consumes=['application/json'], app_security=[],
security_definitions={}, definitions={},
parameter_definitions=PARAMETER_DEFINITIONS, resolver=Resolver())
exception = exc_info.value # type: InvalidSpecification
assert exception.reason == "GET endpoint '$ref' needs to start with '#/'"
def test_default(api):
op = OPERATION6.copy()
op['parameters'][1]['default'] = 1
Operation(api=api, method='GET', path='endpoint', path_parameters=[], operation=op,
app_produces=['application/json'], app_consumes=['application/json'],
app_security=[], security_definitions={}, definitions=DEFINITIONS,
parameter_definitions=PARAMETER_DEFINITIONS,
resolver=Resolver())
op = OPERATION8.copy()
op['parameters'][0]['default'] = {
'keep_stacks': 1, 'image_version': 'one', 'senza_yaml': 'senza.yaml', 'new_traffic': 100
}
Operation(api=api, method='POST', path='endpoint', path_parameters=[], operation=op,
app_produces=['application/json'], app_consumes=['application/json'], app_security=[],
security_definitions={}, definitions=DEFINITIONS, parameter_definitions={}, resolver=Resolver())
def test_get_path_parameter_types(api):
op = OPERATION1.copy()
op['parameters'] = [{'in': 'path', 'type': 'int', 'name': 'int_path'},
{'in': 'path', 'type': 'string', 'name': 'string_path'},
{'in': 'path', 'type': 'string', 'format': 'path', 'name': 'path_path'}]
operation = Operation(api=api, method='GET', path='endpoint', path_parameters=[], operation=op,
app_produces=['application/json'], app_consumes=['application/json'], resolver=Resolver())
assert {'int_path': 'int', 'string_path': 'string', 'path_path': 'path'} == operation.get_path_parameter_types()
| 50.593583
| 124
| 0.498256
| 2,318
| 28,383
| 5.971527
| 0.09189
| 0.045514
| 0.047681
| 0.016183
| 0.862664
| 0.850527
| 0.836295
| 0.826109
| 0.815706
| 0.801546
| 0
| 0.006488
| 0.386358
| 28,383
| 560
| 125
| 50.683929
| 0.788253
| 0.036853
| 0
| 0.72428
| 0
| 0
| 0.28016
| 0.037089
| 0
| 0
| 0
| 0
| 0.115226
| 1
| 0.028807
| false
| 0.012346
| 0.020576
| 0.002058
| 0.05144
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43557bac4859486beaaf73a0d6b8489008c62548
| 42,186
|
py
|
Python
|
a3cosmos_gas_evolution/Common_Python_Code/calc_galaxy_stellar_mass_function.py
|
1054/a3cosmos-gas-evolution
|
66027338602ed2830e289cfbb4db6200739b39d6
|
[
"0BSD"
] | null | null | null |
a3cosmos_gas_evolution/Common_Python_Code/calc_galaxy_stellar_mass_function.py
|
1054/a3cosmos-gas-evolution
|
66027338602ed2830e289cfbb4db6200739b39d6
|
[
"0BSD"
] | 1
|
2021-09-16T11:12:10.000Z
|
2021-09-16T11:12:52.000Z
|
build/lib/a3cosmos_gas_evolution/Common_Python_Code/calc_galaxy_stellar_mass_function.py
|
1054/a3cosmos-gas-evolution
|
66027338602ed2830e289cfbb4db6200739b39d6
|
[
"0BSD"
] | null | null | null |
#!/usr/bin/env python
#
# 20190222
# copied from "calc_stellar_mass_function.py", this code will superceed "calc_stellar_mass_function.py".
#
from __future__ import print_function
import os, sys, re, json, time, astropy
import numpy as np
from astropy.table import Table, Column, hstack
from copy import copy
from numpy import log, log10, power, sum, sqrt, pi, exp
pow = power
lg = log10
ln = log
from scipy.interpolate import InterpolatedUnivariateSpline, interp1d
if not (os.path.dirname(os.path.abspath(__file__)) in sys.path): sys.path.append(os.path.dirname(os.path.abspath(__file__)))
import apply_cosmology
cosmo = apply_cosmology.cosmo
if sys.version_info.major >= 3:
long = int
else:
pass
#
# def
#
def Schechter_Function(lgM, phi, lg_M0, alpha):
#
# Schechter (1976)
#
# Phi(M) dM = (Phi_*) * (M/M_*)**(alpha) * exp(-M/M_*) dM/M_*
# = (Phi_*) * x**(alpha) * exp(-x) dx
# = (Phi_*) * 10**(lgx * alpha) * exp(-10**lgx) dx
# = (Phi_*) * 10**(lgx * alpha) * exp(-10**lgx) dlnx
# = (Phi_*) * 10**(lgx * alpha) * exp(-10**lgx) dlgx * ln(10)
# = (Phi_*) * 10**((lgM-lgM_*)*(alpha+1)) * exp(-10**(lgM-lgM_*)) * ln(10) dlgx
# = (Our_Phi_Phi_Schechter) dlgx
#
lgx = lgM-lg_M0
Phi_Schechter = phi * (10**(lgx*(alpha+1))) * (np.exp(-10**lgx)) * ln(10) # per dex and already multiplied ln(10), so that its integral directly equals \int Phi(M) / M dM
return Phi_Schechter
#
# def
#
def calc_SMF_Davidzon2017(z, lgMstar=None, galaxy_type = 'SFG'):
#
# Davidzon 2017 arXiv:1701.02734
# IMF: Chabrier 2003
# Outputs: lgMstar_grid, lgPhiMstar_grid
#
# check z
if not np.isscalar(z):
raise ValueError('Please input a float number as the redshift!')
#
# check galaxy_type
if not (type(galaxy_type) is str):
raise ValueError('Please input either "ALL", "SFG" or "QG" as the galaxy_type!')
else:
if not (galaxy_type in ['ALL', 'SFG', 'QG']):
raise ValueError('Please input either "ALL", "SFG" or "QG" as the galaxy_type!')
#
# make lgMstar
if lgMstar is None:
lgMstar_grid = np.linspace(6.0, 13.0, num=1000, endpoint=True)
else:
lgMstar_grid = lgMstar
#
# read SMF
tb_SMF = Table.read(os.path.dirname(os.path.dirname(__file__))+os.sep+'Data_Tables/datatables_SMF/datatable_Davidzon2017_SMF_'+galaxy_type+'.txt', format='ascii') # zLo zHi lgMchar Phi_1 alpha_1 Phi_2 alpha_2
SMF_zmin = np.min(tb_SMF['zLo'])
SMF_zmax = np.max(tb_SMF['zHi'])
#
# check z
if z < SMF_zmin or z > SMF_zmax:
raise ValueError('calc_SMF_Davidzon2017: The input redshift is out of the allowed range of %s -- %s!'%(SMF_zmin, SMF_zmax))
#
# spline SMF #<20190214># old method
#<20190214># SMF_z = (tb_SMF['zLo'].data + tb_SMF['zHi'].data) / 2.0
#<20190214># SMF_phi_1 = InterpolatedUnivariateSpline(SMF_z, tb_SMF['Phi_1'].data, k=1)(z)
#<20190214># SMF_phi_2 = InterpolatedUnivariateSpline(SMF_z, tb_SMF['Phi_2'].data, k=1)(z)
#<20190214># SMF_alpha_1 = InterpolatedUnivariateSpline(SMF_z, tb_SMF['alpha_1'].data, k=1)(z)
#<20190214># SMF_alpha_2 = InterpolatedUnivariateSpline(SMF_z, tb_SMF['alpha_2'].data, k=1)(z)
#<20190214># SMF_lgMchar = InterpolatedUnivariateSpline(SMF_z, tb_SMF['lgMchar'].data, k=1)(z)
#<20190214># #print('z, lgMchar, alpha_1, phi_1, alpha_2, phi_2 =', z, SMF_lgMchar, SMF_alpha_1, SMF_phi_1, SMF_alpha_2, SMF_phi_2)
#<20190214># SMF_PhiMstar = Schechter_Function(lgMstar_grid, SMF_phi_1, SMF_lgMchar, SMF_alpha_1) + \
#<20190214># Schechter_Function(lgMstar_grid, SMF_phi_2, SMF_lgMchar, SMF_alpha_2) # two component
#<20190214># lgPhiMstar_grid = np.log10(SMF_PhiMstar)
#
# spline SMF
lgPhiMstar_matrix = []
for k in range(len(tb_SMF)):
SMF_z = (tb_SMF['zLo'][k] + tb_SMF['zHi'][k]) / 2.0
SMF_phi_1 = tb_SMF['Phi_1'][k]
SMF_phi_2 = tb_SMF['Phi_2'][k]
SMF_alpha_1 = tb_SMF['alpha_1'][k]
SMF_alpha_2 = tb_SMF['alpha_2'][k]
SMF_lgMchar = tb_SMF['lgMchar'][k]
#print('z, lgMchar, alpha_1, phi_1, alpha_2, phi_2 =', z, SMF_lgMchar, SMF_alpha_1, SMF_phi_1, SMF_alpha_2, SMF_phi_2)
SMF_PhiMstar = Schechter_Function(lgMstar_grid, SMF_phi_1, SMF_lgMchar, SMF_alpha_1) + \
Schechter_Function(lgMstar_grid, SMF_phi_2, SMF_lgMchar, SMF_alpha_2) # two component
lgPhiMstar_grid = np.log10(SMF_PhiMstar)
lgPhiMstar_matrix.append(copy(lgPhiMstar_grid))
#
SMF_z = (tb_SMF['zLo'].data + tb_SMF['zHi'].data) / 2.0
lgPhiMstar_matrix = np.array(lgPhiMstar_matrix) # shape == (N_SMF_z, N_SMF_lgMstar, )
if z <= np.min(SMF_z):
lgPhiMstar_grid = lgPhiMstar_matrix[0]
elif z >= np.max(SMF_z):
lgPhiMstar_grid = lgPhiMstar_matrix[-1]
else:
lgPhiMstar_grid = interp1d(SMF_z, lgPhiMstar_matrix, axis=0, kind='linear')(z)
#print(lgPhiMstar_matrix.shape, SMF_z.shape, lgPhiMstar_grid.shape)
# fix nan
lgPhiMstar_grid[np.isnan(lgPhiMstar_grid)] = -100
lgPhiMstar_grid[(lgPhiMstar_grid<-100)] = -100
#
if lgMstar is None:
return lgMstar_grid, lgPhiMstar_grid
else:
return lgPhiMstar_grid
def calc_SMF_Moutard2016(z, lgMstar=None, galaxy_type = 'SFG'):
#
# Moutard 2016 - SMF - https://ui.adsabs.harvard.edu/abs/2016A%26A...590A.103M/abstract
# IMF: Chabrier 2003
# Outputs: lgMstar_grid, lgPhiMstar_grid
#
# check z
if not np.isscalar(z):
raise ValueError('Please input a float number as the redshift!')
#
# check galaxy_type
if not (type(galaxy_type) is str):
raise ValueError('Please input either "ALL", "SFG" or "QG" as the galaxy_type!')
else:
if not (galaxy_type in ['ALL', 'SFG', 'QG']):
raise ValueError('Please input either "ALL", "SFG" or "QG" as the galaxy_type!')
#
# make lgMstar
if lgMstar is None:
lgMstar_grid = np.linspace(6.0, 13.0, num=1000, endpoint=True)
else:
lgMstar_grid = lgMstar
#
# read SMF
tb_SMF = Table.read(os.path.dirname(os.path.dirname(__file__))+os.sep+'Data_Tables/datatables_SMF/datatable_Moutard2016_SMF_'+galaxy_type+'.txt', format='ascii') # zLo zHi lgMchar Phi_1 alpha_1 Phi_2 alpha_2
SMF_zmin = np.min(tb_SMF['zLo'])
SMF_zmax = np.max(tb_SMF['zHi'])
#
# check z
if z < SMF_zmin or z > SMF_zmax:
raise ValueError('calc_SMF_Moutard2016: The input redshift is out of the allowed range of %s -- %s!'%(SMF_zmin, SMF_zmax))
#
# spline SMF
lgPhiMstar_matrix = []
for k in range(len(tb_SMF)):
SMF_z = (tb_SMF['zLo'][k] + tb_SMF['zHi'][k]) / 2.0
SMF_phi_1 = tb_SMF['Phi_1'][k]
SMF_phi_2 = tb_SMF['Phi_2'][k]
SMF_alpha_1 = tb_SMF['alpha_1'][k]
SMF_alpha_2 = tb_SMF['alpha_2'][k]
SMF_lgMchar = tb_SMF['lgMchar'][k]
#print('calc_SMF_Moutard2016: z %r, lgMchar %r, alpha_1 %r, phi_1 %r, alpha_2 %r, phi_2 %r'%(z, SMF_lgMchar, SMF_alpha_1, SMF_phi_1, SMF_alpha_2, SMF_phi_2))
SMF_PhiMstar = Schechter_Function(lgMstar_grid, SMF_phi_1, SMF_lgMchar, SMF_alpha_1) + \
Schechter_Function(lgMstar_grid, SMF_phi_2, SMF_lgMchar, SMF_alpha_2) # two component
lgPhiMstar_grid = np.log10(SMF_PhiMstar)
lgPhiMstar_matrix.append(copy(lgPhiMstar_grid))
#
SMF_z = (tb_SMF['zLo'].data + tb_SMF['zHi'].data) / 2.0
lgPhiMstar_matrix = np.array(lgPhiMstar_matrix) # shape == (N_SMF_z, N_SMF_lgMstar, )
if z <= np.min(SMF_z):
lgPhiMstar_grid = lgPhiMstar_matrix[0]
elif z >= np.max(SMF_z):
lgPhiMstar_grid = lgPhiMstar_matrix[-1]
else:
lgPhiMstar_grid = interp1d(SMF_z, lgPhiMstar_matrix, axis=0, kind='linear')(z)
#print(lgPhiMstar_matrix.shape, SMF_z.shape, lgPhiMstar_grid.shape)
# fix nan
lgPhiMstar_grid[np.isnan(lgPhiMstar_grid)] = -100
lgPhiMstar_grid[(lgPhiMstar_grid<-100)] = -100
#
if lgMstar is None:
return lgMstar_grid, lgPhiMstar_grid
else:
return lgPhiMstar_grid
def calc_SMF_Ilbert2013(z, lgMstar=None, galaxy_type = 'SFG'):
#
# Ilbert 2013
# IMF: Chabrier 2003
# Outputs: lgMstar_grid, lgPhiMstar_grid
#
# check z
if not np.isscalar(z):
raise ValueError('Please input a float number as the redshift!')
#
# check galaxy_type
if not (type(galaxy_type) is str):
raise ValueError('Please input either "ALL" or "SFG" as the galaxy_type!')
else:
if not (galaxy_type in ['ALL', 'SFG', 'QG']):
raise ValueError('Please input either "ALL" or "SFG" as the galaxy_type!')
#
# make lgMstar
if lgMstar is None:
lgMstar_grid = np.linspace(6.0, 13.0, num=1000, endpoint=True)
else:
lgMstar_grid = lgMstar
#
# read SMF
tb_SMF = Table.read(os.path.dirname(os.path.dirname(__file__))+os.sep+'Data_Tables/datatables_SMF/datatable_Ilbert2013_SMF_'+galaxy_type+'.txt', format='ascii') # zLo zHi lgMchar Phi_1 alpha_1 Phi_2 alpha_2
SMF_zmin = np.min(tb_SMF['zLo'])
SMF_zmax = np.max(tb_SMF['zHi'])
#
# check z
if z < SMF_zmin or z > SMF_zmax:
raise ValueError('calc_SMF_Ilbert2013: The input redshift is out of the allowed range of %s -- %s!'%(SMF_zmin, SMF_zmax))
#
# spline SMF
lgPhiMstar_matrix = []
for k in range(len(tb_SMF)):
SMF_z = (tb_SMF['zLo'][k] + tb_SMF['zHi'][k]) / 2.0
SMF_phi_1 = tb_SMF['Phi_1'][k]
SMF_phi_2 = tb_SMF['Phi_2'][k]
SMF_alpha_1 = tb_SMF['alpha_1'][k]
SMF_alpha_2 = tb_SMF['alpha_2'][k]
SMF_lgMchar = tb_SMF['lgMchar'][k]
#print('z, lgMchar, alpha_1, phi_1, alpha_2, phi_2 =', z, SMF_lgMchar, SMF_alpha_1, SMF_phi_1, SMF_alpha_2, SMF_phi_2)
SMF_PhiMstar = Schechter_Function(lgMstar_grid, SMF_phi_1, SMF_lgMchar, SMF_alpha_1) + \
Schechter_Function(lgMstar_grid, SMF_phi_2, SMF_lgMchar, SMF_alpha_2) # two component
lgPhiMstar_grid = np.log10(SMF_PhiMstar)
lgPhiMstar_matrix.append(copy(lgPhiMstar_grid))
#
SMF_z = (tb_SMF['zLo'].data + tb_SMF['zHi'].data) / 2.0
lgPhiMstar_matrix = np.array(lgPhiMstar_matrix) # shape == (N_SMF_z, N_SMF_lgMstar, )
if z <= np.min(SMF_z):
lgPhiMstar_grid = lgPhiMstar_matrix[0]
elif z >= np.max(SMF_z):
lgPhiMstar_grid = lgPhiMstar_matrix[-1]
else:
lgPhiMstar_grid = interp1d(SMF_z, lgPhiMstar_matrix, axis=0, kind='linear')(z)
#print(lgPhiMstar_matrix.shape, SMF_z.shape, lgPhiMstar_grid.shape)
# fix nan
lgPhiMstar_grid[np.isnan(lgPhiMstar_grid)] = -100
lgPhiMstar_grid[(lgPhiMstar_grid<-100)] = -100
#
if lgMstar is None:
return lgMstar_grid, lgPhiMstar_grid
else:
return lgPhiMstar_grid
def calc_SMF_Peng2010(z, lgMstar=None, galaxy_type='SFG'):
#
# Peng YingJie 2010
# IMF: Chabrier 2003
# Outputs: lgMstar_grid, lgPhiMstar_grid
#
# check z
if not np.isscalar(z):
raise ValueError('Please input a float number as the redshift!')
#
# check galaxy_type
if not (type(galaxy_type) is str):
raise ValueError('Please input either "ALL", "SFG" or "QG" as the galaxy_type!')
else:
if not (galaxy_type in ['SFG', 'QG']):
raise ValueError('Please input either "ALL", "SFG" or "QG" as the galaxy_type!')
#
# make lgMstar
if lgMstar is None:
lgMstar_grid = np.linspace(6.0, 13.0, num=1000, endpoint=True)
else:
lgMstar_grid = lgMstar
#
# galaxy_type
if galaxy_type == 'ALL':
galaxy_types = ['SFG', 'QG']
else:
galaxy_types = [galaxy_type]
#
# read SMF
for t_galaxy_type in galaxy_types:
tb_SMF = Table.read(os.path.dirname(os.path.dirname(__file__))+os.sep+'Data_Tables/datatables_SMF/datatable_PengYingjie2010_SMF_'+galaxy_type+'.txt', format='ascii') # zLo zHi lgMchar Phi_1 alpha_1 Phi_2 alpha_2
SMF_zmin = np.min(tb_SMF['zLo'])
SMF_zmax = np.max(tb_SMF['zHi'])
#
# there is only one redshift bin, but we still check the input z range
if z < SMF_zmin or z > SMF_zmax:
raise ValueError('calc_SMF_Peng2010: The input redshift is out of the allowed range of %s -- %s!'%(SMF_zmin, SMF_zmax))
#
# just calculate SMF without interpolation
SMF_z = (tb_SMF['zLo'].data + tb_SMF['zHi'].data) / 2.0
SMF_phi_1 = tb_SMF['Phi_1'].data
SMF_alpha_1 = tb_SMF['alpha_1'].data
SMF_lgMchar = tb_SMF['lgMchar'].data
SMF_PhiMstar = Schechter_Function(lgMstar_grid, SMF_phi_1, SMF_lgMchar, SMF_alpha_1) # one component
if t_galaxy_type == 'SFG':
SMF_PhiMstar_SFG = copy(SMF_PhiMstar)
elif t_galaxy_type == 'QG':
SMF_phi_2 = tb_SMF['Phi_2'].data
SMF_alpha_2 = tb_SMF['alpha_2'].data
SMF_PhiMstar_QG = SMF_PhiMstar + Schechter_Function(lgMstar_grid, SMF_phi_2, SMF_lgMchar, SMF_alpha_2) # two component QG SMF
#
if galaxy_type == 'SFG':
lgPhiMstar_grid = np.log10(SMF_PhiMstar_SFG)
elif galaxy_type == 'QG':
lgPhiMstar_grid = np.log10(SMF_PhiMstar_QG)
elif galaxy_type == 'ALL':
lgPhiMstar_grid = np.log10(SMF_PhiMstar_SFG+SMF_PhiMstar_QG)
#
if lgMstar is None:
return lgMstar_grid, lgPhiMstar_grid
else:
return lgPhiMstar_grid
def calc_SMF_Kelvin2014(z, lgMstar=None, galaxy_type='SFG'):
#
# Kelvin 2014 (2014MNRAS.444.1647K)
# GAMA survey
# IMF: Chabrier 2003
# Table 3, Spheroid dominated, and Disc dominated
# Outputs: lgMstar_grid, lgPhiMstar_grid
#
# check z
if not np.isscalar(z):
raise ValueError('Please input a float number as the redshift!')
#
# check galaxy_type
if not (type(galaxy_type) is str):
raise ValueError('Please input either "ALL", "SFG" or "QG" as the galaxy_type!')
else:
if not (galaxy_type in ['SFG', 'QG']):
raise ValueError('Please input either "ALL", "SFG" or "QG" as the galaxy_type!')
#
# make lgMstar
if lgMstar is None:
lgMstar_grid = np.linspace(6.0, 13.0, num=1000, endpoint=True)
else:
lgMstar_grid = lgMstar
#
# galaxy_type
if galaxy_type == 'ALL':
galaxy_types = ['SFG', 'QG']
else:
galaxy_types = [galaxy_type]
#
# read SMF
for t_galaxy_type in galaxy_types:
tb_SMF = Table.read(os.path.dirname(os.path.dirname(__file__))+os.sep+'Data_Tables/datatables_SMF/datatable_Kelvin2014_SMF_'+galaxy_type+'.txt', format='ascii') # zLo zHi lgMchar Phi_1 alpha_1
SMF_zmin = np.min(tb_SMF['zLo'])
SMF_zmax = np.max(tb_SMF['zHi'])
#
# there is only one redshift bin, but we still check the input z range
if z < SMF_zmin or z > SMF_zmax:
raise ValueError('calc_SMF_Kelvin2014: The input redshift is out of the allowed range of %s -- %s!'%(SMF_zmin, SMF_zmax))
#
# just calculate SMF without interpolation
SMF_z = (tb_SMF['zLo'].data + tb_SMF['zHi'].data) / 2.0
SMF_phi_1 = tb_SMF['Phi_1'].data
SMF_alpha_1 = tb_SMF['alpha_1'].data
SMF_lgMchar = tb_SMF['lgMchar'].data
SMF_PhiMstar = Schechter_Function(lgMstar_grid, SMF_phi_1, SMF_lgMchar, SMF_alpha_1) # one component
if t_galaxy_type == 'SFG':
SMF_PhiMstar_SFG = copy(SMF_PhiMstar)
elif t_galaxy_type == 'QG':
SMF_PhiMstar_QG = copy(SMF_PhiMstar)
#
if galaxy_type == 'SFG':
lgPhiMstar_grid = np.log10(SMF_PhiMstar_SFG)
elif galaxy_type == 'QG':
lgPhiMstar_grid = np.log10(SMF_PhiMstar_QG)
elif galaxy_type == 'ALL':
lgPhiMstar_grid = np.log10(SMF_PhiMstar_SFG+SMF_PhiMstar_QG)
#
if lgMstar is None:
return lgMstar_grid, lgPhiMstar_grid
else:
return lgPhiMstar_grid
def calc_SMF_Wright2018_single_component(z, lgMstar=None):
#
# Wright 2018 - SMF - arXiv.1808.01754 - 2018MNRAS.480.3491W.pdf
# IMF: Chabrier 2003
# Outputs: lgMstar_grid, lgPhiMstar_grid
#
# check z
if not np.isscalar(z):
raise ValueError('Please input a float number as the redshift!')
#
# make lgMstar
if lgMstar is None:
lgMstar_grid = np.linspace(6.0, 13.0, num=1000, endpoint=True)
else:
lgMstar_grid = lgMstar
#
#
SMF_lgMchar = (10.791) + (0.558) * z + (-0.102) * z**2
SMF_alpha = (-1.160) + (-0.274) * z + (0.028) * z**2
SMF_phi = 10**((-2.455) + (-0.883) * z + (0.093) * z**2)
#SMF_rho = (-2.455) + (-0.883) * z + (0.093) * z**2
#print('z, lgMchar, alpha, phi =', z, SMF_lgMchar, SMF_alpha, SMF_phi)
SMF_PhiMstar = Schechter_Function(lgMstar_grid, SMF_phi, SMF_lgMchar, SMF_alpha)
lgPhiMstar_grid = np.log10(SMF_PhiMstar)
#
if lgMstar is None:
return lgMstar_grid, lgPhiMstar_grid
else:
return lgPhiMstar_grid
def calc_SMF_Wright2018_double_component(z, lgMstar=None):
#
# Wright 2018 - SMF - arXiv.1808.01754 - 2018MNRAS.480.3491W.pdf
# IMF: Chabrier 2003
# Outputs: lgMstar_grid, lgPhiMstar_grid
#
# check z
if not np.isscalar(z):
raise ValueError('Please input a float number as the redshift!')
#
# make lgMstar
if lgMstar is None:
lgMstar_grid = np.linspace(6.0, 13.0, num=1000, endpoint=True)
else:
lgMstar_grid = lgMstar
#
#
SMF_lgMchar = (10.831) + (0.153) * z + (-0.033) * z**2
SMF_alpha_1 = (-0.579) + (0.048) * z + (0.022) * z**2
SMF_alpha_2 = (-1.489) + (-0.087) * z + (0.016) * z**2
SMF_phi_1 = 10**((-2.312) + (-0.658) * z + (0.016) * z**2)
SMF_phi_2 = 10**((-3.326) + (-0.158) * z + (-0.002) * z**2)
#print('z, lgMchar, alpha_1, phi_1, alpha_2, phi_2 =', z, SMF_lgMchar, SMF_alpha_1, SMF_phi_1, SMF_alpha_2, SMF_phi_2)
SMF_PhiMstar = Schechter_Function(lgMstar_grid, SMF_phi_1, SMF_lgMchar, SMF_alpha_1) + \
Schechter_Function(lgMstar_grid, SMF_phi_2, SMF_lgMchar, SMF_alpha_2)
lgPhiMstar_grid = np.log10(SMF_PhiMstar)
#
if lgMstar is None:
return lgMstar_grid, lgPhiMstar_grid
else:
return lgPhiMstar_grid
def calc_Gladders2013_CSFRD(z, t0, tau):
Gladders2013_t_age = cosmo.age(z).value
Gladders2013_t0 = float(t0)
Gladders2013_tau = float(tau)
Gladders2013_SFR_1 = 1.0/(Gladders2013_t_age*sqrt(2*pi*Gladders2013_tau**2))
Gladders2013_SFR_2 = exp(-(ln(Gladders2013_t_age)-Gladders2013_t0)**2/(2*Gladders2013_tau**2))
Gladders2013_SFR = Gladders2013_SFR_1 * Gladders2013_SFR_2
Gladders2013_SFR = Gladders2013_SFR / 1.64 # converted to Chabrier IMF
return Gladders2013_SFR
def calc_MadauDickinson2014_CSFRD(z):
# Madau & Dickinson (2014)
# Salpeter IMF --> converted to Chabrier IMF
if type(z) is list:
z = np.array(z)
rho_SFR = 0.015 * (1+z)**2.7 / (1.0 + ((1+z)/2.9)**5.6) / 1.64 # converted to Chabrier IMF
return rho_SFR
def calc_Mstar_integrating_CSFRD_dzliu2018(z):
# make z_list
#z_list = np.arange(9.75, 0.75, -0.5).tolist()
#z_list.extend([0.75, 0.50, 0.25, 0.125, 0.0625, 0.03125, 0.00000])
opz_list = np.logspace(np.log10(1.0+0.0), np.log10(1.0+10.75), num=200, endpoint=True)
opz_list = opz_list[::-1]
z_list = opz_list - 1.0
t_list = cosmo.age(z_list).value
#
# integrate dzliu2018 CSFRD
#CSFRD = calc_Gladders2013_CSFRD(z_list, 1.57, 0.67) # Gladder2013 form of CSFRD with Liu et al. (2018) parameters
CSFRD = calc_MadauDickinson2014_CSFRD(z_list)
Mstar_cumulated = 0.0
CSFRD_z_list = [z_list[0]]
CSFRD_Mstar_list = [1e-30] #<TODO># initial mass
for i in range(len(z_list)-1):
#z_bin = (z_list[i+1] + z_list[i]) / 2.0
t_bin = t_list[i+1] # Gyr, time since the Big Bang
time_bin = t_list[i+1] - t_list[i] # Gyr, time interval within the redshift bin
#mass_loss_time_scale = 1.4 # Myr, BEHROOZI & Silk arXiv:1404.5299
#mass_loss_time_scale = 3.0 # Myr, Ilbert et al. (2013) PDF page 11 left middle
mass_loss_time_scale = 0.3 # Myr, Conroy & Wechsler (2009, bibcode 2009ApJ...696..620C) arxiv PDF page 5 Eq (11).
Mstar_formed = (CSFRD[i]+CSFRD[i+1])/2.0 * time_bin * 1e9 # Msun/yr * Gyr * 1e9 => Msun
Mstar_loss_frac = 0.05 * ln(1.0+(t_bin)/(mass_loss_time_scale*1e-3))
# see Ilbert et al. (2013) PDF page 11 left middle; Conroy & Wechsler (2009) arxiv PDF page 5 Eq (11).
# see https://arxiv.org/pdf/1404.5299.pdf PDF page 3 Eq (6); Conroy & Wechsler (2009) arxiv PDF page 5 Eq (11).
#Mstar_loss_frac = 0.0 # no mass loss at all <TODO>
Mstar_cumulated += Mstar_formed * (1.0 - Mstar_loss_frac) # total new star created, locked in stellar mass
#CSFRD_z_list.append(z_bin)
CSFRD_z_list.append(z_list[i+1])
CSFRD_Mstar_list.append(Mstar_cumulated)
CSFRD_z_list = np.array(CSFRD_z_list)[::-1] # sort so that z increases
CSFRD_Mstar_list = np.array(CSFRD_Mstar_list)[::-1]
# spline at z for the output
Mstar_cumulated_at_z = 10**(InterpolatedUnivariateSpline(CSFRD_z_list, np.log10(CSFRD_Mstar_list), k=1)(z))
return Mstar_cumulated_at_z
def calc_SMF_dzliu2018(z=None, lgMstar=None, galaxy_type='SFG', z_list=None, tuning_params='', verbose=True):
#
# dzliu 2018 - my own choice
# IMF: Chabrier 2003
# Outputs: lgMstar_grid, lgPhiMstar_grid
#
# tuning_params: for example, 'D17-no-renorm;'
#
#
# check z
if z is not None:
if not np.isscalar(z):
#raise ValueError('Please input a float number as the redshift!')
if type(z) is list:
z = np.array(z)
z_is_vector = True
else:
z_is_vector = False
#
# check galaxy_type
if not (type(galaxy_type) is str):
raise ValueError('Please input either "ALL", "SFG" or "QG" as the galaxy_type!')
else:
if not (galaxy_type in ['SFG', 'QG', 'ALL']):
raise ValueError('Please input either "ALL", "SFG" or "QG" as the galaxy_type!')
#
# make lgMstar
if lgMstar is None:
lgMstar_grid = np.linspace(6.0, 13.0, num=1000, endpoint=True)
else:
lgMstar_grid = lgMstar
#
# code from '/Volumes/GoogleDrive/Team Drives/DeepFields/Simulations/Cosmological_Galaxy_Modelling/a_dzliu_code_Plot_SMF_dzliu_model.sm'
# make z_list
if z_list is None:
##z_list = np.arange(10.75, 0.75, -0.5).tolist()
##z_list.extend([0.75, 0.50, 0.25, 0.125, 0.0625, 0.03125, 0.015625, 0.00000])
#z_list = np.arange(10.75, 1.00, -0.5).tolist()
#z_list.extend([1.00, 0.75, 0.50, 0.25, 0.125, 0.0625, 0.03125, 0.015625, 0.00000])
#print('z_list', z_list, '(len=%d)'%(len(z_list)))
#
# <20200105> precision issue
# <20200105> now we increase the sampling of z_list from 29 to 40, so that the SMF are better interpolated.
z_list = []
z_list.extend(np.arange(10.75, 5.00, -0.5).tolist())
z_list.extend(np.arange(5.00, 1.00, -0.25).tolist())
z_list.extend(np.arange(1.00, 0.125, -0.125).tolist())
z_list.extend([0.125, 0.0625, 0.03125, 0.015625, 0.00000])
#print('z_list', z_list, '(len=%d)'%(len(z_list)))
# <20200105> precision issue (end)
#
#z_list = []
#z_list.extend(np.arange(9.75, 4.00, -0.5).tolist())
#z_list.extend(np.arange(4.00, 3.00, -0.25).tolist())
#z_list.extend(np.arange(3.00, 2.00, -0.125).tolist())
#z_list.extend(np.arange(2.00, 1.00, -0.0625).tolist())
#z_list.extend(np.arange(1.00, 0.00, -0.03125).tolist())
#z_list.extend([0.00])
SMF_z_list = []
SMF_Phi_list = []
Mstar_cumulated_list = calc_Mstar_integrating_CSFRD_dzliu2018(z_list)
for i in range(len(z_list)-1):
z_bin = (z_list[i] + z_list[i+1]) / 2.0
t_bin = cosmo.age(z_bin).value # Gyr since the Big Bang
Schechter_M = 10**lgMstar_grid
Schechter_Mstep = lgMstar_grid[1] - lgMstar_grid[0]
do_renorm_by_CSFRD_cumulated_Mstar = True
#
# first construct SMF at each redshift bin
#if False:
# EEE_Mchar_MQG = lg((10**10.68)*(t_bin/cosmo.age(0).value)**(0.50)) # Quiescent galaxies' SMF's first component
# EEE_Mchar_NQG = lg((10**10.68)*(t_bin/cosmo.age(0).value)**(0.50)) # Quiescent galaxies' SMF's second component
# EEE_Mchar_SFG = lg((10**10.72)*(t_bin/cosmo.age(0).value)**(-0.50)) # Star-forming galaxies' SMF
# EEE_Phi_MQG = (3.400e-3)*(t_bin/cosmo.age(0).value)**(4.00)
# EEE_Phi_NQG = (0.126e-3)*(t_bin/cosmo.age(0).value)**(4.00) # decrease much faster with increasing z
# EEE_Phi_SFG = (0.900e-3)*(t_bin/cosmo.age(0).value)**(0.20)
# EEE_alpha_MQG = -0.39 + (z_bin)*(1.00)
# EEE_alpha_NQG = -1.56 + (z_bin)*(1.00)
# EEE_alpha_SFG = -1.40 + (z_bin)*(-0.06) # slope -- this makes too many low mass galaxiest at z>4 -- faint-end of the 24um number counts -- no, this is because stellar SED has too old age, f24um is underestimated!
# #EEE_alpha_SFG = -1.40 + (z)*(-0.03) # slope
# Schechter_P_MQG = Schechter_Function(lgMstar_grid, EEE_Phi_MQG, EEE_Mchar_MQG, EEE_alpha_MQG)
# Schechter_P_NQG = Schechter_Function(lgMstar_grid, EEE_Phi_NQG, EEE_Mchar_NQG, EEE_alpha_NQG)
# Schechter_P_SFG = Schechter_Function(lgMstar_grid, EEE_Phi_SFG, EEE_Mchar_SFG, EEE_alpha_SFG)
# Schechter_P_QG = Schechter_P_MQG + Schechter_P_NQG
# #print('z = %.04f, lgSchechter_P_SFG = %s, lgSchechter_P_QG = %s'%(z_bin, np.log10(Schechter_P_SFG), np.log10(Schechter_P_QG)))
#
if z_bin < 0.02:
Schechter_P_QG = 10**(calc_SMF_Peng2010(0.02, lgMstar=lgMstar_grid, galaxy_type='QG'))
Schechter_P_SFG = 10**(calc_SMF_Peng2010(0.02, lgMstar=lgMstar_grid, galaxy_type='SFG'))
elif z_bin < 0.085:
Schechter_P_QG = 10**(calc_SMF_Peng2010(z_bin, lgMstar=lgMstar_grid, galaxy_type='QG'))
Schechter_P_SFG = 10**(calc_SMF_Peng2010(z_bin, lgMstar=lgMstar_grid, galaxy_type='SFG'))
elif z_bin < 0.2:
Schechter_P_QG = 10**(calc_SMF_Peng2010(0.085, lgMstar=lgMstar_grid, galaxy_type='QG')) * (1.0-((0.2-0.085)-(0.2-z_bin))/(0.2-0.085)) + 10**(calc_SMF_Davidzon2017(0.2, lgMstar=lgMstar_grid, galaxy_type='QG')) * (0.0+((0.2-0.085)-(0.2-z_bin))/(0.2-0.085))
Schechter_P_SFG = 10**(calc_SMF_Peng2010(0.085, lgMstar=lgMstar_grid, galaxy_type='SFG')) * (1.0-((0.2-0.085)-(0.2-z_bin))/(0.2-0.085)) + 10**(calc_SMF_Davidzon2017(0.2, lgMstar=lgMstar_grid, galaxy_type='SFG')) * (0.0+((0.2-0.085)-(0.2-z_bin))/(0.2-0.085))
elif z_bin < 4.0:
Schechter_P_QG = 10**(calc_SMF_Davidzon2017(z_bin, lgMstar=lgMstar_grid, galaxy_type='QG'))
Schechter_P_SFG = 10**(calc_SMF_Davidzon2017(z_bin, lgMstar=lgMstar_grid, galaxy_type='SFG'))
#<TODO># QG/SFG fraction seems lower at z~1-3?
#if z_bin > 1.0 and z_bin < 3.0:
# Schechter_P_QG = Schechter_P_QG * np.interp(z_bin, [1.0, 1.5, 2.0, 2.5, 3.0], [1.0, 1.2, 1.2, 1.2, 1.0])
if tuning_params is not None:
if tuning_params.find('D17-no-renorm')>=0:
do_renorm_by_CSFRD_cumulated_Mstar = False
else:
Schechter_P_QG = 10**(calc_SMF_Davidzon2017(4.0, lgMstar=lgMstar_grid, galaxy_type='QG'))
Schechter_P_SFG = 10**(calc_SMF_Davidzon2017(4.0, lgMstar=lgMstar_grid, galaxy_type='SFG'))
#
# Note that my Schechter_Function already contains * ln(10), i.e., it is per dex!
#
#
#
# then re-normalize SMF to total stellar mass integrated from CSFRD (assumed some mass loss, see the called function)
#Mstar_cumulated = calc_Mstar_integrating_CSFRD_dzliu2018(z_bin)
#Mstar_cumulated = calc_Mstar_integrating_CSFRD_dzliu2018(z_list[i+1]) #<20190915># BUGGY, should be z bin edge, and z_list is in descending order
Mstar_cumulated = Mstar_cumulated_list[i+1]
#CSFRD_at_z_bin = calc_MadauDickinson2014_CSFRD(z_bin)
#<TODO><20191001># mask = (Schechter_M>=1e9) #<20191001># to match the Madau & Dickinson (2014) integration limit of '0.03 * L_characteristic', i.e., 0.03 * M_characteristic = 0.03 * 10**10.5 =
#<TODO><20191001># Schechter_M_total = sum((Schechter_P_QG[mask]+Schechter_P_SFG[mask])*Schechter_M[mask]*Schechter_Mstep) # P is per dex, but our Schechter_Function already contains ln(10), hence we do not need to multiply ln(10) here. \int P(M) dM = \int P(M) M dln(M) = \int P(M)*ln(10) M dlg(M)
if do_renorm_by_CSFRD_cumulated_Mstar:
Schechter_M_total = sum((Schechter_P_QG+Schechter_P_SFG)*Schechter_M*Schechter_Mstep) # P is per dex, but our Schechter_Function already contains ln(10), hence we do not need to multiply ln(10) here. \int P(M) dM = \int P(M) M dln(M) = \int P(M)*ln(10) M dlg(M)
renorm_factor = Mstar_cumulated / Schechter_M_total
Schechter_P_SFG = Schechter_P_SFG * renorm_factor # per dex but already contains ln(10)
Schechter_P_QG = Schechter_P_QG * renorm_factor # per dex but already contains ln(10)
Schechter_P_ALL = Schechter_P_SFG + Schechter_P_QG
if verbose:
print('z = %.04f, lgMstar_CSFRD = %0.2f, lgMstar_SMF = %0.2f, renorm = %s'%(z_bin, np.log10(Mstar_cumulated), np.log10(Schechter_M_total), renorm_factor))
#print('z = %.04f, lgCSFRD = %0.2f, lgMstar_CSFRD = %0.2f, lgMstar_SMF = %0.2f'%(z_bin, np.log10(CSFRD_at_z_bin), np.log10(Mstar_cumulated), np.log10(Schechter_M_total)))
#
# --> checked OK
# z = 0.0156, lgCSFRD = -2.04, lgMstar_CSFRD = 8.40, lgMstar_SMF = 8.35
#
#
SMF_z_list.append(z_list[i+1]) # append the lower redshift end point of each bin
if galaxy_type == 'SFG':
SMF_Phi_list.append(Schechter_P_SFG)
elif galaxy_type == 'QG':
SMF_Phi_list.append(Schechter_P_QG)
elif galaxy_type == 'ALL':
SMF_Phi_list.append(Schechter_P_ALL)
#
# spline at z for the output
SMF_z_list = np.array(SMF_z_list)[::-1] # make z increasing order
SMF_Phi_list = np.array(SMF_Phi_list)[::-1].T # make z increasing order
#print(SMF_z_list.shape, SMF_Phi_list.shape)
#
if z is None:
lgPhiMstar_matrix = np.log10(SMF_Phi_list.T)
return SMF_z_list, lgMstar_grid, lgPhiMstar_matrix
elif z_is_vector:
#print('calc_SMF_dzliu2018: np.min(SMF_z_list), np.max(SMF_z_list), z:', np.min(SMF_z_list), np.max(SMF_z_list), z)
lgPhiMstar_grid = interp1d(SMF_z_list, np.log10(SMF_Phi_list), kind='cubic')(z) # kind='nearest' 'linear' 'quadratic' 'cubic'
else:
#print('calc_SMF_dzliu2018: np.min(SMF_z_list), np.max(SMF_z_list), z:', np.min(SMF_z_list), np.max(SMF_z_list), z)
lgPhiMstar_grid = interp1d(SMF_z_list, np.log10(SMF_Phi_list), kind='cubic')(z) # kind='nearest' 'linear' 'quadratic' 'cubic'
#
if lgMstar is None:
return lgMstar_grid, lgPhiMstar_grid
else:
return lgPhiMstar_grid
def calc_SMF_dzliu2020(z=None, lgMstar=None, galaxy_type='SFG', z_list=None, tuning_params='', verbose=True):
#
# dzliu 2020 - optimized choice
# IMF: Chabrier 2003
# Outputs: lgMstar_grid, lgPhiMstar_grid
#
# tuning_params: for example, 'D17-no-renorm;'
#
#
# check z
if z is not None:
if not np.isscalar(z):
#raise ValueError('Please input a float number as the redshift!')
if type(z) is list:
z = np.array(z)
z_is_vector = True
else:
z_is_vector = False
#
# check galaxy_type
if not (type(galaxy_type) is str):
raise ValueError('Please input either "ALL", "SFG" or "QG" as the galaxy_type!')
else:
if not (galaxy_type in ['SFG', 'QG', 'ALL']):
raise ValueError('Please input either "ALL", "SFG" or "QG" as the galaxy_type!')
#
# make lgMstar
if lgMstar is None:
lgMstar_grid = np.linspace(6.0, 13.0, num=1000, endpoint=True)
else:
lgMstar_grid = lgMstar
#
# code from '/Volumes/GoogleDrive/Team Drives/DeepFields/Simulations/Cosmological_Galaxy_Modelling/a_dzliu_code_Plot_SMF_dzliu_model.sm'
# make z_list
if z_list is None:
##z_list = np.arange(10.75, 0.75, -0.5).tolist()
##z_list.extend([0.75, 0.50, 0.25, 0.125, 0.0625, 0.03125, 0.015625, 0.00000])
#z_list = np.arange(10.75, 1.00, -0.5).tolist()
#z_list.extend([1.00, 0.75, 0.50, 0.25, 0.125, 0.0625, 0.03125, 0.015625, 0.00000])
#print('z_list', z_list, '(len=%d)'%(len(z_list)))
#
# <20200105> precision issue
# <20200105> now we increase the sampling of z_list from 29 to 40, so that the SMF are better interpolated.
z_list = []
z_list.extend(np.arange(10.75, 5.00, -0.5).tolist())
z_list.extend(np.arange(5.00, 1.00, -0.25).tolist())
z_list.extend(np.arange(1.00, 0.125, -0.125).tolist())
z_list.extend([0.125, 0.0625, 0.03125, 0.015625, 0.00000])
#print('z_list', z_list, '(len=%d)'%(len(z_list)))
# <20200105> precision issue (end)
#
#z_list = []
#z_list.extend(np.arange(9.75, 4.00, -0.5).tolist())
#z_list.extend(np.arange(4.00, 3.00, -0.25).tolist())
#z_list.extend(np.arange(3.00, 2.00, -0.125).tolist())
#z_list.extend(np.arange(2.00, 1.00, -0.0625).tolist())
#z_list.extend(np.arange(1.00, 0.00, -0.03125).tolist())
#z_list.extend([0.00])
SMF_z_list = []
SMF_Phi_list = []
Mstar_cumulated_list = calc_Mstar_integrating_CSFRD_dzliu2018(z_list)
for i in range(len(z_list)-1):
z_bin = (z_list[i] + z_list[i+1]) / 2.0
t_bin = cosmo.age(z_bin).value # Gyr since the Big Bang
Schechter_M = 10**lgMstar_grid
Schechter_Mstep = lgMstar_grid[1] - lgMstar_grid[0]
do_renorm_by_CSFRD_cumulated_Mstar = True
#
# first construct SMF at each redshift bin
#if False:
# EEE_Mchar_MQG = lg((10**10.68)*(t_bin/cosmo.age(0).value)**(0.50)) # Quiescent galaxies' SMF's first component
# EEE_Mchar_NQG = lg((10**10.68)*(t_bin/cosmo.age(0).value)**(0.50)) # Quiescent galaxies' SMF's second component
# EEE_Mchar_SFG = lg((10**10.72)*(t_bin/cosmo.age(0).value)**(-0.50)) # Star-forming galaxies' SMF
# EEE_Phi_MQG = (3.400e-3)*(t_bin/cosmo.age(0).value)**(4.00)
# EEE_Phi_NQG = (0.126e-3)*(t_bin/cosmo.age(0).value)**(4.00) # decrease much faster with increasing z
# EEE_Phi_SFG = (0.900e-3)*(t_bin/cosmo.age(0).value)**(0.20)
# EEE_alpha_MQG = -0.39 + (z_bin)*(1.00)
# EEE_alpha_NQG = -1.56 + (z_bin)*(1.00)
# EEE_alpha_SFG = -1.40 + (z_bin)*(-0.06) # slope -- this makes too many low mass galaxiest at z>4 -- faint-end of the 24um number counts -- no, this is because stellar SED has too old age, f24um is underestimated!
# #EEE_alpha_SFG = -1.40 + (z)*(-0.03) # slope
# Schechter_P_MQG = Schechter_Function(lgMstar_grid, EEE_Phi_MQG, EEE_Mchar_MQG, EEE_alpha_MQG)
# Schechter_P_NQG = Schechter_Function(lgMstar_grid, EEE_Phi_NQG, EEE_Mchar_NQG, EEE_alpha_NQG)
# Schechter_P_SFG = Schechter_Function(lgMstar_grid, EEE_Phi_SFG, EEE_Mchar_SFG, EEE_alpha_SFG)
# Schechter_P_QG = Schechter_P_MQG + Schechter_P_NQG
# #print('z = %.04f, lgSchechter_P_SFG = %s, lgSchechter_P_QG = %s'%(z_bin, np.log10(Schechter_P_SFG), np.log10(Schechter_P_QG)))
#
if z_bin < 0.025:
Schechter_P_QG = 10**(calc_SMF_Kelvin2014(0.025, lgMstar=lgMstar_grid, galaxy_type='QG'))
Schechter_P_SFG = 10**(calc_SMF_Kelvin2014(0.025, lgMstar=lgMstar_grid, galaxy_type='SFG'))
elif z_bin < 0.06:
Schechter_P_QG = 10**(calc_SMF_Kelvin2014(z_bin, lgMstar=lgMstar_grid, galaxy_type='QG'))
Schechter_P_SFG = 10**(calc_SMF_Kelvin2014(z_bin, lgMstar=lgMstar_grid, galaxy_type='SFG'))
elif z_bin < 0.2:
# interpolate Kelvin2014 local SMF and Moutard2016 SMF
t_lower_opz = np.log10(1.0+0.06)
t_upper_opz = np.log10(1.0+0.2)
t_opz = np.log10(1.0+z_bin)
t_lower_factor = (t_upper_opz-t_opz)/(t_upper_opz-t_lower_opz)
t_upper_factor = 1.0-t_lower_factor
Schechter_P_QG = 10**(calc_SMF_Kelvin2014(0.06, lgMstar=lgMstar_grid, galaxy_type='QG')) * t_lower_factor + 10**(calc_SMF_Moutard2016(0.2, lgMstar=lgMstar_grid, galaxy_type='QG')) * t_upper_factor
Schechter_P_SFG = 10**(calc_SMF_Kelvin2014(0.06, lgMstar=lgMstar_grid, galaxy_type='SFG')) * t_lower_factor + 10**(calc_SMF_Moutard2016(0.2, lgMstar=lgMstar_grid, galaxy_type='SFG')) * t_upper_factor
elif z_bin < 1.5:
Schechter_P_QG = 10**(calc_SMF_Moutard2016(z_bin, lgMstar=lgMstar_grid, galaxy_type='QG'))
Schechter_P_SFG = 10**(calc_SMF_Moutard2016(z_bin, lgMstar=lgMstar_grid, galaxy_type='SFG'))
elif z_bin < 4.0:
Schechter_P_QG = 10**(calc_SMF_Davidzon2017(z_bin, lgMstar=lgMstar_grid, galaxy_type='QG'))
Schechter_P_SFG = 10**(calc_SMF_Davidzon2017(z_bin, lgMstar=lgMstar_grid, galaxy_type='SFG'))
#<TODO># QG/SFG fraction seems lower at z~1-3?
#if z_bin > 1.0 and z_bin < 3.0:
# Schechter_P_QG = Schechter_P_QG * np.interp(z_bin, [1.0, 1.5, 2.0, 2.5, 3.0], [1.0, 1.2, 1.2, 1.2, 1.0])
if tuning_params is not None:
if tuning_params.find('D17-no-renorm')>=0:
do_renorm_by_CSFRD_cumulated_Mstar = False
else:
Schechter_P_QG = 10**(calc_SMF_Davidzon2017(4.0, lgMstar=lgMstar_grid, galaxy_type='QG'))
Schechter_P_SFG = 10**(calc_SMF_Davidzon2017(4.0, lgMstar=lgMstar_grid, galaxy_type='SFG'))
#
# Note that my Schechter_Function already contains * ln(10), i.e., it is per dex!
#
#
#
# then re-normalize SMF to total stellar mass integrated from CSFRD (assumed some mass loss, see the called function)
#Mstar_cumulated = calc_Mstar_integrating_CSFRD_dzliu2018(z_bin)
#Mstar_cumulated = calc_Mstar_integrating_CSFRD_dzliu2018(z_list[i+1]) #<20190915># BUGGY, should be z bin edge, and z_list is in descending order
Mstar_cumulated = Mstar_cumulated_list[i+1]
#CSFRD_at_z_bin = calc_MadauDickinson2014_CSFRD(z_bin)
#<TODO><20191001># mask = (Schechter_M>=1e9) #<20191001># to match the Madau & Dickinson (2014) integration limit of '0.03 * L_characteristic', i.e., 0.03 * M_characteristic = 0.03 * 10**10.5 =
#<TODO><20191001># Schechter_M_total = sum((Schechter_P_QG[mask]+Schechter_P_SFG[mask])*Schechter_M[mask]*Schechter_Mstep) # P is per dex, but our Schechter_Function already contains ln(10), hence we do not need to multiply ln(10) here. \int P(M) dM = \int P(M) M dln(M) = \int P(M)*ln(10) M dlg(M)
if do_renorm_by_CSFRD_cumulated_Mstar:
Schechter_M_total = sum((Schechter_P_QG+Schechter_P_SFG)*Schechter_M*Schechter_Mstep) # P is per dex, but our Schechter_Function already contains ln(10), hence we do not need to multiply ln(10) here. \int P(M) dM = \int P(M) M dln(M) = \int P(M)*ln(10) M dlg(M)
renorm_factor = Mstar_cumulated / Schechter_M_total
Schechter_P_SFG = Schechter_P_SFG * renorm_factor # per dex but already contains ln(10)
Schechter_P_QG = Schechter_P_QG * renorm_factor # per dex but already contains ln(10)
Schechter_P_ALL = Schechter_P_SFG + Schechter_P_QG
if verbose:
print('z = %.04f, lgMstar_CSFRD = %0.2f, lgMstar_SMF = %0.2f, renorm = %s'%(z_bin, np.log10(Mstar_cumulated), np.log10(Schechter_M_total), renorm_factor))
#print('z = %.04f, lgCSFRD = %0.2f, lgMstar_CSFRD = %0.2f, lgMstar_SMF = %0.2f'%(z_bin, np.log10(CSFRD_at_z_bin), np.log10(Mstar_cumulated), np.log10(Schechter_M_total)))
#
# --> checked OK
# z = 0.0156, lgCSFRD = -2.04, lgMstar_CSFRD = 8.40, lgMstar_SMF = 8.35
#
#
SMF_z_list.append(z_list[i+1]) # append the lower redshift end point of each bin
if galaxy_type == 'SFG':
SMF_Phi_list.append(Schechter_P_SFG)
elif galaxy_type == 'QG':
SMF_Phi_list.append(Schechter_P_QG)
elif galaxy_type == 'ALL':
SMF_Phi_list.append(Schechter_P_ALL)
#
# spline at z for the output
SMF_z_list = np.array(SMF_z_list)[::-1] # make z increasing order
SMF_Phi_list = np.array(SMF_Phi_list)[::-1].T # make z increasing order
#print(SMF_z_list.shape, SMF_Phi_list.shape)
#
if z is None:
lgPhiMstar_matrix = np.log10(SMF_Phi_list.T)
return SMF_z_list, lgMstar_grid, lgPhiMstar_matrix
elif z_is_vector:
#print('calc_SMF_dzliu2018: np.min(SMF_z_list), np.max(SMF_z_list), z:', np.min(SMF_z_list), np.max(SMF_z_list), z)
lgPhiMstar_grid = interp1d(SMF_z_list, np.log10(SMF_Phi_list), kind='cubic')(z) # kind='nearest' 'linear' 'quadratic' 'cubic'
else:
#print('calc_SMF_dzliu2018: np.min(SMF_z_list), np.max(SMF_z_list), z:', np.min(SMF_z_list), np.max(SMF_z_list), z)
lgPhiMstar_grid = interp1d(SMF_z_list, np.log10(SMF_Phi_list), kind='cubic')(z) # kind='nearest' 'linear' 'quadratic' 'cubic'
#
if lgMstar is None:
return lgMstar_grid, lgPhiMstar_grid
else:
return lgPhiMstar_grid
| 47.506757
| 306
| 0.634452
| 6,532
| 42,186
| 3.837569
| 0.076393
| 0.023138
| 0.010213
| 0.024893
| 0.883033
| 0.871026
| 0.8508
| 0.836279
| 0.828859
| 0.821359
| 0
| 0.074829
| 0.232115
| 42,186
| 887
| 307
| 47.560316
| 0.698987
| 0.358199
| 0
| 0.768577
| 0
| 0.004246
| 0.092467
| 0.011685
| 0
| 0
| 0
| 0.001127
| 0
| 1
| 0.027601
| false
| 0.002123
| 0.016985
| 0
| 0.095541
| 0.006369
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
437c2acb4306746fef185af50f3c9f569c09afda
| 198
|
py
|
Python
|
libreco/utils/__init__.py
|
massquantity/Reco_Project
|
6e577d6c3a5f7372d88c25dd9f540a1b7651180f
|
[
"MIT"
] | null | null | null |
libreco/utils/__init__.py
|
massquantity/Reco_Project
|
6e577d6c3a5f7372d88c25dd9f540a1b7651180f
|
[
"MIT"
] | null | null | null |
libreco/utils/__init__.py
|
massquantity/Reco_Project
|
6e577d6c3a5f7372d88c25dd9f540a1b7651180f
|
[
"MIT"
] | null | null | null |
from .serialization import (
save_knn,
save_vector,
save_info,
save_model_tf_serving
)
__all__ = [
"save_knn",
"save_vector",
"save_info",
"save_model_tf_serving"
]
| 14.142857
| 28
| 0.646465
| 24
| 198
| 4.666667
| 0.458333
| 0.125
| 0.196429
| 0.303571
| 0.767857
| 0.767857
| 0.767857
| 0.767857
| 0.767857
| 0.767857
| 0
| 0
| 0.252525
| 198
| 13
| 29
| 15.230769
| 0.756757
| 0
| 0
| 0
| 0
| 0
| 0.247475
| 0.106061
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.083333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
439766a626021362b7b43330b2a5b6edbf49acaf
| 32,156
|
py
|
Python
|
tests/rst/test_transform_collect_fields.py
|
LudditeLabs/autodoc-tool
|
b4ae7e3b61907e7e9c3a1b534fce055e5860ffab
|
[
"Apache-2.0"
] | null | null | null |
tests/rst/test_transform_collect_fields.py
|
LudditeLabs/autodoc-tool
|
b4ae7e3b61907e7e9c3a1b534fce055e5860ffab
|
[
"Apache-2.0"
] | null | null | null |
tests/rst/test_transform_collect_fields.py
|
LudditeLabs/autodoc-tool
|
b4ae7e3b61907e7e9c3a1b534fce055e5860ffab
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Luddite Labs Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from autodoc.python.rst.transforms.collect_fields import CollectInfoFields
from autodoc.report import Codes
# These param will be loaded by the fixtures (assert_py_doc, parse_py_doc).
docstring_transforms = [CollectInfoFields]
# Test: convert :param: and :type: to <param_field> and move them to the
# doc parts.
class TestParamField:
# Test: parameters and their types.
def test_fields(self, parse_py_doc):
env = parse_py_doc(
text="""
This is an ordinary paragraph.
:parameter:
:type type_wrong_place: xxx
:param no_type: Lorem ipsum dolor sit amet.
:param type_wrong_place: 123
:param str with_type: Do eiusmod tempor incididunt ut labore
et dolore magna aliqua.
:parameter with_separate_type: Ut enim ad minim veniam,
quis nostrud exercitation.
Paragraph 2.
:type with_separate_type: integer or None
:type with_separate_type: string
:type non_exist: str
:param underscore_name_: Such name parsed as ``<reference>_`` RST
construction. But we should handle it as a plain text.
This is a paragraph after the field list.
.. seealso:: Another function.
.. note:: Lorem ipsum dolor sit amet, consectetur adipiscing elit.
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
section = doc.field_sections.get('params')
assert section is not None
assert len(section) == 5
# Parameter no_type.
param = section[0]
assert param.get('name') == 'no_type'
assert param.get('type') is None
assert param.get('orig_field_tag') == 'param'
assert len(param) == 1
assert len(param[0]) == 1
assert param[0][0].astext() == 'Lorem ipsum dolor sit amet.'
# Parameter type_wrong_place.
param = section[1]
assert param.get('name') == 'type_wrong_place'
assert param.get('type') == ['xxx']
assert param.get('orig_field_tag') == 'param'
assert len(param) == 1
assert len(param[0]) == 1
# Parameter with_type.
param = section[2]
assert param.get('name') == 'with_type'
assert param.get('type') == ['str']
assert param.get('orig_field_tag') == 'param'
assert len(param) == 1
assert len(param[0]) == 1
# Parameter with_separate_type.
param = section[3]
assert param.get('name') == 'with_separate_type'
assert param.get('type') == ['integer or None', 'string']
assert param.get('orig_field_tag') == 'parameter'
assert len(param) == 1
assert len(param[0]) == 2
# Parameter underscore_name_.
param = section[4]
assert param.get('name') == 'underscore_name_'
assert param.get('type') is None
assert param.get('orig_field_tag') == 'param'
assert len(param) == 1
assert len(param[0]) == 1
# Test: report messages.
def test_report(self, parse_py_doc):
env = parse_py_doc(
add_report=True,
text="""
This is an ordinary paragraph.
:parameter:
:type type_wrong_place: xxx
:param no_type: Lorem ipsum dolor sit amet.
:param no_type: Lorem ipsum dolor sit amet.
:param type_wrong_place: 123
:param str with_type: Do eiusmod tempor incididunt ut labore
et dolore magna aliqua.
:parameter with_separate_type: Ut enim ad minim veniam,
quis nostrud exercitation.
Paragraph 2.
:type with_separate_type: integer or None
:type with_separate_type: string
:type non_exist: str
:type: str
:type with_separate_type: yyy
:param xxx: 123
:type xxx: Do eiusmod tempor incididunt ut labore
et dolore magna aliqua.
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
report = env.get('reporter').report
assert isinstance(report, list)
assert len(report) == 5
for i, item in enumerate(report):
assert len(item) == 8, 'Report at %d.' % i
report.sort()
path, domain, line, col, func_name, level, code, msg = report[0]
assert path == '<string>' # conftest setup this.
assert domain == 'python'
# NOTE: currently we drop position in the docstring
# and use position of the ref (function, class).
assert line == 0
assert col == 0
assert func_name == 'test_func'
assert level == logging.INFO
assert code == Codes.COMPLEX
assert msg == 'Type specification is too complex [:type xxx:]'
path, domain, line, col, func_name, level, code, msg = report[1]
assert path == '<string>'
assert domain == 'python'
assert line == 0
assert col == 0
assert func_name == 'test_func'
assert level == logging.INFO
assert code == Codes.DUPLICATE
assert msg == 'Duplicate field [:param no_type:]'
# This check happens before line 16 checks.
path, domain, line, col, func_name, level, code, msg = report[2]
assert path == '<string>'
assert domain == 'python'
assert line == 0
assert col == 0
assert func_name == 'test_func'
assert level == logging.INFO
assert code == Codes.INCORRECT
assert msg == 'Incorrect signature [:parameter:]'
path, domain, line, col, func_name, level, code, msg = report[3]
assert path == '<string>'
assert domain == 'python'
assert line == 0
assert col == 0
assert func_name == 'test_func'
assert level == logging.INFO
assert code == Codes.INCORRECT
assert msg == 'Incorrect signature [:type:]'
path, domain, line, col, func_name, level, code, msg = report[4]
assert path == '<string>'
assert domain == 'python'
assert line == 0
assert col == 0
assert func_name == 'test_func'
assert level == logging.INFO
assert code == Codes.UNKNOWN
assert msg == 'Type for unknown parameter [non_exist]'
# Test: remove detected and invalid fields
def test_remove(self, assert_py_doc):
assert_py_doc(
text="""
This is an ordinary paragraph.
:parameter:
:type type_wrong_place: xxx
:param no_type: Lorem ipsum dolor sit amet.
:param no_type: Lorem ipsum dolor sit amet.
:param type_wrong_place: 123
:param str with_type: Do eiusmod tempor incididunt ut labore
et dolore magna aliqua.
:parameter with_separate_type: Ut enim ad minim veniam,
quis nostrud exercitation.
Paragraph 2.
:type with_separate_type: integer or None
:type non_exist: str
:type: str
:type with_separate_type: yyy
:param xxx: 123
:type xxx: Do eiusmod tempor incididunt ut labore
et dolore magna aliqua.
""",
expected="""This is an ordinary paragraph."""
)
# Test: if specify type fields without params ones, then it will not create
# params field section.
# See CollectInfoFields.after_process().
def test_wrong_section(self, parse_py_doc):
env = parse_py_doc(
text="""
This is an ordinary paragraph.
:type noparam: xxx
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
report = env.get('reporter').report
assert isinstance(report, list)
assert len(report) == 1
for i, item in enumerate(report):
assert len(item) == 8, 'Report at %d.' % i
report.sort()
path, domain, line, col, func_name, level, code, msg = report[0]
assert path == '<string>'
assert domain == 'python'
assert line == 0
assert col == 0
assert func_name == 'test_func'
assert level == logging.INFO
assert code == Codes.UNKNOWN
assert msg == 'Type for unknown parameter [noparam]'
# Test: :ivar:, :vartype:.
class TestVarField:
# Test: parameters and their types.
def test_fields(self, parse_py_doc):
env = parse_py_doc(
text="""
This is an ordinary paragraph.
:ivar:
:vartype type_wrong_place: xxx
:ivar no_type: Lorem ipsum dolor sit amet.
:ivar type_wrong_place: 123
:ivar str with_type: 321
:ivar with_separate_type: Ut enim ad minim veniam,
:vartype with_separate_type: integer or None
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
section = doc.field_sections.get('attributes')
assert section is not None
assert len(section) == 4
# Parameter no_type.
param = section[0]
assert param.get('name') == 'no_type'
assert param.get('type') is None
assert param.get('orig_field_tag') == 'ivar'
assert len(param) == 1
assert len(param[0]) == 1
assert param[0][0].astext() == 'Lorem ipsum dolor sit amet.'
# Parameter type_wrong_place.
param = section[1]
assert param.get('name') == 'type_wrong_place'
assert param.get('type') == ['xxx']
assert param.get('orig_field_tag') == 'ivar'
assert len(param) == 1
assert len(param[0]) == 1
# Parameter with_type.
param = section[2]
assert param.get('name') == 'with_type'
assert param.get('type') == ['str']
assert param.get('orig_field_tag') == 'ivar'
assert len(param) == 1
assert len(param[0]) == 1
assert param[0][0].astext() == '321'
# Parameter with_separate_type.
param = section[3]
assert param.get('name') == 'with_separate_type'
assert param.get('type') == ['integer or None']
assert param.get('orig_field_tag') == 'ivar'
assert len(param) == 1
assert len(param[0]) == 1
# Test: convert :keyword: and :kwtype: to <keyword_field> and move them to the
# doc parts.
class TestKwField:
# Test: parameters and their types.
def test_fields(self, parse_py_doc):
env = parse_py_doc(
text="""
This is an ordinary paragraph.
:keyword:
:kwtype type_wrong_place: xxx
:keyword no_type: Lorem ipsum dolor sit amet.
:keyword type_wrong_place: 123
:keyword str with_type: Do eiusmod tempor incididunt ut labore
et dolore magna aliqua.
:keyword with_separate_type: Ut enim ad minim veniam,
quis nostrud exercitation.
Paragraph 2.
:kwtype with_separate_type: integer or None
:kwtype with_separate_type: string
:kwtype non_exist: str
This is a paragraph after the field list.
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
section = doc.field_sections.get('keyword')
assert section is not None
assert len(section) == 4
# Parameter no_type.
param = section[0]
assert param.get('name') == 'no_type'
assert param.get('type') is None
assert param.get('orig_field_tag') == 'keyword'
assert len(param) == 1
assert len(param[0]) == 1
assert param[0][0].astext() == 'Lorem ipsum dolor sit amet.'
# Parameter type_wrong_place.
param = section[1]
assert param.get('name') == 'type_wrong_place'
assert param.get('type') == ['xxx']
assert param.get('orig_field_tag') == 'keyword'
assert len(param) == 1
assert len(param[0]) == 1
# Parameter with_type.
param = section[2]
assert param.get('name') == 'with_type'
assert param.get('type') == ['str']
assert param.get('orig_field_tag') == 'keyword'
assert len(param) == 1
assert len(param[0]) == 1
# Parameter with_separate_type.
param = section[3]
assert param.get('name') == 'with_separate_type'
assert param.get('type') == ['integer or None', 'string']
assert param.get('orig_field_tag') == 'keyword'
assert len(param) == 1
assert len(param[0]) == 2
# Test: report messages.
def test_report(self, parse_py_doc):
env = parse_py_doc(
add_report=True,
text="""
This is an ordinary paragraph.
:keyword:
:kwtype type_wrong_place: xxx
:keyword no_type: Lorem ipsum dolor sit amet.
:keyword no_type: Lorem ipsum dolor sit amet.
:keyword type_wrong_place: 123
:keyword str with_type: Do eiusmod tempor incididunt ut labore
et dolore magna aliqua.
:keyword with_separate_type: Ut enim ad minim veniam,
quis nostrud exercitation.
Paragraph 2.
:kwtype with_separate_type: integer or None
:kwtype with_separate_type: string
:kwtype non_exist: str
:kwtype: str
:kwtype with_separate_type: yyy
:keyword xxx: 123
:kwtype xxx: Do eiusmod tempor incididunt ut labore
et dolore magna aliqua.
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
report = env.get('reporter').report
assert isinstance(report, list)
assert len(report) == 5
for i, item in enumerate(report):
assert len(item) == 8, 'Report at %d.' % i
report.sort()
path, domain, line, col, ref_name, level, code, msg = report[0]
assert path == '<string>' # conftest setup this.
assert domain == 'python'
# NOTE: currently we drop position in the docstring
# and use position of the ref (function, class).
assert line == 0
assert col == 0
assert ref_name == 'test_func'
assert level == logging.INFO
assert code == Codes.COMPLEX
assert msg == 'Type specification is too complex [:kwtype xxx:]'
path, domain, line, col, ref_name, level, code, msg = report[1]
assert path == '<string>'
assert domain == 'python'
assert line == 0
assert col == 0
assert ref_name == 'test_func'
assert level == logging.INFO
assert code == Codes.DUPLICATE
assert msg == 'Duplicate field [:keyword no_type:]'
# This check happens before line 16 checks.
path, domain, line, col, ref_name, level, code, msg = report[2]
assert path == '<string>'
assert domain == 'python'
assert line == 0
assert col == 0
assert ref_name == 'test_func'
assert level == logging.INFO
assert code == Codes.INCORRECT
assert msg == 'Incorrect signature [:keyword:]'
path, domain, line, col, ref_name, level, code, msg = report[3]
assert path == '<string>'
assert domain == 'python'
assert line == 0
assert col == 0
assert ref_name == 'test_func'
assert level == logging.INFO
assert code == Codes.INCORRECT
assert msg == 'Incorrect signature [:kwtype:]'
path, domain, line, col, ref_name, level, code, msg = report[4]
assert path == '<string>'
assert domain == 'python'
assert line == 0
assert col == 0
assert ref_name == 'test_func'
assert level == logging.INFO
assert code == Codes.UNKNOWN
assert msg == 'Type for unknown parameter [non_exist]'
# Test: remove detected and invalid fields
def test_remove(self, assert_py_doc):
assert_py_doc(
text="""
This is an ordinary paragraph.
:keyword:
:kwtype type_wrong_place: xxx
:keyword no_type: Lorem ipsum dolor sit amet.
:keyword no_type: Lorem ipsum dolor sit amet.
:keyword type_wrong_place: 123
:keyword str with_type: Do eiusmod tempor incididunt ut labore
et dolore magna aliqua.
:keyword with_separate_type: Ut enim ad minim veniam,
quis nostrud exercitation.
Paragraph 2.
:kwtype with_separate_type: integer or None
:kwtype with_separate_type: string
:kwtype non_exist: str
:kwtype: str
:kwtype with_separate_type: yyy
:keyword xxx: 123
:kwtype xxx: Do eiusmod tempor incididunt ut labore
et dolore magna aliqua.
""",
expected="""This is an ordinary paragraph."""
)
# Test: convert :return: and :rtype: to <returns_field> and move them to the
# doc parts.
class TestReturnField:
def test_fields(self, parse_py_doc):
env = parse_py_doc(
text="""
This is an ordinary paragraph.
:return bla bla: Hz
:returns: the message id 1
:return: the message id 2
:rtype: int
:rtype: char
Ut enim ad minim veniam, quis nostrud.
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
section = doc.field_sections.get('returns')
assert section is not None
assert len(section) == 3
ret = section[0]
assert ret.get('type') is None
assert ret.get('orig_field_tag') == 'returns'
assert len(ret) == 1
assert len(ret[0]) == 1
assert ret[0][0].astext() == 'the message id 1'
ret = section[1]
assert ret.get('type') == ['int']
assert ret.get('orig_field_tag') == 'return'
assert len(ret) == 1
assert len(ret[0]) == 1
assert ret[0][0].astext() == 'the message id 2'
ret = section[2]
assert ret.get('type') == ['char']
assert ret.get('orig_field_tag') == 'returns'
assert len(ret) == 1
assert len(ret[0]) == 0
# Test: report messages.
def test_report(self, parse_py_doc):
env = parse_py_doc(
add_report=True,
text="""
This is an ordinary paragraph.
:return bla bla: Hz
:return: the message id 2
:return:
:rtype: int, :obj:`my`
:rtype: char
:rtype bla: char
:rtype: Do eiusmod tempor incididunt ut labore
et dolore magna aliqua.
:rtype: Line 1
Line 2.
Ut enim ad minim veniam, quis nostrud.
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
report = env.get('reporter').report
assert isinstance(report, list)
assert len(report) == 5
for i, item in enumerate(report):
assert len(item) == 8, 'Report at %d.' % i
path, domain, line, col, ref_name, level, code, msg = report[0]
assert path == '<string>' # conftest setup this.
assert domain == 'python'
# NOTE: currently we drop position in the docstring
# and use position of the ref (function, class).
assert line == 0
assert col == 0
assert ref_name == 'test_func'
assert level == logging.INFO
assert code == Codes.INCORRECT
assert msg == 'Incorrect signature [:return:]'
path, domain, line, col, ref_name, level, code, msg = report[1]
assert path == '<string>'
assert domain == 'python'
assert line == 0
assert col == 0
assert ref_name == 'test_func'
assert level == logging.INFO
assert code == Codes.EMPTY
assert msg == 'Empty content [:return:]'
path, domain, line, col, ref_name, level, code, msg = report[2]
assert path == '<string>'
assert domain == 'python'
assert line == 0
assert col == 0
assert ref_name == 'test_func'
assert level == logging.INFO
assert code == Codes.INCORRECT
assert msg == 'Incorrect signature [:rtype:]'
path, domain, line, col, ref_name, level, code, msg = report[3]
assert path == '<string>'
assert domain == 'python'
assert line == 0
assert col == 0
assert ref_name == 'test_func'
assert level == logging.INFO
assert code == Codes.COMPLEX
assert msg == 'Type specification is too complex [:rtype:]'
path, domain, line, col, ref_name, level, code, msg = report[4]
assert path == '<string>'
assert domain == 'python'
assert line == 0
assert col == 0
assert ref_name == 'test_func'
assert level == logging.INFO
assert code == Codes.COMPLEX
assert msg == 'Type specification is too complex [:rtype:]'
# Test: remove detected and invalid fields
def test_remove(self, assert_py_doc):
assert_py_doc(
text="""
This is an ordinary paragraph.
:return bla bla: Hz
:return: the message id 2
:return:
:rtype: int
:rtype: char
:rtype bla: char
:rtype: Do eiusmod tempor incididunt ut labore
et dolore magna aliqua.
Ut enim ad minim veniam, quis nostrud.
""",
expected="""
This is an ordinary paragraph.
Ut enim ad minim veniam, quis nostrud.
"""
)
# Test: convert :raises: rtype: to <raises_field> and move them to the
# doc parts.
class TestRaisesField:
def test_fields(self, parse_py_doc):
env = parse_py_doc(
text="""
This is an ordinary paragraph.
:raises:
:raises ValueError: if the message_body exceeds 160
:raise TypeError: if the message_body is not a basestring
:except RuntimeError:
:exception RuntimeError2:
Ut enim ad minim veniam, quis nostrud.
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
section = doc.field_sections.get('raises')
assert section is not None
assert len(section) == 4
node = section[0]
assert node.get('type') == ['ValueError']
assert node.get('orig_field_tag') == 'raises'
assert len(node) == 1
assert len(node[0]) == 1
assert node[0][0].astext() == 'if the message_body exceeds 160'
node = section[1]
assert node.get('type') == ['TypeError']
assert node.get('orig_field_tag') == 'raise'
assert len(node) == 1
assert len(node[0]) == 1
assert node[0][0].astext() == 'if the message_body is not a basestring'
node = section[2]
assert node.get('type') == ['RuntimeError']
assert node.get('orig_field_tag') == 'except'
assert len(node) == 1
assert len(node[0]) == 0
node = section[3]
assert node.get('type') == ['RuntimeError2']
assert node.get('orig_field_tag') == 'exception'
assert len(node) == 1
assert len(node[0]) == 0
# Test: if :raises: contains multiple types (:raises Type1 Type2:) then
# generate multiple :raises: fields with single type.
def test_multiple(self, parse_py_doc):
env = parse_py_doc(
text="""
This is an ordinary paragraph.
:raises RuntimeError1 RuntimeError2: Generate multiple fields.
Ut enim ad minim veniam, quis nostrud.
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
section = doc.field_sections.get('raises')
assert section is not None
assert len(section) == 2
node = section[0]
assert node.get('type') == ['RuntimeError1']
assert node.get('orig_field_tag') == 'raises'
assert len(node) == 1
assert len(node[0]) == 1
assert node[0][0].astext() == 'Generate multiple fields.'
node = section[1]
assert node.get('type') == ['RuntimeError2']
assert node.get('orig_field_tag') == 'raises'
assert len(node) == 1
assert len(node[0]) == 1
assert node[0][0].astext() == 'Generate multiple fields.'
def test_report(self, parse_py_doc):
env = parse_py_doc(
add_report=True,
text="""
This is an ordinary paragraph.
:raises:
:raises ValueError: if the message_body exceeds 160
:raise TypeError: if the message_body is not a basestring
:except RuntimeError:
:exception RuntimeError2:
:raises RuntimeError1 RuntimeError2: this is incorrect!
Ut enim ad minim veniam, quis nostrud.
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
report = env.get('reporter').report
assert isinstance(report, list)
assert len(report) == 3
assert len(report[0]) == 8
path, domain, line, col, ref_name, level, code, msg = report[0]
assert path == '<string>' # conftest setup this.
assert domain == 'python'
# NOTE: currently we drop position in the docstring
# and use position of the ref (function, class).
assert line == 0
assert col == 0
assert ref_name == 'test_func'
assert level == logging.INFO
assert code == Codes.MISSING
assert msg == 'Type is missing [:raises:]'
path, domain, line, col, ref_name, level, code, msg = report[1]
assert path == '<string>' # conftest setup this.
assert domain == 'python'
assert line == 0
assert col == 0
assert ref_name == 'test_func'
assert level == logging.INFO
assert code == Codes.MISSING
assert msg == 'Description is missing [:raises:]'
path, domain, line, col, ref_name, level, code, msg = report[2]
assert path == '<string>' # conftest setup this.
assert domain == 'python'
assert line == 0
assert col == 0
assert ref_name == 'test_func'
assert level == logging.INFO
assert code == Codes.INCORRECT
assert msg == 'Incorrect signature [:raises:]'
# Test: remove detected and invalid fields.
def test_remove(self, assert_py_doc):
assert_py_doc(
text="""
This is an ordinary paragraph.
:raises:
:raises ValueError: if the message_body exceeds 160
:raise TypeError: if the message_body is not a basestring
:except RuntimeError:
:exception RuntimeError2:
:one:
Ut enim ad minim veniam, quis nostrud.
""",
expected="""
This is an ordinary paragraph.
:one:
Ut enim ad minim veniam, quis nostrud.
"""
)
# Test: convert :Yields: <yields_field> and move them to the doc parts.
class TestYieldsField:
def test_fields(self, parse_py_doc):
env = parse_py_doc(
text="""
This is an ordinary paragraph.
:Yields: Quis nostrud exercitation ullamco. In voluptate velit esse
cillum dolore eu fugiat nulla.
Ut enim ad minim veniam.
:Yields: 123
:Yields:
Ut enim ad minim veniam, quis nostrud.
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
section = doc.field_sections.get('yields')
assert section is not None
# NOTE: empty :Yields: will be dropped.
assert len(section) == 2
node = section[0]
assert node.get('orig_field_tag') == 'Yields'
assert len(node) == 1
assert len(node[0]) == 2
assert node[0].astext() == ('Quis nostrud exercitation ullamco. '
'In voluptate velit esse\ncillum '
'dolore eu fugiat nulla.\n\nUt enim '
'ad minim veniam.')
node = section[1]
assert node.get('orig_field_tag') == 'Yields'
assert len(node) == 1
assert len(node[0]) == 1
assert node[0].astext() == '123'
# Test: remove detected fields.
def test_remove(self, assert_py_doc):
assert_py_doc(
text="""
This is an ordinary paragraph.
:Yields: Quis nostrud exercitation ullamco. In voluptate velit esse
cillum dolore eu fugiat nulla.
Ut enim ad minim veniam.
:Yields: 123
:Yields:
Ut enim ad minim veniam, quis nostrud.
""",
expected="""
This is an ordinary paragraph.
Ut enim ad minim veniam, quis nostrud.
"""
)
def test_report(self, parse_py_doc):
env = parse_py_doc(
add_report=True,
text="""
This is an ordinary paragraph.
:Yields: 123
:Yields:
:Yields 23: sds
Ut enim ad minim veniam, quis nostrud.
"""
)
doc = env['definition'].doc_block.document
assert hasattr(doc, 'field_sections')
report = env.get('reporter').report
assert isinstance(report, list)
assert len(report) == 2
assert len(report[0]) == 8
path, domain, line, col, ref_name, level, code, msg = report[0]
assert path == '<string>' # conftest setup this.
assert domain == 'python'
# NOTE: currently we drop position in the docstring
# and use position of the ref (function, class).
assert line == 0
assert col == 0
assert ref_name == 'test_func'
assert level == logging.INFO
assert code == Codes.MISSING
assert msg == 'Content is missing [:Yields:]'
path, domain, line, col, ref_name, level, code, msg = report[1]
assert path == '<string>' # conftest setup this.
assert domain == 'python'
# NOTE: currently we drop position in the docstring
# and use position of the ref (function, class).
assert line == 0
assert col == 0
assert ref_name == 'test_func'
assert level == logging.INFO
assert code == Codes.INCORRECT
assert msg == 'Incorrect signature [:Yields:]'
| 33.391485
| 79
| 0.562228
| 3,792
| 32,156
| 4.660865
| 0.076213
| 0.034118
| 0.030893
| 0.020369
| 0.882539
| 0.8779
| 0.870827
| 0.864717
| 0.850628
| 0.832918
| 0
| 0.014321
| 0.337666
| 32,156
| 962
| 80
| 33.426195
| 0.815523
| 0.089315
| 0
| 0.843923
| 0
| 0
| 0.415825
| 0
| 0
| 0
| 0
| 0
| 0.472376
| 1
| 0.024862
| false
| 0
| 0.004144
| 0
| 0.037293
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
78d7d286a22c131aab1d87f651bec50ae314224b
| 3,950
|
py
|
Python
|
tests/changes/api/test_jobstep_log_append.py
|
bowlofstew/changes
|
ebd393520e0fdb07c240a8d4e8747281b6186e28
|
[
"Apache-2.0"
] | null | null | null |
tests/changes/api/test_jobstep_log_append.py
|
bowlofstew/changes
|
ebd393520e0fdb07c240a8d4e8747281b6186e28
|
[
"Apache-2.0"
] | null | null | null |
tests/changes/api/test_jobstep_log_append.py
|
bowlofstew/changes
|
ebd393520e0fdb07c240a8d4e8747281b6186e28
|
[
"Apache-2.0"
] | null | null | null |
from changes.models import LogSource, LogChunk
from changes.testutils import APITestCase
class JobStepLogAppendTest(APITestCase):
def test_simple(self):
project = self.create_project()
build = self.create_build(project)
job = self.create_job(build)
jobphase = self.create_jobphase(job)
jobstep = self.create_jobstep(jobphase)
path = '/api/0/jobsteps/{0}/logappend/'.format(jobstep.id.hex)
resp = self.client.post(path, data={
'source': 'stderr',
'offset': 0,
'text': 'hello world!\n',
})
assert resp.status_code == 200, resp.data
data = self.unserialize(resp)
logsource = LogSource.query.get(data['source']['id'])
assert logsource.name == 'stderr'
assert len(data['chunks']) == 1
logchunk = LogChunk.query.get(data['chunks'][0]['id'])
assert logchunk.offset == 0
assert logchunk.size == 13
# TODO(dcramer): there's an issue in flask/somewhere that a 204 causes
# an error in the test runner
# ensure our soft check for duplicate logs matches
# resp = self.client.post(path, data={
# 'source': 'stderr',
# 'offset': 0,
# 'text': 'hello world!\n',
# })
# assert resp.status_code == 200
# resp = self.client.post(path, data={
# 'source': 'stderr',
# 'offset': 12,
# 'text': 'hello world!\n',
# })
# assert resp.status_code == 204
# append to existing log
resp = self.client.post(path, data={
'source': 'stderr',
'offset': 13,
'text': 'foo bar?\n',
})
assert resp.status_code == 200, resp.data
data = self.unserialize(resp)
logsource = LogSource.query.get(data['source']['id'])
assert logsource.name == 'stderr'
assert len(data['chunks']) == 1
logchunk = LogChunk.query.get(data['chunks'][0]['id'])
assert logchunk.offset == 13
assert logchunk.size == 9
# create second logsource
resp = self.client.post(path, data={
'source': 'stdout',
'offset': 0,
'text': 'zoom zoom\n',
})
assert resp.status_code == 200, resp.data
data = self.unserialize(resp)
logsource = LogSource.query.get(data['source']['id'])
assert logsource.name == 'stdout'
assert len(data['chunks']) == 1
logchunk = LogChunk.query.get(data['chunks'][0]['id'])
assert logchunk.offset == 0
assert logchunk.size == 10
def test_without_offsets(self):
project = self.create_project()
build = self.create_build(project)
job = self.create_job(build)
jobphase = self.create_jobphase(job)
jobstep = self.create_jobstep(jobphase)
path = '/api/0/jobsteps/{0}/logappend/'.format(jobstep.id.hex)
resp = self.client.post(path, data={
'source': 'stderr',
'text': 'hello world!\n',
})
assert resp.status_code == 200, resp.data
data = self.unserialize(resp)
logsource = LogSource.query.get(data['source']['id'])
assert logsource.name == 'stderr'
assert len(data['chunks']) == 1
logchunk = LogChunk.query.get(data['chunks'][0]['id'])
assert logchunk.offset == 0
assert logchunk.size == 13
resp = self.client.post(path, data={
'source': 'stderr',
'text': 'foo bar?\n',
})
assert resp.status_code == 200, resp.data
data = self.unserialize(resp)
logsource = LogSource.query.get(data['source']['id'])
assert logsource.name == 'stderr'
assert len(data['chunks']) == 1
logchunk = LogChunk.query.get(data['chunks'][0]['id'])
assert logchunk.offset == 13
assert logchunk.size == 9
| 34.649123
| 78
| 0.558228
| 444
| 3,950
| 4.921171
| 0.202703
| 0.05492
| 0.05492
| 0.057666
| 0.842563
| 0.842563
| 0.842563
| 0.827918
| 0.811899
| 0.756064
| 0
| 0.021606
| 0.296962
| 3,950
| 113
| 79
| 34.955752
| 0.765214
| 0.120759
| 0
| 0.875
| 0
| 0
| 0.103329
| 0.017366
| 0
| 0
| 0
| 0.00885
| 0.3125
| 1
| 0.025
| false
| 0
| 0.025
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
60843fff854d592d84126a5457d81fedd1e723df
| 12,948
|
py
|
Python
|
tests/contrib/flask/test_errorhandler.py
|
melancholy/dd-trace-py
|
32d463e5465466bc876c85a45880a84824d9b47c
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 308
|
2016-12-07T16:49:27.000Z
|
2022-03-15T10:06:45.000Z
|
tests/contrib/flask/test_errorhandler.py
|
melancholy/dd-trace-py
|
32d463e5465466bc876c85a45880a84824d9b47c
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 1,928
|
2016-11-28T17:13:18.000Z
|
2022-03-31T21:43:19.000Z
|
tests/contrib/flask/test_errorhandler.py
|
melancholy/dd-trace-py
|
32d463e5465466bc876c85a45880a84824d9b47c
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 311
|
2016-11-27T03:01:49.000Z
|
2022-03-18T21:34:03.000Z
|
import flask
from tests.utils import assert_span_http_status_code
from . import BaseFlaskTestCase
class FlaskErrorhandlerTestCase(BaseFlaskTestCase):
def test_default_404_handler(self):
"""
When making a 404 request
And no user defined error handler is defined
We create the expected spans
"""
# Make our 404 request
res = self.client.get("/unknown")
self.assertEqual(res.status_code, 404)
spans = self.get_spans()
req_span = self.find_span_by_name(spans, "flask.request")
dispatch_span = self.find_span_by_name(spans, "flask.dispatch_request")
user_ex_span = self.find_span_by_name(spans, "flask.handle_user_exception")
http_ex_span = self.find_span_by_name(spans, "flask.handle_http_exception")
# flask.request span
self.assertEqual(req_span.error, 0)
assert_span_http_status_code(req_span, 404)
self.assertIsNone(req_span.get_tag("flask.endpoint"))
self.assertIsNone(req_span.get_tag("flask.url_rule"))
# flask.dispatch_request span
self.assertEqual(dispatch_span.error, 0)
self.assertIsNone(dispatch_span.get_tag("error.msg"))
self.assertIsNone(dispatch_span.get_tag("error.stack"))
self.assertIsNone(dispatch_span.get_tag("error.type"))
# flask.handle_user_exception span
self.assertEqual(user_ex_span.meta, dict())
self.assertEqual(user_ex_span.error, 0)
# flask.handle_http_exception span
self.assertEqual(http_ex_span.meta, dict())
self.assertEqual(http_ex_span.error, 0)
def test_abort_500(self):
"""
When making a 500 request
And no user defined error handler is defined
We create the expected spans
"""
@self.app.route("/500")
def endpoint_500():
flask.abort(500)
# Make our 500 request
res = self.client.get("/500")
self.assertEqual(res.status_code, 500)
spans = self.get_spans()
req_span = self.find_span_by_name(spans, "flask.request")
dispatch_span = self.find_span_by_name(spans, "flask.dispatch_request")
endpoint_span = self.find_span_by_name(spans, "tests.contrib.flask.test_errorhandler.endpoint_500")
user_ex_span = self.find_span_by_name(spans, "flask.handle_user_exception")
http_ex_span = self.find_span_by_name(spans, "flask.handle_http_exception")
# flask.request span
self.assertEqual(req_span.error, 1)
assert_span_http_status_code(req_span, 500)
self.assertEqual(req_span.get_tag("flask.endpoint"), "endpoint_500")
self.assertEqual(req_span.get_tag("flask.url_rule"), "/500")
# flask.dispatch_request span
self.assertEqual(dispatch_span.error, 1)
error_msg = dispatch_span.get_tag("error.msg")
self.assertTrue(error_msg.startswith("500 Internal Server Error"))
error_stack = dispatch_span.get_tag("error.stack")
self.assertTrue(error_stack.startswith("Traceback (most recent call last):"))
error_type = dispatch_span.get_tag("error.type")
self.assertEqual(error_type, "werkzeug.exceptions.InternalServerError")
# tests.contrib.flask.test_errorhandler.endpoint_500 span
self.assertEqual(endpoint_span.error, 1)
error_msg = endpoint_span.get_tag("error.msg")
self.assertTrue(error_msg.startswith("500 Internal Server Error"))
error_stack = endpoint_span.get_tag("error.stack")
self.assertTrue(error_stack.startswith("Traceback (most recent call last):"))
error_type = endpoint_span.get_tag("error.type")
self.assertEqual(error_type, "werkzeug.exceptions.InternalServerError")
# flask.handle_user_exception span
self.assertEqual(user_ex_span.meta, dict())
self.assertEqual(user_ex_span.error, 0)
# flask.handle_http_exception span
self.assertEqual(http_ex_span.meta, dict())
self.assertEqual(http_ex_span.error, 0)
def test_abort_500_custom_handler(self):
"""
When making a 500 request
And a user defined error handler is defined
We create the expected spans
"""
@self.app.errorhandler(500)
def handle_500(e):
return "whoops", 200
@self.app.route("/500")
def endpoint_500():
flask.abort(500)
# Make our 500 request
res = self.client.get("/500")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b"whoops")
spans = self.get_spans()
req_span = self.find_span_by_name(spans, "flask.request")
dispatch_span = self.find_span_by_name(spans, "flask.dispatch_request")
endpoint_span = self.find_span_by_name(spans, "tests.contrib.flask.test_errorhandler.endpoint_500")
handler_span = self.find_span_by_name(spans, "tests.contrib.flask.test_errorhandler.handle_500")
user_ex_span = self.find_span_by_name(spans, "flask.handle_user_exception")
http_ex_span = self.find_span_by_name(spans, "flask.handle_http_exception")
# flask.request span
self.assertEqual(req_span.error, 0)
assert_span_http_status_code(req_span, 200)
self.assertEqual(req_span.get_tag("flask.endpoint"), "endpoint_500")
self.assertEqual(req_span.get_tag("flask.url_rule"), "/500")
# flask.dispatch_request span
self.assertEqual(dispatch_span.error, 1)
error_msg = dispatch_span.get_tag("error.msg")
self.assertTrue(error_msg.startswith("500 Internal Server Error"))
error_stack = dispatch_span.get_tag("error.stack")
self.assertTrue(error_stack.startswith("Traceback (most recent call last):"))
error_type = dispatch_span.get_tag("error.type")
self.assertEqual(error_type, "werkzeug.exceptions.InternalServerError")
# tests.contrib.flask.test_errorhandler.endpoint_500 span
self.assertEqual(endpoint_span.error, 1)
error_msg = endpoint_span.get_tag("error.msg")
self.assertTrue(error_msg.startswith("500 Internal Server Error"))
error_stack = endpoint_span.get_tag("error.stack")
self.assertTrue(error_stack.startswith("Traceback (most recent call last):"))
error_type = endpoint_span.get_tag("error.type")
self.assertEqual(error_type, "werkzeug.exceptions.InternalServerError")
# tests.contrib.flask.test_errorhandler.handle_500 span
self.assertEqual(handler_span.error, 0)
self.assertIsNone(handler_span.get_tag("error.msg"))
self.assertIsNone(handler_span.get_tag("error.stack"))
self.assertIsNone(handler_span.get_tag("error.type"))
# flask.handle_user_exception span
self.assertEqual(user_ex_span.meta, dict())
self.assertEqual(user_ex_span.error, 0)
# flask.handle_http_exception span
self.assertEqual(http_ex_span.meta, dict())
self.assertEqual(http_ex_span.error, 0)
def test_raise_user_exception(self):
"""
When raising a custom user exception
And no user defined error handler is defined
We create the expected spans
"""
class FlaskTestException(Exception):
pass
@self.app.route("/error")
def endpoint_error():
raise FlaskTestException("custom error message")
# Make our 500 request
res = self.client.get("/error")
self.assertEqual(res.status_code, 500)
spans = self.get_spans()
req_span = self.find_span_by_name(spans, "flask.request")
dispatch_span = self.find_span_by_name(spans, "flask.dispatch_request")
endpoint_span = self.find_span_by_name(spans, "tests.contrib.flask.test_errorhandler.endpoint_error")
user_ex_span = self.find_span_by_name(spans, "flask.handle_user_exception")
http_ex_span = self.find_span_by_name(spans, "flask.handle_http_exception", required=False)
# flask.request span
self.assertEqual(req_span.error, 1)
assert_span_http_status_code(req_span, 500)
self.assertEqual(req_span.get_tag("flask.endpoint"), "endpoint_error")
self.assertEqual(req_span.get_tag("flask.url_rule"), "/error")
# flask.dispatch_request span
self.assertEqual(dispatch_span.error, 1)
error_msg = dispatch_span.get_tag("error.msg")
self.assertTrue(error_msg.startswith("custom error message"))
error_stack = dispatch_span.get_tag("error.stack")
self.assertTrue(error_stack.startswith("Traceback (most recent call last):"))
error_type = dispatch_span.get_tag("error.type")
self.assertEqual(error_type, "tests.contrib.flask.test_errorhandler.FlaskTestException")
# tests.contrib.flask.test_errorhandler.endpoint_500 span
self.assertEqual(endpoint_span.error, 1)
error_msg = endpoint_span.get_tag("error.msg")
self.assertTrue(error_msg.startswith("custom error message"))
error_stack = endpoint_span.get_tag("error.stack")
self.assertTrue(error_stack.startswith("Traceback (most recent call last):"))
error_type = endpoint_span.get_tag("error.type")
self.assertEqual(error_type, "tests.contrib.flask.test_errorhandler.FlaskTestException")
# flask.handle_user_exception span
self.assertEqual(user_ex_span.error, 1)
error_msg = user_ex_span.get_tag("error.msg")
self.assertTrue(error_msg.startswith("custom error message"))
error_stack = user_ex_span.get_tag("error.stack")
self.assertTrue(error_stack.startswith("Traceback (most recent call last):"))
error_type = user_ex_span.get_tag("error.type")
self.assertEqual(error_type, "tests.contrib.flask.test_errorhandler.FlaskTestException")
# flask.handle_http_exception span
self.assertIsNone(http_ex_span)
def test_raise_user_exception_handler(self):
"""
When raising a custom user exception
And a user defined error handler is defined
We create the expected spans
"""
class FlaskTestException(Exception):
pass
@self.app.errorhandler(FlaskTestException)
def handle_error(e):
return "whoops", 200
@self.app.route("/error")
def endpoint_error():
raise FlaskTestException("custom error message")
# Make our 500 request
res = self.client.get("/error")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b"whoops")
spans = self.get_spans()
req_span = self.find_span_by_name(spans, "flask.request")
dispatch_span = self.find_span_by_name(spans, "flask.dispatch_request")
endpoint_span = self.find_span_by_name(spans, "tests.contrib.flask.test_errorhandler.endpoint_error")
handler_span = self.find_span_by_name(spans, "tests.contrib.flask.test_errorhandler.handle_error")
user_ex_span = self.find_span_by_name(spans, "flask.handle_user_exception")
http_ex_span = self.find_span_by_name(spans, "flask.handle_http_exception", required=False)
# flask.request span
self.assertEqual(req_span.error, 0)
assert_span_http_status_code(req_span, 200)
self.assertEqual(req_span.get_tag("flask.endpoint"), "endpoint_error")
self.assertEqual(req_span.get_tag("flask.url_rule"), "/error")
# flask.dispatch_request span
self.assertEqual(dispatch_span.error, 1)
error_msg = dispatch_span.get_tag("error.msg")
self.assertTrue(error_msg.startswith("custom error message"))
error_stack = dispatch_span.get_tag("error.stack")
self.assertTrue(error_stack.startswith("Traceback (most recent call last):"))
error_type = dispatch_span.get_tag("error.type")
self.assertEqual(error_type, "tests.contrib.flask.test_errorhandler.FlaskTestException")
# tests.contrib.flask.test_errorhandler.endpoint_500 span
self.assertEqual(endpoint_span.error, 1)
error_msg = endpoint_span.get_tag("error.msg")
self.assertTrue(error_msg.startswith("custom error message"))
error_stack = endpoint_span.get_tag("error.stack")
self.assertTrue(error_stack.startswith("Traceback (most recent call last):"))
error_type = endpoint_span.get_tag("error.type")
self.assertEqual(error_type, "tests.contrib.flask.test_errorhandler.FlaskTestException")
# tests.contrib.flask.test_errorhandler.handle_error span
self.assertEqual(handler_span.error, 0)
# flask.handle_user_exception span
self.assertEqual(user_ex_span.error, 0)
self.assertEqual(user_ex_span.meta, dict())
# flask.handle_http_exception span
self.assertIsNone(http_ex_span)
| 43.743243
| 109
| 0.688369
| 1,642
| 12,948
| 5.150426
| 0.056638
| 0.097552
| 0.050845
| 0.058531
| 0.972567
| 0.954003
| 0.950928
| 0.892633
| 0.884356
| 0.877734
| 0
| 0.016672
| 0.207831
| 12,948
| 295
| 110
| 43.891525
| 0.807839
| 0.122336
| 0
| 0.82967
| 0
| 0
| 0.210361
| 0.100377
| 0
| 0
| 0
| 0
| 0.489011
| 1
| 0.06044
| false
| 0.010989
| 0.016484
| 0.010989
| 0.104396
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
60d1d542a2a628a3c40aaa6a5faee01e5c072300
| 11,531
|
py
|
Python
|
coremltools/test/neural_network/test_graph_passes.py
|
drush-apple/coremltools
|
624092fb7144945bd6bec6f72b41cc630f44611b
|
[
"BSD-3-Clause"
] | 2
|
2021-03-20T17:53:52.000Z
|
2021-09-17T13:42:36.000Z
|
coremltools/test/neural_network/test_graph_passes.py
|
drush-apple/coremltools
|
624092fb7144945bd6bec6f72b41cc630f44611b
|
[
"BSD-3-Clause"
] | null | null | null |
coremltools/test/neural_network/test_graph_passes.py
|
drush-apple/coremltools
|
624092fb7144945bd6bec6f72b41cc630f44611b
|
[
"BSD-3-Clause"
] | 1
|
2021-05-07T22:23:59.000Z
|
2021-05-07T22:23:59.000Z
|
import numpy as np
import unittest
import coremltools.models.datatypes as datatypes
from coremltools.models import neural_network as neural_network
from coremltools.models import MLModel
from coremltools.models.neural_network.printer import print_network_spec
from coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes import \
remove_disconnected_layers, transform_conv_crop
import copy
import pytest
DEBUG = False
np.random.seed(100)
class MLModelPassesTest(unittest.TestCase):
def test_load_constant_remove(self):
input_features = [('data', datatypes.Array(*(3, 4)))]
output_features = [('out', None)]
builder = neural_network.NeuralNetworkBuilder(input_features, output_features, disable_rank5_shape_mapping=True)
builder.add_activation('relu1', 'RELU', 'data', 'relu1')
builder.add_load_constant_nd('const1', 'c1', constant_value=np.ones((5,)), shape=(5,))
builder.add_activation('relu2', 'RELU', 'relu1', 'out')
builder.add_load_constant_nd('const2', 'c2', constant_value=np.ones((5,)), shape=(5,))
builder.add_load_constant_nd('const3', 'c3', constant_value=np.ones((5,)), shape=(5,))
spec = builder.spec
np.testing.assert_equal(5, len(spec.neuralNetwork.layers))
remove_disconnected_layers(spec)
np.testing.assert_equal(2, len(spec.neuralNetwork.layers))
def test_dead_layer_remove(self):
input_features = [('data', datatypes.Array(*(3, 4)))]
output_features = [('out', None)]
builder = neural_network.NeuralNetworkBuilder(input_features, output_features, disable_rank5_shape_mapping=True)
builder.add_activation('relu1', 'RELU', 'data', 'relu1')
builder.add_load_constant_nd('const1', 'c1', constant_value=np.ones((5,)), shape=(5,))
builder.add_load_constant_nd('const2', 'c2', constant_value=np.ones((5,)), shape=(5,))
builder.add_split_nd('splitnd1', 'const2', ['s1', 's2', 's3'], axis=0, num_splits=3)
builder.add_squeeze('squeeze', 's1', 'squeeze_out')
builder.add_activation('relu4', 'RELU', 's2', 'relu4')
builder.add_activation('relu5', 'RELU', 'relu4', 'relu5')
builder.add_load_constant_nd('const3', 'c3', constant_value=np.ones((5,)), shape=(5,))
builder.add_activation('relu2', 'RELU', 'relu1', 'out')
spec = builder.spec
np.testing.assert_equal(9, len(spec.neuralNetwork.layers))
remove_disconnected_layers(spec)
np.testing.assert_equal(2, len(spec.neuralNetwork.layers))
@pytest.mark.xfail
def test_dead_layer_remove_branch(self):
convergence_tolerance = 1e-8
input_features = [('input', datatypes.Array(*(2,)))]
output_features = [('out', None)]
builder = neural_network.NeuralNetworkBuilder(input_features, output_features, disable_rank5_shape_mapping=True)
# add condition to break from the loop, if convergence criterion is met
builder.add_less_than('cond', ['input'], 'cond', alpha=convergence_tolerance)
branch_layer = builder.add_branch('branch_layer', 'cond')
builder_ifbranch = neural_network.NeuralNetworkBuilder(nn_spec=branch_layer.branch.ifBranch)
builder_ifbranch.add_activation('relu1', 'RELU', 'input', 'relu1_out')
builder_ifbranch.add_activation('relu2_out', 'RELU', 'relu1_out', 'relu2_out')
builder_elsebranch = neural_network.NeuralNetworkBuilder(nn_spec=branch_layer.branch.elseBranch)
builder_elsebranch.add_activation('linear1', 'LINEAR', 'input', 'linear1_out')
builder_elsebranch.add_activation('linear2', 'LINEAR', 'linear1_out', 'relu2_out')
builder.add_squeeze('out', 'input', 'out', squeeze_all=True)
mlmodel = MLModel(builder.spec)
data = np.random.rand(2,)
data_dict = {'input': data}
before_pass_out = mlmodel.predict(data_dict)['out']
if DEBUG:
print('\n mlmodel description before remove disconnected layers pass: \n')
print_network_spec(builder.spec, style='coding')
remove_disconnected_layers(builder.spec)
if DEBUG:
print('\n mlmodel description after remove disconnected layers pass: \n')
print_network_spec(builder.spec, style='coding')
mlmodel = MLModel(builder.spec)
after_pass_out = mlmodel.predict(data_dict)['out']
np.testing.assert_almost_equal(before_pass_out, after_pass_out, decimal=2)
np.testing.assert_equal(len(builder.spec.neuralNetwork.layers), 1)
@pytest.mark.xfail
def test_dead_layer_partial_branch(self):
convergence_tolerance = 1e-8
input_features = [('input', datatypes.Array(*(2,)))]
output_features = [('out', None)]
builder = neural_network.NeuralNetworkBuilder(input_features, output_features, disable_rank5_shape_mapping=True)
# add condition to break from the loop, if convergence criterion is met
builder.add_less_than('cond', ['input'], 'cond', alpha=convergence_tolerance)
branch_layer = builder.add_branch('branch_layer', 'cond')
builder_ifbranch = neural_network.NeuralNetworkBuilder(nn_spec=branch_layer.branch.ifBranch)
builder_ifbranch.add_activation('relu1', 'RELU', 'input', 'relu1_out')
builder_ifbranch.add_activation('relu2_out', 'RELU', 'relu1_out', 'relu2_out')
builder_elsebranch = neural_network.NeuralNetworkBuilder(nn_spec=branch_layer.branch.elseBranch)
builder_elsebranch.add_activation('linear1', 'LINEAR', 'input', 'linear1_out')
builder_elsebranch.add_activation('linear_red_1', 'LINEAR', 'input', 'linear_red1_out')
builder_elsebranch.add_activation('linear_red_2', 'LINEAR', 'linear_red1_out', 'linear_red2_out')
builder_elsebranch.add_activation('linear2', 'LINEAR', 'linear1_out', 'relu2_out')
builder.add_squeeze('out', 'relu2_out', 'out', squeeze_all=True)
mlmodel = MLModel(builder.spec)
data = np.random.rand(2,)
data_dict = {'input': data}
before_pass_out = mlmodel.predict(data_dict)['out']
if DEBUG:
print('\n mlmodel description before remove disconnected layers pass: \n')
print_network_spec(builder.spec, style='coding')
old_spec = copy.copy(builder.spec)
remove_disconnected_layers(builder.spec)
if DEBUG:
print('\n mlmodel description after remove disconnected layers pass: \n')
print_network_spec(builder.spec, style='coding')
mlmodel = MLModel(builder.spec)
after_pass_out = mlmodel.predict(data_dict)['out']
np.testing.assert_almost_equal(before_pass_out, after_pass_out, decimal=2)
np.testing.assert_equal(len(old_spec.neuralNetwork.layers[1].branch.ifBranch.layers),
len(builder.spec.neuralNetwork.layers[1].branch.ifBranch.layers))
np.testing.assert_equal(len(builder.spec.neuralNetwork.layers[1].branch.elseBranch.layers), 2)
def test_conv_crop_bn_to_conv_bn_crop(self):
input_features = [('data', datatypes.Array(1, 10, 10))]
output_features = [('out', None)]
builder = neural_network.NeuralNetworkBuilder(input_features, output_features)
W = np.ones((2,10,1,10), dtype=np.float32)
builder.add_convolution(name='conv',
kernel_channels=1,
output_channels=2,
height=2, width=2,
stride_height=1, stride_width=1,
border_mode='valid', groups=1,
W=W,
b=None, has_bias=False,
input_name='data', output_name='conv_out')
builder.add_crop(name='crop',
left=1, right=1, top=1, bottom=1, offset=0,
input_names=['conv_out'],
output_name='crop_out')
builder.add_batchnorm(name='bn',
channels=2,
gamma=np.ones(2,).astype(np.float32),
beta=np.ones(2,).astype(np.float32),
mean=np.ones(2,).astype(np.float32),
variance=np.ones(2,).astype(np.float32),
input_name='crop_out',
output_name='out')
# Conv -> Crop -> BN
spec = builder.spec.neuralNetwork
np.testing.assert_equal('crop', spec.layers[1].WhichOneof('layer'))
np.testing.assert_equal('batchnorm', spec.layers[2].WhichOneof('layer'))
# transform the pattern
transform_conv_crop(builder.spec)
# Conv -> BN -> Crop
np.testing.assert_equal('batchnorm', spec.layers[1].WhichOneof('layer'))
np.testing.assert_equal('crop', spec.layers[2].WhichOneof('layer'))
def test_conv_crop_bn_relu_to_conv_bn_relu_crop(self):
input_features = [('data', datatypes.Array(1, 10, 10))]
output_features = [('out', None)]
builder = neural_network.NeuralNetworkBuilder(input_features, output_features)
W = np.ones((2,10,1,10), dtype=np.float32)
builder.add_convolution(name='conv',
kernel_channels=1,
output_channels=2,
height=2, width=2,
stride_height=1, stride_width=1,
border_mode='valid', groups=1,
W=W,
b=None, has_bias=False,
input_name='data', output_name='conv_out')
builder.add_crop(name='crop',
left=1, right=1, top=1, bottom=1, offset=0,
input_names=['conv_out'],
output_name='crop_out')
builder.add_batchnorm(name='bn',
channels=2,
gamma=np.ones(2,).astype(np.float32),
beta=np.ones(2,).astype(np.float32),
mean=np.ones(2,).astype(np.float32),
variance=np.ones(2,).astype(np.float32),
input_name='crop_out',
output_name='bn_out')
builder.add_activation(name='relu',
non_linearity='RELU',
input_name='bn_out',
output_name='out')
# Conv -> Crop -> BN -> ReLU
spec = builder.spec.neuralNetwork
np.testing.assert_equal('crop', spec.layers[1].WhichOneof('layer'))
np.testing.assert_equal('batchnorm', spec.layers[2].WhichOneof('layer'))
np.testing.assert_equal('activation', spec.layers[3].WhichOneof('layer'))
# transform the pattern
transform_conv_crop(builder.spec)
# Conv -> BN -> ReLU -> Crop
np.testing.assert_equal('batchnorm', spec.layers[1].WhichOneof('layer'))
np.testing.assert_equal('activation', spec.layers[2].WhichOneof('layer'))
np.testing.assert_equal('crop', spec.layers[3].WhichOneof('layer'))
if __name__ == '__main__':
RUN_ALL_TESTS = True
if RUN_ALL_TESTS:
unittest.main()
else:
suite = unittest.TestSuite()
suite.addTest(MLModelPassesTest('test_load_constant_remove'))
unittest.TextTestRunner().run(suite)
| 53.632558
| 120
| 0.620588
| 1,328
| 11,531
| 5.146837
| 0.136295
| 0.039503
| 0.041697
| 0.049744
| 0.860278
| 0.847257
| 0.847257
| 0.799561
| 0.799561
| 0.781419
| 0
| 0.021817
| 0.25271
| 11,531
| 214
| 121
| 53.883178
| 0.771382
| 0.023849
| 0
| 0.731183
| 0
| 0
| 0.109007
| 0.002223
| 0
| 0
| 0
| 0
| 0.102151
| 1
| 0.032258
| false
| 0.069892
| 0.048387
| 0
| 0.086022
| 0.048387
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
7191384967d4855be9ef86405316317f8b78e9b2
| 586
|
py
|
Python
|
intro/Hello World.py
|
Alekss101/python-school
|
6bde1676451f13677b7ce1f9c391017a9e40c37b
|
[
"MIT"
] | null | null | null |
intro/Hello World.py
|
Alekss101/python-school
|
6bde1676451f13677b7ce1f9c391017a9e40c37b
|
[
"MIT"
] | null | null | null |
intro/Hello World.py
|
Alekss101/python-school
|
6bde1676451f13677b7ce1f9c391017a9e40c37b
|
[
"MIT"
] | null | null | null |
# This Python program displays ASCII art
#
# by Aleksander Rohozinski
# Febuary 14, 2022
# ICS 2O - Ashbury College
# Mr. Giansante
# -------------------------------------------------------
print(" _ _ _ _ _ _ _ _ ")
print("| | | | | | | | | | | | | | |")
print("| |_| | ___| | | ___ | | | | ___ _ __| | __| |")
print("| _ |/ _ \ | |/ _ \ | |/\| |/ _ \| '__| |/ _` |")
print("| | | | __/ | | (_) | \ /\ / (_) | | | | (_| |")
print("\_| |_/\___|_|_|\___/ \/ \/ \___/|_| |_|\__,_|")
| 39.066667
| 62
| 0.308874
| 24
| 586
| 5.333333
| 0.791667
| 0.390625
| 0.46875
| 0.46875
| 0.234375
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01897
| 0.370307
| 586
| 14
| 63
| 41.857143
| 0.327913
| 0.298635
| 0
| 0
| 0
| 0.666667
| 0.744417
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
71db867a445b3d446936480b83073f3fe1c2f5be
| 12,866
|
py
|
Python
|
tests/unit/records/targets/test_fileobj.py
|
cwegrzyn/records-mover
|
e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2
|
[
"Apache-2.0"
] | 36
|
2020-03-17T11:56:51.000Z
|
2022-01-19T16:03:32.000Z
|
tests/unit/records/targets/test_fileobj.py
|
cwegrzyn/records-mover
|
e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2
|
[
"Apache-2.0"
] | 60
|
2020-03-02T23:13:29.000Z
|
2021-05-19T15:05:42.000Z
|
tests/unit/records/targets/test_fileobj.py
|
cwegrzyn/records-mover
|
e3b71d6c09d99d0bcd6a956b9d09d20f8abe98d2
|
[
"Apache-2.0"
] | 4
|
2020-08-11T13:17:37.000Z
|
2021-11-05T21:11:52.000Z
|
import unittest
from records_mover.records.targets.fileobj import FileobjTarget
from records_mover.records.results import MoveResult
from records_mover.records.records_format import DelimitedRecordsFormat
from mock import patch, Mock, ANY
class TestFileobjTarget(unittest.TestCase):
@patch('records_mover.records.pandas.prep_df_for_csv_output')
@patch('records_mover.records.targets.fileobj.io')
@patch('records_mover.records.targets.fileobj.complain_on_unhandled_hints')
def test_move_from_dataframe_uncompressed_no_header_row(self,
mock_complain_on_unhandled_hints,
mock_io,
mock_prep_df_for_csv_output):
mock_fileobj = Mock(name='fileobj')
mock_records_format = DelimitedRecordsFormat(hints={
'encoding': 'UTF8',
'compression': None,
'header-row': False,
'quoting': 'all'
})
fileobj_target = FileobjTarget(fileobj=mock_fileobj,
records_format=mock_records_format)
mock_df_1 = Mock(name='df_1')
mock_df_1.index = ['a']
mock_df_2 = Mock(name='df_2')
mock_df_2.index = ['a']
mock_processing_instructions = Mock(name='processing_instructions')
mock_dfs_source = Mock(name='dfs_source')
mock_dfs_source.dfs = [mock_df_1, mock_df_2]
mock_prep_df_for_csv_output.side_effect = [mock_df_1, mock_df_2]
out = fileobj_target.move_from_dataframes_source(mock_dfs_source,
mock_processing_instructions)
mock_text_fileobj = mock_io.TextIOWrapper.return_value
mock_df_1.to_csv.assert_called_with(index=mock_dfs_source.include_index,
path_or_buf=mock_text_fileobj,
mode="a",
date_format='%Y-%m-%d %H:%M:%S.%f%z',
doublequote=False,
encoding='UTF8',
escapechar='\\',
header=False,
line_terminator='\n',
quotechar='"',
quoting=1,
sep=',')
mock_df_2.to_csv.assert_called_with(index=mock_dfs_source.include_index,
path_or_buf=mock_text_fileobj,
mode="a",
date_format='%Y-%m-%d %H:%M:%S.%f%z',
doublequote=False,
encoding='UTF8',
escapechar='\\',
header=False,
line_terminator='\n',
quotechar='"',
quoting=1,
sep=',')
self.assertEqual(out, MoveResult(move_count=2, output_urls=None))
@patch('records_mover.records.pandas.prep_df_for_csv_output')
@patch('records_mover.records.targets.fileobj.io')
@patch('records_mover.records.targets.fileobj.complain_on_unhandled_hints')
def test_move_from_dataframe_uncompressed_with_header_row(self,
mock_complain_on_unhandled_hints,
mock_io,
mock_prep_df_for_csv_output):
mock_fileobj = Mock(name='fileobj')
mock_records_format = DelimitedRecordsFormat(hints={
'encoding': 'UTF8',
'compression': None,
'header-row': True,
'quoting': 'all'
})
fileobj_target = FileobjTarget(fileobj=mock_fileobj,
records_format=mock_records_format)
mock_df_1 = Mock(name='df_1')
mock_df_1.index = ['a']
mock_df_2 = Mock(name='df_2')
mock_df_2.index = ['a']
mock_processing_instructions = Mock(name='processing_instructions')
mock_dfs_source = Mock(name='dfs_source')
mock_dfs_source.dfs = [mock_df_1, mock_df_2]
mock_prep_df_for_csv_output.side_effect = [mock_df_1, mock_df_2]
out = fileobj_target.move_from_dataframes_source(mock_dfs_source,
mock_processing_instructions)
mock_text_fileobj = mock_io.TextIOWrapper.return_value
mock_df_1.to_csv.assert_called_with(index=mock_dfs_source.include_index,
path_or_buf=mock_text_fileobj,
mode="a",
date_format='%Y-%m-%d %H:%M:%S.%f%z',
doublequote=False,
encoding='UTF8',
escapechar='\\',
header=True,
line_terminator='\n',
quotechar='"',
quoting=1,
sep=',')
mock_df_2.to_csv.assert_called_with(index=mock_dfs_source.include_index,
path_or_buf=mock_text_fileobj,
mode="a",
date_format='%Y-%m-%d %H:%M:%S.%f%z',
doublequote=False,
encoding='UTF8',
escapechar='\\',
header=False,
line_terminator='\n',
quotechar='"',
quoting=1,
sep=',')
self.assertEqual(out, MoveResult(move_count=2, output_urls=None))
@patch('records_mover.records.pandas.prep_df_for_csv_output')
@patch('records_mover.records.targets.fileobj.io')
@patch('records_mover.records.targets.fileobj.complain_on_unhandled_hints')
def test_move_from_dataframe_compressed_no_header_row(self,
mock_complain_on_unhandled_hints,
mock_io,
mock_prep_df_for_csv_output):
mock_fileobj = Mock(name='fileobj')
mock_records_format = DelimitedRecordsFormat(hints={
'encoding': 'UTF8',
'compression': 'GZIP',
'header-row': False,
'quoting': 'all'
})
fileobj_target = FileobjTarget(fileobj=mock_fileobj,
records_format=mock_records_format)
mock_df_1 = Mock(name='df_1')
mock_df_1.index = ['a']
mock_df_2 = Mock(name='df_2')
mock_df_2.index = ['a']
mock_processing_instructions = Mock(name='processing_instructions')
mock_dfs_source = Mock(name='dfs_source')
mock_dfs_source.dfs = [mock_df_1, mock_df_2]
mock_prep_df_for_csv_output.side_effect = [mock_df_1, mock_df_2]
out = fileobj_target.move_from_dataframes_source(mock_dfs_source,
mock_processing_instructions)
mock_df_1.to_csv.assert_called_with(path_or_buf=ANY,
index=mock_dfs_source.include_index,
mode="a",
compression='gzip',
date_format='%Y-%m-%d %H:%M:%S.%f%z',
doublequote=False,
encoding='UTF8',
escapechar='\\',
header=False,
line_terminator='\n',
quotechar='"',
quoting=1,
sep=',')
mock_df_2.to_csv.assert_called_with(path_or_buf=ANY,
index=mock_dfs_source.include_index,
mode="a",
compression='gzip',
date_format='%Y-%m-%d %H:%M:%S.%f%z',
doublequote=False,
encoding='UTF8',
escapechar='\\',
header=False,
line_terminator='\n',
quotechar='"',
quoting=1,
sep=',')
self.assertEqual(out, MoveResult(move_count=2, output_urls=None))
@patch('records_mover.records.pandas.prep_df_for_csv_output')
@patch('records_mover.records.targets.fileobj.io')
@patch('records_mover.records.targets.fileobj.complain_on_unhandled_hints')
def test_move_from_dataframe_compressed_with_header_row(self,
mock_complain_on_unhandled_hints,
mock_io,
mock_prep_df_for_csv_output):
mock_fileobj = Mock(name='fileobj')
mock_records_format = DelimitedRecordsFormat(hints={
'encoding': 'UTF8',
'compression': 'GZIP',
'header-row': True,
'quoting': 'all'
})
fileobj_target = FileobjTarget(fileobj=mock_fileobj,
records_format=mock_records_format)
mock_df_1 = Mock(name='df_1')
mock_df_1.index = ['a']
mock_df_2 = Mock(name='df_2')
mock_df_2.index = ['a']
mock_processing_instructions = Mock(name='processing_instructions')
mock_dfs_source = Mock(name='dfs_source')
mock_dfs_source.dfs = [mock_df_1, mock_df_2]
mock_prep_df_for_csv_output.side_effect = [mock_df_1, mock_df_2]
out = fileobj_target.move_from_dataframes_source(mock_dfs_source,
mock_processing_instructions)
mock_df_1.to_csv.assert_called_with(path_or_buf=ANY,
index=mock_dfs_source.include_index,
mode="a",
compression='gzip',
date_format='%Y-%m-%d %H:%M:%S.%f%z',
doublequote=False,
encoding='UTF8',
escapechar='\\',
header=True,
line_terminator='\n',
quotechar='"',
quoting=1,
sep=',')
mock_df_2.to_csv.assert_called_with(path_or_buf=ANY,
index=mock_dfs_source.include_index,
mode="a",
compression='gzip',
date_format='%Y-%m-%d %H:%M:%S.%f%z',
doublequote=False,
encoding='UTF8',
escapechar='\\',
header=False,
line_terminator='\n',
quotechar='"',
quoting=1,
sep=',')
self.assertEqual(out, MoveResult(move_count=2, output_urls=None))
| 56.929204
| 95
| 0.42795
| 1,079
| 12,866
| 4.709917
| 0.086191
| 0.047226
| 0.027548
| 0.056671
| 0.958481
| 0.951987
| 0.951987
| 0.951987
| 0.951987
| 0.951987
| 0
| 0.011001
| 0.491295
| 12,866
| 225
| 96
| 57.182222
| 0.76547
| 0
| 0
| 0.953704
| 0
| 0
| 0.099798
| 0.055651
| 0
| 0
| 0
| 0
| 0.055556
| 1
| 0.018519
| false
| 0
| 0.023148
| 0
| 0.046296
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
71e1debda0f899f23cc0fa4a37f38df7a6b422e5
| 113
|
py
|
Python
|
cycat/__init__.py
|
CyCat-project/cycat
|
6dbad12737496a4a5a81abf7814a6c5d4724dacc
|
[
"Unlicense"
] | 4
|
2021-02-27T21:49:31.000Z
|
2021-06-13T22:18:00.000Z
|
cycat/__init__.py
|
CyCat-project/cycat
|
6dbad12737496a4a5a81abf7814a6c5d4724dacc
|
[
"Unlicense"
] | null | null | null |
cycat/__init__.py
|
CyCat-project/cycat
|
6dbad12737496a4a5a81abf7814a6c5d4724dacc
|
[
"Unlicense"
] | null | null | null |
__version__ = '0.1.0'
from cycat.url import generate
from cycat.url import validate
from cycat.url import build
| 18.833333
| 30
| 0.787611
| 19
| 113
| 4.473684
| 0.526316
| 0.317647
| 0.423529
| 0.635294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030928
| 0.141593
| 113
| 5
| 31
| 22.6
| 0.845361
| 0
| 0
| 0
| 1
| 0
| 0.044248
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e0f83124c6b777e653f35e7ac484af1e0154165a
| 11,259
|
py
|
Python
|
Market/migrations/0001_initial.py
|
Fowerus/drf-crm
|
be5420f1942886b685214c33537cf4b3759704a1
|
[
"Apache-2.0"
] | 3
|
2021-09-20T09:21:46.000Z
|
2021-09-21T08:51:14.000Z
|
Market/migrations/0001_initial.py
|
Fowerus/drf-crm
|
be5420f1942886b685214c33537cf4b3759704a1
|
[
"Apache-2.0"
] | null | null | null |
Market/migrations/0001_initial.py
|
Fowerus/drf-crm
|
be5420f1942886b685214c33537cf4b3759704a1
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 4.0.1 on 2022-02-04 13:28
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Cashbox',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created_at')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Updated_at')),
('data', models.JSONField(blank=True, null=True)),
('name', models.CharField(max_length=150, verbose_name='Name')),
('cash', models.DecimalField(decimal_places=2, max_digits=100, validators=[django.core.validators.MinValueValidator(0.0)], verbose_name='Cash')),
('account_money', models.DecimalField(decimal_places=2, max_digits=100, validators=[django.core.validators.MinValueValidator(0.0)], verbose_name='Account money')),
],
options={
'verbose_name': 'Cashbox',
'verbose_name_plural': 'Cashboxes',
'db_table': 'cashbox',
'ordering': ['-updated_at'],
},
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created_at')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Updated_at')),
('data', models.JSONField(blank=True, null=True)),
('name', models.CharField(max_length=150, verbose_name='Name')),
('code', models.CharField(max_length=150, null=True, verbose_name='Code')),
('barcode', models.CharField(max_length=150, null=True, verbose_name='Barcode')),
('purchase_price', models.DecimalField(decimal_places=2, max_digits=100, validators=[django.core.validators.MinValueValidator(0.0)], verbose_name='Purchase price')),
('sale_price', models.DecimalField(decimal_places=2, max_digits=100, validators=[django.core.validators.MinValueValidator(0.0)], verbose_name='Sale price')),
('count', models.IntegerField(verbose_name='Quantity')),
('supplier', models.CharField(max_length=150, verbose_name='Supplier')),
('irreducible_balance', models.DecimalField(decimal_places=2, max_digits=100, validators=[django.core.validators.MinValueValidator(0.0)], verbose_name='Irreducible balance')),
],
options={
'verbose_name': 'Product',
'verbose_name_plural': 'Products',
'db_table': 'product',
'ordering': ['-updated_at'],
},
),
migrations.CreateModel(
name='ProductCategory',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created_at')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Updated_at')),
('data', models.JSONField(blank=True, null=True)),
('name', models.CharField(max_length=150, verbose_name='Name')),
],
options={
'verbose_name': 'Category',
'verbose_name_plural': 'Categories',
'db_table': 'category',
'ordering': ['-updated_at'],
},
),
migrations.CreateModel(
name='ProductOrder',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created_at')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Updated_at')),
('data', models.JSONField(blank=True, null=True)),
('name', models.CharField(max_length=150, verbose_name='Name')),
('price', models.DecimalField(decimal_places=2, max_digits=100, validators=[django.core.validators.MinValueValidator(0.0)], verbose_name='Price')),
],
options={
'verbose_name': 'ProductOrder',
'verbose_name_plural': 'ProductOrders',
'db_table': 'productorder',
'ordering': ['-updated_at'],
},
),
migrations.CreateModel(
name='PurchaseAccept',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created_at')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Updated_at')),
('data', models.JSONField(blank=True, null=True)),
('is_cash', models.BooleanField(default=False)),
('accept', models.BooleanField(default=False)),
],
options={
'verbose_name': 'PurchaseAccept',
'verbose_name_plural': 'PurchaseAccepts',
'db_table': 'purchaseaccept',
'ordering': ['-updated_at'],
},
),
migrations.CreateModel(
name='PurchaseRequest',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created_at')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Updated_at')),
('data', models.JSONField(blank=True, null=True)),
('price', models.DecimalField(decimal_places=2, max_digits=100, validators=[django.core.validators.MinValueValidator(0.0)], verbose_name='Price')),
('count', models.IntegerField(verbose_name='Quantity')),
('is_deferred', models.BooleanField(default=False, verbose_name='Deferred')),
('is_cash', models.BooleanField(verbose_name='Is cash')),
],
options={
'verbose_name': 'PurchaseRequest',
'verbose_name_plural': 'PurchaseRequests',
'db_table': 'purchaserequest',
'ordering': ['-updated_at'],
},
),
migrations.CreateModel(
name='SaleOrder',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created_at')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Updated_at')),
('data', models.JSONField(blank=True, null=True)),
('cash', models.DecimalField(decimal_places=2, max_digits=100, validators=[django.core.validators.MinValueValidator(0.0)], verbose_name='Cash')),
('card', models.DecimalField(decimal_places=2, max_digits=100, validators=[django.core.validators.MinValueValidator(0.0)], verbose_name='Card')),
('bank_transfer', models.DecimalField(decimal_places=2, max_digits=100, validators=[django.core.validators.MinValueValidator(0.0)], verbose_name='Bank transfer')),
('discount', models.DecimalField(decimal_places=2, max_digits=100, validators=[django.core.validators.MinValueValidator(0.0)], verbose_name='Discount')),
],
options={
'verbose_name': 'SaleOrder',
'verbose_name_plural': 'SaleOrders',
'db_table': 'saleorder',
'ordering': ['-updated_at'],
},
),
migrations.CreateModel(
name='SaleProduct',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created_at')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Updated_at')),
('data', models.JSONField(blank=True, null=True)),
('cash', models.DecimalField(decimal_places=2, max_digits=100, validators=[django.core.validators.MinValueValidator(0.0)], verbose_name='Cash')),
('card', models.DecimalField(decimal_places=2, max_digits=100, validators=[django.core.validators.MinValueValidator(0.0)], verbose_name='Card')),
('bank_transfer', models.DecimalField(decimal_places=2, max_digits=100, validators=[django.core.validators.MinValueValidator(0.0)], verbose_name='Bank transfer')),
('discount', models.DecimalField(decimal_places=2, max_digits=100, validators=[django.core.validators.MinValueValidator(0.0)], verbose_name='Discount')),
],
options={
'verbose_name': 'SaleProduct',
'verbose_name_plural': 'SaleProducts',
'db_table': 'saleproduct',
'ordering': ['-updated_at'],
},
),
migrations.CreateModel(
name='Transaction',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created_at')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Updated_at')),
('data', models.JSONField(blank=True, null=True)),
],
options={
'verbose_name': 'Transaction',
'verbose_name_plural': 'Transactions',
'db_table': 'transaction',
'ordering': ['-updated_at'],
},
),
migrations.CreateModel(
name='WorkDone',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created_at')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Updated_at')),
('data', models.JSONField(blank=True, null=True)),
('name', models.CharField(max_length=150, verbose_name='Name')),
('price', models.DecimalField(decimal_places=2, max_digits=100, validators=[django.core.validators.MinValueValidator(0.0)], verbose_name='Price')),
],
options={
'verbose_name': 'WorkDone',
'verbose_name_plural': 'WorkDones',
'db_table': 'workdone',
'ordering': ['-updated_at'],
},
),
]
| 57.443878
| 191
| 0.589395
| 1,092
| 11,259
| 5.850733
| 0.096154
| 0.134293
| 0.051651
| 0.07826
| 0.800282
| 0.800282
| 0.741118
| 0.72124
| 0.72124
| 0.70684
| 0
| 0.016326
| 0.265565
| 11,259
| 195
| 192
| 57.738462
| 0.756319
| 0.003997
| 0
| 0.632979
| 1
| 0
| 0.169729
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010638
| 0
| 0.031915
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
46024d8c32774efb47cb45c407e3430fc00bb0f9
| 134,747
|
py
|
Python
|
pkgs/ops-pkg/src/genie/libs/ops/bgp/iosxe/tests/bgp_output.py
|
jbronikowski/genielibs
|
200a34e5fe4838a27b5a80d5973651b2e34ccafb
|
[
"Apache-2.0"
] | 94
|
2018-04-30T20:29:15.000Z
|
2022-03-29T13:40:31.000Z
|
pkgs/ops-pkg/src/genie/libs/ops/bgp/iosxe/tests/bgp_output.py
|
jbronikowski/genielibs
|
200a34e5fe4838a27b5a80d5973651b2e34ccafb
|
[
"Apache-2.0"
] | 67
|
2018-12-06T21:08:09.000Z
|
2022-03-29T18:00:46.000Z
|
pkgs/ops-pkg/src/genie/libs/ops/bgp/iosxe/tests/bgp_output.py
|
jbronikowski/genielibs
|
200a34e5fe4838a27b5a80d5973651b2e34ccafb
|
[
"Apache-2.0"
] | 49
|
2018-06-29T18:59:03.000Z
|
2022-03-10T02:07:59.000Z
|
'''
BGP Genie Ops Object Outputs for IOSXE.
'''
class BgpOutput(object):
show_bgp_all_summary = '''\
show bgp all summary
For address family: IPv4 Unicast
BGP router identifier 1.1.1.1, local AS number 65000
BGP table version is 4, main routing table version 4
3 network entries using 744 bytes of memory
3 path entries using 408 bytes of memory
3/3 BGP path/bestpath attribute entries using 840 bytes of memory
2 BGP extended community entries using 500 bytes of memory
0 BGP route-map cache entries using 0 bytes of memory
0 BGP filter-list cache entries using 0 bytes of memory
BGP using 2492 total bytes of memory
BGP activity 12/0 prefixes, 12/0 paths, scan interval 60 secs
Neighbor V AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down State/PfxRcd
2.2.2.2 4 65000 27420 30189 4 0 0 2w5d 1
3.3.3.3 4 65000 27430 30165 4 0 0 2w5d 1
For address family: IPv6 Unicast
BGP router identifier 1.1.1.1, local AS number 65000
BGP table version is 5, main routing table version 5
3 network entries using 816 bytes of memory
3 path entries using 456 bytes of memory
3/3 BGP path/bestpath attribute entries using 840 bytes of memory
2 BGP extended community entries using 500 bytes of memory
0 BGP route-map cache entries using 0 bytes of memory
0 BGP filter-list cache entries using 0 bytes of memory
BGP using 2612 total bytes of memory
BGP activity 12/0 prefixes, 12/0 paths, scan interval 60 secs
Neighbor V AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down State/PfxRcd
2001:2:2:2::2 4 65000 27420 30190 5 0 0 2w5d 1
2001:3:3:3::3 4 65000 27430 30181 5 0 0 2w5d 1
For address family: VPNv4 Unicast
BGP router identifier 1.1.1.1, local AS number 65000
BGP table version is 4, main routing table version 4
3 network entries using 768 bytes of memory
3 path entries using 408 bytes of memory
3/3 BGP path/bestpath attribute entries using 888 bytes of memory
2 BGP extended community entries using 500 bytes of memory
0 BGP route-map cache entries using 0 bytes of memory
0 BGP filter-list cache entries using 0 bytes of memory
BGP using 2564 total bytes of memory
BGP activity 12/0 prefixes, 12/0 paths, scan interval 60 secs
Neighbor V AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down State/PfxRcd
2.2.2.2 4 65000 27420 30177 4 0 0 2w5d 1
3.3.3.3 4 65000 27431 30188 4 0 0 2w5d 1
For address family: VPNv6 Unicast
BGP router identifier 1.1.1.1, local AS number 65000
BGP table version is 5, main routing table version 5
3 network entries using 840 bytes of memory
3 path entries using 468 bytes of memory
3/3 BGP path/bestpath attribute entries using 888 bytes of memory
2 BGP extended community entries using 500 bytes of memory
0 BGP route-map cache entries using 0 bytes of memory
0 BGP filter-list cache entries using 0 bytes of memory
BGP using 2696 total bytes of memory
BGP activity 12/0 prefixes, 12/0 paths, scan interval 60 secs
Neighbor V AS MsgRcvd MsgSent TblVer InQ OutQ Up/Down State/PfxRcd
2001:2:2:2::2 4 65000 27420 30187 5 0 0 2w5d 1
2001:3:3:3::3 4 65000 27430 30169 5 0 0 2w5d 1
R1_xe#
'''
show_vrf_detail = '''\
show vrf detail | inc \(VRF
VRF Mgmt-intf (VRF Id = 1); default RD <not set>; default VPNID <not set>
VRF VRF1 (VRF Id = 2); default RD 65000:1; default VPNID <not set>
'''
show_bgp_all_cluster_ids = '''\
show bgp all cluster-ids
Global cluster-id: 1.1.1.1 (configured: 0.0.0.0)
BGP client-to-client reflection: Configured Used
all (inter-cluster and intra-cluster): ENABLED
intra-cluster: ENABLED ENABLED
List of cluster-ids:
Cluster-id #-neighbors C2C-rfl-CFG C2C-rfl-USE
'''
show_bgp_all_neighbors = '''\
show bgp all neighbors
For address family: IPv4 Unicast
BGP neighbor is 2.2.2.2, remote AS 65000, internal link
BGP version 4, remote router ID 2.2.2.2
BGP state = Established, up for 2w5d
Last read 00:00:17, last write 00:00:39, hold time is 180, keepalive interval is 60 seconds
Neighbor sessions:
1 active, is not multisession capable (disabled)
Neighbor capabilities:
Route refresh: advertised and received(new)
Four-octets ASN Capability: advertised and received
Address family IPv4 Unicast: advertised and received
Enhanced Refresh Capability: advertised
Multisession Capability:
Stateful switchover support enabled: NO for session 1
Message statistics:
InQ depth is 0
OutQ depth is 0
Sent Rcvd
Opens: 1 1
Notifications: 0 0
Updates: 2 2
Keepalives: 30186 27417
Route Refresh: 0 0
Total: 30189 27420
Do log neighbor state changes (via global configuration)
Default minimum time between advertisement runs is 0 seconds
Address tracking is enabled, the RIB does have a route to 2.2.2.2
Route to peer address reachability Up: 1; Down: 0
Last notification 2w5d
Connections established 1; dropped 0
Last reset never
Interface associated: (none) (peering address NOT in same link)
Transport(tcp) path-mtu-discovery is enabled
Graceful-Restart is disabled
SSO is disabled
Connection state is ESTAB, I/O status: 1, unread input bytes: 0
Connection is ECN Disabled, Mininum incoming TTL 0, Outgoing TTL 255
Local host: 1.1.1.1, Local port: 179
Foreign host: 2.2.2.2, Foreign port: 25026
Connection tableid (VRF): 0
Maximum output segment queue size: 50
Enqueued packets for retransmit: 0, input: 0 mis-ordered: 0 (0 bytes)
Event Timers (current time is 0x621EDEF8):
Timer Starts Wakeups Next
Retrans 30188 0 0x0
TimeWait 0 0 0x0
AckHold 27419 26858 0x0
SendWnd 0 0 0x0
KeepAlive 0 0 0x0
GiveUp 0 0 0x0
PmtuAger 0 0 0x0
DeadWait 0 0 0x0
Linger 0 0 0x0
ProcessQ 0 0 0x0
iss: 402897367 snduna: 403471038 sndnxt: 403471038
irs: 3455343999 rcvnxt: 3455865077
sndwnd: 32236 scale: 0 maxrcvwnd: 16384
rcvwnd: 15719 scale: 0 delrcvwnd: 665
SRTT: 1000 ms, RTTO: 1003 ms, RTV: 3 ms, KRTT: 0 ms
minRTT: 1 ms, maxRTT: 1000 ms, ACK hold: 200 ms
uptime: 1645068668 ms, Sent idletime: 17155 ms, Receive idletime: 17356 ms
Status Flags: passive open, gen tcbs
Option Flags: nagle, path mtu capable
IP Precedence value : 6
Datagrams (max data segment is 1240 bytes):
Rcvd: 57653 (out of order: 0), with data: 27419, total data bytes: 521077
Sent: 57603 (retransmit: 0, fastretransmit: 0, partialack: 0, Second Congestion: 0), with data: 30188, total data bytes: 573670
Packets received in fast path: 0, fast processed: 0, slow path: 0
fast lock acquisition failures: 0, slow path: 0
TCP Semaphore 0x7F23C7640538 FREE
BGP neighbor is 3.3.3.3, remote AS 65000, internal link
BGP version 4, remote router ID 3.3.3.3
BGP state = Established, up for 2w5d
Last read 00:00:02, last write 00:00:22, hold time is 180, keepalive interval is 60 seconds
Neighbor sessions:
1 active, is not multisession capable (disabled)
Neighbor capabilities:
Route refresh: advertised and received(new)
Four-octets ASN Capability: advertised and received
Address family IPv4 Unicast: advertised and received
Graceful Restart Capability: received
Remote Restart timer is 120 seconds
Address families advertised by peer:
IPv4 Unicast (was not preserved
Enhanced Refresh Capability: advertised
Multisession Capability:
Stateful switchover support enabled: NO for session 1
Message statistics:
InQ depth is 0
OutQ depth is 0
Sent Rcvd
Opens: 1 1
Notifications: 0 0
Updates: 2 2
Keepalives: 30162 27427
Route Refresh: 0 0
Total: 30165 27430
Do log neighbor state changes (via global configuration)
Default minimum time between advertisement runs is 0 seconds
Address tracking is enabled, the RIB does have a route to 3.3.3.3
Route to peer address reachability Up: 1; Down: 0
Last notification 2w5d
Connections established 1; dropped 0
Last reset never
Interface associated: (none) (peering address NOT in same link)
Transport(tcp) path-mtu-discovery is enabled
Graceful-Restart is disabled
SSO is disabled
Connection state is ESTAB, I/O status: 1, unread input bytes: 0
Connection is ECN Disabled, Mininum incoming TTL 0, Outgoing TTL 255
Local host: 1.1.1.1, Local port: 11084
Foreign host: 3.3.3.3, Foreign port: 179
Connection tableid (VRF): 0
Maximum output segment queue size: 50
Enqueued packets for retransmit: 0, input: 0 mis-ordered: 0 (0 bytes)
Event Timers (current time is 0x621EDEFB):
Timer Starts Wakeups Next
Retrans 30165 0 0x0
TimeWait 0 0 0x0
AckHold 27429 26944 0x0
SendWnd 0 0 0x0
KeepAlive 0 0 0x0
GiveUp 0 0 0x0
PmtuAger 1636295 1636294 0x621EE25B
DeadWait 0 0 0x0
Linger 0 0 0x0
ProcessQ 0 0 0x0
iss: 4151727173 snduna: 4152300388 sndnxt: 4152300388
irs: 3770309714 rcvnxt: 3770830970
sndwnd: 29200 scale: 0 maxrcvwnd: 16384
rcvwnd: 15966 scale: 0 delrcvwnd: 418
SRTT: 1000 ms, RTTO: 1003 ms, RTV: 3 ms, KRTT: 0 ms
minRTT: 1 ms, maxRTT: 1000 ms, ACK hold: 200 ms
uptime: 1645729685 ms, Sent idletime: 2470 ms, Receive idletime: 2670 ms
Status Flags: active open
Option Flags: nagle, path mtu capable
IP Precedence value : 6
Datagrams (max data segment is 1460 bytes):
Rcvd: 57650 (out of order: 0), with data: 27428, total data bytes: 521255
Sent: 57523 (retransmit: 0, fastretransmit: 0, partialack: 0, Second Congestion: 0), with data: 30164, total data bytes: 573214
Packets received in fast path: 0, fast processed: 0, slow path: 0
fast lock acquisition failures: 0, slow path: 0
TCP Semaphore 0x7F23C7494520 FREE
For address family: IPv6 Unicast
BGP neighbor is 2001:2:2:2::2, remote AS 65000, internal link
BGP version 4, remote router ID 2.2.2.2
BGP state = Established, up for 2w5d
Last read 00:00:17, last write 00:00:00, hold time is 180, keepalive interval is 60 seconds
Neighbor sessions:
1 active, is not multisession capable (disabled)
Neighbor capabilities:
Route refresh: advertised and received(new)
Four-octets ASN Capability: advertised and received
Address family IPv6 Unicast: advertised and received
Enhanced Refresh Capability: advertised
Multisession Capability:
Stateful switchover support enabled: NO for session 1
Message statistics:
InQ depth is 0
OutQ depth is 0
Sent Rcvd
Opens: 1 1
Notifications: 0 0
Updates: 2 2
Keepalives: 30188 27417
Route Refresh: 0 0
Total: 30191 27420
Do log neighbor state changes (via global configuration)
Default minimum time between advertisement runs is 0 seconds
Address tracking is enabled, the RIB does have a route to 2001:2:2:2::2
Route to peer address reachability Up: 2; Down: 0
Last notification 2w5d
Connections established 1; dropped 0
Last reset never
Interface associated: (none) (peering address NOT in same link)
Transport(tcp) path-mtu-discovery is enabled
Graceful-Restart is disabled
SSO is disabled
Connection state is ESTAB, I/O status: 1, unread input bytes: 0
Connection is ECN Disabled, Mininum incoming TTL 0, Outgoing TTL 255
Local host: 2001:1:1:1::1, Local port: 179
Foreign host: 2001:2:2:2::2, Foreign port: 52223
Connection tableid (VRF): 0
Maximum output segment queue size: 50
Enqueued packets for retransmit: 0, input: 0 mis-ordered: 0 (0 bytes)
Event Timers (current time is 0x621EDEFE):
Timer Starts Wakeups Next
Retrans 30190 0 0x0
TimeWait 0 0 0x0
AckHold 27419 26861 0x0
SendWnd 0 0 0x0
KeepAlive 0 0 0x0
GiveUp 0 0 0x0
PmtuAger 0 0 0x0
DeadWait 0 0 0x0
Linger 0 0 0x0
ProcessQ 0 0 0x0
iss: 723831891 snduna: 724405635 sndnxt: 724405635
irs: 2052291678 rcvnxt: 2052812792
sndwnd: 32141 scale: 0 maxrcvwnd: 16384
rcvwnd: 15206 scale: 0 delrcvwnd: 1178
SRTT: 1000 ms, RTTO: 1003 ms, RTV: 3 ms, KRTT: 0 ms
minRTT: 1 ms, maxRTT: 1000 ms, ACK hold: 200 ms
uptime: 1645067186 ms, Sent idletime: 315 ms, Receive idletime: 106 ms
Status Flags: passive open, gen tcbs
Option Flags: nagle, path mtu capable
IP Precedence value : 6
Datagrams (max data segment is 1220 bytes):
Rcvd: 57654 (out of order: 0), with data: 27419, total data bytes: 521113
Sent: 57613 (retransmit: 0, fastretransmit: 0, partialack: 0, Second Congestion: 0), with data: 57613, total data bytes: 2878271
Packets received in fast path: 0, fast processed: 0, slow path: 0
fast lock acquisition failures: 0, slow path: 0
TCP Semaphore 0x7F23C76402C8 FREE
BGP neighbor is 2001:3:3:3::3, remote AS 65000, internal link
BGP version 4, remote router ID 3.3.3.3
BGP state = Established, up for 2w5d
Last read 00:00:02, last write 00:00:10, hold time is 180, keepalive interval is 60 seconds
Neighbor sessions:
1 active, is not multisession capable (disabled)
Neighbor capabilities:
Route refresh: advertised and received(new)
Four-octets ASN Capability: advertised and received
Address family IPv6 Unicast: advertised and received
Graceful Restart Capability: received
Remote Restart timer is 120 seconds
Address families advertised by peer:
IPv6 Unicast (was not preserved
Enhanced Refresh Capability: advertised
Multisession Capability:
Stateful switchover support enabled: NO for session 1
Message statistics:
InQ depth is 0
OutQ depth is 0
Sent Rcvd
Opens: 1 1
Notifications: 0 0
Updates: 2 2
Keepalives: 30178 27427
Route Refresh: 0 0
Total: 30181 27430
Do log neighbor state changes (via global configuration)
Default minimum time between advertisement runs is 0 seconds
Address tracking is enabled, the RIB does have a route to 2001:3:3:3::3
Route to peer address reachability Up: 1; Down: 0
Last notification 2w5d
Connections established 1; dropped 0
Last reset never
Interface associated: (none) (peering address NOT in same link)
Transport(tcp) path-mtu-discovery is enabled
Graceful-Restart is disabled
SSO is disabled
Connection state is ESTAB, I/O status: 1, unread input bytes: 0
Connection is ECN Disabled, Mininum incoming TTL 0, Outgoing TTL 255
Local host: 2001:1:1:1::1, Local port: 179
Foreign host: 2001:3:3:3::3, Foreign port: 47133
Connection tableid (VRF): 0
Maximum output segment queue size: 50
Enqueued packets for retransmit: 0, input: 0 mis-ordered: 0 (0 bytes)
Event Timers (current time is 0x621EDF01):
Timer Starts Wakeups Next
Retrans 30180 0 0x0
TimeWait 0 0 0x0
AckHold 27429 26963 0x0
SendWnd 0 0 0x0
KeepAlive 0 0 0x0
GiveUp 0 0 0x0
PmtuAger 0 0 0x0
DeadWait 0 0 0x0
Linger 0 0 0x0
ProcessQ 0 0 0x0
iss: 1399948803 snduna: 1400522357 sndnxt: 1400522357
irs: 1512650626 rcvnxt: 1513171918
sndwnd: 28800 scale: 0 maxrcvwnd: 16384
rcvwnd: 16384 scale: 0 delrcvwnd: 0
SRTT: 1000 ms, RTTO: 1003 ms, RTV: 3 ms, KRTT: 0 ms
minRTT: 1 ms, maxRTT: 1000 ms, ACK hold: 200 ms
uptime: 1645725071 ms, Sent idletime: 2676 ms, Receive idletime: 2676 ms
Status Flags: passive open, gen tcbs
Option Flags: nagle, path mtu capable
IP Precedence value : 6
Datagrams (max data segment is 1440 bytes):
Rcvd: 57664 (out of order: 0), with data: 27428, total data bytes: 521291
Sent: 57560 (retransmit: 0, fastretransmit: 0, partialack: 0, Second Congestion: 0), with data: 57560, total data bytes: 2875961
Packets received in fast path: 0, fast processed: 0, slow path: 0
fast lock acquisition failures: 0, slow path: 0
TCP Semaphore 0x7F23C7494380 FREE
For address family: VPNv4 Unicast
BGP neighbor is 2.2.2.2, vrf VRF1, remote AS 65000, internal link
BGP version 4, remote router ID 2.2.2.2
BGP state = Established, up for 2w5d
Last read 00:00:13, last write 00:00:44, hold time is 180, keepalive interval is 60 seconds
Neighbor sessions:
1 active, is not multisession capable (disabled)
Neighbor capabilities:
Route refresh: advertised and received(new)
Four-octets ASN Capability: advertised and received
Address family IPv4 Unicast: advertised and received
Enhanced Refresh Capability: advertised
Multisession Capability:
Stateful switchover support enabled: NO for session 1
Message statistics:
InQ depth is 0
OutQ depth is 0
Sent Rcvd
Opens: 1 1
Notifications: 0 0
Updates: 2 2
Keepalives: 30174 27417
Route Refresh: 0 0
Total: 30177 27420
Do log neighbor state changes (via global configuration)
Default minimum time between advertisement runs is 0 seconds
Address tracking is enabled, the RIB does have a route to 2.2.2.2
Route to peer address reachability Up: 2; Down: 0
Last notification 2w5d
Connections established 1; dropped 0
Last reset never
Interface associated: (none) (peering address NOT in same link)
Transport(tcp) path-mtu-discovery is enabled
Graceful-Restart is disabled
SSO is disabled
Connection state is ESTAB, I/O status: 1, unread input bytes: 0
Connection is ECN Disabled, Mininum incoming TTL 0, Outgoing TTL 255
Local host: 1.1.1.1, Local port: 179
Foreign host: 2.2.2.2, Foreign port: 50426
Connection tableid (VRF): 2
Maximum output segment queue size: 50
Enqueued packets for retransmit: 0, input: 0 mis-ordered: 0 (0 bytes)
Event Timers (current time is 0x621EDF03):
Timer Starts Wakeups Next
Retrans 30180 4 0x0
TimeWait 0 0 0x0
AckHold 27420 26856 0x0
SendWnd 0 0 0x0
KeepAlive 0 0 0x0
GiveUp 0 0 0x0
PmtuAger 0 0 0x0
DeadWait 0 0 0x0
Linger 0 0 0x0
ProcessQ 0 0 0x0
iss: 3028094276 snduna: 3028667719 sndnxt: 3028667719
irs: 568735500 rcvnxt: 569256556
sndwnd: 32483 scale: 0 maxrcvwnd: 16384
rcvwnd: 15738 scale: 0 delrcvwnd: 646
SRTT: 1000 ms, RTTO: 1003 ms, RTV: 3 ms, KRTT: 0 ms
minRTT: 1 ms, maxRTT: 1000 ms, ACK hold: 200 ms
uptime: 1645068163 ms, Sent idletime: 13180 ms, Receive idletime: 13380 ms
Status Flags: passive open, gen tcbs
Option Flags: VRF id set, nagle, path mtu capable
IP Precedence value : 6
Datagrams (max data segment is 1240 bytes):
Rcvd: 57669 (out of order: 0), with data: 27420, total data bytes: 521055
Sent: 57610 (retransmit: 4, fastretransmit: 0, partialack: 0, Second Congestion: 0), with data: 30176, total data bytes: 573442
Packets received in fast path: 0, fast processed: 0, slow path: 0
fast lock acquisition failures: 0, slow path: 0
TCP Semaphore 0x7F23C7640468 FREE
BGP neighbor is 3.3.3.3, vrf VRF1, remote AS 65000, internal link
BGP version 4, remote router ID 3.3.3.3
BGP state = Established, up for 2w5d
Last read 00:00:02, last write 00:00:40, hold time is 180, keepalive interval is 60 seconds
Neighbor sessions:
1 active, is not multisession capable (disabled)
Neighbor capabilities:
Route refresh: advertised and received(new)
Four-octets ASN Capability: advertised and received
Address family IPv4 Unicast: advertised and received
Graceful Restart Capability: received
Remote Restart timer is 120 seconds
Address families advertised by peer:
IPv4 Unicast (was not preserved
Enhanced Refresh Capability: advertised
Multisession Capability:
Stateful switchover support enabled: NO for session 1
Message statistics:
InQ depth is 0
OutQ depth is 0
Sent Rcvd
Opens: 1 1
Notifications: 0 0
Updates: 2 3
Keepalives: 30185 27427
Route Refresh: 0 0
Total: 30188 27431
Do log neighbor state changes (via global configuration)
Default minimum time between advertisement runs is 0 seconds
Address tracking is enabled, the RIB does have a route to 3.3.3.3
Route to peer address reachability Up: 1; Down: 0
Last notification 2w5d
Connections established 1; dropped 0
Last reset never
Interface associated: (none) (peering address NOT in same link)
Transport(tcp) path-mtu-discovery is enabled
Graceful-Restart is disabled
SSO is disabled
Connection state is ESTAB, I/O status: 1, unread input bytes: 0
Connection is ECN Disabled, Mininum incoming TTL 0, Outgoing TTL 255
Local host: 1.1.1.1, Local port: 43893
Foreign host: 3.3.3.3, Foreign port: 179
Connection tableid (VRF): 2
Maximum output segment queue size: 50
Enqueued packets for retransmit: 0, input: 0 mis-ordered: 0 (0 bytes)
Event Timers (current time is 0x621EDF06):
Timer Starts Wakeups Next
Retrans 30188 0 0x0
TimeWait 0 0 0x0
AckHold 27429 26958 0x0
SendWnd 0 0 0x0
KeepAlive 0 0 0x0
GiveUp 0 0 0x0
PmtuAger 1636291 1636290 0x621EE25B
DeadWait 0 0 0x0
Linger 0 0 0x0
ProcessQ 0 0 0x0
iss: 1210418011 snduna: 1210991663 sndnxt: 1210991663
irs: 1252136999 rcvnxt: 1252658304
sndwnd: 29200 scale: 0 maxrcvwnd: 16384
rcvwnd: 15909 scale: 0 delrcvwnd: 475
SRTT: 1000 ms, RTTO: 1003 ms, RTV: 3 ms, KRTT: 0 ms
minRTT: 1 ms, maxRTT: 1000 ms, ACK hold: 200 ms
uptime: 1645726624 ms, Sent idletime: 2482 ms, Receive idletime: 2682 ms
Status Flags: active open
Option Flags: VRF id set, nagle, path mtu capable
IP Precedence value : 6
Datagrams (max data segment is 1460 bytes):
Rcvd: 57674 (out of order: 0), with data: 27429, total data bytes: 521304
Sent: 57561 (retransmit: 0, fastretransmit: 0, partialack: 0, Second Congestion: 0), with data: 30187, total data bytes: 573651
Packets received in fast path: 0, fast processed: 0, slow path: 0
fast lock acquisition failures: 0, slow path: 0
TCP Semaphore 0x7F23C7494450 FREE
For address family: VPNv6 Unicast
BGP neighbor is 2001:2:2:2::2, vrf VRF1, remote AS 65000, internal link
BGP version 4, remote router ID 2.2.2.2
BGP state = Established, up for 2w5d
Last read 00:00:05, last write 00:00:26, hold time is 180, keepalive interval is 60 seconds
Neighbor sessions:
1 active, is not multisession capable (disabled)
Neighbor capabilities:
Route refresh: advertised and received(new)
Four-octets ASN Capability: advertised and received
Address family IPv6 Unicast: advertised and received
Enhanced Refresh Capability: advertised
Multisession Capability:
Stateful switchover support enabled: NO for session 1
Message statistics:
InQ depth is 0
OutQ depth is 0
Sent Rcvd
Opens: 1 1
Notifications: 0 0
Updates: 2 2
Keepalives: 30184 27417
Route Refresh: 0 0
Total: 30187 27420
Do log neighbor state changes (via global configuration)
Default minimum time between advertisement runs is 0 seconds
Address tracking is enabled, the RIB does have a route to 2001:2:2:2::2
Route to peer address reachability Up: 1; Down: 0
Last notification 2w5d
Connections established 1; dropped 0
Last reset never
Interface associated: (none) (peering address NOT in same link)
Transport(tcp) path-mtu-discovery is enabled
Graceful-Restart is disabled
SSO is disabled
Connection state is ESTAB, I/O status: 1, unread input bytes: 0
Connection is ECN Disabled, Mininum incoming TTL 0, Outgoing TTL 255
Local host: 2001:1:1:1::1, Local port: 32057
Foreign host: 2001:2:2:2::2, Foreign port: 179
Connection tableid (VRF): 503316482
Maximum output segment queue size: 50
Enqueued packets for retransmit: 0, input: 0 mis-ordered: 0 (0 bytes)
Event Timers (current time is 0x621EDF09):
Timer Starts Wakeups Next
Retrans 30190 4 0x0
TimeWait 0 0 0x0
AckHold 27420 26869 0x0
SendWnd 0 0 0x0
KeepAlive 0 0 0x0
GiveUp 0 0 0x0
PmtuAger 1 1 0x0
DeadWait 0 0 0x0
Linger 0 0 0x0
ProcessQ 0 0 0x0
iss: 3279254845 snduna: 3279828513 sndnxt: 3279828513
irs: 2772915965 rcvnxt: 2773437057
sndwnd: 32217 scale: 0 maxrcvwnd: 16384
rcvwnd: 15225 scale: 0 delrcvwnd: 1159
SRTT: 1000 ms, RTTO: 1003 ms, RTV: 3 ms, KRTT: 0 ms
minRTT: 1 ms, maxRTT: 1000 ms, ACK hold: 200 ms
uptime: 1645068087 ms, Sent idletime: 5770 ms, Receive idletime: 5970 ms
Status Flags: active open
Option Flags: VRF id set, nagle, path mtu capable
IP Precedence value : 6
Datagrams (max data segment is 1220 bytes):
Rcvd: 57637 (out of order: 0), with data: 27419, total data bytes: 521091
Sent: 57604 (retransmit: 4, fastretransmit: 0, partialack: 0, Second Congestion: 0), with data: 57604, total data bytes: 2877835
Packets received in fast path: 0, fast processed: 0, slow path: 0
fast lock acquisition failures: 0, slow path: 0
TCP Semaphore 0x7F23C7640398 FREE
BGP neighbor is 2001:3:3:3::3, vrf VRF1, remote AS 65000, internal link
BGP version 4, remote router ID 3.3.3.3
BGP state = Established, up for 2w5d
Last read 00:00:39, last write 00:00:26, hold time is 180, keepalive interval is 60 seconds
Neighbor sessions:
1 active, is not multisession capable (disabled)
Neighbor capabilities:
Route refresh: advertised and received(new)
Four-octets ASN Capability: advertised and received
Address family IPv6 Unicast: advertised and received
Graceful Restart Capability: received
Remote Restart timer is 120 seconds
Address families advertised by peer:
IPv6 Unicast (was not preserved
Enhanced Refresh Capability: advertised
Multisession Capability:
Stateful switchover support enabled: NO for session 1
Message statistics:
InQ depth is 0
OutQ depth is 0
Sent Rcvd
Opens: 1 1
Notifications: 0 0
Updates: 2 3
Keepalives: 30166 27426
Route Refresh: 0 0
Total: 30169 27430
Do log neighbor state changes (via global configuration)
Default minimum time between advertisement runs is 0 seconds
Address tracking is enabled, the RIB does have a route to 2001:3:3:3::3
Route to peer address reachability Up: 1; Down: 0
Last notification 2w5d
Connections established 1; dropped 0
Last reset never
Interface associated: (none) (peering address NOT in same link)
Transport(tcp) path-mtu-discovery is enabled
Graceful-Restart is disabled
SSO is disabled
Connection state is ESTAB, I/O status: 1, unread input bytes: 0
Connection is ECN Disabled, Mininum incoming TTL 0, Outgoing TTL 255
Local host: 2001:1:1:1::1, Local port: 179
Foreign host: 2001:3:3:3::3, Foreign port: 20838
Connection tableid (VRF): 503316482
Maximum output segment queue size: 50
Enqueued packets for retransmit: 0, input: 0 mis-ordered: 0 (0 bytes)
Event Timers (current time is 0x621EDF0B):
Timer Starts Wakeups Next
Retrans 30168 0 0x0
TimeWait 0 0 0x0
AckHold 27429 26946 0x0
SendWnd 0 0 0x0
KeepAlive 0 0 0x0
GiveUp 0 0 0x0
PmtuAger 0 0 0x0
DeadWait 0 0 0x0
Linger 0 0 0x0
ProcessQ 0 0 0x0
iss: 2912962685 snduna: 2913536011 sndnxt: 2913536011
irs: 3025088327 rcvnxt: 3025609679
sndwnd: 28800 scale: 0 maxrcvwnd: 16384
rcvwnd: 16327 scale: 0 delrcvwnd: 57
SRTT: 1000 ms, RTTO: 1003 ms, RTV: 3 ms, KRTT: 0 ms
minRTT: 1 ms, maxRTT: 1000 ms, ACK hold: 200 ms
uptime: 1645724603 ms, Sent idletime: 26956 ms, Receive idletime: 26955 ms
Status Flags: passive open, gen tcbs
Option Flags: VRF id set, nagle, path mtu capable
IP Precedence value : 6
Datagrams (max data segment is 1440 bytes):
Rcvd: 57671 (out of order: 0), with data: 27428, total data bytes: 521351
Sent: 57550 (retransmit: 0, fastretransmit: 0, partialack: 0, Second Congestion: 0), with data: 57550, total data bytes: 2875333
Packets received in fast path: 0, fast processed: 0, slow path: 0
fast lock acquisition failures: 0, slow path: 0
TCP Semaphore 0x7F23C74942B0 FREE
For address family: IPv4 Multicast
For address family: L2VPN E-VPN
For address family: VPNv4 Multicast
For address family: MVPNv4 Unicast
For address family: MVPNv6 Unicast
For address family: VPNv6 Multicast
For address family: VPNv4 Flowspec
For address family: VPNv6 Flowspec
'''
show_neighbor_policy_1 = '''\
show bgp all neighbors 2.2.2.2 policy
Neighbor: 2.2.2.2, Address-Family: IPv4 Unicast
Neighbor: 2.2.2.2, Address-Family: VPNv4 Unicast (VRF1)
'''
show_neighbor_policy_2 = '''\
show bgp all neighbors 2001:2:2:2::2 policy
Neighbor: 2001:2:2:2::2, Address-Family: IPv6 Unicast
Neighbor: 2001:2:2:2::2, Address-Family: VPNv6 Unicast (VRF1)
'''
show_neighbor_policy_3 = '''\
show bgp all neighbors 2001:3:3:3::3 policy
Neighbor: 2001:3:3:3::3, Address-Family: IPv6 Unicast
Neighbor: 2001:3:3:3::3, Address-Family: VPNv6 Unicast (VRF1)
'''
show_neighbor_policy_4 = '''\
show bgp all neighbors 3.3.3.3 policy
Neighbor: 3.3.3.3, Address-Family: IPv4 Unicast
Neighbor: 3.3.3.3, Address-Family: VPNv4 Unicast (VRF1)
'''
show_bgp_all = '''\
show bgp all
For address family: IPv4 Unicast
BGP table version is 4, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
*> 1.1.1.1/32 0.0.0.0 0 32768 i
r>i 2.2.2.2/32 2.2.2.2 0 100 0 i
r>i 3.3.3.3/32 3.3.3.3 100 0 i
For address family: IPv6 Unicast
BGP table version is 5, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
*> 2001:1:1:1::1/128
:: 0 32768 i
r>i 2001:2:2:2::2/128
2001:2:2:2::2 0 100 0 i
r>i 2001:3:3:3::3/128
2001:3:3:3::3 100 0 i
For address family: VPNv4 Unicast
BGP table version is 4, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 65000:1 (default for vrf VRF1)
*> 1.1.1.1/32 0.0.0.0 0 32768 i
r>i 2.2.2.2/32 2.2.2.2 0 100 0 i
r>i 3.3.3.3/32 3.3.3.3 100 0 i
For address family: VPNv6 Unicast
BGP table version is 5, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 65000:1 (default for vrf VRF1)
*> 2001:1:1:1::1/128
:: 0 32768 i
r>i 2001:2:2:2::2/128
2001:2:2:2::2 0 100 0 i
r>i 2001:3:3:3::3/128
2001:3:3:3::3 100 0 i
For address family: IPv4 Multicast
For address family: L2VPN E-VPN
For address family: VPNv4 Multicast
For address family: MVPNv4 Unicast
For address family: MVPNv6 Unicast
For address family: VPNv6 Multicast
For address family: VPNv4 Flowspec
For address family: VPNv6 Flowspec
'''
show_bgp_all_detail = '''\
show bgp all detail
For address family: IPv4 Unicast
BGP routing table entry for 1.1.1.1/32, version 2
Paths: (1 available, best #1, table default)
Advertised to update-groups:
1
Refresh Epoch 1
Local
0.0.0.0 from 0.0.0.0 (1.1.1.1)
Origin IGP, metric 0, localpref 100, weight 32768, valid, sourced, local, best
rx pathid: 0, tx pathid: 0x0
BGP routing table entry for 2.2.2.2/32, version 4
Paths: (1 available, best #1, table default, RIB-failure(17))
Flag: 0x100
Not advertised to any peer
Refresh Epoch 1
Local
2.2.2.2 (metric 10752) from 2.2.2.2 (2.2.2.2)
Origin IGP, metric 0, localpref 100, valid, internal, best
rx pathid: 0, tx pathid: 0x0
BGP routing table entry for 3.3.3.3/32, version 3
Paths: (1 available, best #1, table default, RIB-failure(17))
Not advertised to any peer
Refresh Epoch 1
Local
3.3.3.3 (metric 2570240) from 3.3.3.3 (3.3.3.3)
Origin IGP, localpref 100, valid, internal, best
rx pathid: 0, tx pathid: 0x0
For address family: IPv6 Unicast
BGP routing table entry for 2001:1:1:1::1/128, version 3
Paths: (1 available, best #1, table default)
Advertised to update-groups:
1
Refresh Epoch 1
Local
:: from 0.0.0.0 (1.1.1.1)
Origin IGP, metric 0, localpref 100, weight 32768, valid, sourced, local, best
rx pathid: 0, tx pathid: 0x0
BGP routing table entry for 2001:2:2:2::2/128, version 5
Paths: (1 available, best #1, table default, RIB-failure(145))
Flag: 0x100
Not advertised to any peer
Refresh Epoch 1
Local
2001:2:2:2::2 (metric 10752) from 2001:2:2:2::2 (2.2.2.2)
Origin IGP, metric 0, localpref 100, valid, internal, best
rx pathid: 0, tx pathid: 0x0
BGP routing table entry for 2001:3:3:3::3/128, version 4
Paths: (1 available, best #1, table default, RIB-failure(145))
Not advertised to any peer
Refresh Epoch 1
Local
2001:3:3:3::3 (metric 2570240) from 2001:3:3:3::3 (3.3.3.3)
Origin IGP, localpref 100, valid, internal, best
rx pathid: 0, tx pathid: 0x0
For address family: VPNv4 Unicast
Route Distinguisher: 65000:1 (default for vrf VRF1)
BGP routing table entry for 65000:1:1.1.1.1/32, version 2
Paths: (1 available, best #1, table VRF1)
Advertised to update-groups:
1
Refresh Epoch 1
Local
0.0.0.0 (via vrf VRF1) from 0.0.0.0 (1.1.1.1)
Origin IGP, metric 0, localpref 100, weight 32768, valid, sourced, local, best
Extended Community: Cost:pre-bestpath:128:1280 0x8800:32768:0
0x8801:100:32 0x8802:65280:256 0x8803:65281:1514 0x8806:0:16843009
rx pathid: 0, tx pathid: 0x0
BGP routing table entry for 65000:1:2.2.2.2/32, version 4
Paths: (1 available, best #1, table VRF1, RIB-failure(17))
Flag: 0x100
Not advertised to any peer
Refresh Epoch 1
Local
2.2.2.2 (metric 10752) (via vrf VRF1) from 2.2.2.2 (2.2.2.2)
Origin IGP, metric 0, localpref 100, valid, internal, best
rx pathid: 0, tx pathid: 0x0
BGP routing table entry for 65000:1:3.3.3.3/32, version 3
Paths: (1 available, best #1, table VRF1, RIB-failure(17))
Not advertised to any peer
Refresh Epoch 1
Local
3.3.3.3 (metric 2570240) (via vrf VRF1) from 3.3.3.3 (3.3.3.3)
Origin IGP, localpref 100, valid, internal, best
rx pathid: 0, tx pathid: 0x0
For address family: VPNv6 Unicast
Route Distinguisher: 65000:1 (default for vrf VRF1)
BGP routing table entry for [65000:1]2001:1:1:1::1/128, version 3
Paths: (1 available, best #1, table VRF1)
Advertised to update-groups:
1
Refresh Epoch 1
Local
:: (via vrf VRF1) from 0.0.0.0 (1.1.1.1)
Origin IGP, metric 0, localpref 100, weight 32768, valid, sourced, local, best
Extended Community: Cost:pre-bestpath:128:1280 0x8800:32768:0
0x8801:100:32 0x8802:65280:256 0x8803:65281:1514 0x8806:0:16843009
0x8807:53266:318767104
rx pathid: 0, tx pathid: 0x0
BGP routing table entry for [65000:1]2001:2:2:2::2/128, version 5
Paths: (1 available, best #1, table VRF1, RIB-failure(145))
Flag: 0x100
Not advertised to any peer
Refresh Epoch 1
Local
2001:2:2:2::2 (metric 10752) (via vrf VRF1) from 2001:2:2:2::2 (2.2.2.2)
Origin IGP, metric 0, localpref 100, valid, internal, best
rx pathid: 0, tx pathid: 0x0
BGP routing table entry for [65000:1]2001:3:3:3::3/128, version 4
Paths: (1 available, best #1, table VRF1, RIB-failure(145))
Not advertised to any peer
Refresh Epoch 1
Local
2001:3:3:3::3 (metric 2570240) (via vrf VRF1) from 2001:3:3:3::3 (3.3.3.3)
Origin IGP, localpref 100, valid, internal, best
rx pathid: 0, tx pathid: 0x0
For address family: IPv4 Multicast
For address family: L2VPN E-VPN
For address family: VPNv4 Multicast
For address family: MVPNv4 Unicast
For address family: MVPNv6 Unicast
For address family: VPNv6 Multicast
For address family: VPNv4 Flowspec
For address family: VPNv6 Flowspec
'''
nbr_routes_1 = '''\
show bgp all neighbors 2.2.2.2 routes
For address family: IPv4 Unicast
BGP table version is 4, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
r>i 2.2.2.2/32 2.2.2.2 0 100 0 i
Total number of prefixes 1
For address family: VPNv4 Unicast
BGP table version is 4, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 65000:1 (default for vrf VRF1)
r>i 2.2.2.2/32 2.2.2.2 0 100 0 i
Total number of prefixes 1
'''
nbr_routes_2 = '''\
show bgp all neighbors 2001:2:2:2::2 routes
For address family: IPv6 Unicast
BGP table version is 5, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
r>i 2001:2:2:2::2/128
2001:2:2:2::2 0 100 0 i
Total number of prefixes 1
For address family: VPNv6 Unicast
BGP table version is 5, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 65000:1 (default for vrf VRF1)
r>i 2001:2:2:2::2/128
2001:2:2:2::2 0 100 0 i
Total number of prefixes 1
'''
nbr_routes_3 = '''\
show bgp all neighbors 2001:3:3:3::3 routes
For address family: IPv6 Unicast
BGP table version is 5, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
r>i 2001:3:3:3::3/128
2001:3:3:3::3 100 0 i
Total number of prefixes 1
For address family: VPNv6 Unicast
BGP table version is 5, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 65000:1 (default for vrf VRF1)
r>i 2001:3:3:3::3/128
2001:3:3:3::3 100 0 i
Total number of prefixes 1
'''
nbr_routes_4 = '''\
show bgp all neighbors 3.3.3.3 routes
For address family: IPv4 Unicast
BGP table version is 4, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
r>i 3.3.3.3/32 3.3.3.3 100 0 i
Total number of prefixes 1
For address family: VPNv4 Unicast
BGP table version is 4, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 65000:1 (default for vrf VRF1)
r>i 3.3.3.3/32 3.3.3.3 100 0 i
Total number of prefixes 1
'''
nbr_adv_routes_1 = '''\
show bgp all neighbors 2.2.2.2 advertised-routes
For address family: IPv4 Unicast
BGP table version is 4, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
*> 1.1.1.1/32 0.0.0.0 0 32768 i
Total number of prefixes 1
For address family: VPNv4 Unicast
BGP table version is 4, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 65000:1 (default for vrf VRF1)
*> 1.1.1.1/32 0.0.0.0 0 32768 i
Total number of prefixes 1
'''
nbr_adv_routes_2 = '''\
show bgp all neighbors 2001:2:2:2::2 advertised-routes
For address family: IPv6 Unicast
BGP table version is 5, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
*> 2001:1:1:1::1/128
:: 0 32768 i
Total number of prefixes 1
For address family: VPNv6 Unicast
BGP table version is 5, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 65000:1 (default for vrf VRF1)
*> 2001:1:1:1::1/128
:: 0 32768 i
Total number of prefixes 1
'''
nbr_adv_routes_3 = '''\
show bgp all neighbors 2001:3:3:3::3 advertised-routes
For address family: IPv6 Unicast
BGP table version is 5, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
*> 2001:1:1:1::1/128
:: 0 32768 i
Total number of prefixes 1
For address family: VPNv6 Unicast
BGP table version is 5, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 65000:1 (default for vrf VRF1)
*> 2001:1:1:1::1/128
:: 0 32768 i
Total number of prefixes 1
'''
nbr_adv_routes_4 = '''\
show bgp all neighbors 3.3.3.3 advertised-routes
For address family: IPv4 Unicast
BGP table version is 4, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
*> 1.1.1.1/32 0.0.0.0 0 32768 i
Total number of prefixes 1
For address family: VPNv4 Unicast
BGP table version is 4, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 65000:1 (default for vrf VRF1)
*> 1.1.1.1/32 0.0.0.0 0 32768 i
Total number of prefixes 1
'''
custom_output_1 = '''\
show bgp vpnv4 unicast all neighbors 2.2.2.2
BGP neighbor is 2.2.2.2, vrf VRF1, remote AS 65000, internal link
BGP version 4, remote router ID 2.2.2.2
BGP state = Established, up for 2w5d
Last read 00:00:44, last write 00:00:42, hold time is 180, keepalive interval is 60 seconds
Neighbor sessions:
1 active, is not multisession capable (disabled)
Neighbor capabilities:
Route refresh: advertised and received(new)
Four-octets ASN Capability: advertised and received
Address family IPv4 Unicast: advertised and received
Enhanced Refresh Capability: advertised
Multisession Capability:
Stateful switchover support enabled: NO for session 1
Message statistics:
InQ depth is 0
OutQ depth is 0
Sent Rcvd
Opens: 1 1
Notifications: 0 0
Updates: 2 2
Keepalives: 30228 27466
Route Refresh: 0 0
Total: 30231 27469
Do log neighbor state changes (via global configuration)
Default minimum time between advertisement runs is 0 seconds
For address family: VPNv4 Unicast
Translates address family IPv4 Unicast for VRF VRF1
Session: 2.2.2.2
BGP table version 4, neighbor version 4/0
Output queue size : 0
Index 1, Advertise bit 0
1 update-group member
Slow-peer detection is disabled
Slow-peer split-update-group dynamic is disabled
Sent Rcvd
Prefix activity: ---- ----
Prefixes Current: 1 1 (Consumes 136 bytes)
Prefixes Total: 2 1
Implicit Withdraw: 1 0
Explicit Withdraw: 0 0
Used as bestpath: n/a 1
Used as multipath: n/a 0
Used as secondary: n/a 0
Outbound Inbound
Local Policy Denied Prefixes: -------- -------
Bestpath from this peer: 2 n/a
Bestpath from iBGP peer: 1 n/a
Total: 3 0
Number of NLRIs in the update sent: max 1, min 0
Last detected as dynamic slow peer: never
Dynamic slow peer recovered: never
Refresh Epoch: 1
Last Sent Refresh Start-of-rib: never
Last Sent Refresh End-of-rib: never
Last Received Refresh Start-of-rib: never
Last Received Refresh End-of-rib: never
Sent Rcvd
Refresh activity: ---- ----
Refresh Start-of-RIB 0 0
Refresh End-of-RIB 0 0
Address tracking is enabled, the RIB does have a route to 2.2.2.2
Route to peer address reachability Up: 2; Down: 0
Last notification 2w5d
Connections established 1; dropped 0
Last reset never
Interface associated: (none) (peering address NOT in same link)
Transport(tcp) path-mtu-discovery is enabled
Graceful-Restart is disabled
SSO is disabled
Connection state is ESTAB, I/O status: 1, unread input bytes: 0
Connection is ECN Disabled, Mininum incoming TTL 0, Outgoing TTL 255
Local host: 1.1.1.1, Local port: 179
Foreign host: 2.2.2.2, Foreign port: 50426
Connection tableid (VRF): 2
Maximum output segment queue size: 50
Enqueued packets for retransmit: 0, input: 0 mis-ordered: 0 (0 bytes)
Event Timers (current time is 0x624C3478):
Timer Starts Wakeups Next
Retrans 30234 4 0x0
TimeWait 0 0 0x0
AckHold 27469 26904 0x0
SendWnd 0 0 0x0
KeepAlive 0 0 0x0
GiveUp 0 0 0x0
PmtuAger 0 0 0x0
DeadWait 0 0 0x0
Linger 0 0 0x0
ProcessQ 0 0 0x0
iss: 3028094276 snduna: 3028668745 sndnxt: 3028668745
irs: 568735500 rcvnxt: 569257487
sndwnd: 32711 scale: 0 maxrcvwnd: 16384
rcvwnd: 16061 scale: 0 delrcvwnd: 323
SRTT: 1000 ms, RTTO: 1003 ms, RTV: 3 ms, KRTT: 0 ms
minRTT: 1 ms, maxRTT: 1000 ms, ACK hold: 200 ms
uptime: 1648039160 ms, Sent idletime: 43001 ms, Receive idletime: 42793 ms
Status Flags: passive open, gen tcbs
Option Flags: VRF id set, nagle, path mtu capable
IP Precedence value : 6
Datagrams (max data segment is 1240 bytes):
Rcvd: 57771 (out of order: 0), with data: 27469, total data bytes: 521986
Sent: 57713 (retransmit: 4, fastretransmit: 0, partialack: 0, Second Congestion: 0), with data: 30230, total data bytes: 574468
Packets received in fast path: 0, fast processed: 0, slow path: 0
fast lock acquisition failures: 0, slow path: 0
TCP Semaphore 0x7F23C7640468 FREE
'''
custom_output_2 = '''\
show bgp vpnv4 unicast all
BGP table version is 4, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 65000:1 (default for vrf VRF1)
*> 1.1.1.1/32 0.0.0.0 0 32768 i
r>i 2.2.2.2/32 2.2.2.2 0 100 0 i
r>i 3.3.3.3/32 3.3.3.3 100 0 i
'''
custom_output_3 = '''\
show bgp vpnv4 unicast all neighbors 2.2.2.2 advertised-routes
BGP table version is 4, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 65000:1 (default for vrf VRF1)
*> 1.1.1.1/32 0.0.0.0 0 32768 i
Total number of prefixes 1
'''
custom_output_4 = '''\
show bgp all neighbors | i BGP neighbor
BGP neighbor is 2.2.2.2, remote AS 65000, internal link
BGP neighbor is 3.3.3.3, remote AS 65000, internal link
BGP neighbor is 2001:2:2:2::2, remote AS 65000, internal link
BGP neighbor is 2001:3:3:3::3, remote AS 65000, internal link
BGP neighbor is 2.2.2.2, vrf VRF1, remote AS 65000, internal link
BGP neighbor is 3.3.3.3, vrf VRF1, remote AS 65000, internal link
BGP neighbor is 2001:2:2:2::2, vrf VRF1, remote AS 65000, internal link
BGP neighbor is 2001:3:3:3::3, vrf VRF1, remote AS 65000, internal link
'''
custom_output_5 = '''\
show bgp vpnv4 unicast all neighbors 2.2.2.2 routes
BGP table version is 4, local router ID is 1.1.1.1
Status codes: s suppressed, d damped, h history, * valid, > best, i - internal,
r RIB-failure, S Stale, m multipath, b backup-path, f RT-Filter,
x best-external, a additional-path, c RIB-compressed,
t secondary path, L long-lived-stale,
Origin codes: i - IGP, e - EGP, ? - incomplete
RPKI validation codes: V valid, I invalid, N Not found
Network Next Hop Metric LocPrf Weight Path
Route Distinguisher: 65000:1 (default for vrf VRF1)
r>i 2.2.2.2/32 2.2.2.2 0 100 0 i
Total number of prefixes 1
'''
bgp_info = {
'instance': {
'default': {
'bgp_id': 65000,
'vrf': {
'VRF1': {
'cluster_id': '1.1.1.1',
'neighbor': {
'2.2.2.2': {
'bgp_negotiated_capabilities': {
'enhanced_refresh': 'advertised',
'four_octets_asn': 'advertised and received',
'route_refresh': 'advertised and received(new)',
'stateful_switchover': 'NO for session 1',
},
'bgp_negotiated_keepalive_timers': {
'hold_time': 180,
'keepalive_interval': 60,
},
'bgp_neighbor_counters': {
'messages': {
'received': {
'keepalives': 27417,
'notifications': 0,
'opens': 1,
'updates': 2,
},
'sent': {
'keepalives': 30174,
'notifications': 0,
'opens': 1,
'updates': 2,
},
},
},
'bgp_session_transport': {
'connection': {
'last_reset': 'never',
'state': 'Established',
},
'transport': {
'foreign_host': '2.2.2.2',
'foreign_port': '50426',
'local_host': '1.1.1.1',
'local_port': '179',
'mss': 1240,
},
},
'bgp_version': 4,
'remote_as': 65000,
'session_state': 'Established',
'shutdown': False,
},
'2001:2:2:2::2': {
'bgp_negotiated_capabilities': {
'enhanced_refresh': 'advertised',
'four_octets_asn': 'advertised and received',
'route_refresh': 'advertised and received(new)',
'stateful_switchover': 'NO for session 1',
},
'bgp_negotiated_keepalive_timers': {
'hold_time': 180,
'keepalive_interval': 60,
},
'bgp_neighbor_counters': {
'messages': {
'received': {
'keepalives': 27417,
'notifications': 0,
'opens': 1,
'updates': 2,
},
'sent': {
'keepalives': 30184,
'notifications': 0,
'opens': 1,
'updates': 2,
},
},
},
'bgp_session_transport': {
'connection': {
'last_reset': 'never',
'state': 'Established',
},
'transport': {
'foreign_host': '2001:2:2:2::2',
'foreign_port': '179',
'local_host': '2001:1:1:1::1',
'local_port': '32057',
'mss': 1220,
},
},
'bgp_version': 4,
'remote_as': 65000,
'session_state': 'Established',
'shutdown': False,
},
'2001:3:3:3::3': {
'bgp_negotiated_capabilities': {
'enhanced_refresh': 'advertised',
'four_octets_asn': 'advertised and received',
'graceful_restart': 'received',
'route_refresh': 'advertised and received(new)',
'stateful_switchover': 'NO for session 1',
},
'bgp_negotiated_keepalive_timers': {
'hold_time': 180,
'keepalive_interval': 60,
},
'bgp_neighbor_counters': {
'messages': {
'received': {
'keepalives': 27426,
'notifications': 0,
'opens': 1,
'updates': 3,
},
'sent': {
'keepalives': 30166,
'notifications': 0,
'opens': 1,
'updates': 2,
},
},
},
'bgp_session_transport': {
'connection': {
'last_reset': 'never',
'state': 'Established',
},
'transport': {
'foreign_host': '2001:3:3:3::3',
'foreign_port': '20838',
'local_host': '2001:1:1:1::1',
'local_port': '179',
'mss': 1440,
},
},
'bgp_version': 4,
'remote_as': 65000,
'session_state': 'Established',
'shutdown': False,
},
'3.3.3.3': {
'bgp_negotiated_capabilities': {
'enhanced_refresh': 'advertised',
'four_octets_asn': 'advertised and received',
'graceful_restart': 'received',
'route_refresh': 'advertised and received(new)',
'stateful_switchover': 'NO for session 1',
},
'bgp_negotiated_keepalive_timers': {
'hold_time': 180,
'keepalive_interval': 60,
},
'bgp_neighbor_counters': {
'messages': {
'received': {
'keepalives': 27427,
'notifications': 0,
'opens': 1,
'updates': 3,
},
'sent': {
'keepalives': 30185,
'notifications': 0,
'opens': 1,
'updates': 2,
},
},
},
'bgp_session_transport': {
'connection': {
'last_reset': 'never',
'state': 'Established',
},
'transport': {
'foreign_host': '3.3.3.3',
'foreign_port': '179',
'local_host': '1.1.1.1',
'local_port': '43893',
'mss': 1460,
},
},
'bgp_version': 4,
'remote_as': 65000,
'session_state': 'Established',
'shutdown': False,
},
},
},
'default': {
'cluster_id': '1.1.1.1',
'neighbor': {
'2.2.2.2': {
'address_family': {
'ipv4 unicast': {
'bgp_table_version': 4,
'path': {
'memory_usage': 408,
'total_entries': 3,
},
'prefixes': {
'memory_usage': 744,
'total_entries': 3,
},
'routing_table_version': 4,
'total_memory': 2492,
},
'vpnv4 unicast': {
'bgp_table_version': 4,
'path': {
'memory_usage': 408,
'total_entries': 3,
},
'prefixes': {
'memory_usage': 768,
'total_entries': 3,
},
'routing_table_version': 4,
'total_memory': 2564,
},
},
'bgp_negotiated_capabilities': {
'enhanced_refresh': 'advertised',
'four_octets_asn': 'advertised and received',
'route_refresh': 'advertised and received(new)',
'stateful_switchover': 'NO for session 1',
},
'bgp_negotiated_keepalive_timers': {
'hold_time': 180,
'keepalive_interval': 60,
},
'bgp_neighbor_counters': {
'messages': {
'received': {
'keepalives': 27417,
'notifications': 0,
'opens': 1,
'updates': 2,
},
'sent': {
'keepalives': 30186,
'notifications': 0,
'opens': 1,
'updates': 2,
},
},
},
'bgp_session_transport': {
'connection': {
'last_reset': 'never',
'state': 'Established',
},
'transport': {
'foreign_host': '2.2.2.2',
'foreign_port': '25026',
'local_host': '1.1.1.1',
'local_port': '179',
'mss': 1240,
},
},
'bgp_version': 4,
'remote_as': 65000,
'session_state': 'Established',
'shutdown': False,
},
'2001:2:2:2::2': {
'address_family': {
'ipv6 unicast': {
'bgp_table_version': 5,
'path': {
'memory_usage': 456,
'total_entries': 3,
},
'prefixes': {
'memory_usage': 816,
'total_entries': 3,
},
'routing_table_version': 5,
'total_memory': 2612,
},
'vpnv6 unicast': {
'bgp_table_version': 5,
'path': {
'memory_usage': 468,
'total_entries': 3,
},
'prefixes': {
'memory_usage': 840,
'total_entries': 3,
},
'routing_table_version': 5,
'total_memory': 2696,
},
},
'bgp_negotiated_capabilities': {
'enhanced_refresh': 'advertised',
'four_octets_asn': 'advertised and received',
'route_refresh': 'advertised and received(new)',
'stateful_switchover': 'NO for session 1',
},
'bgp_negotiated_keepalive_timers': {
'hold_time': 180,
'keepalive_interval': 60,
},
'bgp_neighbor_counters': {
'messages': {
'received': {
'keepalives': 27417,
'notifications': 0,
'opens': 1,
'updates': 2,
},
'sent': {
'keepalives': 30188,
'notifications': 0,
'opens': 1,
'updates': 2,
},
},
},
'bgp_session_transport': {
'connection': {
'last_reset': 'never',
'state': 'Established',
},
'transport': {
'foreign_host': '2001:2:2:2::2',
'foreign_port': '52223',
'local_host': '2001:1:1:1::1',
'local_port': '179',
'mss': 1220,
},
},
'bgp_version': 4,
'remote_as': 65000,
'session_state': 'Established',
'shutdown': False,
},
'2001:3:3:3::3': {
'address_family': {
'ipv6 unicast': {
'bgp_table_version': 5,
'path': {
'memory_usage': 456,
'total_entries': 3,
},
'prefixes': {
'memory_usage': 816,
'total_entries': 3,
},
'routing_table_version': 5,
'total_memory': 2612,
},
'vpnv6 unicast': {
'bgp_table_version': 5,
'path': {
'memory_usage': 468,
'total_entries': 3,
},
'prefixes': {
'memory_usage': 840,
'total_entries': 3,
},
'routing_table_version': 5,
'total_memory': 2696,
},
},
'bgp_negotiated_capabilities': {
'enhanced_refresh': 'advertised',
'four_octets_asn': 'advertised and received',
'graceful_restart': 'received',
'route_refresh': 'advertised and received(new)',
'stateful_switchover': 'NO for session 1',
},
'bgp_negotiated_keepalive_timers': {
'hold_time': 180,
'keepalive_interval': 60,
},
'bgp_neighbor_counters': {
'messages': {
'received': {
'keepalives': 27427,
'notifications': 0,
'opens': 1,
'updates': 2,
},
'sent': {
'keepalives': 30178,
'notifications': 0,
'opens': 1,
'updates': 2,
},
},
},
'bgp_session_transport': {
'connection': {
'last_reset': 'never',
'state': 'Established',
},
'transport': {
'foreign_host': '2001:3:3:3::3',
'foreign_port': '47133',
'local_host': '2001:1:1:1::1',
'local_port': '179',
'mss': 1440,
},
},
'bgp_version': 4,
'remote_as': 65000,
'session_state': 'Established',
'shutdown': False,
},
'3.3.3.3': {
'address_family': {
'ipv4 unicast': {
'bgp_table_version': 4,
'path': {
'memory_usage': 408,
'total_entries': 3,
},
'prefixes': {
'memory_usage': 744,
'total_entries': 3,
},
'routing_table_version': 4,
'total_memory': 2492,
},
'vpnv4 unicast': {
'bgp_table_version': 4,
'path': {
'memory_usage': 408,
'total_entries': 3,
},
'prefixes': {
'memory_usage': 768,
'total_entries': 3,
},
'routing_table_version': 4,
'total_memory': 2564,
},
},
'bgp_negotiated_capabilities': {
'enhanced_refresh': 'advertised',
'four_octets_asn': 'advertised and received',
'graceful_restart': 'received',
'route_refresh': 'advertised and received(new)',
'stateful_switchover': 'NO for session 1',
},
'bgp_negotiated_keepalive_timers': {
'hold_time': 180,
'keepalive_interval': 60,
},
'bgp_neighbor_counters': {
'messages': {
'received': {
'keepalives': 27427,
'notifications': 0,
'opens': 1,
'updates': 2,
},
'sent': {
'keepalives': 30162,
'notifications': 0,
'opens': 1,
'updates': 2,
},
},
},
'bgp_session_transport': {
'connection': {
'last_reset': 'never',
'state': 'Established',
},
'transport': {
'foreign_host': '3.3.3.3',
'foreign_port': '179',
'local_host': '1.1.1.1',
'local_port': '11084',
'mss': 1460,
},
},
'bgp_version': 4,
'remote_as': 65000,
'session_state': 'Established',
'shutdown': False,
},
},
},
},
},
},
}
bgp_table = {
'instance': {
'default': {
'vrf': {
'VRF1': {
'address_family': {
'vpnv4 unicast RD 65000:1': {
'bgp_table_version': 4,
'default_vrf': 'VRF1',
'prefixes': {
'1.1.1.1/32': {
'index': {
1: {
'gateway': '0.0.0.0',
'localpref': 100,
'metric': 0,
'next_hop': '0.0.0.0',
'origin_codes': 'i',
'originator': '1.1.1.1',
'status_codes': '*>',
'update_group': 1,
'weight': '32768',
},
},
'paths': '1 available, best #1, table VRF1',
'table_version': '2',
},
'2.2.2.2/32': {
'index': {
1: {
'gateway': '2.2.2.2',
'localpref': 100,
'metric': 0,
'next_hop': '2.2.2.2',
'next_hop_igp_metric': '10752',
'origin_codes': 'i',
'originator': '2.2.2.2',
'status_codes': '*>',
},
},
'paths': '1 available, best #1, table VRF1, RIB-failure(17)',
'table_version': '4',
},
'3.3.3.3/32': {
'index': {
1: {
'gateway': '3.3.3.3',
'localpref': 100,
'next_hop': '3.3.3.3',
'next_hop_igp_metric': '2570240',
'origin_codes': 'i',
'originator': '3.3.3.3',
'status_codes': '*>',
},
},
'paths': '1 available, best #1, table VRF1, RIB-failure(17)',
'table_version': '3',
},
},
'route_distinguisher': '65000:1',
'route_identifier': '1.1.1.1',
},
'vpnv6 unicast RD 65000:1': {
'bgp_table_version': 5,
'default_vrf': 'VRF1',
'prefixes': {
'2001:1:1:1::1/128': {
'index': {
1: {
'gateway': '0.0.0.0',
'localpref': 100,
'metric': 0,
'next_hop': '::',
'origin_codes': 'i',
'originator': '1.1.1.1',
'status_codes': '*>',
'update_group': 1,
'weight': '32768',
},
},
'paths': '1 available, best #1, table VRF1',
'table_version': '3',
},
'2001:2:2:2::2/128': {
'index': {
1: {
'gateway': '2001:2:2:2::2',
'localpref': 100,
'metric': 0,
'next_hop': '2001:2:2:2::2',
'next_hop_igp_metric': '10752',
'origin_codes': 'i',
'originator': '2.2.2.2',
'status_codes': '*>',
},
},
'paths': '1 available, best #1, table VRF1, RIB-failure(145)',
'table_version': '5',
},
'2001:3:3:3::3/128': {
'index': {
1: {
'gateway': '2001:3:3:3::3',
'localpref': 100,
'next_hop': '2001:3:3:3::3',
'next_hop_igp_metric': '2570240',
'origin_codes': 'i',
'originator': '3.3.3.3',
'status_codes': '*>',
},
},
'paths': '1 available, best #1, table VRF1, RIB-failure(145)',
'table_version': '4',
},
},
'route_distinguisher': '65000:1',
'route_identifier': '1.1.1.1',
},
},
},
'default': {
'address_family': {
'ipv4 unicast': {
'prefixes': {
'1.1.1.1/32': {
'index': {
1: {
'gateway': '0.0.0.0',
'localpref': 100,
'metric': 0,
'next_hop': '0.0.0.0',
'origin_codes': 'i',
'originator': '1.1.1.1',
'status_codes': '*>',
'update_group': 1,
'weight': '32768',
},
},
'paths': '1 available, best #1, table default',
'table_version': '2',
},
'2.2.2.2/32': {
'index': {
1: {
'gateway': '2.2.2.2',
'localpref': 100,
'metric': 0,
'next_hop': '2.2.2.2',
'next_hop_igp_metric': '10752',
'origin_codes': 'i',
'originator': '2.2.2.2',
'status_codes': '*>',
},
},
'paths': '1 available, best #1, table default, RIB-failure(17)',
'table_version': '4',
},
'3.3.3.3/32': {
'index': {
1: {
'gateway': '3.3.3.3',
'localpref': 100,
'next_hop': '3.3.3.3',
'next_hop_igp_metric': '2570240',
'origin_codes': 'i',
'originator': '3.3.3.3',
'status_codes': '*>',
},
},
'paths': '1 available, best #1, table default, RIB-failure(17)',
'table_version': '3',
},
},
},
'ipv6 unicast': {
'prefixes': {
'2001:1:1:1::1/128': {
'index': {
1: {
'gateway': '0.0.0.0',
'localpref': 100,
'metric': 0,
'next_hop': '::',
'origin_codes': 'i',
'originator': '1.1.1.1',
'status_codes': '*>',
'update_group': 1,
'weight': '32768',
},
},
'paths': '1 available, best #1, table default',
'table_version': '3',
},
'2001:2:2:2::2/128': {
'index': {
1: {
'gateway': '2001:2:2:2::2',
'localpref': 100,
'metric': 0,
'next_hop': '2001:2:2:2::2',
'next_hop_igp_metric': '10752',
'origin_codes': 'i',
'originator': '2.2.2.2',
'status_codes': '*>',
},
},
'paths': '1 available, best #1, table default, RIB-failure(145)',
'table_version': '5',
},
'2001:3:3:3::3/128': {
'index': {
1: {
'gateway': '2001:3:3:3::3',
'localpref': 100,
'next_hop': '2001:3:3:3::3',
'next_hop_igp_metric': '2570240',
'origin_codes': 'i',
'originator': '3.3.3.3',
'status_codes': '*>',
},
},
'paths': '1 available, best #1, table default, RIB-failure(145)',
'table_version': '4',
},
},
},
},
},
},
},
},
}
bgp_routes_per_peer = {
'instance': {
'default': {
'vrf': {
'VRF1': {
'neighbor': {
'2.2.2.2': {
'address_family': {
'ipv4 unicast': {
'advertised': {
'1.1.1.1/32': {
'index': {
1: {
'localprf': 0,
'next_hop': '0.0.0.0',
'origin_codes': 'i',
'status_codes': '*>',
'weight': 32768,
},
},
},
},
'routes': {
'2.2.2.2/32': {
'index': {
1: {
'localprf': 100,
'metric': 0,
'next_hop': '2.2.2.2',
'origin_codes': 'i',
'status_codes': 'r>',
'weight': 0,
},
},
},
},
},
'vpnv4 unicast': {
'advertised': {
},
'routes': {
},
},
'vpnv4 unicast RD 65000:1': {
'advertised': {
'1.1.1.1/32': {
'index': {
1: {
'localprf': 0,
'next_hop': '0.0.0.0',
'origin_codes': 'i',
'status_codes': '*>',
'weight': 32768,
},
},
},
},
'default_vrf': 'VRF1',
'route_distinguisher': '65000:1',
'routes': {
'2.2.2.2/32': {
'index': {
1: {
'localprf': 100,
'metric': 0,
'next_hop': '2.2.2.2',
'origin_codes': 'i',
'status_codes': 'r>',
'weight': 0,
},
},
},
},
},
},
'remote_as': 65000,
},
'2001:2:2:2::2': {
'address_family': {
'ipv4 unicast': {
'advertised': {
'1.1.1.1/32': {
'index': {
1: {
'localprf': 0,
'next_hop': '0.0.0.0',
'origin_codes': 'i',
'status_codes': '*>',
'weight': 32768,
},
},
},
},
},
'ipv6 unicast': {
'routes': {
'2001:2:2:2::2/128': {
'index': {
1: {
'localprf': 100,
'metric': 0,
'next_hop': '2001:2:2:2::2',
'origin_codes': 'i',
'status_codes': 'r>',
'weight': 0,
},
},
},
},
},
'vpnv4 unicast': {
'advertised': {
},
},
'vpnv4 unicast RD 65000:1': {
'advertised': {
'1.1.1.1/32': {
'index': {
1: {
'localprf': 0,
'next_hop': '0.0.0.0',
'origin_codes': 'i',
'status_codes': '*>',
'weight': 32768,
},
},
},
},
'default_vrf': 'VRF1',
'route_distinguisher': '65000:1',
},
'vpnv6 unicast': {
'routes': {
},
},
'vpnv6 unicast RD 65000:1': {
'routes': {
'2001:2:2:2::2/128': {
'index': {
1: {
'localprf': 100,
'metric': 0,
'next_hop': '2001:2:2:2::2',
'origin_codes': 'i',
'status_codes': 'r>',
'weight': 0,
},
},
},
},
},
},
'remote_as': 65000,
},
'2001:3:3:3::3': {
'address_family': {
'ipv4 unicast': {
'advertised': {
'1.1.1.1/32': {
'index': {
1: {
'localprf': 0,
'next_hop': '0.0.0.0',
'origin_codes': 'i',
'status_codes': '*>',
'weight': 32768,
},
},
},
},
},
'ipv6 unicast': {
'routes': {
'2001:3:3:3::3/128': {
'index': {
1: {
'localprf': 100,
'next_hop': '2001:3:3:3::3',
'origin_codes': 'i',
'status_codes': 'r>',
'weight': 0,
},
},
},
},
},
'vpnv4 unicast': {
'advertised': {
},
},
'vpnv4 unicast RD 65000:1': {
'advertised': {
'1.1.1.1/32': {
'index': {
1: {
'localprf': 0,
'next_hop': '0.0.0.0',
'origin_codes': 'i',
'status_codes': '*>',
'weight': 32768,
},
},
},
},
'default_vrf': 'VRF1',
'route_distinguisher': '65000:1',
},
'vpnv6 unicast': {
'routes': {
},
},
'vpnv6 unicast RD 65000:1': {
'routes': {
'2001:3:3:3::3/128': {
'index': {
1: {
'localprf': 100,
'next_hop': '2001:3:3:3::3',
'origin_codes': 'i',
'status_codes': 'r>',
'weight': 0,
},
},
},
},
},
},
'remote_as': 65000,
},
'3.3.3.3': {
'address_family': {
'ipv4 unicast': {
'advertised': {
'1.1.1.1/32': {
'index': {
1: {
'localprf': 0,
'next_hop': '0.0.0.0',
'origin_codes': 'i',
'status_codes': '*>',
'weight': 32768,
},
},
},
},
'routes': {
'3.3.3.3/32': {
'index': {
1: {
'localprf': 100,
'next_hop': '3.3.3.3',
'origin_codes': 'i',
'status_codes': 'r>',
'weight': 0,
},
},
},
},
},
'vpnv4 unicast': {
'advertised': {
},
'routes': {
},
},
'vpnv4 unicast RD 65000:1': {
'advertised': {
'1.1.1.1/32': {
'index': {
1: {
'localprf': 0,
'next_hop': '0.0.0.0',
'origin_codes': 'i',
'status_codes': '*>',
'weight': 32768,
},
},
},
},
'default_vrf': 'VRF1',
'route_distinguisher': '65000:1',
'routes': {
'3.3.3.3/32': {
'index': {
1: {
'localprf': 100,
'next_hop': '3.3.3.3',
'origin_codes': 'i',
'status_codes': 'r>',
'weight': 0,
},
},
},
},
},
},
'remote_as': 65000,
},
},
},
'default': {
'neighbor': {
'2.2.2.2': {
'address_family': {
'ipv4 unicast': {
'input_queue': 0,
'msg_rcvd': 27420,
'msg_sent': 30189,
'output_queue': 0,
'state_pfxrcd': '1',
'tbl_ver': 4,
'up_down': '2w5d',
},
'vpnv4 unicast': {
'input_queue': 0,
'msg_rcvd': 27420,
'msg_sent': 30177,
'output_queue': 0,
'state_pfxrcd': '1',
'tbl_ver': 4,
'up_down': '2w5d',
},
},
'remote_as': 65000,
},
'2001:2:2:2::2': {
'address_family': {
'ipv6 unicast': {
'input_queue': 0,
'msg_rcvd': 27420,
'msg_sent': 30190,
'output_queue': 0,
'state_pfxrcd': '1',
'tbl_ver': 5,
'up_down': '2w5d',
},
'vpnv6 unicast': {
'input_queue': 0,
'msg_rcvd': 27420,
'msg_sent': 30187,
'output_queue': 0,
'state_pfxrcd': '1',
'tbl_ver': 5,
'up_down': '2w5d',
},
},
'remote_as': 65000,
},
'2001:3:3:3::3': {
'address_family': {
'ipv6 unicast': {
'input_queue': 0,
'msg_rcvd': 27430,
'msg_sent': 30181,
'output_queue': 0,
'state_pfxrcd': '1',
'tbl_ver': 5,
'up_down': '2w5d',
},
'vpnv6 unicast': {
'input_queue': 0,
'msg_rcvd': 27430,
'msg_sent': 30169,
'output_queue': 0,
'state_pfxrcd': '1',
'tbl_ver': 5,
'up_down': '2w5d',
},
},
'remote_as': 65000,
},
'3.3.3.3': {
'address_family': {
'ipv4 unicast': {
'input_queue': 0,
'msg_rcvd': 27430,
'msg_sent': 30165,
'output_queue': 0,
'state_pfxrcd': '1',
'tbl_ver': 4,
'up_down': '2w5d',
},
'vpnv4 unicast': {
'input_queue': 0,
'msg_rcvd': 27431,
'msg_sent': 30188,
'output_queue': 0,
'state_pfxrcd': '1',
'tbl_ver': 4,
'up_down': '2w5d',
},
},
'remote_as': 65000,
},
},
},
},
},
},
}
bgp_info_custom = {
'instance': {
'default': {
'bgp_id': 65000,
'vrf': {
'default': {
'cluster_id': '1.1.1.1',
'neighbor': {
'2.2.2.2': {
'address_family': {
'ipv4 unicast': {
'bgp_table_version': 4,
'path': {
'memory_usage': 408,
'total_entries': 3,
},
'prefixes': {
'memory_usage': 744,
'total_entries': 3,
},
'routing_table_version': 4,
'total_memory': 2492,
},
'vpnv4 unicast': {
'bgp_table_version': 4,
'path': {
'memory_usage': 408,
'total_entries': 3,
},
'prefixes': {
'memory_usage': 768,
'total_entries': 3,
},
'routing_table_version': 4,
'total_memory': 2564,
},
},
},
},
},
},
},
},
}
bgp_routes_per_peer_custom = {
'instance': {
'default': {
'vrf': {
'default': {
'neighbor': {
'2.2.2.2': {
'address_family': {
'vpnv4 unicast': {
'input_queue': 0,
'msg_rcvd': 27420,
'msg_sent': 30177,
'output_queue': 0,
'state_pfxrcd': '1',
'tbl_ver': 4,
'up_down': '2w5d',
},
},
},
},
},
},
},
},
}
| 50.222512
| 136
| 0.366108
| 11,527
| 134,747
| 4.22842
| 0.055695
| 0.014813
| 0.014895
| 0.010669
| 0.921791
| 0.912804
| 0.897048
| 0.889128
| 0.878849
| 0.869001
| 0
| 0.121975
| 0.5654
| 134,747
| 2,682
| 137
| 50.241238
| 0.710336
| 0.000289
| 0
| 0.748363
| 0
| 0.042553
| 0.590376
| 0.008649
| 0
| 0
| 0.004618
| 0
| 0
| 1
| 0
| false
| 0.002455
| 0
| 0
| 0.011866
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
46105bf33eb9a7fac3905cceabc3af2fcd67963c
| 5,021
|
py
|
Python
|
resources/views.py
|
enuchi/swapi
|
5e181c3e4c6782dc49aaae2e2e25a89c6333f275
|
[
"BSD-3-Clause"
] | 1
|
2020-08-24T01:39:38.000Z
|
2020-08-24T01:39:38.000Z
|
resources/views.py
|
enuchi/swapi
|
5e181c3e4c6782dc49aaae2e2e25a89c6333f275
|
[
"BSD-3-Clause"
] | null | null | null |
resources/views.py
|
enuchi/swapi
|
5e181c3e4c6782dc49aaae2e2e25a89c6333f275
|
[
"BSD-3-Clause"
] | 1
|
2020-08-24T01:39:39.000Z
|
2020-08-24T01:39:39.000Z
|
from __future__ import unicode_literals
from rest_framework import viewsets
from .models import (
People,
Planet,
Film,
Species,
Vehicle,
Starship
)
from .serializers import (
PeopleSerializer,
PlanetSerializer,
FilmSerializer,
SpeciesSerializer,
VehicleSerializer,
StarshipSerializer
)
class PeopleViewSet(viewsets.ModelViewSet):
queryset = People.objects.all()
serializer_class = PeopleSerializer
search_fields = ('name',)
def retrieve(self, request, *args, **kwargs):
return super(PeopleViewSet, self).retrieve(request, *args, **kwargs)
def list(self, request, *args, **kwargs):
return super(PeopleViewSet, self).list(request, *args, **kwargs)
def create(self, request, *args, **kwargs):
return super(PeopleViewSet, self).create(request, *args, **kwargs)
def update(self, request, *args, **kwargs):
return super(PeopleViewSet, self).update(request, *args, **kwargs)
def destroy(self, request, *args, **kwargs):
return super(PeopleViewSet, self).destroy(request, *args, **kwargs)
class PlanetViewSet(viewsets.ModelViewSet):
queryset = Planet.objects.all()
serializer_class = PlanetSerializer
search_fields = ('name',)
def retrieve(self, request, *args, **kwargs):
return super(PlanetViewSet, self).retrieve(request, *args, **kwargs)
def list(self, request, *args, **kwargs):
return super(PlanetViewSet, self).list(request, *args, **kwargs)
def create(self, request, *args, **kwargs):
return super(PlanetViewSet, self).create(request, *args, **kwargs)
def update(self, request, *args, **kwargs):
return super(PlanetViewSet, self).update(request, *args, **kwargs)
def destroy(self, request, *args, **kwargs):
return super(PlanetViewSet, self).destroy(request, *args, **kwargs)
class FilmViewSet(viewsets.ModelViewSet):
queryset = Film.objects.all()
serializer_class = FilmSerializer
search_fields = ('title',)
def retrieve(self, request, *args, **kwargs):
return super(FilmViewSet, self).retrieve(request, *args, **kwargs)
def list(self, request, *args, **kwargs):
return super(FilmViewSet, self).list(request, *args, **kwargs)
def create(self, request, *args, **kwargs):
return super(FilmViewSet, self).create(request, *args, **kwargs)
def update(self, request, *args, **kwargs):
return super(FilmViewSet, self).update(request, *args, **kwargs)
def destroy(self, request, *args, **kwargs):
return super(FilmViewSet, self).destroy(request, *args, **kwargs)
class SpeciesViewSet(viewsets.ModelViewSet):
queryset = Species.objects.all()
serializer_class = SpeciesSerializer
search_fields = ('name',)
def retrieve(self, request, *args, **kwargs):
return super(SpeciesViewSet, self).retrieve(request, *args, **kwargs)
def list(self, request, *args, **kwargs):
return super(SpeciesViewSet, self).list(request, *args, **kwargs)
def create(self, request, *args, **kwargs):
return super(SpeciesViewSet, self).create(request, *args, **kwargs)
def update(self, request, *args, **kwargs):
return super(SpeciesViewSet, self).update(request, *args, **kwargs)
def destroy(self, request, *args, **kwargs):
return super(SpeciesViewSet, self).destroy(request, *args, **kwargs)
class VehicleViewSet(viewsets.ModelViewSet):
queryset = Vehicle.objects.all()
serializer_class = VehicleSerializer
search_fields = ('name','model',)
def retrieve(self, request, *args, **kwargs):
return super(VehicleViewSet, self).retrieve(request, *args, **kwargs)
def list(self, request, *args, **kwargs):
return super(VehicleViewSet, self).list(request, *args, **kwargs)
def create(self, request, *args, **kwargs):
return super(VehicleViewSet, self).create(request, *args, **kwargs)
def update(self, request, *args, **kwargs):
return super(VehicleViewSet, self).update(request, *args, **kwargs)
def destroy(self, request, *args, **kwargs):
return super(VehicleViewSet, self).destroy(request, *args, **kwargs)
class StarshipViewSet(viewsets.ModelViewSet):
queryset = Starship.objects.all()
serializer_class = StarshipSerializer
search_fields = ('name','model',)
def retrieve(self, request, *args, **kwargs):
return super(StarshipViewSet, self).retrieve(request, *args, **kwargs)
def list(self, request, *args, **kwargs):
return super(StarshipViewSet, self).list(request, *args, **kwargs)
def create(self, request, *args, **kwargs):
return super(StarshipViewSet, self).create(request, *args, **kwargs)
def update(self, request, *args, **kwargs):
return super(StarshipViewSet, self).update(request, *args, **kwargs)
def destroy(self, request, *args, **kwargs):
return super(StarshipViewSet, self).destroy(request, *args, **kwargs)
| 32.603896
| 78
| 0.672376
| 534
| 5,021
| 6.28839
| 0.095506
| 0.196546
| 0.303752
| 0.187612
| 0.764443
| 0.757296
| 0.714116
| 0.714116
| 0.575045
| 0.575045
| 0
| 0
| 0.184625
| 5,021
| 153
| 79
| 32.816993
| 0.820225
| 0
| 0
| 0.343137
| 0
| 0
| 0.006971
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.294118
| false
| 0
| 0.039216
| 0.294118
| 0.862745
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
1cbd0da893fcab36bea698bdf53225778dc7516b
| 3,063
|
py
|
Python
|
complexity/tests/views/records/test_the_modulus.py
|
o4dev/Complexity
|
88ef79c2dc68809284a8f48957a521eee77e2e69
|
[
"BSD-3-Clause"
] | 1
|
2018-02-18T18:01:27.000Z
|
2018-02-18T18:01:27.000Z
|
complexity/tests/views/records/test_the_modulus.py
|
o4dev/Complexity
|
88ef79c2dc68809284a8f48957a521eee77e2e69
|
[
"BSD-3-Clause"
] | null | null | null |
complexity/tests/views/records/test_the_modulus.py
|
o4dev/Complexity
|
88ef79c2dc68809284a8f48957a521eee77e2e69
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
complexity: tests/views/records/test_the_modulus.py
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
from ..quizzes import Quiz
quiz = Quiz("the_modulus")
records_url = "/records/{}".format("the_modulus")
NAMES = ["Jack", "Harry", "Emma", "Hannah", "Tom"]
def test_added(test_client):
"""
Test that a record is saved once a quiz is complete.
"""
assert NAMES[0] not in test_client.get(records_url).data
quiz.new(test_client)
quiz.answer_correct(test_client, *([1]*3))
quiz.answer_correct(test_client, *([2]*3))
quiz.answer_correct(test_client, *([3]*3))
quiz.answer_correct(test_client, *([1]*3))
quiz.answer_correct(test_client, *([2]*3))
quiz.answer_correct(test_client, *([3]*3))
resp = quiz.finish(test_client, NAMES[0])
assert NAMES[0] in test_client.get(records_url).data
def test_order(test_client):
"""
Test that records are stored in order.
"""
for name in NAMES:
assert name not in test_client.get(records_url).data
quiz.new(test_client)
quiz.answer_correct(test_client, *([1]*3))
quiz.answer_correct(test_client, *([2]*3))
quiz.answer_correct(test_client, *([3]*3))
quiz.answer_correct(test_client, *([1]*3))
quiz.answer_correct(test_client, *([2]*3))
quiz.answer_incorrect(test_client, *([3]*3))
resp = quiz.finish(test_client, NAMES[1])
quiz.new(test_client)
quiz.answer_correct(test_client, *([1]*3))
quiz.answer_correct(test_client, *([2]*3))
quiz.answer_correct(test_client, *([3]*3))
quiz.answer_correct(test_client, *([1]*3))
quiz.answer_correct(test_client, *([2]*3))
quiz.answer_correct(test_client, *([3]*3))
resp = quiz.finish(test_client, NAMES[0])
quiz.new(test_client)
quiz.answer_correct(test_client, *([1]*3))
quiz.answer_correct(test_client, *([2]*3))
quiz.answer_correct(test_client, *([3]*3))
quiz.answer_correct(test_client, *([1]*3))
quiz.answer_incorrect(test_client, *([2]*3))
quiz.answer_incorrect(test_client, *([3]*3))
resp = quiz.finish(test_client, NAMES[2])
quiz.new(test_client)
quiz.answer_correct(test_client, *([1]*3))
quiz.answer_correct(test_client, *([2]*3))
quiz.answer_correct(test_client, *([3]*3))
quiz.answer_incorrect(test_client, *([1]*3))
quiz.answer_incorrect(test_client, *([2]*3))
quiz.answer_incorrect(test_client, *([3]*3))
resp = quiz.finish(test_client, NAMES[3])
quiz.new(test_client)
quiz.answer_correct(test_client, *([1]*3))
quiz.answer_correct(test_client, *([2]*3))
quiz.answer_incorrect(test_client, *([3]*3))
quiz.answer_incorrect(test_client, *([1]*3))
quiz.answer_incorrect(test_client, *([2]*3))
quiz.answer_incorrect(test_client, *([3]*3))
resp = quiz.finish(test_client, NAMES[4])
last = 0
records = test_client.get(records_url).data
for name in NAMES:
next = records[last:].find(name)
assert next != -1
last += next
| 31.90625
| 60
| 0.642181
| 439
| 3,063
| 4.250569
| 0.138952
| 0.289389
| 0.176849
| 0.292605
| 0.783494
| 0.783494
| 0.769025
| 0.753483
| 0.753483
| 0.753483
| 0
| 0.032877
| 0.16585
| 3,063
| 95
| 61
| 32.242105
| 0.697456
| 0.078028
| 0
| 0.71875
| 0
| 0
| 0.01982
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 1
| 0.03125
| false
| 0
| 0.015625
| 0
| 0.046875
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1ce1b4d3d83ca79403fc1d726c2ac15a56c4f811
| 3,285
|
py
|
Python
|
tests/test_extra.py
|
saxix/django-sysinfo
|
fdcf98a2f2d493487aaaa46610539dd994d2dcd5
|
[
"BSD-3-Clause"
] | 2
|
2017-08-31T20:16:32.000Z
|
2017-09-08T20:54:30.000Z
|
tests/test_extra.py
|
saxix/django-sysinfo
|
fdcf98a2f2d493487aaaa46610539dd994d2dcd5
|
[
"BSD-3-Clause"
] | 1
|
2017-07-10T14:53:54.000Z
|
2017-07-10T14:53:54.000Z
|
tests/test_extra.py
|
saxix/django-sysinfo
|
fdcf98a2f2d493487aaaa46610539dd994d2dcd5
|
[
"BSD-3-Clause"
] | 2
|
2017-07-10T09:35:28.000Z
|
2021-08-31T16:41:37.000Z
|
import json
import logging
import pytest
from demoproject.models import test_sysinfo
from django_sysinfo.api import get_extra
from django_sysinfo.compat import reverse
from django_sysinfo.conf import config
logger = logging.getLogger(__name__)
def test_base(monkeypatch):
monkeypatch.setattr(config, "extra", {"a": lambda x: 1,
"b": "func",
"c": "demoproject.models.test_sysinfo",
"d": lambda x: 1.0 / 0.0,
"e": test_sysinfo,
})
ret = get_extra(config)
assert ret == {"a": 1,
"c": 123,
"e": 123,
}
@pytest.mark.django_db
# @pytest.mark.urls("urls")
def test_extra(client, monkeypatch):
from demoproject.models import test_sysinfo
monkeypatch.setattr("django_sysinfo.conf.config.host", False)
monkeypatch.setattr("django_sysinfo.conf.config.os", False)
monkeypatch.setattr("django_sysinfo.conf.config.python", False)
monkeypatch.setattr("django_sysinfo.conf.config.process", False)
monkeypatch.setattr("django_sysinfo.conf.config.modules", False)
monkeypatch.setattr("django_sysinfo.conf.config.project", False)
monkeypatch.setattr("django_sysinfo.conf.config.environ", False)
monkeypatch.setattr("django_sysinfo.conf.config.databases", False)
monkeypatch.setattr("django_sysinfo.conf.config.installed_apps", False)
monkeypatch.setattr("django_sysinfo.api.config.extra", {"test1": "demoproject.models.test_sysinfo",
"test2": test_sysinfo,
})
response = client.get(reverse("sys-info"))
data = json.loads(response.content.decode("utf8"))
assert list(data.keys()) == ["extra"], data.keys()
assert data["extra"]["test1"] == 123
assert data["extra"]["test2"] == 123
@pytest.mark.django_db
# @pytest.mark.urls("urls")
def test_extra_url(client, monkeypatch):
from demoproject.models import test_sysinfo
monkeypatch.setattr("django_sysinfo.conf.config.host", False)
monkeypatch.setattr("django_sysinfo.conf.config.os", False)
monkeypatch.setattr("django_sysinfo.conf.config.python", False)
monkeypatch.setattr("django_sysinfo.conf.config.modules", False)
monkeypatch.setattr("django_sysinfo.conf.config.process", False)
monkeypatch.setattr("django_sysinfo.conf.config.project", False)
monkeypatch.setattr("django_sysinfo.conf.config.environ", False)
monkeypatch.setattr("django_sysinfo.conf.config.databases", False)
monkeypatch.setattr("django_sysinfo.conf.config.installed_apps", False)
monkeypatch.setattr("django_sysinfo.api.config.extra", {"test1": "demoproject.models.test_sysinfo",
"test2": test_sysinfo,
})
response = client.get(reverse("sys-info"))
data = json.loads(response.content.decode("utf8"))
assert list(data.keys()) == ["extra"], data.keys()
assert data["extra"]["test1"] == 123
assert data["extra"]["test2"] == 123
| 43.8
| 103
| 0.621918
| 351
| 3,285
| 5.689459
| 0.185185
| 0.149725
| 0.240361
| 0.310466
| 0.840761
| 0.840761
| 0.821733
| 0.821733
| 0.821733
| 0.821733
| 0
| 0.013816
| 0.250837
| 3,285
| 74
| 104
| 44.391892
| 0.797643
| 0.015525
| 0
| 0.666667
| 0
| 0
| 0.271742
| 0.237388
| 0
| 0
| 0
| 0
| 0.116667
| 1
| 0.05
| false
| 0
| 0.15
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1ce8b9f55300cdeead35c55fb6b31140ee270a03
| 3,865
|
py
|
Python
|
tests/test_xmldoctotabular.py
|
simonwiles/xmltotabular
|
13c8b60e383aa16db662c744e57578f2b7e2ed57
|
[
"MIT"
] | 2
|
2020-09-26T23:57:00.000Z
|
2021-06-16T23:59:46.000Z
|
tests/test_xmldoctotabular.py
|
simonwiles/xmltotabular
|
13c8b60e383aa16db662c744e57578f2b7e2ed57
|
[
"MIT"
] | 23
|
2020-11-09T05:43:50.000Z
|
2021-10-21T19:29:47.000Z
|
tests/test_xmldoctotabular.py
|
simonwiles/xmltotabular
|
13c8b60e383aa16db662c744e57578f2b7e2ed57
|
[
"MIT"
] | null | null | null |
import yaml
from xmltotabular import XmlDocToTabular
def test_single_simple_entity_per_doc(simple_config):
xml = """\
<?xml version="1.0" encoding="UTF-8"?>
<album>
<name>Five Leaves Left</name>
<artist>Nick Drake</artist>
<released>1969</released>
<label>Island</label>
<genre>Folk</genre>
</album>
"""
docTransformer = XmlDocToTabular(simple_config)
assert docTransformer.process_doc(xml) == {
"album": [
{
"id": "None_0",
"name": "Five Leaves Left",
"artist": "Nick Drake",
"released": "1969",
"label": "Island",
"genre": "Folk",
}
]
}
def test_multiple_simple_entities_per_doc(simple_config):
xml = """\
<?xml version="1.0" encoding="UTF-8"?>
<albums>
<album>
<name>Five Leaves Left</name>
<artist>Nick Drake</artist>
<released>1969</released>
<label>Island</label>
<genre>Folk</genre>
</album>
<album>
<name>Bryter Layter</name>
<artist>Nick Drake</artist>
<released>1971</released>
<label>Island</label>
<genre>Folk</genre>
</album>
<album>
<name>Pink Moon</name>
<artist>Nick Drake</artist>
<released>1972</released>
<label>Island</label>
<genre>Folk</genre>
</album>
</albums>
"""
docTransformer = XmlDocToTabular(simple_config)
assert docTransformer.process_doc(xml) == {
"album": [
{
"id": "None_0",
"name": "Five Leaves Left",
"artist": "Nick Drake",
"released": "1969",
"label": "Island",
"genre": "Folk",
},
{
"id": "None_1",
"name": "Bryter Layter",
"artist": "Nick Drake",
"released": "1971",
"label": "Island",
"genre": "Folk",
},
{
"id": "None_2",
"name": "Pink Moon",
"artist": "Nick Drake",
"released": "1972",
"label": "Island",
"genre": "Folk",
},
]
}
def test_attribute_style_xml():
config = yaml.safe_load(
r"""
album:
<entity>: album
<fields>:
"@name": name
"@artist": artist
"@released": released
"@label": label
"@genre": genre
"""
)
xml = """\
<?xml version="1.0" encoding="UTF-8"?>
<albums>
<album name="Five Leaves Left"
artist="Nick Drake"
released="1969"
label="Island"
genre="Folk" />
<album name="Bryter Layter"
artist="Nick Drake"
released="1971"
label="Island"
genre="Folk" />
<album name="Pink Moon"
artist="Nick Drake"
released="1972"
label="Island"
genre="Folk" />
</albums>
"""
docTransformer = XmlDocToTabular(config)
assert docTransformer.process_doc(xml) == {
"album": [
{
"id": "None_0",
"name": "Five Leaves Left",
"artist": "Nick Drake",
"released": "1969",
"label": "Island",
"genre": "Folk",
},
{
"id": "None_1",
"name": "Bryter Layter",
"artist": "Nick Drake",
"released": "1971",
"label": "Island",
"genre": "Folk",
},
{
"id": "None_2",
"name": "Pink Moon",
"artist": "Nick Drake",
"released": "1972",
"label": "Island",
"genre": "Folk",
},
]
}
| 24.15625
| 57
| 0.443984
| 331
| 3,865
| 5.099698
| 0.166163
| 0.082938
| 0.124408
| 0.136256
| 0.8359
| 0.8359
| 0.782583
| 0.782583
| 0.757109
| 0.757109
| 0
| 0.030994
| 0.398965
| 3,865
| 159
| 58
| 24.308176
| 0.695652
| 0
| 0
| 0.711111
| 0
| 0
| 0.478177
| 0.050508
| 0
| 0
| 0
| 0
| 0.022222
| 1
| 0.022222
| false
| 0
| 0.014815
| 0
| 0.037037
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c4bf00164b0aa69c2337f43e5b460c15e2596c9
| 238,784
|
py
|
Python
|
regular_language/unit_tests/test_sse_forrest_sseASForrest.py
|
ShoYamanishi/nlpregex
|
795b36d5a2fad8bc25264b2093ffa9c3723b282b
|
[
"MIT"
] | 1
|
2021-12-03T07:20:18.000Z
|
2021-12-03T07:20:18.000Z
|
regular_language/unit_tests/test_sse_forrest_sseASForrest.py
|
ShoYamanishi/nlpregex
|
795b36d5a2fad8bc25264b2093ffa9c3723b282b
|
[
"MIT"
] | null | null | null |
regular_language/unit_tests/test_sse_forrest_sseASForrest.py
|
ShoYamanishi/nlpregex
|
795b36d5a2fad8bc25264b2093ffa9c3723b282b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Unit Tests for sse_forrest.sseASForrest """
import unittest
import nlpregex.regular_language.ast
import nlpregex.regular_language.sse_forrest
from nlpregex.regular_language.unit_tests.test_sse_asforrest_helper import test_sse_ASForrest_helper
class test_sseASForrest( unittest.TestCase ):
def __init__( self, *args, **kwargs ):
unittest.TestCase.__init__(self, *args, **kwargs)
self.helper = test_sse_ASForrest_helper()
def create_initial_serial_node( self, forrest, children ):
if len( children ) == 0:
return forrest.create_initial_node('e')
if len( children )== 1:
return forrest.create_initial_node( children[0] )
r = forrest.create_initial_node('s' )
for c in children:
n = forrest.create_initial_node( c )
e = nlpregex.regular_language.sse_forrest.sseASTEdge()
e.add_to_graph( forrest, r, n, "directed" )
r.generate_regex()
return r
def add_question_node( self, forrest, n ):
r = forrest.create_initial_node('?' )
e = nlpregex.regular_language.sse_forrest.sseASTEdge()
e.add_to_graph( forrest, r, n, "directed" )
r.generate_regex()
return r
def test_constructor_0001( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
self.assertEqual( forrest01.next_nonterminal_num, 1 )
self.assertEqual( forrest01.node_id_next, 0 )
def test_next_node_id_allocation_0001( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
id01 = forrest01.next_node_id_allocation()
self.assertEqual( id01, 0 )
self.assertEqual( forrest01.node_id_next, 1 )
id02 = forrest01.next_node_id_allocation()
self.assertEqual( id02, 1 )
self.assertEqual( forrest01.node_id_next, 2 )
id03 = forrest01.next_node_id_allocation()
self.assertEqual( id03, 2 )
self.assertEqual( forrest01.node_id_next, 3 )
def test_allocate_nonterminal_0001( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
nt01 = forrest01.allocate_nonterminal()
self.assertEqual( nt01, 'n1' )
self.assertEqual( forrest01.next_nonterminal_num, 2 )
nt02 = forrest01.allocate_nonterminal()
self.assertEqual( nt02, 'n2' )
self.assertEqual( forrest01.next_nonterminal_num, 3 )
nt03 = forrest01.allocate_nonterminal()
self.assertEqual( nt03, 'n3' )
self.assertEqual( forrest01.next_nonterminal_num, 4 )
nt04 = forrest01.allocate_nonterminal()
self.assertEqual( nt04, 'n4' )
self.assertEqual( forrest01.next_nonterminal_num, 5 )
forrest01.reset_nonterminal_num_allocation()
nt05 = forrest01.allocate_nonterminal()
self.assertEqual( nt05, 'n1' )
self.assertEqual( forrest01.next_nonterminal_num, 2 )
nt06 = forrest01.allocate_nonterminal()
self.assertEqual( nt06, 'n2' )
self.assertEqual( forrest01.next_nonterminal_num, 3 )
nt07 = forrest01.allocate_nonterminal()
self.assertEqual( nt07, 'n3' )
self.assertEqual( forrest01.next_nonterminal_num, 4 )
def test_remove_duplication_and_order_children_0001( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
list01 = forrest01.remove_duplication_and_order_children( ['e','t1','t2','t3', 'n4'] )
self.assertEqual( list01, [ 'e','n4', 't1','t2','t3' ] )
def test_remove_duplication_and_order_children_0002( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
list01 = forrest01.remove_duplication_and_order_children( ['t1','t1','t2','t2', 't1'] )
self.assertEqual( list01, [ 't1','t2' ] )
# 'e'
def test_create_initial_node_0001( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('e')
self.assertEqual( node01.ast_node_type, 'e' )
self.assertEqual( node01.regex, 'e' )
self.assertEqual( node01.children_map, {} )
self.assertEqual( node01.num_terms, 0 )
self.assertEqual( node01.height, 0 )
self.assertEqual( node01.node_id , 0 )
# 't1'
def test_create_initial_node_0002( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t1')
self.assertEqual( node01.ast_node_type, 't1' )
self.assertEqual( node01.regex, 't1' )
self.assertEqual( node01.children_map, {} )
self.assertEqual( node01.num_terms, 0 )
self.assertEqual( node01.height, 0 )
self.assertEqual( node01.node_id , 0 )
# 'n1'
def test_create_initial_node_0003( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n1')
self.assertEqual( node01.ast_node_type, 'n1' )
self.assertEqual( node01.regex, 'n1' )
self.assertEqual( node01.children_map, {} )
self.assertEqual( node01.num_terms, 0 )
self.assertEqual( node01.height, 0 )
self.assertEqual( node01.node_id , 0 )
# 'u'
def test_create_initial_node_0004( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('u')
self.assertEqual( node01.ast_node_type, 'u' )
self.assertEqual( node01.regex, 'u' )
self.assertEqual( node01.children_map, {} )
self.assertEqual( node01.num_terms, 0 )
self.assertEqual( node01.height, 0 )
self.assertEqual( node01.node_id , 0 )
# 's'
def test_create_initial_node_0005( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('s')
self.assertEqual( node01.ast_node_type, 's' )
self.assertEqual( node01.regex, 's' )
self.assertEqual( node01.children_map, {} )
self.assertEqual( node01.num_terms, 0 )
self.assertEqual( node01.height, 0 )
self.assertEqual( node01.node_id , 0 )
# '*'
def test_create_initial_node_0005( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('*')
self.assertEqual( node01.ast_node_type, '*' )
self.assertEqual( node01.regex, '*' )
self.assertEqual( node01.children_map, {} )
self.assertEqual( node01.num_terms, 0 )
self.assertEqual( node01.height, 0 )
self.assertEqual( node01.node_id , 0 )
# '?'
def test_create_initial_node_0006( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('?')
self.assertEqual( node01.ast_node_type, '?' )
self.assertEqual( node01.regex, '?' )
self.assertEqual( node01.children_map, {} )
self.assertEqual( node01.num_terms, 0 )
self.assertEqual( node01.height, 0 )
self.assertEqual( node01.node_id , 0 )
# []
def test_create_initial_union_node_0001( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node([])
self.assertEqual( node01, None )
# ['e']
def test_create_initial_union_node_0002( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['e'])
self.assertEqual( node01.ast_node_type, 'e' )
self.assertEqual( node01.regex, 'e' )
self.assertEqual( node01.children_map, {} )
self.assertEqual( node01.num_terms, 0 )
self.assertEqual( node01.height, 0 )
self.assertEqual( node01.node_id , 0 )
# ['t1']
def test_create_initial_union_node_0003( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1'])
self.assertEqual( node01.ast_node_type, 't1' )
self.assertEqual( node01.regex, 't1' )
self.assertEqual( node01.children_map, {} )
self.assertEqual( node01.num_terms, 0 )
self.assertEqual( node01.height, 0 )
self.assertEqual( node01.node_id , 0 )
# ['n1']
def test_create_initial_union_node_0004( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n1'])
self.assertEqual( node01.ast_node_type, 'n1' )
self.assertEqual( node01.regex, 'n1' )
self.assertEqual( node01.children_map, {} )
self.assertEqual( node01.num_terms, 0 )
self.assertEqual( node01.height, 0 )
self.assertEqual( node01.node_id , 0 )
# ['e', 't1']
def test_create_initial_union_node_0005( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['e', 't1'])
self.assertEqual( node01.ast_node_type, 'u' )
self.assertEqual( node01.regex, '( e | t1 )' )
self.assertEqual( len(node01.children_map), 2 )
self.assertEqual( node01.num_terms, 0 )
self.assertEqual( node01.height, 0 )
self.assertEqual( node01.node_id , 0 )
node02 = node01.children_map['e']
self.assertEqual( node02.ast_node_type, 'e' )
self.assertEqual( node02.regex, 'e' )
self.assertEqual( node02.children_map, {} )
self.assertEqual( node02.num_terms, 0 )
self.assertEqual( node02.height, 0 )
self.assertEqual( node02.node_id , 1 )
node03 = node01.children_map['t1']
self.assertEqual( node03.ast_node_type, 't1' )
self.assertEqual( node03.regex, 't1' )
self.assertEqual( node03.children_map, {} )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id , 2 )
# ['t1', 't2']
def test_create_initial_union_node_0006( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
self.assertEqual( node01.ast_node_type, 'u' )
self.assertEqual( node01.regex, '( t1 | t2 )' )
self.assertEqual( len(node01.children_map), 2 )
self.assertEqual( node01.num_terms, 0 )
self.assertEqual( node01.height, 0 )
self.assertEqual( node01.node_id , 0 )
node02 = node01.children_map['t1']
self.assertEqual( node02.ast_node_type, 't1' )
self.assertEqual( node02.regex, 't1' )
self.assertEqual( node02.children_map, {} )
self.assertEqual( node02.num_terms, 0 )
self.assertEqual( node02.height, 0 )
self.assertEqual( node02.node_id , 1 )
node03 = node01.children_map['t2']
self.assertEqual( node03.ast_node_type, 't2' )
self.assertEqual( node03.regex, 't2' )
self.assertEqual( node03.children_map, {} )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id , 2 )
# ['t1', 't1']
def test_create_initial_union_node_0007( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't1'])
self.assertEqual( node01.ast_node_type, 't1' )
self.assertEqual( node01.regex, 't1' )
self.assertEqual( node01.children_map, {} )
self.assertEqual( node01.num_terms, 0 )
self.assertEqual( node01.height, 0 )
self.assertEqual( node01.node_id , 0 )
# ['t1', 't1', 'e']
def test_create_initial_union_node_0008( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't1', 'e'])
self.assertEqual( node01.ast_node_type, 'u' )
self.assertEqual( node01.regex, '( e | t1 )' )
self.assertEqual( len(node01.children_map), 2 )
self.assertEqual( node01.num_terms, 0 )
self.assertEqual( node01.height, 0 )
self.assertEqual( node01.node_id , 0 )
node02 = node01.children_map['e']
self.assertEqual( node02.ast_node_type, 'e' )
self.assertEqual( node02.regex, 'e' )
self.assertEqual( node02.children_map, {} )
self.assertEqual( node02.num_terms, 0 )
self.assertEqual( node02.height, 0 )
self.assertEqual( node02.node_id , 1 )
node03 = node01.children_map['t1']
self.assertEqual( node03.ast_node_type, 't1' )
self.assertEqual( node03.regex, 't1' )
self.assertEqual( node03.children_map, {} )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id , 2 )
# ['t1', 't3', 't3', 't1']
def test_create_initial_union_node_0009( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't3', 't3', 't1'])
self.assertEqual( node01.ast_node_type, 'u' )
self.assertEqual( node01.regex, '( t1 | t3 )' )
self.assertEqual( len(node01.children_map), 2 )
self.assertEqual( node01.num_terms, 0 )
self.assertEqual( node01.height, 0 )
self.assertEqual( node01.node_id , 0 )
node02 = node01.children_map['t1']
self.assertEqual( node02.ast_node_type, 't1' )
self.assertEqual( node02.regex, 't1' )
self.assertEqual( node02.children_map, {} )
self.assertEqual( node02.num_terms, 0 )
self.assertEqual( node02.height, 0 )
self.assertEqual( node02.node_id , 1 )
node03 = node01.children_map['t3']
self.assertEqual( node03.ast_node_type, 't3' )
self.assertEqual( node03.regex, 't3' )
self.assertEqual( node03.children_map, {} )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id , 2 )
# e e
def test_union_two_ASTs_0001( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['e'])
node02 = forrest01.create_initial_union_node(['e'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'e' )
self.assertEqual( node03.regex, 'e' )
self.assertEqual( node03.children_map, {} )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 0 )
# e t1
def test_union_two_ASTs_0002( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['e'])
node02 = forrest01.create_initial_union_node(['t1'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( e | t1 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['e']
self.assertEqual( node04.ast_node_type, 'e' )
self.assertEqual( node04.regex, 'e' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['t1']
self.assertEqual( node05.ast_node_type, 't1' )
self.assertEqual( node05.regex, 't1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
# e t2
def test_union_two_ASTs_0003( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['e'])
node02 = forrest01.create_initial_union_node(['t2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( e | t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['e']
self.assertEqual( node04.ast_node_type, 'e' )
self.assertEqual( node04.regex, 'e' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['t2']
self.assertEqual( node05.ast_node_type, 't2' )
self.assertEqual( node05.regex, 't2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
# e n1
def test_union_two_ASTs_0004( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['e'])
node02 = forrest01.create_initial_union_node(['n1'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( e | n1 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['e']
self.assertEqual( node04.ast_node_type, 'e' )
self.assertEqual( node04.regex, 'e' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['n1']
self.assertEqual( node05.ast_node_type, 'n1' )
self.assertEqual( node05.regex, 'n1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
# e n2
def test_union_two_ASTs_0005( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['e'])
node02 = forrest01.create_initial_union_node(['n2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( e | n2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['e']
self.assertEqual( node04.ast_node_type, 'e' )
self.assertEqual( node04.regex, 'e' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['n2']
self.assertEqual( node05.ast_node_type, 'n2' )
self.assertEqual( node05.regex, 'n2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
# e u
def test_union_two_ASTs_0006( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['e'])
node02 = forrest01.create_initial_union_node(['t1', 't2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( e | t1 | t2 )' )
self.assertEqual( len(node03.children_map), 3 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['e']
self.assertEqual( node04.ast_node_type, 'e' )
self.assertEqual( node04.regex, 'e' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['t1']
self.assertEqual( node05.ast_node_type, 't1' )
self.assertEqual( node05.regex, 't1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 2 )
node06 = node03.children_map['t2']
self.assertEqual( node06.ast_node_type, 't2' )
self.assertEqual( node06.regex, 't2' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 3 )
# e s
def test_union_two_ASTs_0007( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['e'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( e | t1 t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['e']
self.assertEqual( node04.ast_node_type, 'e' )
self.assertEqual( node04.regex, 'e' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['t1 t2']
self.assertEqual( node05.ast_node_type, 's' )
self.assertEqual( node05.regex, 't1 t2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
# e *
def test_union_two_ASTs_0008( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['e'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.repeat_AST(node02)
node04 = forrest01.union_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) * | e )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['e']
self.assertEqual( node05.ast_node_type, 'e' )
self.assertEqual( node05.regex, 'e' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
node06 = node04.children_map['( t1 t2 ) *']
self.assertEqual( node06.ast_node_type, '*' )
self.assertEqual( node06.regex, '( t1 t2 ) *' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 4 )
# e ?
def test_union_two_ASTs_0009( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['e'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = self.add_question_node( forrest01, node02 )
node04 = forrest01.union_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) ? | e )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['e']
self.assertEqual( node05.ast_node_type, 'e' )
self.assertEqual( node05.regex, 'e' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
node06 = node04.children_map['( t1 t2 ) ?']
self.assertEqual( node06.ast_node_type, '?' )
self.assertEqual( node06.regex, '( t1 t2 ) ?' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 4 )
# t1 e
def test_union_two_ASTs_0011( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1'])
node02 = forrest01.create_initial_union_node(['e'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( e | t1 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['e']
self.assertEqual( node04.ast_node_type, 'e' )
self.assertEqual( node04.regex, 'e' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 1 )
node05 = node03.children_map['t1']
self.assertEqual( node05.ast_node_type, 't1' )
self.assertEqual( node05.regex, 't1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
# t1 t1
def test_union_two_ASTs_0012( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1'])
node02 = forrest01.create_initial_union_node(['t1'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 't1' )
self.assertEqual( node03.regex, 't1' )
self.assertEqual( node03.children_map, {} )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 0 )
# t1 t2
def test_union_two_ASTs_0013( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1'])
node02 = forrest01.create_initial_union_node(['t2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( t1 | t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['t2']
self.assertEqual( node05.ast_node_type, 't2' )
self.assertEqual( node05.regex, 't2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
# t1 n1
def test_union_two_ASTs_0014( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1'])
node02 = forrest01.create_initial_union_node(['n1'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n1 | t1 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['n1']
self.assertEqual( node05.ast_node_type, 'n1' )
self.assertEqual( node05.regex, 'n1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
# t1 n2
def test_union_two_ASTs_0015( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1'])
node02 = forrest01.create_initial_union_node(['n2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n2 | t1 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['n2']
self.assertEqual( node05.ast_node_type, 'n2' )
self.assertEqual( node05.regex, 'n2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
# t1 u
def test_union_two_ASTs_0016( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1'])
node02 = forrest01.create_initial_union_node(['t1', 't2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( t1 | t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['t2']
self.assertEqual( node05.ast_node_type, 't2' )
self.assertEqual( node05.regex, 't2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 3 )
# t1 s
def test_union_two_ASTs_0017( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( t1 | t1 t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['t1 t2']
self.assertEqual( node05.ast_node_type, 's' )
self.assertEqual( node05.regex, 't1 t2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
# t1 *
def test_union_two_ASTs_0018( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.repeat_AST(node02)
node04 = forrest01.union_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) * | t1 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['t1']
self.assertEqual( node05.ast_node_type, 't1' )
self.assertEqual( node05.regex, 't1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
node06 = node04.children_map['( t1 t2 ) *']
self.assertEqual( node06.ast_node_type, '*' )
self.assertEqual( node06.regex, '( t1 t2 ) *' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 4 )
# t1 ?
def test_union_two_ASTs_0019( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = self.add_question_node( forrest01, node02 )
node04 = forrest01.union_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) ? | t1 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['t1']
self.assertEqual( node05.ast_node_type, 't1' )
self.assertEqual( node05.regex, 't1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
node06 = node04.children_map['( t1 t2 ) ?']
self.assertEqual( node06.ast_node_type, '?' )
self.assertEqual( node06.regex, '( t1 t2 ) ?' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 4 )
# t2 e
def test_union_two_ASTs_0021( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t2'])
node02 = forrest01.create_initial_union_node(['e'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( e | t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['e']
self.assertEqual( node04.ast_node_type, 'e' )
self.assertEqual( node04.regex, 'e' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 1 )
node05 = node03.children_map['t2']
self.assertEqual( node05.ast_node_type, 't2' )
self.assertEqual( node05.regex, 't2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
# t2 t1
def test_union_two_ASTs_0022( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t2'])
node02 = forrest01.create_initial_union_node(['t1'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( t1 | t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 1 )
node05 = node03.children_map['t2']
self.assertEqual( node05.ast_node_type, 't2' )
self.assertEqual( node05.regex, 't2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
# t2 t2
def test_union_two_ASTs_0023( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t2'])
node02 = forrest01.create_initial_union_node(['t2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 't2' )
self.assertEqual( node03.regex, 't2' )
self.assertEqual( node03.children_map, {} )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 0 )
# t2 n1
def test_union_two_ASTs_0024( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t2'])
node02 = forrest01.create_initial_union_node(['n1'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n1 | t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['t2']
self.assertEqual( node04.ast_node_type, 't2' )
self.assertEqual( node04.regex, 't2' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['n1']
self.assertEqual( node05.ast_node_type, 'n1' )
self.assertEqual( node05.regex, 'n1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
# t2 n2
def test_union_two_ASTs_0025( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t2'])
node02 = forrest01.create_initial_union_node(['n2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n2 | t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['t2']
self.assertEqual( node04.ast_node_type, 't2' )
self.assertEqual( node04.regex, 't2' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['n2']
self.assertEqual( node05.ast_node_type, 'n2' )
self.assertEqual( node05.regex, 'n2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
# t2 u
def test_union_two_ASTs_0026( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t2'])
node02 = forrest01.create_initial_union_node(['t1', 't2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( t1 | t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 2 )
node05 = node03.children_map['t2']
self.assertEqual( node05.ast_node_type, 't2' )
self.assertEqual( node05.regex, 't2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
# t2 s
def test_union_two_ASTs_0027( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t2'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( t1 t2 | t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['t2']
self.assertEqual( node04.ast_node_type, 't2' )
self.assertEqual( node04.regex, 't2' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['t1 t2']
self.assertEqual( node05.ast_node_type, 's' )
self.assertEqual( node05.regex, 't1 t2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
# t2 *
def test_union_two_ASTs_0028( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t2'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.repeat_AST(node02)
node04 = forrest01.union_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) * | t2 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['t2']
self.assertEqual( node05.ast_node_type, 't2' )
self.assertEqual( node05.regex, 't2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
node06 = node04.children_map['( t1 t2 ) *']
self.assertEqual( node06.ast_node_type, '*' )
self.assertEqual( node06.regex, '( t1 t2 ) *' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 4 )
# t2 ?
def test_union_two_ASTs_0019( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t2'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = self.add_question_node( forrest01, node02 )
node04 = forrest01.union_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) ? | t2 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['t2']
self.assertEqual( node05.ast_node_type, 't2' )
self.assertEqual( node05.regex, 't2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
node06 = node04.children_map['( t1 t2 ) ?']
self.assertEqual( node06.ast_node_type, '?' )
self.assertEqual( node06.regex, '( t1 t2 ) ?' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 4 )
# n1 e
def test_union_two_ASTs_0031( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n1'])
node02 = forrest01.create_initial_union_node(['e'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( e | n1 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['e']
self.assertEqual( node04.ast_node_type, 'e' )
self.assertEqual( node04.regex, 'e' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 1 )
node05 = node03.children_map['n1']
self.assertEqual( node05.ast_node_type, 'n1' )
self.assertEqual( node05.regex, 'n1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
# n1 t1
def test_union_two_ASTs_0032( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n1'])
node02 = forrest01.create_initial_union_node(['t1'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n1 | t1 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 1 )
node05 = node03.children_map['n1']
self.assertEqual( node05.ast_node_type, 'n1' )
self.assertEqual( node05.regex, 'n1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
# n1 t2
def test_union_two_ASTs_0033( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n1'])
node02 = forrest01.create_initial_union_node(['t2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n1 | t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['t2']
self.assertEqual( node04.ast_node_type, 't2' )
self.assertEqual( node04.regex, 't2' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 1 )
node05 = node03.children_map['n1']
self.assertEqual( node05.ast_node_type, 'n1' )
self.assertEqual( node05.regex, 'n1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
# n1 n1
def test_union_two_ASTs_0034( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n1'])
node02 = forrest01.create_initial_union_node(['n1'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'n1' )
self.assertEqual( node03.regex, 'n1' )
self.assertEqual( node03.children_map, {} )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 0 )
# n1 n2
def test_union_two_ASTs_0035( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n1'])
node02 = forrest01.create_initial_union_node(['n2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n1 | n2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['n1']
self.assertEqual( node04.ast_node_type, 'n1' )
self.assertEqual( node04.regex, 'n1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['n2']
self.assertEqual( node05.ast_node_type, 'n2' )
self.assertEqual( node05.regex, 'n2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
# n1 u
def test_union_two_ASTs_0036( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n1'])
node02 = forrest01.create_initial_union_node(['t1', 't2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n1 | t1 | t2 )' )
self.assertEqual( len(node03.children_map), 3 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 2 )
node05 = node03.children_map['t2']
self.assertEqual( node05.ast_node_type, 't2' )
self.assertEqual( node05.regex, 't2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 3 )
node06 = node03.children_map['n1']
self.assertEqual( node06.ast_node_type, 'n1' )
self.assertEqual( node06.regex, 'n1' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 0 )
# n1 s
def test_union_two_ASTs_0037( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n1'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n1 | t1 t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['n1']
self.assertEqual( node04.ast_node_type, 'n1' )
self.assertEqual( node04.regex, 'n1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['t1 t2']
self.assertEqual( node05.ast_node_type, 's' )
self.assertEqual( node05.regex, 't1 t2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
# n1 *
def test_union_two_ASTs_0038( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n1'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.repeat_AST(node02)
node04 = forrest01.union_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) * | n1 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['n1']
self.assertEqual( node05.ast_node_type, 'n1' )
self.assertEqual( node05.regex, 'n1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
node06 = node04.children_map['( t1 t2 ) *']
self.assertEqual( node06.ast_node_type, '*' )
self.assertEqual( node06.regex, '( t1 t2 ) *' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 4 )
# n1 ?
def test_union_two_ASTs_0039( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n1'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = self.add_question_node( forrest01, node02 )
node04 = forrest01.union_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) ? | n1 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['n1']
self.assertEqual( node05.ast_node_type, 'n1' )
self.assertEqual( node05.regex, 'n1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
node06 = node04.children_map['( t1 t2 ) ?']
self.assertEqual( node06.ast_node_type, '?' )
self.assertEqual( node06.regex, '( t1 t2 ) ?' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 4 )
# n2 e
def test_union_two_ASTs_0041( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n2'])
node02 = forrest01.create_initial_union_node(['e'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( e | n2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['e']
self.assertEqual( node04.ast_node_type, 'e' )
self.assertEqual( node04.regex, 'e' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 1 )
node05 = node03.children_map['n2']
self.assertEqual( node05.ast_node_type, 'n2' )
self.assertEqual( node05.regex, 'n2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
# n2 t1
def test_union_two_ASTs_0042( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n2'])
node02 = forrest01.create_initial_union_node(['t1'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n2 | t1 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 1 )
node05 = node03.children_map['n2']
self.assertEqual( node05.ast_node_type, 'n2' )
self.assertEqual( node05.regex, 'n2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
# n2 t2
def test_union_two_ASTs_0043( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n2'])
node02 = forrest01.create_initial_union_node(['t2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n2 | t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['t2']
self.assertEqual( node04.ast_node_type, 't2' )
self.assertEqual( node04.regex, 't2' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 1 )
node05 = node03.children_map['n2']
self.assertEqual( node05.ast_node_type, 'n2' )
self.assertEqual( node05.regex, 'n2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
# n2 n1
def test_union_two_ASTs_0044( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n2'])
node02 = forrest01.create_initial_union_node(['n1'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n1 | n2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
node04 = node03.children_map['n1']
self.assertEqual( node04.ast_node_type, 'n1' )
self.assertEqual( node04.regex, 'n1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 1 )
node05 = node03.children_map['n2']
self.assertEqual( node05.ast_node_type, 'n2' )
self.assertEqual( node05.regex, 'n2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
# n2 n2
def test_union_two_ASTs_0045( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n2'])
node02 = forrest01.create_initial_union_node(['n2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'n2' )
self.assertEqual( node03.regex, 'n2' )
self.assertEqual( node03.children_map, {} )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 0 )
# n2 u
def test_union_two_ASTs_0046( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n2'])
node02 = forrest01.create_initial_union_node(['t1', 't2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n2 | t1 | t2 )' )
self.assertEqual( len(node03.children_map), 3 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 2 )
node05 = node03.children_map['t2']
self.assertEqual( node05.ast_node_type, 't2' )
self.assertEqual( node05.regex, 't2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 3 )
node06 = node03.children_map['n2']
self.assertEqual( node06.ast_node_type, 'n2' )
self.assertEqual( node06.regex, 'n2' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 0 )
# n2 s
def test_union_two_ASTs_0047( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n2'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n2 | t1 t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['n2']
self.assertEqual( node04.ast_node_type, 'n2' )
self.assertEqual( node04.regex, 'n2' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['t1 t2']
self.assertEqual( node05.ast_node_type, 's' )
self.assertEqual( node05.regex, 't1 t2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
# n2 *
def test_union_two_ASTs_0048( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n2'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.repeat_AST(node02)
node04 = forrest01.union_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) * | n2 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['n2']
self.assertEqual( node05.ast_node_type, 'n2' )
self.assertEqual( node05.regex, 'n2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
node06 = node04.children_map['( t1 t2 ) *']
self.assertEqual( node06.ast_node_type, '*' )
self.assertEqual( node06.regex, '( t1 t2 ) *' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 4 )
# n2 ?
def test_union_two_ASTs_0049( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['n2'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = self.add_question_node( forrest01, node02 )
node04 = forrest01.union_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) ? | n2 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['n2']
self.assertEqual( node05.ast_node_type, 'n2' )
self.assertEqual( node05.regex, 'n2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
node06 = node04.children_map['( t1 t2 ) ?']
self.assertEqual( node06.ast_node_type, '?' )
self.assertEqual( node06.regex, '( t1 t2 ) ?' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 4 )
# u e
def test_union_two_ASTs_0051( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
node02 = forrest01.create_initial_union_node(['e'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( e | t1 | t2 )' )
self.assertEqual( len(node03.children_map), 3 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['e']
self.assertEqual( node04.ast_node_type, 'e' )
self.assertEqual( node04.regex, 'e' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 3 )
node05 = node03.children_map['t1']
self.assertEqual( node05.ast_node_type, 't1' )
self.assertEqual( node05.regex, 't1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
node06 = node03.children_map['t2']
self.assertEqual( node06.ast_node_type, 't2' )
self.assertEqual( node06.regex, 't2' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 2 )
# u t1
def test_union_two_ASTs_0052( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
node02 = forrest01.create_initial_union_node(['t1'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( t1 | t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 1 )
node05 = node03.children_map['t2']
self.assertEqual( node05.ast_node_type, 't2' )
self.assertEqual( node05.regex, 't2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 2 )
# u t2
def test_union_two_ASTs_0053( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't3'])
node02 = forrest01.create_initial_union_node(['t2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( t1 | t2 | t3 )' )
self.assertEqual( len(node03.children_map), 3 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 1 )
node05 = node03.children_map['t2']
self.assertEqual( node05.ast_node_type, 't2' )
self.assertEqual( node05.regex, 't2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 3 )
node06 = node03.children_map['t3']
self.assertEqual( node06.ast_node_type, 't3' )
self.assertEqual( node06.regex, 't3' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 2 )
# u n1
def test_union_two_ASTs_0054( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
node02 = forrest01.create_initial_union_node(['n1'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n1 | t1 | t2 )' )
self.assertEqual( len(node03.children_map), 3 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['n1']
self.assertEqual( node04.ast_node_type, 'n1' )
self.assertEqual( node04.regex, 'n1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 3 )
node05 = node03.children_map['t1']
self.assertEqual( node05.ast_node_type, 't1' )
self.assertEqual( node05.regex, 't1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
node06 = node03.children_map['t2']
self.assertEqual( node06.ast_node_type, 't2' )
self.assertEqual( node06.regex, 't2' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 2 )
# u n2
def test_union_two_ASTs_0055( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
node02 = forrest01.create_initial_union_node(['n2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n2 | t1 | t2 )' )
self.assertEqual( len(node03.children_map), 3 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['n2']
self.assertEqual( node04.ast_node_type, 'n2' )
self.assertEqual( node04.regex, 'n2' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 3 )
node05 = node03.children_map['t1']
self.assertEqual( node05.ast_node_type, 't1' )
self.assertEqual( node05.regex, 't1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
node06 = node03.children_map['t2']
self.assertEqual( node06.ast_node_type, 't2' )
self.assertEqual( node06.regex, 't2' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 2 )
# u u
def test_union_two_ASTs_0056( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
node02 = forrest01.create_initial_union_node(['t1', 't2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( t1 | t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 6 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 1 )
node05 = node03.children_map['t2']
self.assertEqual( node05.ast_node_type, 't2' )
self.assertEqual( node05.regex, 't2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 2 )
# u s
def test_union_two_ASTs_0057( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( t1 | t1 t2 | t2 )' )
self.assertEqual( len(node03.children_map), 3 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 6 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 1 )
node05 = node03.children_map['t1 t2']
self.assertEqual( node05.ast_node_type, 's' )
self.assertEqual( node05.regex, 't1 t2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 3 )
node06 = node03.children_map['t2']
self.assertEqual( node06.ast_node_type, 't2' )
self.assertEqual( node06.regex, 't2' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 2 )
# u *
def test_union_two_ASTs_0058( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.repeat_AST(node02)
node04 = forrest01.union_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) * | t1 | t2 )' )
self.assertEqual( len(node04.children_map), 3 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 7 )
node05 = node04.children_map['t1']
self.assertEqual( node05.ast_node_type, 't1' )
self.assertEqual( node05.regex, 't1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
node06 = node04.children_map['( t1 t2 ) *']
self.assertEqual( node06.ast_node_type, '*' )
self.assertEqual( node06.regex, '( t1 t2 ) *' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 6 )
node07 = node04.children_map['t2']
self.assertEqual( node07.ast_node_type, 't2' )
self.assertEqual( node07.regex, 't2' )
self.assertEqual( node07.children_map, {} )
self.assertEqual( node07.num_terms, 0 )
self.assertEqual( node07.height, 0 )
# u ?
def test_union_two_ASTs_0059( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = self.add_question_node( forrest01, node02 )
node04 = forrest01.union_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) ? | t1 | t2 )' )
self.assertEqual( len(node04.children_map), 3 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 7 )
node05 = node04.children_map['t1']
self.assertEqual( node05.ast_node_type, 't1' )
self.assertEqual( node05.regex, 't1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 1 )
node06 = node04.children_map['( t1 t2 ) ?']
self.assertEqual( node06.ast_node_type, '?' )
self.assertEqual( node06.regex, '( t1 t2 ) ?' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 6 )
node07 = node04.children_map['t2']
self.assertEqual( node07.ast_node_type, 't2' )
self.assertEqual( node07.regex, 't2' )
self.assertEqual( node07.children_map, {} )
self.assertEqual( node07.num_terms, 0 )
self.assertEqual( node07.height, 0 )
self.assertEqual( node07.node_id , 2 )
# s e
def test_union_two_ASTs_0061( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'])
node02 = forrest01.create_initial_union_node(['e'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( e | t1 t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['e']
self.assertEqual( node04.ast_node_type, 'e' )
self.assertEqual( node04.regex, 'e' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 3 )
node05 = node03.children_map['t1 t2']
self.assertEqual( node05.ast_node_type, 's' )
self.assertEqual( node05.regex, 't1 t2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
# s t1
def test_union_two_ASTs_0062( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.create_initial_union_node(['t1'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( t1 | t1 t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['t1 t2']
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, 't1 t2' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['t1']
self.assertEqual( node05.ast_node_type, 't1' )
self.assertEqual( node05.regex, 't1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 3 )
# s t2
def test_union_two_ASTs_0063( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't3'] )
node02 = forrest01.create_initial_union_node(['t2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( t1 t3 | t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['t1 t3']
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, 't1 t3' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 0 )
node05 = node03.children_map['t2']
self.assertEqual( node05.ast_node_type, 't2' )
self.assertEqual( node05.regex, 't2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 3 )
# s n1
def test_union_two_ASTs_0064( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.create_initial_union_node(['n1'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n1 | t1 t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['n1']
self.assertEqual( node04.ast_node_type, 'n1' )
self.assertEqual( node04.regex, 'n1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 3 )
node05 = node03.children_map['t1 t2']
self.assertEqual( node05.ast_node_type, 's' )
self.assertEqual( node05.regex, 't1 t2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
# s n2
def test_union_two_ASTs_0065( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.create_initial_union_node(['n2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( n2 | t1 t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
node04 = node03.children_map['n2']
self.assertEqual( node04.ast_node_type, 'n2' )
self.assertEqual( node04.regex, 'n2' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 3 )
node05 = node03.children_map['t1 t2']
self.assertEqual( node05.ast_node_type, 's' )
self.assertEqual( node05.regex, 't1 t2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
# s u
def test_union_two_ASTs_0066( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.create_initial_union_node(['t1', 't2'])
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( t1 | t1 t2 | t2 )' )
self.assertEqual( len(node03.children_map), 3 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 6 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 4 )
node05 = node03.children_map['t2']
self.assertEqual( node05.ast_node_type, 't2' )
self.assertEqual( node05.regex, 't2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 5 )
node06 = node03.children_map['t1 t2']
self.assertEqual( node06.ast_node_type, 's' )
self.assertEqual( node06.regex, 't1 t2' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 0 )
# s s
def test_union_two_ASTs_0067( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.union_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't1 t2' )
self.assertEqual( node03.children_map, {} )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 0 )
# s *
def test_union_two_ASTs_0068( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.repeat_AST(node02)
node04 = forrest01.union_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) * | t1 t2 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 7 )
node05 = node04.children_map['t1 t2']
self.assertEqual( node05.ast_node_type, 's' )
self.assertEqual( node05.regex, 't1 t2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
node06 = node04.children_map['( t1 t2 ) *']
self.assertEqual( node06.ast_node_type, '*' )
self.assertEqual( node06.regex, '( t1 t2 ) *' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 6 )
# s ?
def test_union_two_ASTs_0069( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t3', 't4'] )
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = self.add_question_node( forrest01, node02 )
node04 = forrest01.union_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) ? | t3 t4 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 7 )
node05 = node04.children_map['t3 t4']
self.assertEqual( node05.ast_node_type, 's' )
self.assertEqual( node05.regex, 't3 t4' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 0 )
node06 = node04.children_map['( t1 t2 ) ?']
self.assertEqual( node06.ast_node_type, '?' )
self.assertEqual( node06.regex, '( t1 t2 ) ?' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 6 )
# * e
def test_union_two_ASTs_0071( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'])
node02 = forrest01.repeat_AST(node01)
node03 = forrest01.create_initial_union_node(['e'])
node04 = forrest01.union_two_ASTs( node02, node03 )
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) * | e )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['e']
self.assertEqual( node05.ast_node_type, 'e' )
self.assertEqual( node05.regex, 'e' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 4 )
node06 = node04.children_map['( t1 t2 ) *']
self.assertEqual( node06.ast_node_type, '*' )
self.assertEqual( node06.regex, '( t1 t2 ) *' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 3 )
# * t1
def test_union_two_ASTs_0072( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.repeat_AST(node01)
node03 = forrest01.create_initial_union_node(['t1'])
node04 = forrest01.union_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) * | t1 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['( t1 t2 ) *']
self.assertEqual( node05.ast_node_type, '*' )
self.assertEqual( node05.regex, '( t1 t2 ) *' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 3 )
node06 = node04.children_map['t1']
self.assertEqual( node06.ast_node_type, 't1' )
self.assertEqual( node06.regex, 't1' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 4 )
# * t2
def test_union_two_ASTs_0073( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.repeat_AST(node01)
node03 = forrest01.create_initial_union_node(['t2'])
node04 = forrest01.union_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) * | t2 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['( t1 t2 ) *']
self.assertEqual( node05.ast_node_type, '*' )
self.assertEqual( node05.regex, '( t1 t2 ) *' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 3 )
node06 = node04.children_map['t2']
self.assertEqual( node06.ast_node_type, 't2' )
self.assertEqual( node06.regex, 't2' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 4 )
# * n1
def test_union_two_ASTs_0074( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.repeat_AST(node01)
node03 = forrest01.create_initial_union_node(['n1'])
node04 = forrest01.union_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) * | n1 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['n1']
self.assertEqual( node05.ast_node_type, 'n1' )
self.assertEqual( node05.regex, 'n1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 4 )
node06 = node04.children_map['( t1 t2 ) *']
self.assertEqual( node06.ast_node_type, '*' )
self.assertEqual( node06.regex, '( t1 t2 ) *' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 3 )
# * n2
def test_union_two_ASTs_0075( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.repeat_AST(node01)
node03 = forrest01.create_initial_union_node(['n2'])
node04 = forrest01.union_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) * | n2 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['n2']
self.assertEqual( node05.ast_node_type, 'n2' )
self.assertEqual( node05.regex, 'n2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 4 )
node06 = node04.children_map['( t1 t2 ) *']
self.assertEqual( node06.ast_node_type, '*' )
self.assertEqual( node06.regex, '( t1 t2 ) *' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 3 )
# * u
def test_union_two_ASTs_0076( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.repeat_AST(node01)
node03 = forrest01.create_initial_union_node(['t1', 't2'])
node04 = forrest01.union_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) * | t1 | t2 )' )
self.assertEqual( len(node04.children_map), 3 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 7 )
node05 = node04.children_map['t1']
self.assertEqual( node05.ast_node_type, 't1' )
self.assertEqual( node05.regex, 't1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 5 )
node06 = node04.children_map['t2']
self.assertEqual( node06.ast_node_type, 't2' )
self.assertEqual( node06.regex, 't2' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 6 )
node07 = node04.children_map['( t1 t2 ) *']
self.assertEqual( node07.ast_node_type, '*' )
self.assertEqual( node07.regex, '( t1 t2 ) *' )
self.assertEqual( node07.children_map, {} )
self.assertEqual( node07.num_terms, 0 )
self.assertEqual( node07.height, 0 )
self.assertEqual( node07.node_id , 3 )
# * s
def test_union_two_ASTs_0077( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.repeat_AST(node01)
node03 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node04 = forrest01.union_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) * | t1 t2 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 7 )
node05 = node04.children_map['t1 t2']
self.assertEqual( node05.ast_node_type, 's' )
self.assertEqual( node05.regex, 't1 t2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 4 )
node06 = node04.children_map['( t1 t2 ) *']
self.assertEqual( node06.ast_node_type, '*' )
self.assertEqual( node06.regex, '( t1 t2 ) *' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 3 )
# * *
def test_union_two_ASTs_0078( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.repeat_AST(node01)
node03 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node04 = forrest01.repeat_AST(node03)
node05 = forrest01.union_two_ASTs(node02, node04)
self.assertEqual( node04.ast_node_type, '*' )
self.assertEqual( node04.regex, '( t1 t2 ) *' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 7 )
# * ?
def test_union_two_ASTs_0079( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t3', 't4'] )
node02 = forrest01.repeat_AST(node01)
node03 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node04 = self.add_question_node( forrest01, node03 )
node05 = forrest01.union_two_ASTs(node02, node04)
self.assertEqual( node05.ast_node_type, 'u' )
self.assertEqual( node05.regex, '( ( t1 t2 ) ? | ( t3 t4 ) * )' )
self.assertEqual( len(node05.children_map), 2 )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id, 8 )
node06 = node05.children_map['( t3 t4 ) *']
self.assertEqual( node06.ast_node_type, '*' )
self.assertEqual( node06.regex, '( t3 t4 ) *' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 3 )
node07 = node05.children_map['( t1 t2 ) ?']
self.assertEqual( node07.ast_node_type, '?' )
self.assertEqual( node07.regex, '( t1 t2 ) ?' )
self.assertEqual( node07.children_map, {} )
self.assertEqual( node07.num_terms, 0 )
self.assertEqual( node07.height, 0 )
self.assertEqual( node07.node_id , 7 )
# ? e
def test_union_two_ASTs_0081( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'])
node02 = self.add_question_node( forrest01, node01 )
node03 = forrest01.create_initial_union_node(['e'])
node04 = forrest01.union_two_ASTs( node02, node03 )
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) ? | e )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['e']
self.assertEqual( node05.ast_node_type, 'e' )
self.assertEqual( node05.regex, 'e' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 4 )
node06 = node04.children_map['( t1 t2 ) ?']
self.assertEqual( node06.ast_node_type, '?' )
self.assertEqual( node06.regex, '( t1 t2 ) ?' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 3 )
# ? t1
def test_union_two_ASTs_0082( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.add_question_node( forrest01, node01 )
node03 = forrest01.create_initial_union_node(['t1'])
node04 = forrest01.union_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) ? | t1 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['( t1 t2 ) ?']
self.assertEqual( node05.ast_node_type, '?' )
self.assertEqual( node05.regex, '( t1 t2 ) ?' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 3 )
node06 = node04.children_map['t1']
self.assertEqual( node06.ast_node_type, 't1' )
self.assertEqual( node06.regex, 't1' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 4 )
# ? t2
def test_union_two_ASTs_0083( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.add_question_node( forrest01, node01 )
node03 = forrest01.create_initial_union_node(['t2'])
node04 = forrest01.union_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) ? | t2 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['( t1 t2 ) ?']
self.assertEqual( node05.ast_node_type, '?' )
self.assertEqual( node05.regex, '( t1 t2 ) ?' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 3 )
node06 = node04.children_map['t2']
self.assertEqual( node06.ast_node_type, 't2' )
self.assertEqual( node06.regex, 't2' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 4 )
# ? n1
def test_union_two_ASTs_0084( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.add_question_node( forrest01, node01 )
node03 = forrest01.create_initial_union_node(['n1'])
node04 = forrest01.union_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) ? | n1 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['n1']
self.assertEqual( node05.ast_node_type, 'n1' )
self.assertEqual( node05.regex, 'n1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 4 )
node06 = node04.children_map['( t1 t2 ) ?']
self.assertEqual( node06.ast_node_type, '?' )
self.assertEqual( node06.regex, '( t1 t2 ) ?' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 3 )
# ? n2
def test_union_two_ASTs_0085( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.add_question_node( forrest01, node01 )
node03 = forrest01.create_initial_union_node(['n2'])
node04 = forrest01.union_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) ? | n2 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
node05 = node04.children_map['n2']
self.assertEqual( node05.ast_node_type, 'n2' )
self.assertEqual( node05.regex, 'n2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 4 )
node06 = node04.children_map['( t1 t2 ) ?']
self.assertEqual( node06.ast_node_type, '?' )
self.assertEqual( node06.regex, '( t1 t2 ) ?' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 3 )
# ? u
def test_union_two_ASTs_0086( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.add_question_node( forrest01, node01 )
node03 = forrest01.create_initial_union_node(['t1', 't2'])
node04 = forrest01.union_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) ? | t1 | t2 )' )
self.assertEqual( len(node04.children_map), 3 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 7 )
node05 = node04.children_map['t1']
self.assertEqual( node05.ast_node_type, 't1' )
self.assertEqual( node05.regex, 't1' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 5 )
node06 = node04.children_map['t2']
self.assertEqual( node06.ast_node_type, 't2' )
self.assertEqual( node06.regex, 't2' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 6 )
node07 = node04.children_map['( t1 t2 ) ?']
self.assertEqual( node07.ast_node_type, '?' )
self.assertEqual( node07.regex, '( t1 t2 ) ?' )
self.assertEqual( node07.children_map, {} )
self.assertEqual( node07.num_terms, 0 )
self.assertEqual( node07.height, 0 )
self.assertEqual( node07.node_id , 3 )
# ? s
def test_union_two_ASTs_0087( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.add_question_node( forrest01, node01 )
node03 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node04 = forrest01.union_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 'u' )
self.assertEqual( node04.regex, '( ( t1 t2 ) ? | t1 t2 )' )
self.assertEqual( len(node04.children_map), 2 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 7 )
node05 = node04.children_map['t1 t2']
self.assertEqual( node05.ast_node_type, 's' )
self.assertEqual( node05.regex, 't1 t2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 4 )
node06 = node04.children_map['( t1 t2 ) ?']
self.assertEqual( node06.ast_node_type, '?' )
self.assertEqual( node06.regex, '( t1 t2 ) ?' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 3 )
# ? *
def test_union_two_ASTs_0078( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.add_question_node( forrest01, node01 )
node03 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node04 = forrest01.repeat_AST(node03)
node05 = forrest01.union_two_ASTs(node02, node04)
self.assertEqual( node05.ast_node_type, 'u' )
self.assertEqual( node05.regex, '( ( t1 t2 ) * | ( t1 t2 ) ? )' )
self.assertEqual( len(node05.children_map), 2 )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id, 8 )
node06 = node05.children_map['( t1 t2 ) *']
self.assertEqual( node06.ast_node_type, '*' )
self.assertEqual( node06.regex, '( t1 t2 ) *' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 7 )
node07 = node05.children_map['( t1 t2 ) ?']
self.assertEqual( node07.ast_node_type, '?' )
self.assertEqual( node07.regex, '( t1 t2 ) ?' )
self.assertEqual( node07.children_map, {} )
self.assertEqual( node07.num_terms, 0 )
self.assertEqual( node07.height, 0 )
self.assertEqual( node07.node_id , 3 )
# ? ?
def test_union_two_ASTs_0079( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t3', 't4'] )
node02 = self.add_question_node( forrest01, node01 )
node03 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node04 = self.add_question_node( forrest01, node03 )
node05 = forrest01.union_two_ASTs(node02, node04)
self.assertEqual( node05.ast_node_type, 'u' )
self.assertEqual( node05.regex, '( ( t1 t2 ) ? | ( t3 t4 ) ? )' )
self.assertEqual( len(node05.children_map), 2 )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id, 8 )
node06 = node05.children_map['( t3 t4 ) ?']
self.assertEqual( node06.ast_node_type, '?' )
self.assertEqual( node06.regex, '( t3 t4 ) ?' )
self.assertEqual( node06.children_map, {} )
self.assertEqual( node06.num_terms, 0 )
self.assertEqual( node06.height, 0 )
self.assertEqual( node06.node_id , 3 )
node07 = node05.children_map['( t1 t2 ) ?']
self.assertEqual( node07.ast_node_type, '?' )
self.assertEqual( node07.regex, '( t1 t2 ) ?' )
self.assertEqual( node07.children_map, {} )
self.assertEqual( node07.num_terms, 0 )
self.assertEqual( node07.height, 0 )
self.assertEqual( node07.node_id , 7 )
# e e
def test_concat_two_ASTs_0001( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('e')
node02 = forrest01.create_initial_node('e')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'e' )
self.assertEqual( node03.regex, 'e' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 1 )
# e t1
def test_concat_two_ASTs_0002( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('e')
node02 = forrest01.create_initial_node('t1')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 't1' )
self.assertEqual( node03.regex, 't1' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 1 )
# e t2
def test_concat_two_ASTs_0003( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('e')
node02 = forrest01.create_initial_node('t2')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 't2' )
self.assertEqual( node03.regex, 't2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 1 )
# e n1
def test_concat_two_ASTs_0004( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('e')
node02 = forrest01.create_initial_node('n1')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'n1' )
self.assertEqual( node03.regex, 'n1' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 1 )
# e n2
def test_concat_two_ASTs_0005( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('e')
node02 = forrest01.create_initial_node('n2')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'n2' )
self.assertEqual( node03.regex, 'n2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 1 )
# e u
def test_concat_two_ASTs_0006( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('e')
node02 = forrest01.create_initial_union_node(['t1', 't2'])
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( t1 | t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 1 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 2 )
node05 = node03.children_map['t2']
self.assertEqual( node05.ast_node_type, 't2' )
self.assertEqual( node05.regex, 't2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 3 )
# e s
def test_concat_two_ASTs_0007( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('e')
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't1 t2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 1 )
# e *
def test_concat_two_ASTs_0008( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('e')
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.repeat_AST(node02)
node04 = forrest01.concat_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, '*' )
self.assertEqual( node04.regex, '( t1 t2 ) *' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 4 )
# e ?
def test_concat_two_ASTs_0009( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('e')
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = self.add_question_node( forrest01, node02 )
node04 = forrest01.concat_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, '?' )
self.assertEqual( node04.regex, '( t1 t2 ) ?' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 4 )
# t1 e
def test_concat_two_ASTs_0011( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t1')
node02 = forrest01.create_initial_node('e')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 't1' )
self.assertEqual( node03.regex, 't1' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 0 )
# t1 t1
def test_concat_two_ASTs_0012( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t1')
node02 = forrest01.create_initial_node('t1')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't1 t1' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
# t1 t2
def test_concat_two_ASTs_0013( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t1')
node02 = forrest01.create_initial_node('t2')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't1 t2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
# t1 n1
def test_concat_two_ASTs_0014( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t1')
node02 = forrest01.create_initial_node('n1')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't1 n1' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
# t1 n2
def test_concat_two_ASTs_0015( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t1')
node02 = forrest01.create_initial_node('n2')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't1 n2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
# t1 u
def test_concat_two_ASTs_0016( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t1')
node02 = forrest01.create_initial_union_node(['t1', 't2'])
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't1 ( t1 | t2 )' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
# t1 s
def test_concat_two_ASTs_0017( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t1')
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't1 t1 t2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
# t1 *
def test_concat_two_ASTs_0018( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t1')
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.repeat_AST(node02)
node04 = forrest01.concat_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, 't1 ( t1 t2 ) *' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
# t1 ?
def test_concat_two_ASTs_0019( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t1')
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = self.add_question_node( forrest01, node02 )
node04 = forrest01.concat_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, 't1 ( t1 t2 ) ?' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
# t2 e
def test_concat_two_ASTs_0021( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t2')
node02 = forrest01.create_initial_node('e')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 't2' )
self.assertEqual( node03.regex, 't2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 0 )
# t2 t1
def test_concat_two_ASTs_0022( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t2')
node02 = forrest01.create_initial_node('t1')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't2 t1' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
# t2 t2
def test_concat_two_ASTs_0023( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t2')
node02 = forrest01.create_initial_node('t2')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't2 t2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
# t2 n1
def test_concat_two_ASTs_0024( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t2')
node02 = forrest01.create_initial_node('n1')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't2 n1' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
# t2 n2
def test_concat_two_ASTs_0025( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t2')
node02 = forrest01.create_initial_node('n2')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't2 n2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
# t2 u
def test_concat_two_ASTs_0026( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t2')
node02 = forrest01.create_initial_union_node(['t1', 't2'])
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't2 ( t1 | t2 )' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
# t2 s
def test_concat_two_ASTs_0027( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t2')
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't2 t1 t2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
# t2 *
def test_concat_two_ASTs_0028( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t2')
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.repeat_AST(node02)
node04 = forrest01.concat_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, 't2 ( t1 t2 ) *' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
# t2 ?
def test_concat_two_ASTs_0029( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t2')
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = self.add_question_node( forrest01, node02 )
node04 = forrest01.concat_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, 't2 ( t1 t2 ) ?' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
# n1 e
def test_concat_two_ASTs_0031( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n1')
node02 = forrest01.create_initial_node('e')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'n1' )
self.assertEqual( node03.regex, 'n1' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 0 )
# n1 t1
def test_concat_two_ASTs_0032( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n1')
node02 = forrest01.create_initial_node('t1')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 'n1 t1' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
# n1 t2
def test_concat_two_ASTs_0033( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n1')
node02 = forrest01.create_initial_node('t2')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 'n1 t2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
# n1 n1
def test_concat_two_ASTs_0034( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n1')
node02 = forrest01.create_initial_node('n1')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 'n1 n1' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
# n1 n2
def test_concat_two_ASTs_0035( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n1')
node02 = forrest01.create_initial_node('n2')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 'n1 n2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
# n1 u
def test_concat_two_ASTs_0036( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n1')
node02 = forrest01.create_initial_union_node(['t1', 't2'])
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 'n1 ( t1 | t2 )' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
# n1 s
def test_concat_two_ASTs_0037( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n1')
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 'n1 t1 t2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
# n1 *
def test_concat_two_ASTs_0038( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n1')
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.repeat_AST(node02)
node04 = forrest01.concat_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, 'n1 ( t1 t2 ) *' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
# n1 ?
def test_concat_two_ASTs_0039( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n1')
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = self.add_question_node( forrest01, node02 )
node04 = forrest01.concat_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, 'n1 ( t1 t2 ) ?' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
# n2 e
def test_concat_two_ASTs_0041( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n2')
node02 = forrest01.create_initial_node('e')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'n2' )
self.assertEqual( node03.regex, 'n2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 0 )
# n2 t1
def test_concat_two_ASTs_0042( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n2')
node02 = forrest01.create_initial_node('t1')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 'n2 t1' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
# n2 t2
def test_concat_two_ASTs_0043( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n2')
node02 = forrest01.create_initial_node('t2')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 'n2 t2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
# n2 n1
def test_concat_two_ASTs_0044( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n2')
node02 = forrest01.create_initial_node('n1')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 'n2 n1' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
# n2 n2
def test_concat_two_ASTs_0045( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n2')
node02 = forrest01.create_initial_node('n2')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 'n2 n2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 2 )
# n2 u
def test_concat_two_ASTs_0046( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n2')
node02 = forrest01.create_initial_union_node(['t1', 't2'])
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 'n2 ( t1 | t2 )' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
# n2 s
def test_concat_two_ASTs_0047( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n2')
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 'n2 t1 t2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
# n2 *
def test_concat_two_ASTs_0048( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n2')
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.repeat_AST(node02)
node04 = forrest01.concat_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, 'n2 ( t1 t2 ) *' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
# n2 ?
def test_concat_two_ASTs_0049( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n2')
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = self.add_question_node( forrest01, node02 )
node04 = forrest01.concat_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, 'n2 ( t1 t2 ) ?' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
# u e
def test_concat_two_ASTs_0051( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
node02 = forrest01.create_initial_node('e')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 'u' )
self.assertEqual( node03.regex, '( t1 | t2 )' )
self.assertEqual( len(node03.children_map), 2 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 0 )
node04 = node03.children_map['t1']
self.assertEqual( node04.ast_node_type, 't1' )
self.assertEqual( node04.regex, 't1' )
self.assertEqual( node04.children_map, {} )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id , 1 )
node05 = node03.children_map['t2']
self.assertEqual( node05.ast_node_type, 't2' )
self.assertEqual( node05.regex, 't2' )
self.assertEqual( node05.children_map, {} )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id , 2 )
# u t1
def test_concat_two_ASTs_0052( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
node02 = forrest01.create_initial_node('t1')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, '( t1 | t2 ) t1' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
# u t2
def test_concat_two_ASTs_0053( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
node02 = forrest01.create_initial_node('t2')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, '( t1 | t2 ) t2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
# u n1
def test_concat_two_ASTs_0054( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
node02 = forrest01.create_initial_node('n1')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, '( t1 | t2 ) n1' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
# u n2
def test_concat_two_ASTs_0055( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
node02 = forrest01.create_initial_node('n2')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, '( t1 | t2 ) n2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
# u u
def test_concat_two_ASTs_0056( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
node02 = forrest01.create_initial_union_node(['t1', 't2'])
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, '( t1 | t2 ) ( t1 | t2 )' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 6 )
# u s
def test_concat_two_ASTs_0057( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, '( t1 | t2 ) t1 t2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 6 )
# u *
def test_concat_two_ASTs_0058( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.repeat_AST(node02)
node04 = forrest01.concat_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, '( t1 | t2 ) ( t1 t2 ) *' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 7 )
# u ?
def test_concat_two_ASTs_0059( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node(['t1', 't2'])
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = self.add_question_node( forrest01, node02 )
node04 = forrest01.concat_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, '( t1 | t2 ) ( t1 t2 ) ?' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 7 )
# s e
def test_concat_two_ASTs_0061( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.create_initial_node('e')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't1 t2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 0 )
# s t1
def test_concat_two_ASTs_0062( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.create_initial_node('t1')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't1 t2 t1' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
# s t2
def test_concat_two_ASTs_0063( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.create_initial_node('t2')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't1 t2 t2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
# s n1
def test_concat_two_ASTs_0064( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.create_initial_node('n1')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't1 t2 n1' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
# s n2
def test_concat_two_ASTs_0065( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.create_initial_node('n2')
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't1 t2 n2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 4 )
# s u
def test_concat_two_ASTs_0066( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.create_initial_union_node(['t1', 't2'])
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't1 t2 ( t1 | t2 )' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 6 )
# s s
def test_concat_two_ASTs_0067( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't1 t2 t1 t2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 6 )
# s *
def test_concat_two_ASTs_0068( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = forrest01.repeat_AST(node02)
node04 = forrest01.concat_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, 't1 t2 ( t1 t2 ) *' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 7 )
# s ?
def test_concat_two_ASTs_0069( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node03 = self.add_question_node( forrest01, node02 )
node04 = forrest01.concat_two_ASTs(node01, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, 't1 t2 ( t1 t2 ) ?' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 7 )
# * e
def test_concat_two_ASTs_0071( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.repeat_AST(node01)
node03 = forrest01.create_initial_node('e')
node04 = forrest01.concat_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, '*' )
self.assertEqual( node04.regex, '( t1 t2 ) *' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 3 )
# * t1
def test_concat_two_ASTs_0072( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.repeat_AST(node01)
node03 = forrest01.create_initial_node('t1')
node04 = forrest01.concat_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, '( t1 t2 ) * t1' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
# * t2
def test_concat_two_ASTs_0073( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.repeat_AST(node01)
node03 = forrest01.create_initial_node('t2')
node04 = forrest01.concat_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, '( t1 t2 ) * t2' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
# * n1
def test_concat_two_ASTs_0074( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.repeat_AST(node01)
node03 = forrest01.create_initial_node('n1')
node04 = forrest01.concat_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, '( t1 t2 ) * n1' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
# * n2
def test_concat_two_ASTs_0075( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.repeat_AST(node01)
node03 = forrest01.create_initial_node('n2')
node04 = forrest01.concat_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, '( t1 t2 ) * n2' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
# * u
def test_concat_two_ASTs_0076( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.repeat_AST(node01)
node03 = forrest01.create_initial_union_node(['t1', 't2'])
node04 = forrest01.concat_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, '( t1 t2 ) * ( t1 | t2 )' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 7 )
# * s
def test_concat_two_ASTs_0077( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.repeat_AST(node01)
node03 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node04 = forrest01.concat_two_ASTs( node02, node03 )
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, '( t1 t2 ) * t1 t2' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 7 )
# * *
def test_concat_two_ASTs_0078( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.repeat_AST(node01)
node03 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node04 = forrest01.repeat_AST(node03)
node05 = forrest01.concat_two_ASTs(node02, node04)
self.assertEqual( node05.ast_node_type, 's' )
self.assertEqual( node05.regex, '( t1 t2 ) * ( t1 t2 ) *' )
self.assertEqual( len(node05.children_map), 0 )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id, 8 )
# * ?
def test_concat_two_ASTs_0079( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = forrest01.repeat_AST(node01)
node03 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node04 = self.add_question_node( forrest01, node03 )
node05 = forrest01.concat_two_ASTs(node02, node04)
self.assertEqual( node05.ast_node_type, 's' )
self.assertEqual( node05.regex, '( t1 t2 ) * ( t1 t2 ) ?' )
self.assertEqual( len(node05.children_map), 0 )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id, 8 )
# ? e
def test_concat_two_ASTs_0081( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.add_question_node( forrest01, node01 )
node03 = forrest01.create_initial_node('e')
node04 = forrest01.concat_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, '?' )
self.assertEqual( node04.regex, '( t1 t2 ) ?' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 3 )
# ? t1
def test_concat_two_ASTs_0082( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.add_question_node( forrest01, node01 )
node03 = forrest01.create_initial_node('t1')
node04 = forrest01.concat_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, '( t1 t2 ) ? t1' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
# ? t2
def test_concat_two_ASTs_0083( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.add_question_node( forrest01, node01 )
node03 = forrest01.create_initial_node('t2')
node04 = forrest01.concat_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, '( t1 t2 ) ? t2' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
# ? n1
def test_concat_two_ASTs_0084( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.add_question_node( forrest01, node01 )
node03 = forrest01.create_initial_node('n1')
node04 = forrest01.concat_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, '( t1 t2 ) ? n1' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
# ? n2
def test_concat_two_ASTs_0085( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.add_question_node( forrest01, node01 )
node03 = forrest01.create_initial_node('n2')
node04 = forrest01.concat_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, '( t1 t2 ) ? n2' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 5 )
# ? u
def test_concat_two_ASTs_0086( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.add_question_node( forrest01, node01 )
node03 = forrest01.create_initial_union_node(['t1', 't2'])
node04 = forrest01.concat_two_ASTs(node02, node03)
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, '( t1 t2 ) ? ( t1 | t2 )' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 7 )
# ? s
def test_concat_two_ASTs_0087( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.add_question_node( forrest01, node01 )
node03 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node04 = forrest01.concat_two_ASTs( node02, node03 )
self.assertEqual( node04.ast_node_type, 's' )
self.assertEqual( node04.regex, '( t1 t2 ) ? t1 t2' )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node04.num_terms, 0 )
self.assertEqual( node04.height, 0 )
self.assertEqual( node04.node_id, 7 )
# ? *
def test_concat_two_ASTs_0088( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.add_question_node( forrest01, node01 )
node03 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node04 = forrest01.repeat_AST(node03)
node05 = forrest01.concat_two_ASTs(node02, node04)
self.assertEqual( node05.ast_node_type, 's' )
self.assertEqual( node05.regex, '( t1 t2 ) ? ( t1 t2 ) *' )
self.assertEqual( len(node05.children_map), 0 )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id, 8 )
# ? ?
def test_concat_two_ASTs_0089( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node02 = self.add_question_node( forrest01, node01 )
node03 = self.create_initial_serial_node( forrest01, ['t1', 't2'] )
node04 = self.add_question_node( forrest01, node03 )
node05 = forrest01.concat_two_ASTs(node02, node04)
self.assertEqual( node05.ast_node_type, 's' )
self.assertEqual( node05.regex, '( t1 t2 ) ? ( t1 t2 ) ?' )
self.assertEqual( len(node05.children_map), 0 )
self.assertEqual( node05.num_terms, 0 )
self.assertEqual( node05.height, 0 )
self.assertEqual( node05.node_id, 8 )
# ? ?
def test_concat_two_ASTs_0089( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 'e', 't2'] )
node02 = self.create_initial_serial_node( forrest01, ['t1', 'e', 't2'] )
node03 = forrest01.concat_two_ASTs(node01, node02)
self.assertEqual( node03.ast_node_type, 's' )
self.assertEqual( node03.regex, 't1 t2 t1 t2' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 8 )
# 'e'
def test_repeat_AST_0001( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('e')
node02 = forrest01.repeat_AST( node01 )
self.assertEqual( node02.ast_node_type, 'e' )
self.assertEqual( node02.regex, 'e' )
self.assertEqual( len(node02.children_map), 0 )
self.assertEqual( node02.num_terms, 0 )
self.assertEqual( node02.height, 0 )
self.assertEqual( node02.node_id, 0 )
# 't1'
def test_repeat_AST_0002( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('t1')
node02 = forrest01.repeat_AST( node01 )
self.assertEqual( node02.ast_node_type, '*' )
self.assertEqual( node02.regex, 't1 *' )
self.assertEqual( len(node02.children_map), 0 )
self.assertEqual( node02.num_terms, 0 )
self.assertEqual( node02.height, 0 )
self.assertEqual( node02.node_id, 1 )
# 'n1'
def test_repeat_AST_0003( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_node('n1')
node02 = forrest01.repeat_AST( node01 )
self.assertEqual( node02.ast_node_type, '*' )
self.assertEqual( node02.regex, 'n1 *' )
self.assertEqual( len(node02.children_map), 0 )
self.assertEqual( node02.num_terms, 0 )
self.assertEqual( node02.height, 0 )
self.assertEqual( node02.node_id, 1 )
# 'u'
def test_repeat_AST_0004( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node( ['t1', 't2' ] )
node02 = forrest01.repeat_AST( node01 )
self.assertEqual( node02.ast_node_type, '*' )
self.assertEqual( node02.regex, '( t1 | t2 ) *' )
self.assertEqual( len(node02.children_map), 0 )
self.assertEqual( node02.num_terms, 0 )
self.assertEqual( node02.height, 0 )
self.assertEqual( node02.node_id, 3 )
# 's'
def test_repeat_AST_0005( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2' ] )
node02 = forrest01.repeat_AST( node01 )
self.assertEqual( node02.ast_node_type, '*' )
self.assertEqual( node02.regex, '( t1 t2 ) *' )
self.assertEqual( len(node02.children_map), 0 )
self.assertEqual( node02.num_terms, 0 )
self.assertEqual( node02.height, 0 )
self.assertEqual( node02.node_id, 3 )
# '*'
def test_repeat_AST_0006( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2' ] )
node02 = forrest01.repeat_AST( node01 )
node03 = forrest01.repeat_AST( node02 )
self.assertEqual( node03.ast_node_type, '*' )
self.assertEqual( node03.regex, '( t1 t2 ) *' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 3 )
# '?'
def test_repeat_AST_0007( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = self.create_initial_serial_node( forrest01, ['t1', 't2' ] )
node02 = self.add_question_node( forrest01, node01 )
node03 = forrest01.repeat_AST( node02 )
self.assertEqual( node03.ast_node_type, '*' )
self.assertEqual( node03.regex, '( t1 t2 ) *' )
self.assertEqual( len(node03.children_map), 0 )
self.assertEqual( node03.num_terms, 0 )
self.assertEqual( node03.height, 0 )
self.assertEqual( node03.node_id, 3 )
# epsilon in union
def test_repeat_AST_0008( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node([ 't1', 't2', 'e' ])
node02 = forrest01.repeat_AST( node01 )
self.assertEqual( node02.ast_node_type, '*' )
self.assertEqual( node02.regex, '( t1 | t2 ) *' )
self.assertEqual( len(node02.children_map), 0 )
self.assertEqual( node02.num_terms, 0 )
self.assertEqual( node02.height, 0 )
self.assertEqual( node02.node_id, 4 )
# epsilon in union leaves one node.
def test_repeat_AST_0008( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
node01 = forrest01.create_initial_union_node([ 't1', 'e' ])
node02 = forrest01.repeat_AST( node01 )
self.assertEqual( node02.ast_node_type, '*' )
self.assertEqual( node02.regex, 't1 *' )
self.assertEqual( len(node02.children_map), 0 )
self.assertEqual( node02.num_terms, 0 )
self.assertEqual( node02.height, 0 )
self.assertEqual( node02.node_id, 3 )
# (e) => none
def test_remove_AST_0001( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
E_1
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 1)
forrest01.remove_AST( node01 )
self.assertEqual( forrest01.num_nodes(), 0 )
# (t) => none
def test_remove_AST_0002( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
T1_1
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 1)
forrest01.remove_AST( node01 )
self.assertEqual( forrest01.num_nodes(), 0 )
# (n) => none
def test_remove_AST_0003( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
N1_1
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 1)
forrest01.remove_AST( node01 )
self.assertEqual( forrest01.num_nodes(), 0 )
# (u) => none
def test_remove_AST_0004( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
|_1:T2_2 T3_3
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 1)
forrest01.remove_AST( node01 )
self.assertEqual( forrest01.num_nodes(), 0 )
# (s) => none
def test_remove_AST_0005( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_1:T3_2 T3_3
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 1)
forrest01.remove_AST( node01 )
self.assertEqual( forrest01.num_nodes(), 0 )
# (*) => none
def test_remove_AST_0006( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
*_1:T2_2
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 1)
forrest01.remove_AST( node01 )
self.assertEqual( forrest01.num_nodes(), 0 )
# (?) => none
def test_remove_AST_0007( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
?_1:T2_2
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 1)
forrest01.remove_AST( node01 )
self.assertEqual( forrest01.num_nodes(), 0 )
# |_1:(T2_2) T3_3
def test_remove_AST_0008( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
|_1:T2_2 T3_3
'''
spec_expected = '''
|_1:T3_3
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node02 = self.helper.get_node(forrest01, 2)
forrest01.remove_AST( node02 )
spec03 = self.helper.display_tree( root01 )
self.assertEqual( self.helper.compare_specs( spec03, spec_expected ), True )
# |_1:T2_2 (T3_3)
def test_remove_AST_0009( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
|_1:T2_2 T3_3
'''
spec_expected = '''
|_1:T2_2
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node03 = self.helper.get_node(forrest01, 3)
forrest01.remove_AST( node03 )
spec03 = self.helper.display_tree( root01 )
self.assertEqual( self.helper.compare_specs( spec03, spec_expected ), True )
# S_1:(T2_2) T3_3
def test_remove_AST_0010( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_1:T2_2 T2_3
'''
spec_expected = '''
S_1:T2_3
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 2)
forrest01.remove_AST( node01 )
spec03 = self.helper.display_tree( root01 )
self.assertEqual( self.helper.compare_specs( spec03, spec_expected ), True )
# S_1:T2_2 (T3_3)
def test_remove_AST_0011( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_1:T2_2 T3_3
'''
spec_expected = '''
S_1:T2_2
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 3)
forrest01.remove_AST( node01 )
spec03 = self.helper.display_tree( root01 )
self.assertEqual( self.helper.compare_specs( spec03, spec_expected ), True )
# *_1:(T2_2)
def test_remove_AST_0012( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
*_1:T2_2
'''
spec_expected = '''
*_1
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 2)
forrest01.remove_AST( node01 )
spec03 = self.helper.display_tree( root01 )
self.assertEqual( self.helper.compare_specs( spec03, spec_expected ), True )
# ?_1:(T2_2)
def test_remove_AST_0013( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
?_1:T2_2
'''
spec_expected = '''
?_1
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 2)
forrest01.remove_AST( node01 )
spec03 = self.helper.display_tree( root01 )
self.assertEqual( self.helper.compare_specs( spec03, spec_expected ), True )
# General pattern
def test_remove_AST_0014( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
|_1:T2_2 S_3 T4_4 E_5
S_3:T6_6 |_7 T8_8
|_7:T9_9 T10_10
'''
spec_expected = '''
|_1:T2_2 T4_4 E_5
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 3)
forrest01.remove_AST( node01 )
spec03 = self.helper.display_tree( root01 )
self.assertEqual( self.helper.compare_specs( spec03, spec_expected ), True )
def test_clone_AST_0001( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
|_0:T1_1 S_2 T3_3 E_4
S_2:T5_5 |_6 T7_7
|_6:T8_8 T9_9
'''
spec_expected = '''
|_10:T1_11 S_12 T3_18 E_19
S_12:T5_13 |_14 T7_17
|_14:T8_15 T9_16
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
root02 = forrest01.clone_AST( root01 )
spec02 = self.helper.display_tree( root02 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_clone_AST_0002( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
|_0:T1_1 S_2 T3_3 E_4
S_2:T1_5 |_6 T7_7
|_6:T8_8 T9_9
'''
spec_expected = '''
T1_10
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 1)
root02 = forrest01.clone_AST( node01 )
spec02 = self.helper.display_tree( root02 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_clone_AST_0003( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
|_0:T1_1 S_2 T3_3 E_4
S_2:T5_5 |_6 T7_7
|_6:T8_8 T9_9
'''
spec_expected = '''
S_10:T5_11 |_12 T7_15
|_12:T8_13 T9_14
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 2)
root02 = forrest01.clone_AST( node01 )
spec02 = self.helper.display_tree( root02 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_clone_AST_0004( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
|_0:T1_1 S_2 T3_3 E_4
S_2:T5_5 |_6 T7_7
|_6:T8_8 T9_9
'''
spec_expected = '''
T3_10
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 3)
root02 = forrest01.clone_AST( node01 )
spec02 = self.helper.display_tree( root02 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_clone_AST_0005( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
|_0:T1_1 S_2 T3_3 E_4
S_2:T5_5 |_6 T7_7
|_6:T8_8 T9_9
'''
spec_expected = '''
E_10
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 4)
root02 = forrest01.clone_AST( node01 )
spec02 = self.helper.display_tree( root02 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_clone_AST_0006( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
|_0:T1_1 S_2 T3_3 E_4
S_2:T5_5 |_6 T7_7
|_6:T8_8 T9_9
'''
spec_expected = '''
T5_10
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 5)
root02 = forrest01.clone_AST( node01 )
spec02 = self.helper.display_tree( root02 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_clone_AST_0007( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
|_0:T1_1 S_2 T3_3 E_4
S_2:T5_5 |_6 T7_7
|_6:T8_8 T9_9
'''
spec_expected = '''
|_10:T8_11 T9_12
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 6)
root02 = forrest01.clone_AST( node01 )
spec02 = self.helper.display_tree( root02 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_clone_AST_0008( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
|_0:T1_1 S_2 T3_3 E_4
S_2:T5_5 |_6 T7_7
|_6:T8_8 T9_9
'''
spec_expected = '''
T7_10
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 7)
root02 = forrest01.clone_AST( node01 )
spec02 = self.helper.display_tree( root02 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_clone_AST_0009( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
|_0:T1_1 S_2 T3_3 E_4
S_2:T5_5 |_6 T7_7
|_6:T8_8 T9_9
'''
spec_expected = '''
T8_10
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 8)
root02 = forrest01.clone_AST( node01 )
spec02 = self.helper.display_tree( root02 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_clone_AST_0010( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
|_0:T1_1 S_2 T3_3 E_4
S_2:T5_5 |_6 T7_7
|_6:T8_8 T9_9
'''
spec_expected = '''
T9_10
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
node01 = self.helper.get_node(forrest01, 9)
root02 = forrest01.clone_AST( node01 )
spec02 = self.helper.display_tree( root02 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_clone_AST_0011( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
E_0
'''
spec_expected = '''
E_1
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
root02 = forrest01.clone_AST( root01 )
spec02 = self.helper.display_tree( root02 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_remove_epsilons_from_unions_0001( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 |_2 T3_3
|_2:T4_4 T5_5 T6_6
'''
spec_expected = '''
S_0:T1_1 |_2 T3_3
|_2:T4_4 T5_5 T6_6
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.remove_epsilons_from_unions( root01 )
spec02 = self.helper.display_tree( root01 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_remove_epsilons_from_unions_0002( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 |_2 T3_3
|_2:T4_4 E_5 T6_6
'''
spec_expected = '''
S_0:T1_1 ?_2 T3_3
?_2:|_7
|_7:T4_4 T6_6
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.remove_epsilons_from_unions( root01 )
spec02 = self.helper.display_tree( root01 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_remove_epsilons_from_unions_0003( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 |_2 T3_3
|_2:T4_4 E_5
'''
spec_expected = '''
S_0:T1_1 ?_2 T3_3
?_2:T4_6
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.remove_epsilons_from_unions( root01 )
spec02 = self.helper.display_tree( root01 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_remove_epsilons_from_unions_0004( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 |_2 T3_3
|_2:S_4 E_5 T6_6
S_4:T7_7 T8_8 T9_9
'''
spec_expected = '''
S_0:T1_1 ?_2 T3_3
?_2:|_10
|_10:S_4 T6_6
S_4:T7_7 T8_8 T9_9
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.remove_epsilons_from_unions( root01 )
spec02 = self.helper.display_tree( root01 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_remove_epsilons_from_unions_0005( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 |_2 T3_3
|_2:S_4 E_5
S_4:T6_6 T7_7 T8_8
'''
spec_expected = '''
S_0:T1_1 ?_2 T3_3
?_2:S_9
S_9:T6_6 T7_7 T8_8
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.remove_epsilons_from_unions( root01 )
spec02 = self.helper.display_tree( root01 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_create_tree_root_for_reduction_0001( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 |_2 T3_3
|_2:S_4 T5_5 T6_6
S_4:T7_7 T8_8 T9_9
'''
spec_expected = '''
R_10:S_0
S_0:T1_1 |_2 T3_3
|_2:S_4 T5_5 T6_6
S_4:T7_7 T8_8 T9_9
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.create_tree_root_for_reduction( root01 )
spec02 = self.helper.display_tree( forrest01.root )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_prepare_for_reduction_0001( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 |_2 T3_3
|_2:S_4 T5_5 T6_6
S_4:T7_7 T8_8 T9_9
'''
spec_expected = '''
R_10:S_0
S_0:T1_1 |_2 T3_3
|_2:S_4 T5_5 T6_6
S_4:T7_7 T8_8 T9_9
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.create_tree_root_for_reduction( root01 )
forrest01.prepare_for_reduction()
spec02 = self.helper.display_tree( forrest01.root )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
node10 = self.helper.get_node( forrest01, 10 )
node00 = self.helper.get_node( forrest01, 0 )
node01 = self.helper.get_node( forrest01, 1 )
node02 = self.helper.get_node( forrest01, 2 )
node03 = self.helper.get_node( forrest01, 3 )
node04 = self.helper.get_node( forrest01, 4 )
node05 = self.helper.get_node( forrest01, 5 )
node06 = self.helper.get_node( forrest01, 6 )
node07 = self.helper.get_node( forrest01, 7 )
node08 = self.helper.get_node( forrest01, 8 )
node09 = self.helper.get_node( forrest01, 9 )
self.assertEqual( node10.regex, 'r' )
self.assertEqual( node10.num_terms, 7 )
self.assertEqual( node10.height, 5 )
self.assertEqual( len(node10.children_map), 1 )
self.assertEqual( node10.children_map['n0'], node00 )
self.assertEqual( node09.regex, 't9' )
self.assertEqual( node09.num_terms, 1 )
self.assertEqual( node09.height, 1 )
self.assertEqual( len(node09.children_map), 0 )
self.assertEqual( node08.regex, 't8' )
self.assertEqual( node08.num_terms, 1 )
self.assertEqual( node08.height, 1 )
self.assertEqual( len(node08.children_map), 0 )
self.assertEqual( node07.regex, 't7' )
self.assertEqual( node07.num_terms, 1 )
self.assertEqual( node07.height, 1 )
self.assertEqual( len(node07.children_map), 0 )
self.assertEqual( node06.regex, 't6' )
self.assertEqual( node06.num_terms, 1 )
self.assertEqual( node06.height, 1 )
self.assertEqual( len(node06.children_map), 0 )
self.assertEqual( node05.regex, 't5' )
self.assertEqual( node05.num_terms, 1 )
self.assertEqual( node05.height, 1 )
self.assertEqual( len(node05.children_map), 0 )
self.assertEqual( node04.regex, 't7 t8 t9' )
self.assertEqual( node04.num_terms, 3 )
self.assertEqual( node04.height, 2 )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node03.regex, 't3' )
self.assertEqual( node03.num_terms, 1 )
self.assertEqual( node03.height, 1 )
self.assertEqual( len(node04.children_map), 0 )
self.assertEqual( node02.regex, '( t7 t8 t9 | t5 | t6 )' )
self.assertEqual( node02.num_terms, 5 )
self.assertEqual( node02.height, 3 )
self.assertEqual( len(node02.children_map), 3 )
self.assertEqual( node02.children_map['t7 t8 t9'], node04 )
self.assertEqual( node02.children_map['t5'], node05 )
self.assertEqual( node02.children_map['t6'], node06 )
self.assertEqual( node01.regex, 't1' )
self.assertEqual( node01.num_terms, 1 )
self.assertEqual( node01.height, 1 )
self.assertEqual( len(node01.children_map), 0 )
self.assertEqual( node00.regex, 't1 ( t7 t8 t9 | t5 | t6 ) t3' )
self.assertEqual( node00.num_terms, 7 )
self.assertEqual( node00.height, 4 )
self.assertEqual( len(node00.children_map), 0 )
def test_find_nonterminals_0001( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 |_2 T3_3
|_2:S_4 N5_5 T6_6
S_4:T7_7 T8_8 N9_9
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.create_tree_root_for_reduction( root01 )
forrest01.prepare_for_reduction()
list01 = forrest01.find_nonterminals()
self.assertEqual( len(list01), 3 )
self.assertEqual( 'n0' in list01 , True )
self.assertEqual( 'n5' in list01 , True )
self.assertEqual( 'n9' in list01 , True )
def test_isolate_balanced_children_0001( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 T2_2 |_3 T4_4 T5_5
|_3:S_6 N7_7 T8_8
S_6:T9_9 T10_10 N11_11
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.create_tree_root_for_reduction( root01 )
forrest01.prepare_for_reduction()
node01 = self.helper.get_node( forrest01, 0 )
pos_begin = 0
pos_end = 4
list01 = forrest01.isolate_balanced_children( node01, pos_begin, pos_end )
spec02 = self.helper.display_tree( forrest01.root )
# print (spec02)
spec_expected = '''
R_12:S_0
S_0:T2_2 |_3 T4_4
|_3:S_6 N7_7 T8_8
S_6:T9_9 T10_10 N11_11
'''
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
self.assertEqual( node01.balanced_out_pre, 't1' )
self.assertEqual( node01.balanced_out_post, 't5' )
def test_isolate_balanced_children_0002( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 T2_2 |_3 T4_4 T5_5 T6_6
|_3:S_7 N8_8 T9_9
S_7:T10_10 T11_11 N12_12
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.create_tree_root_for_reduction( root01 )
forrest01.prepare_for_reduction()
node01 = self.helper.get_node( forrest01, 0 )
pos_begin = 3
pos_end = 5
list01 = forrest01.isolate_balanced_children( node01, pos_begin, pos_end )
spec02 = self.helper.display_tree( forrest01.root )
node05 = self.helper.get_node( forrest01, 5 )
# print (spec02)
spec_expected = '''
R_13:S_0
S_0:T1_1 T2_2 |_3 T5_5
|_3:S_7 N8_8 T9_9
S_7:T10_10 T11_11 N12_12
'''
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
self.assertEqual( node05.balanced_out_pre, 't4' )
self.assertEqual( node05.balanced_out_post, 't6' )
def test_isolate_balanced_children_0002( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 T2_2 |_3 T4_4 T5_5 T6_6
|_3:S_7 N8_8 T9_9
S_7:T10_10 T11_11 N12_12
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.create_tree_root_for_reduction( root01 )
forrest01.prepare_for_reduction()
node01 = self.helper.get_node( forrest01, 0 )
pos_begin = 3
pos_end = 5
list01 = forrest01.isolate_balanced_children( node01, pos_begin, pos_end )
spec02 = self.helper.display_tree( forrest01.root )
node05 = self.helper.get_node( forrest01, 5 )
# print (spec02)
spec_expected = '''
R_13:S_0
S_0:T1_1 T2_2 |_3 T5_5
|_3:S_7 N8_8 T9_9
S_7:T10_10 T11_11 N12_12
'''
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
self.assertEqual( node05.balanced_out_pre, 't4' )
self.assertEqual( node05.balanced_out_post, 't6' )
def test_isolate_balanced_children_0003( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 T2_2 |_3 T4_4 T5_5 T6_6
|_3:S_7 N8_8 T9_9
S_7:T10_10 T11_11 N12_12
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.create_tree_root_for_reduction( root01 )
forrest01.prepare_for_reduction()
node01 = self.helper.get_node( forrest01, 0 )
pos_begin = 1
pos_end = 4
list01 = forrest01.isolate_balanced_children( node01, pos_begin, pos_end )
spec02 = self.helper.display_tree( forrest01.root )
node14 = self.helper.get_node( forrest01, 14 )
# print (spec02)
spec_expected = '''
R_13:S_0
S_0:T1_1 S_14 T6_6
S_14:|_3 T4_4
|_3:S_7 N8_8 T9_9
S_7:T10_10 T11_11 N12_12
'''
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
self.assertEqual( node14.balanced_out_pre, 't2' )
self.assertEqual( node14.balanced_out_post, 't5' )
def test_isolate_balanced_children_0004( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 T2_2 |_3 T4_4 T5_5 T6_6
|_3:S_7 N8_8 T9_9
S_7:T10_10 T11_11 N12_12
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.create_tree_root_for_reduction( root01 )
forrest01.prepare_for_reduction()
node01 = self.helper.get_node( forrest01, 0 )
pos_begin = 1
pos_end = 3
list01 = forrest01.isolate_balanced_children( node01, pos_begin, pos_end )
spec02 = self.helper.display_tree( forrest01.root )
node14 = self.helper.get_node( forrest01, 14 )
# print (spec02)
spec_expected = '''
R_13:S_0
S_0:T1_1 S_14 T5_5 T6_6
S_14:|_3
|_3:S_7 N8_8 T9_9
S_7:T10_10 T11_11 N12_12
'''
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
self.assertEqual( node14.balanced_out_pre, 't2' )
self.assertEqual( node14.balanced_out_post, 't4' )
def test_isolate_balanced_children_0005( self ):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 T2_2 |_3 T4_4 T5_5 T6_6
|_3:S_7 N8_8 T9_9
S_7:T10_10 T11_11 N12_12
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.create_tree_root_for_reduction( root01 )
forrest01.prepare_for_reduction()
node01 = self.helper.get_node( forrest01, 0 )
pos_begin = 3
pos_end = 5
node05 = self.helper.get_node( forrest01, 5 )
node05.balanced_out_pre = 't100'
node05.balanced_out_post = 't200'
list01 = forrest01.isolate_balanced_children( node01, pos_begin, pos_end )
spec02 = self.helper.display_tree( forrest01.root )
node14 = self.helper.get_node( forrest01, 14 )
# print (spec02)
spec_expected = '''
R_13:S_0
S_0:T1_1 T2_2 |_3 S_14
|_3:S_7 N8_8 T9_9
S_7:T10_10 T11_11 N12_12
S_14:T5_5
'''
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
self.assertEqual( node14.balanced_out_pre, 't4' )
self.assertEqual( node14.balanced_out_post, 't6' )
def test_visit_and_balance_trees_for_out_tokens_0001( self):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 T2_2 |_3 T4_4 T5_5 T6_6
|_3:S_7 N8_8 T9_9
S_7:T10_10 T11_11 N12_12
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.create_tree_root_for_reduction( root01 )
forrest01.prepare_for_reduction()
node01 = self.helper.get_node( forrest01, 0 )
list01 = forrest01.visit_and_balance_trees_for_out_tokens( node01, {} )
spec02 = self.helper.display_tree( forrest01.root )
node14 = self.helper.get_node( forrest01, 14 )
# print (spec02)
spec_expected = '''
R_13:S_0
S_0:T1_1 T2_2 |_3 T4_4 T5_5 T6_6
|_3:S_7 N8_8 T9_9
S_7:T10_10 T11_11 N12_12
'''
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
def test_visit_and_balance_trees_for_out_tokens_0002( self):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 T2_2 |_3 T4_4 T5_5 T6_6
|_3:S_7 N8_8 T9_9
S_7:T10_10 T11_11 N12_12
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.create_tree_root_for_reduction( root01 )
forrest01.prepare_for_reduction()
node01 = self.helper.get_node( forrest01, 0 )
balanced_dic = {}
balanced_dic['t1'] = 't6'
list01 = forrest01.visit_and_balance_trees_for_out_tokens( node01, balanced_dic )
spec02 = self.helper.display_tree( forrest01.root )
node14 = self.helper.get_node( forrest01, 14 )
# print (spec02)
spec_expected = '''
R_13:S_0
S_0:T2_2 |_3 T4_4 T5_5
|_3:S_7 N8_8 T9_9
S_7:T10_10 T11_11 N12_12
'''
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
self.assertEqual( node01.balanced_out_pre, 't1' )
self.assertEqual( node01.balanced_out_post, 't6' )
def test_visit_and_balance_trees_for_out_tokens_0003( self):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 T2_2 T3_3 T4_4 T5_5 T6_6 T7_7
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.create_tree_root_for_reduction( root01 )
forrest01.prepare_for_reduction()
node01 = self.helper.get_node( forrest01, 0 )
balanced_dic = {}
balanced_dic['t1'] = 't7'
balanced_dic['t2'] = 't6'
balanced_dic['t3'] = 't5'
list01 = forrest01.visit_and_balance_trees_for_out_tokens( node01, balanced_dic )
spec02 = self.helper.display_tree( forrest01.root )
node14 = self.helper.get_node( forrest01, 14 )
# print (spec02)
spec_expected = '''
R_8:S_0
S_0:S_9
S_9:T4_4
'''
node04 = self.helper.get_node( forrest01, 4 )
node09 = self.helper.get_node( forrest01, 9 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
self.assertEqual( node01.balanced_out_pre, 't1' )
self.assertEqual( node01.balanced_out_post, 't7' )
self.assertEqual( node09.balanced_out_pre, 't2' )
self.assertEqual( node09.balanced_out_post, 't6' )
self.assertEqual( node04.balanced_out_pre, 't3' )
self.assertEqual( node04.balanced_out_post, 't5' )
def test_visit_and_balance_trees_for_out_tokens_0004( self):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 T2_2 T3_3 T4_4 T5_5 T6_6 T7_7
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.create_tree_root_for_reduction( root01 )
forrest01.prepare_for_reduction()
node01 = self.helper.get_node( forrest01, 0 )
balanced_dic = {}
balanced_dic['t1'] = 't4'
balanced_dic['t5'] = 't7'
list01 = forrest01.visit_and_balance_trees_for_out_tokens( node01, balanced_dic )
spec02 = self.helper.display_tree( forrest01.root )
node14 = self.helper.get_node( forrest01, 14 )
# print (spec02)
spec_expected = '''
R_8:S_0
S_0:S_9 T6_6
S_9:T2_2 T3_3
'''
node09 = self.helper.get_node( forrest01, 9 )
node06 = self.helper.get_node( forrest01, 6 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
self.assertEqual( node01.balanced_out_pre, '' )
self.assertEqual( node01.balanced_out_post, '' )
self.assertEqual( node09.balanced_out_pre, 't1' )
self.assertEqual( node09.balanced_out_post, 't4' )
self.assertEqual( node06.balanced_out_pre, 't5' )
self.assertEqual( node06.balanced_out_post, 't7' )
def test_balance_trees_for_out_tokens_0001( self):
forrest01 = nlpregex.regular_language.sse_forrest.sseASForrest()
spec01 = '''
S_0:T1_1 T2_2 |_3 T4_4 T5_5 T6_6 T7_7
|_3:S_8 N9_9 T10_10
S_8:T11_11 T12_12 N13_13 T14_14
'''
root01 = self.helper.construct_ast_from_spec( forrest01, spec01 )
forrest01.create_tree_root_for_reduction( root01 )
forrest01.prepare_for_reduction()
node01 = self.helper.get_node( forrest01, 0 )
balanced_list = [ ('t11', 't14'), ('t1', 't4'), ('t5', 't7') ]
list01 = forrest01.balance_trees_for_out_tokens( balanced_list )
spec02 = self.helper.display_tree( forrest01.root )
# print (spec02)
spec_expected = '''
R_15:S_0
S_0:S_16 T6_6
S_16:T2_2 |_3
|_3:S_8 N9_9 T10_10
S_8:T12_12 N13_13
'''
node08 = self.helper.get_node( forrest01, 8 )
node16 = self.helper.get_node( forrest01, 16 )
node06 = self.helper.get_node( forrest01, 6 )
self.assertEqual( self.helper.compare_specs( spec02, spec_expected ), True )
self.assertEqual( node08.balanced_out_pre, 't11' )
self.assertEqual( node08.balanced_out_post, 't14' )
self.assertEqual( node16.balanced_out_pre, 't1' )
self.assertEqual( node16.balanced_out_post, 't4' )
self.assertEqual( node06.balanced_out_pre, 't5' )
self.assertEqual( node06.balanced_out_post, 't7' )
if __name__ == '__main__':
unittest.main()
| 37.562372
| 101
| 0.578226
| 25,914
| 238,784
| 5.087713
| 0.010612
| 0.262926
| 0.099148
| 0.035497
| 0.984717
| 0.972225
| 0.954165
| 0.945966
| 0.943046
| 0.937198
| 0
| 0.094341
| 0.316692
| 238,784
| 6,356
| 102
| 37.568282
| 0.713703
| 0.008711
| 0
| 0.865454
| 0
| 0
| 0.050129
| 0
| 0
| 0
| 0
| 0
| 0.553268
| 1
| 0.057458
| false
| 0
| 0.000958
| 0
| 0.059612
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1c7dd66a92799563793fe0a5f001de32daebcef9
| 57,420
|
py
|
Python
|
tests/test_SpiralArmsPotential.py
|
gusbeane/galpy
|
d6db971285f163456c81775fc2fdc7d75189762c
|
[
"BSD-3-Clause"
] | 147
|
2015-01-01T14:06:17.000Z
|
2022-03-24T14:47:41.000Z
|
tests/test_SpiralArmsPotential.py
|
gusbeane/galpy
|
d6db971285f163456c81775fc2fdc7d75189762c
|
[
"BSD-3-Clause"
] | 269
|
2015-01-07T15:58:31.000Z
|
2022-03-30T18:42:08.000Z
|
tests/test_SpiralArmsPotential.py
|
gusbeane/galpy
|
d6db971285f163456c81775fc2fdc7d75189762c
|
[
"BSD-3-Clause"
] | 110
|
2015-02-08T10:57:24.000Z
|
2021-12-28T07:56:49.000Z
|
from __future__ import division
from galpy.potential import SpiralArmsPotential as spiral
import numpy as np
from numpy import pi
from numpy.testing import assert_allclose
from scipy.misc import derivative as deriv
import unittest
class TestSpiralArmsPotential(unittest.TestCase):
def test_constructor(self):
"""Test that constructor initializes and converts units correctly."""
sp = spiral() # default values
assert sp._amp == 1
assert sp._N == -2 # trick to change to left handed coordinate system
assert sp._alpha == -0.2
assert sp._r_ref == 1
assert sp._phi_ref == 0
assert sp._Rs == 0.3
assert sp._H == 0.125
assert sp._Cs == [1]
assert sp._omega == 0
assert sp._rho0 == 1 / (4 * pi)
assert sp.isNonAxi == True
assert sp.hasC == True
assert sp.hasC_dxdv == True
assert sp._ro == 8
assert sp._vo == 220
def test_Rforce(self):
"""Tests Rforce against a numerical derivative -d(Potential) / dR."""
dx = 1e-8
rtol = 1e-5 # relative tolerance
pot = spiral()
assert_allclose(pot.Rforce(1., 0.), -deriv(lambda x: pot(x, 0.), 1., dx=dx), rtol=rtol)
R, z, t = 0.3, 0, 0
assert_allclose(pot.Rforce(R, z, 0, t), -deriv(lambda x: pot(x, z, 0, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi/2.2, t), -deriv(lambda x: pot(x, z, pi/2.2, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi, t), -deriv(lambda x: pot(x, z, pi, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, 3.7*pi/2, t), -deriv(lambda x: pot(x, z, 3.7*pi/2, t), R, dx=dx), rtol=rtol)
R, z, t = 1, -.7, 3
assert_allclose(pot.Rforce(R, z, 0, t), -deriv(lambda x: pot(x, z, 0, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi/2, t), -deriv(lambda x: pot(x, z, pi/2, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi, t), -deriv(lambda x: pot(x, z, pi, 0), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, 3.3*pi/2, t), -deriv(lambda x: pot(x, z, 3.3*pi/2, t), R, dx=dx), rtol=rtol)
R, z = 3.14, .7
assert_allclose(pot.Rforce(R, z, 0), -deriv(lambda x: pot(x, z, 0), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi / 2), -deriv(lambda x: pot(x, z, pi / 2), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi), -deriv(lambda x: pot(x, z, pi), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, 3*pi/2), -deriv(lambda x: pot(x, z, 3*pi/2), R, dx=dx), rtol=rtol)
pot = spiral(amp=13, N=7, alpha=-0.3, r_ref=0.5, phi_ref=0.3, Rs=0.7, H=0.7, Cs=[1, 2, 3], omega=3)
assert_allclose(pot.Rforce(1., 0.), -deriv(lambda x: pot(x, 0.), 1., dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(0.01, 0.), -deriv(lambda x: pot(x, 0.), 0.01, dx=dx), rtol=rtol)
R, z, t = 0.3, 0, 1.123
assert_allclose(pot.Rforce(R, z, 0, t), -deriv(lambda x: pot(x, z, 0, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi/2, t), -deriv(lambda x: pot(x, z, pi/2, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi, t), -deriv(lambda x: pot(x, z, pi, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, 3*pi/2, t), -deriv(lambda x: pot(x, z, 3*pi/2, t), R, dx=dx), rtol=rtol)
R, z, t = 1, -.7, 121
assert_allclose(pot.Rforce(R, z, 0, t), -deriv(lambda x: pot(x, z, 0, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi / 2, t), -deriv(lambda x: pot(x, z, pi / 2, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi, t), -deriv(lambda x: pot(x, z, pi, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, 3*pi/2, t), -deriv(lambda x: pot(x, z, 3*pi/2, t), R, dx=dx), rtol=rtol)
R, z, t = 3.14, .7, 0.123
assert_allclose(pot.Rforce(R, z, 0, t), -deriv(lambda x: pot(x, z, 0, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi/2, t), -deriv(lambda x: pot(x, z, pi / 2, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi, t), -deriv(lambda x: pot(x, z, pi, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, 3*pi/2, t), -deriv(lambda x: pot(x, z, 3*pi/2, t), R, dx=dx), rtol=rtol)
pot = spiral(amp=13, N=1, alpha=0.01, r_ref=1.12, phi_ref=0, Cs=[1, 1.5, 8.], omega=-3)
assert_allclose(pot.Rforce(1., 0.), -deriv(lambda x: pot(x, 0.), 1., dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(0.1, 0.), -deriv(lambda x: pot(x, 0.), 0.1, dx=dx), rtol=rtol)
R, z, t = 0.3, 0, -4.5
assert_allclose(pot.Rforce(R, z, 0, t), -deriv(lambda x: pot(x, z, 0, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi/2, t), -deriv(lambda x: pot(x, z, pi/2, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi, t), -deriv(lambda x: pot(x, z, pi, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, 3*pi/2, t), -deriv(lambda x: pot(x, z, 3*pi/2, t), R, dx=dx), rtol=rtol)
R, z, t = 1, -.7, -123
assert_allclose(pot.Rforce(R, z, 0, t), -deriv(lambda x: pot(x, z, 0, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi / 2, t), -deriv(lambda x: pot(x, z, pi / 2, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi, t), -deriv(lambda x: pot(x, z, pi, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, 3*pi/2, t), -deriv(lambda x: pot(x, z, 3*pi/2, t), R, dx=dx), rtol=rtol)
R, z, t = 3.14, .7, -123.123
assert_allclose(pot.Rforce(R, z, 0, t), -deriv(lambda x: pot(x, z, 0, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi/2, t), -deriv(lambda x: pot(x, z, pi/2, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi, t), -deriv(lambda x: pot(x, z, pi, t), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, 3*pi/2, t), -deriv(lambda x: pot(x, z, 3*pi/2, t), R, dx=dx), rtol=rtol)
pot = spiral(N=10, r_ref=15, phi_ref=5, Cs=[8./(3.*pi), 0.5, 8./(15.*pi)])
assert_allclose(pot.Rforce(1., 0.), -deriv(lambda x: pot(x, 0.), 1., dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(0.01, 0.), -deriv(lambda x: pot(x, 0.), 0.01, dx=dx), rtol=rtol)
R, z = 0.3, 0
assert_allclose(pot.Rforce(R, z, 0), -deriv(lambda x: pot(x, z, 0), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi/2.1), -deriv(lambda x: pot(x, z, pi/2.1), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, 1.3*pi), -deriv(lambda x: pot(x, z, 1.3*pi), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, 3*pi/2), -deriv(lambda x: pot(x, z, 3*pi/2), R, dx=dx), rtol=rtol)
R, z = 1, -.7
assert_allclose(pot.Rforce(R, z, 0), -deriv(lambda x: pot(x, z, 0), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi / 2), -deriv(lambda x: pot(x, z, pi / 2), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, .9*pi), -deriv(lambda x: pot(x, z, .9*pi), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, 3.3*pi/2), -deriv(lambda x: pot(x, z, 3.3*pi/2), R, dx=dx), rtol=rtol)
R, z = 3.14, .7
assert_allclose(pot.Rforce(R, z, 0), -deriv(lambda x: pot(x, z, 0), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, pi / 2.3), -deriv(lambda x: pot(x, z, pi / 2.3), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, 1.1*pi), -deriv(lambda x: pot(x, z, 1.1*pi), R, dx=dx), rtol=rtol)
assert_allclose(pot.Rforce(R, z, 3.5*pi/2), -deriv(lambda x: pot(x, z, 3.5*pi/2), R, dx=dx), rtol=rtol)
def test_zforce(self):
"""Test zforce against a numerical derivative -d(Potential) / dz"""
dx = 1e-8
rtol = 1e-6 # relative tolerance
pot = spiral()
# zforce is zero in the plane of the galaxy
assert_allclose(0, pot.zforce(0.3, 0, 0), rtol=rtol)
assert_allclose(0, pot.zforce(0.3, 0, pi/2), rtol=rtol)
assert_allclose(0, pot.zforce(0.3, 0, pi), rtol=rtol)
assert_allclose(0, pot.zforce(0.3, 0, 3*pi/2), rtol=rtol)
# test zforce against -dPhi/dz
R, z = 1, -.7
assert_allclose(pot.zforce(R, z, 0), -deriv(lambda x: pot(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, pi/2), -deriv(lambda x: pot(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, pi), -deriv(lambda x: pot(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, 3*pi/2), -deriv(lambda x: pot(R, x, 3*pi/2), z, dx=dx), rtol=rtol)
R, z = 3.7, .7
assert_allclose(pot.zforce(R, z, 0), -deriv(lambda x: pot(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, pi/2), -deriv(lambda x: pot(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, pi), -deriv(lambda x: pot(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, 3*pi/2), -deriv(lambda x: pot(R, x, 3*pi/2), z, dx=dx), rtol=rtol)
pot = spiral(amp=13, N=3, alpha=-.3, r_ref=0.5, phi_ref=0.3, Rs=0.7, H=0.7, Cs=[1, 2], omega=3)
# zforce is zero in the plane of the galaxy
assert_allclose(0, pot.zforce(0.3, 0, 0, 1), rtol=rtol)
assert_allclose(0, pot.zforce(0.6, 0, pi/2, 2), rtol=rtol)
assert_allclose(0, pot.zforce(0.9, 0, pi, 3), rtol=rtol)
assert_allclose(0, pot.zforce(1.2, 0, 2*pi, 4), rtol=rtol)
# test zforce against -dPhi/dz
R, z, t = 1, -.7, 123
assert_allclose(pot.zforce(R, z, 0, t), -deriv(lambda x: pot(R, x, 0, t), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, pi/2, t), -deriv(lambda x: pot(R, x, pi/2, t), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, pi, t), -deriv(lambda x: pot(R, x, pi, t), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, 3*pi/2, t), -deriv(lambda x: pot(R, x, 3*pi/2, t), z, dx=dx), rtol=rtol)
R, z = 3.7, .7
assert_allclose(pot.zforce(R, z, 0), -deriv(lambda x: pot(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, pi/2), -deriv(lambda x: pot(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, pi), -deriv(lambda x: pot(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, 3*pi/2), -deriv(lambda x: pot(R, x, 3*pi/2), z, dx=dx), rtol=rtol)
pot = spiral(N=1, alpha=-0.2, r_ref=.5, Cs=[1, 1.5], omega=-3)
# zforce is zero in the plane of the galaxy
assert_allclose(0, pot.zforce(0.3, 0, 0, 123), rtol=rtol)
assert_allclose(0, pot.zforce(0.3, 0, pi/2, -321), rtol=rtol)
assert_allclose(0, pot.zforce(32, 0, pi, 1.23), rtol=rtol)
assert_allclose(0, pot.zforce(0.123, 0, 3.33*pi/2, -3.21), rtol=rtol)
# test zforce against -dPhi/dz
R, z = 1, -1.5
assert_allclose(pot.zforce(R, z, 0), -deriv(lambda x: pot(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, pi/2), -deriv(lambda x: pot(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, pi), -deriv(lambda x: pot(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, 3*pi/2.1), -deriv(lambda x: pot(R, x, 3*pi/2.1), z, dx=dx), rtol=rtol)
R, z, t = 3.7, .7, -100
assert_allclose(pot.zforce(R, z, 0, t), -deriv(lambda x: pot(R, x, 0, t), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, pi/2, t), -deriv(lambda x: pot(R, x, pi/2, t), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, pi, t), -deriv(lambda x: pot(R, x, pi, t), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, 3.4*pi/2, t), -deriv(lambda x: pot(R, x, 3.4*pi/2, t), z, dx=dx), rtol=rtol)
pot = spiral(N=5, r_ref=1.5, phi_ref=0.5, Cs=[8./(3.*pi), 0.5, 8./(15.*pi)])
# zforce is zero in the plane of the galaxy
assert_allclose(0, pot.zforce(0.3, 0, 0), rtol=rtol)
assert_allclose(0, pot.zforce(0.4, 0, pi/2), rtol=rtol)
assert_allclose(0, pot.zforce(0.5, 0, pi*1.1), rtol=rtol)
assert_allclose(0, pot.zforce(0.6, 0, 3*pi/2), rtol=rtol)
# test zforce against -dPhi/dz
R, z = 1, -.7
assert_allclose(pot.zforce(R, z, 0), -deriv(lambda x: pot(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, pi/2), -deriv(lambda x: pot(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, pi), -deriv(lambda x: pot(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, 3*pi/2), -deriv(lambda x: pot(R, x, 3*pi/2), z, dx=dx), rtol=rtol)
R, z = 37, 1.7
assert_allclose(pot.zforce(R, z, 0), -deriv(lambda x: pot(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, pi/2), -deriv(lambda x: pot(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, pi), -deriv(lambda x: pot(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.zforce(R, z, 3*pi/2), -deriv(lambda x: pot(R, x, 3*pi/2), z, dx=dx), rtol=rtol)
def test_phiforce(self):
"""Test phiforce against a numerical derivative -d(Potential) / d(phi)."""
dx = 1e-8
rtol = 1e-5 # relative tolerance
pot = spiral()
R, z = .3, 0
assert_allclose(pot.phiforce(R, z, 0), -deriv(lambda x: pot(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi/2), -deriv(lambda x: pot(R, z, x), pi/2, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi), -deriv(lambda x: pot(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, 3*pi/2), -deriv(lambda x: pot(R, z, x), 3*pi/2, dx=dx), rtol=rtol)
R, z = .1, -.3
assert_allclose(pot.phiforce(R, z, 0), -deriv(lambda x: pot(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi/2), -deriv(lambda x: pot(R, z, x), pi/2, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi), -deriv(lambda x: pot(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, 3*pi/2), -deriv(lambda x: pot(R, z, x), 3*pi/2, dx=dx), rtol=rtol)
R, z = 3, 7
assert_allclose(pot.phiforce(R, z, 0), -deriv(lambda x: pot(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi/2.1), -deriv(lambda x: pot(R, z, x), pi/2.1, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi), -deriv(lambda x: pot(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, 3*pi/2), -deriv(lambda x: pot(R, z, x), 3*pi/2, dx=dx), rtol=rtol)
pot = spiral(N=7, alpha=-0.3, r_ref=0.5, phi_ref=0.3, Rs=0.7, H=0.7, Cs=[1, 1, 1], omega=2*pi)
R, z, t = .3, 0, 1.2
assert_allclose(pot.phiforce(R, z, 0, 0), -deriv(lambda x: pot(R, z, x, 0), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi/2, t), -deriv(lambda x: pot(R, z, x, t), pi/2, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi, t), -deriv(lambda x: pot(R, z, x, t), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, 3*pi/2, t), -deriv(lambda x: pot(R, z, x, t), 3*pi/2, dx=dx), rtol=rtol)
R, z = 1, -.7
assert_allclose(pot.phiforce(R, z, 0), -deriv(lambda x: pot(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi/2), -deriv(lambda x: pot(R, z, x), pi/2, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi), -deriv(lambda x: pot(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, 3*pi/2), -deriv(lambda x: pot(R, z, x), 3*pi/2, dx=dx), rtol=rtol)
R, z, t = 3.7, .7, -5.1
assert_allclose(pot.phiforce(R, z, 0, t), -deriv(lambda x: pot(R, z, x, t), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi/2, t), -deriv(lambda x: pot(R, z, x, t), pi/2, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi, t), -deriv(lambda x: pot(R, z, x, t), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, 3.2*pi/2, t), -deriv(lambda x: pot(R, z, x, t), 3.2*pi/2, dx=dx), rtol=rtol)
pot = spiral(N=1, alpha=0.1, phi_ref=0, Cs=[1, 1.5], omega=-.333)
R, z = .3, 0
assert_allclose(pot.phiforce(R, z, 0), -deriv(lambda x: pot(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi/2), -deriv(lambda x: pot(R, z, x), pi/2, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi), -deriv(lambda x: pot(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, 3.2*pi/2), -deriv(lambda x: pot(R, z, x), 3.2*pi/2, dx=dx), rtol=rtol)
R, z, t = 1, -.7, 123
assert_allclose(pot.phiforce(R, z, 0, t), -deriv(lambda x: pot(R, z, x, t), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi/2, t), -deriv(lambda x: pot(R, z, x, t), pi/2, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi, t), -deriv(lambda x: pot(R, z, x, t), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, 3*pi/2, t), -deriv(lambda x: pot(R, z, x, t), 3*pi/2, dx=dx), rtol=rtol)
R, z, t = 3, 4, 5
assert_allclose(pot.phiforce(R, z, 0, t), -deriv(lambda x: pot(R, z, x, t), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi/2, t), -deriv(lambda x: pot(R, z, x, t), pi/2, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi, t), -deriv(lambda x: pot(R, z, x, t), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, 3*pi/2, t), -deriv(lambda x: pot(R, z, x, t), 3*pi/2, dx=dx), rtol=rtol)
pot = spiral(N=4, r_ref=1.5, phi_ref=5, Cs=[8./(3.*pi), 0.5, 8./(15.*pi)])
R, z = .3, 0
assert_allclose(pot.phiforce(R, z, 0), -deriv(lambda x: pot(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi/2), -deriv(lambda x: pot(R, z, x), pi/2, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi), -deriv(lambda x: pot(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, 3*pi/2), -deriv(lambda x: pot(R, z, x), 3*pi/2, dx=dx), rtol=rtol)
R, z = 1, -.7
assert_allclose(pot.phiforce(R, z, 0), -deriv(lambda x: pot(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi/2), -deriv(lambda x: pot(R, z, x), pi/2, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi), -deriv(lambda x: pot(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, 3*pi/2), -deriv(lambda x: pot(R, z, x), 3*pi/2, dx=dx), rtol=rtol)
R, z = 2.1, .12345
assert_allclose(pot.phiforce(R, z, 0), -deriv(lambda x: pot(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi/2), -deriv(lambda x: pot(R, z, x), pi/2, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, pi), -deriv(lambda x: pot(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phiforce(R, z, 2*pi), -deriv(lambda x: pot(R, z, x), 2*pi, dx=dx), rtol=rtol)
def test_R2deriv(self):
"""Test R2deriv against a numerical derivative -d(Rforce) / dR."""
dx = 1e-8
rtol = 1e-6 # relative tolerance
pot = spiral()
assert_allclose(pot.R2deriv(1., 0.), -deriv(lambda x: pot.Rforce(x, 0.), 1., dx=dx), rtol=rtol)
R, z = 0.3, 0
assert_allclose(pot.R2deriv(R, z, 0), -deriv(lambda x: pot.Rforce(x, z, 0), R, dx=dx), rtol=rtol)
assert_allclose(pot.R2deriv(R, z, pi / 2), -deriv(lambda x: pot.Rforce(x, z, pi/2), R, dx=dx), rtol=rtol)
assert_allclose(pot.R2deriv(R, z, pi), -deriv(lambda x: pot.Rforce(x, z, pi), R, dx=dx), rtol=rtol)
assert_allclose(pot.R2deriv(R, z, 3.1*pi / 2), -deriv(lambda x: pot.Rforce(x, z, 3.1*pi/2), R, dx=dx), rtol=rtol)
R, z = 1, -.7
assert_allclose(pot.R2deriv(R, z, 0), -deriv(lambda x: pot.Rforce(x, z, 0), R, dx=dx), rtol=rtol)
assert_allclose(pot.R2deriv(R, z, pi / 2), -deriv(lambda x: pot.Rforce(x, z, pi / 2), R, dx=dx), rtol=rtol)
assert_allclose(pot.R2deriv(R, z, pi), -deriv(lambda x: pot.Rforce(x, z, pi), R, dx=dx), rtol=rtol)
assert_allclose(pot.R2deriv(R, z, 2*pi), -deriv(lambda x: pot.Rforce(x, z, 2*pi), R, dx=dx), rtol=rtol)
R, z = 5, .9
assert_allclose(pot.R2deriv(R, z, 0), -deriv(lambda x: pot.Rforce(x, z, 0), R, dx=dx), rtol=rtol)
assert_allclose(pot.R2deriv(R, z, pi / 2), -deriv(lambda x: pot.Rforce(x, z, pi / 2), R, dx=dx), rtol=rtol)
assert_allclose(pot.R2deriv(R, z, pi), -deriv(lambda x: pot.Rforce(x, z, pi), R, dx=dx), rtol=rtol)
assert_allclose(pot.R2deriv(R, z, 3 * pi / 2), -deriv(lambda x: pot.Rforce(x, z, 3*pi/2), R, dx=dx), rtol=rtol)
# pot = spiral(N=1, alpha=-.3, r_ref=.1, phi_ref=pi, Rs=1, H=1, Cs=[1, 2, 3], omega=3)
# assert_allclose(pot.R2deriv(1e-3, 0.), -deriv(lambda x: pot.Rforce(x, 0.), 1e-3, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(1., 0.), -deriv(lambda x: pot.Rforce(x, 0.), 1., dx=dx), rtol=rtol)
# R, z = 0.3, 0
# assert_allclose(pot.R2deriv(R, z, 0), -deriv(lambda x: pot.Rforce(x, z, 0), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, pi / 2), -deriv(lambda x: pot.Rforce(x, z, pi/2), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, pi), -deriv(lambda x: pot.Rforce(x, z, pi), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, 3 * pi / 2), -deriv(lambda x: pot.Rforce(x, z, 3*pi/2), R, dx=dx), rtol=rtol)
# R, z = 1, -.7
# assert_allclose(pot.R2deriv(R, z, 0), -deriv(lambda x: pot.Rforce(x, z, 0), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, pi / 2), -deriv(lambda x: pot.Rforce(x, z, pi / 2), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, pi), -deriv(lambda x: pot.Rforce(x, z, pi), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, 3.1*pi/2), -deriv(lambda x: pot.Rforce(x, z, 3.1*pi/2), R, dx=dx), rtol=rtol)
# R, z = 5, .9
# assert_allclose(pot.R2deriv(R, z, 0), -deriv(lambda x: pot.Rforce(x, z, 0), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, pi / 2.4), -deriv(lambda x: pot.Rforce(x, z, pi / 2.4), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, pi), -deriv(lambda x: pot.Rforce(x, z, pi), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, 3 * pi / 2), -deriv(lambda x: pot.Rforce(x, z, 3*pi/2), R, dx=dx), rtol=rtol)
#
# pot = spiral(N=7, alpha=.1, r_ref=1, phi_ref=1, Rs=1.1, H=.1, Cs=[8./(3.*pi), 0.5, 8./(15.*pi)], omega=-.3)
# assert_allclose(pot.R2deriv(1., 0.), -deriv(lambda x: pot.Rforce(x, 0.), 1., dx=dx), rtol=rtol)
# R, z = 0.3, 0
# assert_allclose(pot.R2deriv(R, z, 0), -deriv(lambda x: pot.Rforce(x, z, 0), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, pi/2), -deriv(lambda x: pot.Rforce(x, z, pi/2), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, pi), -deriv(lambda x: pot.Rforce(x, z, pi), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, 3 * pi / 2), -deriv(lambda x: pot.Rforce(x, z, 3*pi/2), R, dx=dx), rtol=rtol)
# R, z = 1, -.7
# assert_allclose(pot.R2deriv(R, z, 0), -deriv(lambda x: pot.Rforce(x, z, 0), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, pi / 2), -deriv(lambda x: pot.Rforce(x, z, pi / 2), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, pi), -deriv(lambda x: pot.Rforce(x, z, pi), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, 3 * pi / 2), -deriv(lambda x: pot.Rforce(x, z, 3*pi/2), R, dx=dx), rtol=rtol)
# R, z = 5, .9
# assert_allclose(pot.R2deriv(R, z, 0), -deriv(lambda x: pot.Rforce(x, z, 0), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, pi / 2), -deriv(lambda x: pot.Rforce(x, z, pi / 2), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, pi), -deriv(lambda x: pot.Rforce(x, z, pi), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, 3 * pi / 2), -deriv(lambda x: pot.Rforce(x, z, 3*pi/2), R, dx=dx), rtol=rtol)
#
# pot = spiral(N=4, alpha=pi/2, r_ref=1, phi_ref=1, Rs=.7, H=.77, Cs=[3, 4], omega=-1.3)
# assert_allclose(pot.R2deriv(1e-3, 0.), -deriv(lambda x: pot.Rforce(x, 0.), 1e-3, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(1., 0.), -deriv(lambda x: pot.Rforce(x, 0.), 1., dx=dx), rtol=rtol)
# R, z = 0.3, 0
# assert_allclose(pot.R2deriv(R, z, 0), -deriv(lambda x: pot.Rforce(x, z, 0), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, pi / 2), -deriv(lambda x: pot.Rforce(x, z, pi/2), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, pi), -deriv(lambda x: pot.Rforce(x, z, pi), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, 3 * pi / 2), -deriv(lambda x: pot.Rforce(x, z, 3*pi/2), R, dx=dx), rtol=rtol)
# R, z = 1, -.7
# assert_allclose(pot.R2deriv(R, z, 0), -deriv(lambda x: pot.Rforce(x, z, 0), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, pi / 2), -deriv(lambda x: pot.Rforce(x, z, pi / 2), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, pi), -deriv(lambda x: pot.Rforce(x, z, pi), R, dx=dx), rtol=rtol)
# assert_allclose(pot.R2deriv(R, z, .33*pi/2), -deriv(lambda x: pot.Rforce(x, z, .33*pi/2), R, dx=dx), rtol=rtol)
def test_z2deriv(self):
"""Test z2deriv against a numerical derivative -d(zforce) / dz"""
dx = 1e-8
rtol = 1e-6 # relative tolerance
pot = spiral()
R, z = .3, 0
assert_allclose(pot.z2deriv(R, z, 0), -deriv(lambda x: pot.zforce(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi/2), -deriv(lambda x: pot.zforce(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi), -deriv(lambda x: pot.zforce(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.zforce(R, x, 3*pi/2), z, dx=dx), rtol=rtol)
R, z = 1, -.3
assert_allclose(pot.z2deriv(R, z, 0), -deriv(lambda x: pot.zforce(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi/2), -deriv(lambda x: pot.zforce(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi), -deriv(lambda x: pot.zforce(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.zforce(R, x, 3*pi/2), z, dx=dx), rtol=rtol)
R, z = 1.2, .1
assert_allclose(pot.z2deriv(R, z, 0), -deriv(lambda x: pot.zforce(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi/2), -deriv(lambda x: pot.zforce(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi), -deriv(lambda x: pot.zforce(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.zforce(R, x, 3*pi/2), z, dx=dx), rtol=rtol)
pot = spiral(N=3, alpha=-0.3, r_ref=.25, Cs=[8./(3.*pi), 0.5, 8./(15.*pi)])
R, z = .3, 0
assert_allclose(pot.z2deriv(R, z, 0), -deriv(lambda x: pot.zforce(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi/2), -deriv(lambda x: pot.zforce(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi), -deriv(lambda x: pot.zforce(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.zforce(R, x, 3*pi/2), z, dx=dx), rtol=rtol)
R, z = 1, -.3
assert_allclose(pot.z2deriv(R, z, 0), -deriv(lambda x: pot.zforce(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi/2), -deriv(lambda x: pot.zforce(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi), -deriv(lambda x: pot.zforce(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.zforce(R, x, 3*pi/2), z, dx=dx), rtol=rtol)
R, z = 3.3, .7
assert_allclose(pot.z2deriv(R, z, 0), -deriv(lambda x: pot.zforce(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi/2), -deriv(lambda x: pot.zforce(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi), -deriv(lambda x: pot.zforce(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.zforce(R, x, 3*pi/2), z, dx=dx), rtol=rtol)
pot = spiral(amp=5, N=1, alpha=0.1, r_ref=0.5, phi_ref=0.3, Rs=0.7, H=0.7, Cs=[1, 2], omega=3)
R, z = .3, 0
assert_allclose(pot.z2deriv(R, z, 0), -deriv(lambda x: pot.zforce(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi/2), -deriv(lambda x: pot.zforce(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi), -deriv(lambda x: pot.zforce(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.zforce(R, x, 3*pi/2), z, dx=dx), rtol=rtol)
R, z = 1, -.3
assert_allclose(pot.z2deriv(R, z, 0), -deriv(lambda x: pot.zforce(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi/2), -deriv(lambda x: pot.zforce(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi), -deriv(lambda x: pot.zforce(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.zforce(R, x, 3*pi/2), z, dx=dx), rtol=rtol)
R, z = 3.3, .7
assert_allclose(pot.z2deriv(R, z, 0), -deriv(lambda x: pot.zforce(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi/2), -deriv(lambda x: pot.zforce(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi), -deriv(lambda x: pot.zforce(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.zforce(R, x, 3*pi/2), z, dx=dx), rtol=rtol)
pot = spiral(N=1, alpha=1, r_ref=3, phi_ref=pi, Cs=[1, 2], omega=-3)
R, z = .7, 0
assert_allclose(pot.z2deriv(R, z, 0), -deriv(lambda x: pot.zforce(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi/2), -deriv(lambda x: pot.zforce(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi), -deriv(lambda x: pot.zforce(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.zforce(R, x, 3*pi/2), z, dx=dx), rtol=rtol)
R, z = 1, -.3
assert_allclose(pot.z2deriv(R, z, 0), -deriv(lambda x: pot.zforce(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi/2), -deriv(lambda x: pot.zforce(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi), -deriv(lambda x: pot.zforce(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.zforce(R, x, 3*pi/2), z, dx=dx), rtol=rtol)
R, z = 2.1, .99
assert_allclose(pot.z2deriv(R, z, 0), -deriv(lambda x: pot.zforce(R, x, 0), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi/2), -deriv(lambda x: pot.zforce(R, x, pi/2), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, pi), -deriv(lambda x: pot.zforce(R, x, pi), z, dx=dx), rtol=rtol)
assert_allclose(pot.z2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.zforce(R, x, 3*pi/2), z, dx=dx), rtol=rtol)
def test_phi2deriv(self):
"""Test phi2deriv against a numerical derivative -d(phiforce) / d(phi)."""
dx = 1e-8
rtol = 1e-7 # relative tolerance
pot = spiral()
R, z = .3, 0
assert_allclose(pot.phi2deriv(R, z, 0), -deriv(lambda x: pot.phiforce(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi/2.1), -deriv(lambda x: pot.phiforce(R, z, x), pi/2.1, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi), -deriv(lambda x: pot.phiforce(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, 3*pi/2.5), -deriv(lambda x: pot.phiforce(R, z, x), 3*pi/2.5, dx=dx), rtol=rtol)
R, z = 1, -.3
assert_allclose(pot.phi2deriv(R, z, 0), -deriv(lambda x: pot.phiforce(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi/2), -deriv(lambda x: pot.phiforce(R, z, x), pi/2, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi), -deriv(lambda x: pot.phiforce(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.phiforce(R, z, x), 3*pi/2, dx=dx), rtol=rtol)
R, z = 3.3, .7
assert_allclose(pot.phi2deriv(R, z, 0), -deriv(lambda x: pot.phiforce(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi/2.1), -deriv(lambda x: pot.phiforce(R, z, x), pi/2.1, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi), -deriv(lambda x: pot.phiforce(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.phiforce(R, z, x), 3*pi/2, dx=dx), rtol=rtol)
pot = spiral(amp=13, N=1, alpha=-.3, r_ref=0.5, phi_ref=0.1, Rs=0.7, H=0.7, Cs=[1, 2, 3], omega=3)
R, z = .3, 0
assert_allclose(pot.phi2deriv(R, z, 0), -deriv(lambda x: pot.phiforce(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi/2), -deriv(lambda x: pot.phiforce(R, z, x), pi/2, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi), -deriv(lambda x: pot.phiforce(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, 3.3*pi/2), -deriv(lambda x: pot.phiforce(R, z, x), 3.3*pi/2, dx=dx), rtol=rtol)
R, z = 1, -.3
assert_allclose(pot.phi2deriv(R, z, 0), -deriv(lambda x: pot.phiforce(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi/2), -deriv(lambda x: pot.phiforce(R, z, x), pi/2, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi), -deriv(lambda x: pot.phiforce(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.phiforce(R, z, x), 3*pi/2, dx=dx), rtol=rtol)
R, z = 3.3, .7
assert_allclose(pot.phi2deriv(R, z, 0), -deriv(lambda x: pot.phiforce(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi/2.1), -deriv(lambda x: pot.phiforce(R, z, x), pi/2.1, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi), -deriv(lambda x: pot.phiforce(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.phiforce(R, z, x), 3*pi/2, dx=dx), rtol=rtol)
pot = spiral(amp=13, N=5, alpha=0.1, r_ref=.3, phi_ref=.1, Rs=0.77, H=0.747, Cs=[3, 2], omega=-3)
R, z = .3, 0
assert_allclose(pot.phi2deriv(R, z, 0), -deriv(lambda x: pot.phiforce(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi/2), -deriv(lambda x: pot.phiforce(R, z, x), pi/2, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi), -deriv(lambda x: pot.phiforce(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.phiforce(R, z, x), 3*pi/2, dx=dx), rtol=rtol)
R, z = 1, -.3
assert_allclose(pot.phi2deriv(R, z, 0), -deriv(lambda x: pot.phiforce(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi/2), -deriv(lambda x: pot.phiforce(R, z, x), pi/2, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi), -deriv(lambda x: pot.phiforce(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.phiforce(R, z, x), 3*pi/2, dx=dx), rtol=rtol)
R, z = 3.3, .7
assert_allclose(pot.phi2deriv(R, z, 0), -deriv(lambda x: pot.phiforce(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi/2.1), -deriv(lambda x: pot.phiforce(R, z, x), pi/2.1, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi), -deriv(lambda x: pot.phiforce(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.phiforce(R, z, x), 3*pi/2, dx=dx), rtol=rtol)
pot = spiral(amp=11, N=7, alpha=.777, r_ref=7, phi_ref=.7, Cs=[8./(3.*pi), 0.5, 8./(15.*pi)])
R, z = .7, 0
assert_allclose(pot.phi2deriv(R, z, 0), -deriv(lambda x: pot.phiforce(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi/2), -deriv(lambda x: pot.phiforce(R, z, x), pi/2, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi), -deriv(lambda x: pot.phiforce(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.phiforce(R, z, x), 3*pi/2, dx=dx), rtol=rtol)
R, z = 1, -.33
assert_allclose(pot.phi2deriv(R, z, 0), -deriv(lambda x: pot.phiforce(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi/2.2), -deriv(lambda x: pot.phiforce(R, z, x), pi/2.2, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi), -deriv(lambda x: pot.phiforce(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.phiforce(R, z, x), 3*pi/2, dx=dx), rtol=rtol)
R, z = 1.123, .123
assert_allclose(pot.phi2deriv(R, z, 0), -deriv(lambda x: pot.phiforce(R, z, x), 0, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi/2.1), -deriv(lambda x: pot.phiforce(R, z, x), pi/2.1, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, pi), -deriv(lambda x: pot.phiforce(R, z, x), pi, dx=dx), rtol=rtol)
assert_allclose(pot.phi2deriv(R, z, 3*pi/2), -deriv(lambda x: pot.phiforce(R, z, x), 3*pi/2, dx=dx), rtol=rtol)
def test_dens(self):
"""Test dens against density obtained using Poisson's equation."""
rtol = 1e-2 # relative tolerance (this one isn't as precise)
pot = spiral()
assert_allclose(pot.dens(1, 0, 0, forcepoisson=False), pot.dens(1, 0, 0, forcepoisson=True), rtol=rtol)
assert_allclose(pot.dens(1, 1, .5, forcepoisson=False), pot.dens(1, 1, .5, forcepoisson=True), rtol=rtol)
assert_allclose(pot.dens(1, -1, -1, forcepoisson=False), pot.dens(1, -1, -1, forcepoisson=True), rtol=rtol)
assert_allclose(pot.dens(.1, .1, .1, forcepoisson=False), pot.dens(.1, .1, .1, forcepoisson=True), rtol=rtol)
assert_allclose(pot.dens(33, .777, .747, forcepoisson=False), pot.dens(33, .777, .747, forcepoisson=True), rtol=rtol)
pot = spiral(amp=3, N=5, alpha=.3, r_ref=.7, omega=5)
assert_allclose(pot.dens(1, 0, 0, forcepoisson=False), pot.dens(1, 0, 0, forcepoisson=True), rtol=rtol)
assert_allclose(pot.dens(1.2, 1.2, 1.2, forcepoisson=False), pot.dens(1.2, 1.2, 1.2, forcepoisson=True), rtol=rtol)
assert_allclose(pot.dens(1, -1, -1, forcepoisson=False), pot.dens(1, -1, -1, forcepoisson=True), rtol=rtol)
assert_allclose(pot.dens(.1, .1, .1, forcepoisson=False), pot.dens(.1, .1, .1, forcepoisson=True), rtol=rtol)
assert_allclose(pot.dens(33.3, .007, .747, forcepoisson=False), pot.dens(33.3, .007, .747, forcepoisson=True), rtol=rtol)
pot = spiral(amp=0.6, N=3, alpha=.24, r_ref=1, phi_ref=pi, Cs=[8./(3.*pi), 0.5, 8./(15.*pi)], omega=-3)
assert_allclose(pot.dens(1, 0, 0, forcepoisson=False), pot.dens(1, 0, 0, forcepoisson=True), rtol=rtol)
assert_allclose(pot.dens(1, 1, 1, forcepoisson=False), pot.dens(1, 1, 1, forcepoisson=True), rtol=rtol)
assert_allclose(pot.dens(1, -1, -1, forcepoisson=False), pot.dens(1, -1, -1, forcepoisson=True), rtol=rtol)
# assert_allclose(pot.dens(.1, .1, .1, forcepoisson=False), pot.dens(.1, .1, .1, forcepoisson=True), rtol=rtol)
assert_allclose(pot.dens(3.33, -7.77, -.747, forcepoisson=False), pot.dens(3.33, -7.77, -.747, forcepoisson=True), rtol=rtol)
pot = spiral(amp=100, N=4, alpha=pi/2, r_ref=1, phi_ref=1, Rs=7, H=77, Cs=[3, 1, 1], omega=-1.3)
assert_allclose(pot.dens(1, 0, 0, forcepoisson=False), pot.dens(1, 0, 0, forcepoisson=True), rtol=rtol)
assert_allclose(pot.dens(3, 2, pi, forcepoisson=False), pot.dens(3, 2, pi, forcepoisson=True), rtol=rtol)
assert_allclose(pot.dens(1, -1, -1, forcepoisson=False), pot.dens(1, -1, -1, forcepoisson=True), rtol=rtol)
assert_allclose(pot.dens(.1, .123, .1, forcepoisson=False), pot.dens(.1, .123, .1, forcepoisson=True), rtol=rtol)
assert_allclose(pot.dens(333, -.777, .747, forcepoisson=False), pot.dens(333, -.777, .747, forcepoisson=True), rtol=rtol)
def test_Rzderiv(self):
"""Test Rzderiv against a numerical derivative."""
dx = 1e-8
rtol = 1e-6
pot = spiral()
R, z, phi, t = 1, 0, 0, 0
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 0.7, 0.3, pi/3, 0
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 1.1, -0.3, pi/4.2, 3
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = .777, .747, .343, 2.5
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 12, 1, 2, 3
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 3, 4, 5, 6
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 5, -.7, 3*pi/2, 5
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 11, 11, 11, 1.123
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 4, 7, 2, 10000
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = .01, 0, 0, 0
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 1.23, 0, 44, 343
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 7, 7, 7, 7
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
pot = spiral(amp=13, N=7, alpha=.1, r_ref=1.123, phi_ref=.3, Rs=0.777, H=.5, Cs=[4.5], omega=-3.4)
R, z, phi, t = 1, 0, 0, 0
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = .777, 0.333, pi/3, 0.
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 1.1, -0.3, pi/4.2, 3
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = .777, .747, .343, 2.5
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 12, 1, 2, 3
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 3, 4, 5, 6
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 2, -.7, 3*pi/2, 5
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 11, 11, 11, 1.123
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 4, 7, 2, 10000
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = .01, 0, 0, 0
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 1.23, 0, 44, 343
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 7, 7, 7, 7
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
pot = spiral(amp=11, N=2, alpha=.777, r_ref=7, Cs=[8.], omega=0.1)
R, z, phi, t = 1, 0, 0, 0
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 0.7, 0.3, pi/12, 0
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 1.1, -0.3, pi/4.2, 3
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = .777, .747, .343, 2.5
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 2, 1, 2, 3
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 3, 4, 5, 6
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 5, -.7, 3*pi/2, 5
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 11, 11, 11, 1.123
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 4, 7, 2, 10000
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = .01, 0, 0, 0
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 1.23, 0, 44, 343
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 7, 7, 7, 7
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
pot = spiral(amp=2, N=1, alpha=-0.1, r_ref=5, Rs=5, H=.7, Cs=[3.5], omega=3)
R, z, phi, t = 1, 0, 0, 0
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 0.77, 0.3, pi/3, 0
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 3.1, -0.3, pi/5, 2
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = .777, .747, .343, 2.5
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 12, 1, 2, 3
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 3, 4, 5, 6
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 5, -.7, 3*pi/2, 5
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 11, 11, 11, 1.123
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 4, 7, 2, 10000
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = .01, 0, 0, 0
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 1.23, 0, 44, 343
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
R, z, phi, t = 7, 7, 7, 7
assert_allclose(pot.Rzderiv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, x, phi, t), z, dx=dx), rtol=rtol)
def test_Rphideriv(self):
"""Test Rphideriv against a numerical derivative."""
dx = 1e-8
rtol = 5e-5
pot = spiral()
R, z, phi, t = 1, 0, 0, 0
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 0.7, 0.3, pi / 3, 0
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 1.1, -0.3, pi / 4.2, 3
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = .777, .747, .343, 2.5
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 12, 1, 2, 3
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 3, 4, 5, 6
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 5, -.7, 3 * pi / 2, 5
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 11, 11, 11, 1.123
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 4, 7, 2, 1000
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = .01, 0, 0, 0
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 1.23, 0, 44, 343
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 7, 1, 7, 7
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
pot = spiral(N=3, alpha=.21, r_ref=.5, phi_ref=pi, Cs=[2.], omega=-3)
R, z, phi, t = 1, 0, 0, 0
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 0.7, 0.3, pi / 3, 0
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 1.1, -0.3, pi / 4.2, 3
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = .777, .747, .343, 2.5
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 12, 1, 2, 3
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 3, 4, 5, 6
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 5, -.7, 3 * pi / 2, 5
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 11, 11, 11, 1.123
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 3, 2, 1, 100
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = .01, 0, 0, 0
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 1.12, 0, 2, 343
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
R, z, phi, t = 7, 7, 7, 7
assert_allclose(pot.Rphideriv(R, z, phi, t), -deriv(lambda x: pot.Rforce(R, z, x, t), phi, dx=dx), rtol=rtol)
def test_OmegaP(self):
sp = spiral()
assert sp.OmegaP() == 0
sp = spiral(N=1, alpha=2, r_ref=.1, phi_ref=.5, Rs=0.2, H=0.7, Cs=[1,2], omega=-123)
assert sp.OmegaP() == -123
sp = spiral(omega=123.456)
assert sp.OmegaP() == 123.456
def test_K(self):
pot = spiral()
R = 1
assert_allclose([pot._K(R)], [pot._ns * pot._N / R / np.sin(pot._alpha)])
R = 1e-6
assert_allclose([pot._K(R)], [pot._ns * pot._N / R / np.sin(pot._alpha)])
R = 0.5
assert_allclose([pot._K(R)], [pot._ns * pot._N / R / np.sin(pot._alpha)])
def test_B(self):
pot = spiral()
R = 1
assert_allclose([pot._B(R)], [pot._K(R) * pot._H * (1 + 0.4 * pot._K(R) * pot._H)])
R = 1e-6
assert_allclose([pot._B(R)], [pot._K(R) * pot._H * (1 + 0.4 * pot._K(R) * pot._H)])
R = 0.3
assert_allclose([pot._B(R)], [pot._K(R) * pot._H * (1 + 0.4 * pot._K(R) * pot._H)])
def test_D(self):
pot = spiral()
assert_allclose([pot._D(3)], [(1. + pot._K(3)*pot._H + 0.3 * pot._K(3)**2 * pot._H**2.) / (1. + 0.3*pot._K(3) * pot._H)])
assert_allclose([pot._D(1e-6)], [(1. + pot._K(1e-6)*pot._H + 0.3 * pot._K(1e-6)**2 * pot._H**2.) / (1. + 0.3*pot._K(1e-6) * pot._H)])
assert_allclose([pot._D(.5)], [(1. + pot._K(.5)*pot._H + 0.3 * pot._K(.5)**2 * pot._H**2.) / (1. + 0.3*pot._K(.5) * pot._H)])
def test_dK_dR(self):
pot = spiral()
dx = 1e-8
assert_allclose(pot._dK_dR(3), deriv(pot._K, 3, dx=dx))
assert_allclose(pot._dK_dR(2.3), deriv(pot._K, 2.3, dx=dx))
assert_allclose(pot._dK_dR(-2.3), deriv(pot._K, -2.3, dx=dx))
def test_dB_dR(self):
pot = spiral()
dx = 1e-8
assert_allclose(pot._dB_dR(3.3), deriv(pot._B, 3.3, dx=dx))
assert_allclose(pot._dB_dR(1e-3), deriv(pot._B, 1e-3, dx=dx))
assert_allclose(pot._dB_dR(3), deriv(pot._B, 3, dx=dx))
def test_dD_dR(self):
pot = spiral()
dx = 1e-8
assert_allclose(pot._dD_dR(1e-3), deriv(pot._D, 1e-3, dx=dx))
assert_allclose(pot._dD_dR(2), deriv(pot._D, 2, dx=dx))
def test_gamma(self):
pot = spiral()
R, phi = 1, 2
assert_allclose(pot._gamma(R, phi), [pot._N * (float(phi) - pot._phi_ref - np.log(float(R) / pot._r_ref) /
np.tan(pot._alpha))])
R , phi = .1, -.2
assert_allclose(pot._gamma(R, phi), [pot._N * (float(phi) - pot._phi_ref - np.log(float(R) / pot._r_ref) /
np.tan(pot._alpha))])
R, phi = 0.01, 0
assert_allclose(pot._gamma(R, phi), [pot._N * (float(phi) - pot._phi_ref - np.log(float(R) / pot._r_ref) /
np.tan(pot._alpha))])
def test_dgamma_dR(self):
pot = spiral()
dx = 1e-8
assert_allclose(pot._dgamma_dR(3.), deriv(lambda x: pot._gamma(x, 1), 3., dx=dx))
assert_allclose(pot._dgamma_dR(3), deriv(lambda x: pot._gamma(x, 1), 3, dx=dx))
assert_allclose(pot._dgamma_dR(0.01), deriv(lambda x: pot._gamma(x, 1), 0.01, dx=dx))
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestSpiralArmsPotential)
unittest.TextTestRunner(verbosity=2).run(suite)
| 74.765625
| 141
| 0.556583
| 10,676
| 57,420
| 2.936306
| 0.016017
| 0.038216
| 0.214751
| 0.170346
| 0.949534
| 0.939996
| 0.922802
| 0.912403
| 0.896261
| 0.881587
| 0
| 0.052149
| 0.240247
| 57,420
| 767
| 142
| 74.863103
| 0.66643
| 0.100035
| 0
| 0.718601
| 0
| 0
| 0.000155
| 0
| 0
| 0
| 0
| 0
| 0.624801
| 1
| 0.030207
| false
| 0
| 0.011129
| 0
| 0.042925
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
98c39418e30f914edb662c2a8305aa24b24cb1ac
| 171
|
py
|
Python
|
Lista_Heranca_Polimorfismo/ex_02/test.py
|
lcs-amorim/LP2_2s2017
|
1b5c21cbb49e3b7806bee66e0a23c926673941de
|
[
"Apache-2.0"
] | null | null | null |
Lista_Heranca_Polimorfismo/ex_02/test.py
|
lcs-amorim/LP2_2s2017
|
1b5c21cbb49e3b7806bee66e0a23c926673941de
|
[
"Apache-2.0"
] | null | null | null |
Lista_Heranca_Polimorfismo/ex_02/test.py
|
lcs-amorim/LP2_2s2017
|
1b5c21cbb49e3b7806bee66e0a23c926673941de
|
[
"Apache-2.0"
] | 1
|
2018-08-14T19:25:14.000Z
|
2018-08-14T19:25:14.000Z
|
from main import ex01
def test_ex_01():
assert ex01("cesar",1500,10) == 1650
assert ex01("vinicius", 1000, 10) == 1100
assert ex01("lucas", 3500, 15) == 4025
| 24.428571
| 45
| 0.637427
| 26
| 171
| 4.115385
| 0.769231
| 0.280374
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.294118
| 0.204678
| 171
| 6
| 46
| 28.5
| 0.492647
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 0
| 0
| 0
| 0
| 0
| 0.6
| 1
| 0.2
| true
| 0
| 0.2
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98c88322a44c8932d4d684a706a8efa3c7bc0d74
| 2,508
|
py
|
Python
|
tests/algos/torch/test_bc_impl.py
|
alxlampe/d3rlpy
|
af7e6bd018a51f95138d121f59c50dc36ec87e3a
|
[
"MIT"
] | null | null | null |
tests/algos/torch/test_bc_impl.py
|
alxlampe/d3rlpy
|
af7e6bd018a51f95138d121f59c50dc36ec87e3a
|
[
"MIT"
] | null | null | null |
tests/algos/torch/test_bc_impl.py
|
alxlampe/d3rlpy
|
af7e6bd018a51f95138d121f59c50dc36ec87e3a
|
[
"MIT"
] | null | null | null |
import pytest
from d3rlpy.algos.torch.bc_impl import BCImpl, DiscreteBCImpl
from d3rlpy.augmentation import AugmentationPipeline
from d3rlpy.optimizers import AdamFactory
from d3rlpy.encoders import DefaultEncoderFactory
from tests.algos.algo_test import torch_impl_tester, DummyScaler
@pytest.mark.parametrize('observation_shape', [(100, ), (4, 84, 84)])
@pytest.mark.parametrize('action_size', [2])
@pytest.mark.parametrize('learning_rate', [1e-3])
@pytest.mark.parametrize('optim_factory', [AdamFactory()])
@pytest.mark.parametrize('encoder_factory', [DefaultEncoderFactory()])
@pytest.mark.parametrize('scaler', [None, DummyScaler()])
@pytest.mark.parametrize('augmentation', [AugmentationPipeline()])
@pytest.mark.parametrize('n_augmentations', [1])
def test_bc_impl(observation_shape, action_size, learning_rate, optim_factory,
encoder_factory, scaler, augmentation, n_augmentations):
impl = BCImpl(observation_shape,
action_size,
learning_rate,
optim_factory,
encoder_factory,
use_gpu=False,
scaler=scaler,
augmentation=augmentation,
n_augmentations=n_augmentations)
torch_impl_tester(impl, discrete=False, imitator=True)
@pytest.mark.parametrize('observation_shape', [(100, ), (4, 84, 84)])
@pytest.mark.parametrize('action_size', [2])
@pytest.mark.parametrize('learning_rate', [1e-3])
@pytest.mark.parametrize('optim_factory', [AdamFactory()])
@pytest.mark.parametrize('encoder_factory', [DefaultEncoderFactory()])
@pytest.mark.parametrize('beta', [0.5])
@pytest.mark.parametrize('scaler', [None, DummyScaler()])
@pytest.mark.parametrize('augmentation', [AugmentationPipeline()])
@pytest.mark.parametrize('n_augmentations', [1])
def test_discrete_bc_impl(observation_shape, action_size, learning_rate,
optim_factory, encoder_factory, beta, scaler,
augmentation, n_augmentations):
impl = DiscreteBCImpl(observation_shape,
action_size,
learning_rate,
optim_factory,
encoder_factory,
beta,
use_gpu=False,
scaler=scaler,
augmentation=augmentation,
n_augmentations=n_augmentations)
torch_impl_tester(impl, discrete=True, imitator=True)
| 45.6
| 78
| 0.655104
| 245
| 2,508
| 6.489796
| 0.216327
| 0.106918
| 0.224528
| 0.065409
| 0.801887
| 0.756604
| 0.756604
| 0.756604
| 0.756604
| 0.756604
| 0
| 0.015593
| 0.232855
| 2,508
| 54
| 79
| 46.444444
| 0.810811
| 0
| 0
| 0.653061
| 0
| 0
| 0.082935
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040816
| false
| 0
| 0.122449
| 0
| 0.163265
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98edeb41a17afcb4a6a7453e46629c22b6963229
| 130
|
py
|
Python
|
snnw/nn/weight_init.py
|
juliustao/SNNW
|
2051c81b4013030d67fdfdcb1dd2973ba550ddd9
|
[
"MIT"
] | null | null | null |
snnw/nn/weight_init.py
|
juliustao/SNNW
|
2051c81b4013030d67fdfdcb1dd2973ba550ddd9
|
[
"MIT"
] | null | null | null |
snnw/nn/weight_init.py
|
juliustao/SNNW
|
2051c81b4013030d67fdfdcb1dd2973ba550ddd9
|
[
"MIT"
] | null | null | null |
import numpy as np
def kaiming(in_size, out_size):
return np.random.normal(size=(in_size, out_size)) * np.sqrt(2.0/in_size)
| 21.666667
| 76
| 0.723077
| 25
| 130
| 3.56
| 0.6
| 0.202247
| 0.202247
| 0.292135
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017857
| 0.138462
| 130
| 5
| 77
| 26
| 0.776786
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 9
|
c725ae947c72540c38d0fc830f40c06c3cf7b5f1
| 39,658
|
py
|
Python
|
looker_client_31/api/render_task_api.py
|
ContrastingSounds/looker_sdk_31
|
f973434049fff1b605b10086ab8b84f2f62e3489
|
[
"MIT"
] | null | null | null |
looker_client_31/api/render_task_api.py
|
ContrastingSounds/looker_sdk_31
|
f973434049fff1b605b10086ab8b84f2f62e3489
|
[
"MIT"
] | null | null | null |
looker_client_31/api/render_task_api.py
|
ContrastingSounds/looker_sdk_31
|
f973434049fff1b605b10086ab8b84f2f62e3489
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Experimental Looker API 3.1 Preview
This API 3.1 is in active development. Breaking changes are likely to occur to some API functions in future Looker releases until API 3.1 is officially launched and upgraded to beta status. If you have time and interest to experiment with new or modified services exposed in this embryonic API 3.1, we welcome your participation and feedback! For large development efforts or critical line-of-business projects, we strongly recommend you stick with the API 3.0 while API 3.1 is under construction. # noqa: E501
OpenAPI spec version: 3.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from looker_client_31.api_client import ApiClient
class RenderTaskApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_dashboard_render_task(self, dashboard_id, result_format, body, width, height, **kwargs): # noqa: E501
"""Create Dashboard Render Task # noqa: E501
### Create a new task to render a dashboard to a document or image. Returns a render task object. To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_dashboard_render_task(dashboard_id, result_format, body, width, height, async=True)
>>> result = thread.get()
:param async bool
:param int dashboard_id: Id of dashboard to render (required)
:param str result_format: Output type: pdf, png, or jpg (required)
:param CreateDashboardRenderTask body: Dashboard render task parameters (required)
:param int width: Output width in pixels (required)
:param int height: Output height in pixels (required)
:param str fields: Requested fields.
:return: RenderTask
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_dashboard_render_task_with_http_info(dashboard_id, result_format, body, width, height, **kwargs) # noqa: E501
else:
(data) = self.create_dashboard_render_task_with_http_info(dashboard_id, result_format, body, width, height, **kwargs) # noqa: E501
return data
def create_dashboard_render_task_with_http_info(self, dashboard_id, result_format, body, width, height, **kwargs): # noqa: E501
"""Create Dashboard Render Task # noqa: E501
### Create a new task to render a dashboard to a document or image. Returns a render task object. To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_dashboard_render_task_with_http_info(dashboard_id, result_format, body, width, height, async=True)
>>> result = thread.get()
:param async bool
:param int dashboard_id: Id of dashboard to render (required)
:param str result_format: Output type: pdf, png, or jpg (required)
:param CreateDashboardRenderTask body: Dashboard render task parameters (required)
:param int width: Output width in pixels (required)
:param int height: Output height in pixels (required)
:param str fields: Requested fields.
:return: RenderTask
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['dashboard_id', 'result_format', 'body', 'width', 'height', 'fields'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_dashboard_render_task" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'dashboard_id' is set
if ('dashboard_id' not in params or
params['dashboard_id'] is None):
raise ValueError("Missing the required parameter `dashboard_id` when calling `create_dashboard_render_task`") # noqa: E501
# verify the required parameter 'result_format' is set
if ('result_format' not in params or
params['result_format'] is None):
raise ValueError("Missing the required parameter `result_format` when calling `create_dashboard_render_task`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_dashboard_render_task`") # noqa: E501
# verify the required parameter 'width' is set
if ('width' not in params or
params['width'] is None):
raise ValueError("Missing the required parameter `width` when calling `create_dashboard_render_task`") # noqa: E501
# verify the required parameter 'height' is set
if ('height' not in params or
params['height'] is None):
raise ValueError("Missing the required parameter `height` when calling `create_dashboard_render_task`") # noqa: E501
collection_formats = {}
path_params = {}
if 'dashboard_id' in params:
path_params['dashboard_id'] = params['dashboard_id'] # noqa: E501
if 'result_format' in params:
path_params['result_format'] = params['result_format'] # noqa: E501
query_params = []
if 'width' in params:
query_params.append(('width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('height', params['height'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/render_tasks/dashboards/{dashboard_id}/{result_format}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RenderTask', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_look_render_task(self, look_id, result_format, width, height, **kwargs): # noqa: E501
"""Create Look Render Task # noqa: E501
### Create a new task to render a look to an image. Returns a render task object. To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_look_render_task(look_id, result_format, width, height, async=True)
>>> result = thread.get()
:param async bool
:param int look_id: Id of look to render (required)
:param str result_format: Output type: png, or jpg (required)
:param int width: Output width in pixels (required)
:param int height: Output height in pixels (required)
:param str fields: Requested fields.
:return: RenderTask
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_look_render_task_with_http_info(look_id, result_format, width, height, **kwargs) # noqa: E501
else:
(data) = self.create_look_render_task_with_http_info(look_id, result_format, width, height, **kwargs) # noqa: E501
return data
def create_look_render_task_with_http_info(self, look_id, result_format, width, height, **kwargs): # noqa: E501
"""Create Look Render Task # noqa: E501
### Create a new task to render a look to an image. Returns a render task object. To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_look_render_task_with_http_info(look_id, result_format, width, height, async=True)
>>> result = thread.get()
:param async bool
:param int look_id: Id of look to render (required)
:param str result_format: Output type: png, or jpg (required)
:param int width: Output width in pixels (required)
:param int height: Output height in pixels (required)
:param str fields: Requested fields.
:return: RenderTask
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['look_id', 'result_format', 'width', 'height', 'fields'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_look_render_task" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'look_id' is set
if ('look_id' not in params or
params['look_id'] is None):
raise ValueError("Missing the required parameter `look_id` when calling `create_look_render_task`") # noqa: E501
# verify the required parameter 'result_format' is set
if ('result_format' not in params or
params['result_format'] is None):
raise ValueError("Missing the required parameter `result_format` when calling `create_look_render_task`") # noqa: E501
# verify the required parameter 'width' is set
if ('width' not in params or
params['width'] is None):
raise ValueError("Missing the required parameter `width` when calling `create_look_render_task`") # noqa: E501
# verify the required parameter 'height' is set
if ('height' not in params or
params['height'] is None):
raise ValueError("Missing the required parameter `height` when calling `create_look_render_task`") # noqa: E501
collection_formats = {}
path_params = {}
if 'look_id' in params:
path_params['look_id'] = params['look_id'] # noqa: E501
if 'result_format' in params:
path_params['result_format'] = params['result_format'] # noqa: E501
query_params = []
if 'width' in params:
query_params.append(('width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('height', params['height'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/render_tasks/looks/{look_id}/{result_format}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RenderTask', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_lookml_dashboard_render_task(self, dashboard_id, result_format, body, width, height, **kwargs): # noqa: E501
"""Create Lookml Dashboard Render Task # noqa: E501
### Create a new task to render a lookml dashboard to a document or image. Returns a render task object. To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_lookml_dashboard_render_task(dashboard_id, result_format, body, width, height, async=True)
>>> result = thread.get()
:param async bool
:param str dashboard_id: Id of lookml dashboard to render (required)
:param str result_format: Output type: pdf, png, or jpg (required)
:param CreateDashboardRenderTask body: Dashboard render task parameters (required)
:param int width: Output width in pixels (required)
:param int height: Output height in pixels (required)
:param str fields: Requested fields.
:return: RenderTask
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_lookml_dashboard_render_task_with_http_info(dashboard_id, result_format, body, width, height, **kwargs) # noqa: E501
else:
(data) = self.create_lookml_dashboard_render_task_with_http_info(dashboard_id, result_format, body, width, height, **kwargs) # noqa: E501
return data
def create_lookml_dashboard_render_task_with_http_info(self, dashboard_id, result_format, body, width, height, **kwargs): # noqa: E501
"""Create Lookml Dashboard Render Task # noqa: E501
### Create a new task to render a lookml dashboard to a document or image. Returns a render task object. To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_lookml_dashboard_render_task_with_http_info(dashboard_id, result_format, body, width, height, async=True)
>>> result = thread.get()
:param async bool
:param str dashboard_id: Id of lookml dashboard to render (required)
:param str result_format: Output type: pdf, png, or jpg (required)
:param CreateDashboardRenderTask body: Dashboard render task parameters (required)
:param int width: Output width in pixels (required)
:param int height: Output height in pixels (required)
:param str fields: Requested fields.
:return: RenderTask
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['dashboard_id', 'result_format', 'body', 'width', 'height', 'fields'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_lookml_dashboard_render_task" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'dashboard_id' is set
if ('dashboard_id' not in params or
params['dashboard_id'] is None):
raise ValueError("Missing the required parameter `dashboard_id` when calling `create_lookml_dashboard_render_task`") # noqa: E501
# verify the required parameter 'result_format' is set
if ('result_format' not in params or
params['result_format'] is None):
raise ValueError("Missing the required parameter `result_format` when calling `create_lookml_dashboard_render_task`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_lookml_dashboard_render_task`") # noqa: E501
# verify the required parameter 'width' is set
if ('width' not in params or
params['width'] is None):
raise ValueError("Missing the required parameter `width` when calling `create_lookml_dashboard_render_task`") # noqa: E501
# verify the required parameter 'height' is set
if ('height' not in params or
params['height'] is None):
raise ValueError("Missing the required parameter `height` when calling `create_lookml_dashboard_render_task`") # noqa: E501
collection_formats = {}
path_params = {}
if 'dashboard_id' in params:
path_params['dashboard_id'] = params['dashboard_id'] # noqa: E501
if 'result_format' in params:
path_params['result_format'] = params['result_format'] # noqa: E501
query_params = []
if 'width' in params:
query_params.append(('width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('height', params['height'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/render_tasks/lookml_dashboards/{dashboard_id}/{result_format}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RenderTask', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_query_render_task(self, query_id, result_format, width, height, **kwargs): # noqa: E501
"""Create Query Render Task # noqa: E501
### Create a new task to render an existing query to an image. Returns a render task object. To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_query_render_task(query_id, result_format, width, height, async=True)
>>> result = thread.get()
:param async bool
:param int query_id: Id of the query to render (required)
:param str result_format: Output type: png or jpg (required)
:param int width: Output width in pixels (required)
:param int height: Output height in pixels (required)
:param str fields: Requested fields.
:return: RenderTask
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.create_query_render_task_with_http_info(query_id, result_format, width, height, **kwargs) # noqa: E501
else:
(data) = self.create_query_render_task_with_http_info(query_id, result_format, width, height, **kwargs) # noqa: E501
return data
def create_query_render_task_with_http_info(self, query_id, result_format, width, height, **kwargs): # noqa: E501
"""Create Query Render Task # noqa: E501
### Create a new task to render an existing query to an image. Returns a render task object. To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_query_render_task_with_http_info(query_id, result_format, width, height, async=True)
>>> result = thread.get()
:param async bool
:param int query_id: Id of the query to render (required)
:param str result_format: Output type: png or jpg (required)
:param int width: Output width in pixels (required)
:param int height: Output height in pixels (required)
:param str fields: Requested fields.
:return: RenderTask
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['query_id', 'result_format', 'width', 'height', 'fields'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_query_render_task" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'query_id' is set
if ('query_id' not in params or
params['query_id'] is None):
raise ValueError("Missing the required parameter `query_id` when calling `create_query_render_task`") # noqa: E501
# verify the required parameter 'result_format' is set
if ('result_format' not in params or
params['result_format'] is None):
raise ValueError("Missing the required parameter `result_format` when calling `create_query_render_task`") # noqa: E501
# verify the required parameter 'width' is set
if ('width' not in params or
params['width'] is None):
raise ValueError("Missing the required parameter `width` when calling `create_query_render_task`") # noqa: E501
# verify the required parameter 'height' is set
if ('height' not in params or
params['height'] is None):
raise ValueError("Missing the required parameter `height` when calling `create_query_render_task`") # noqa: E501
collection_formats = {}
path_params = {}
if 'query_id' in params:
path_params['query_id'] = params['query_id'] # noqa: E501
if 'result_format' in params:
path_params['result_format'] = params['result_format'] # noqa: E501
query_params = []
if 'width' in params:
query_params.append(('width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('height', params['height'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/render_tasks/queries/{query_id}/{result_format}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RenderTask', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def render_task(self, render_task_id, **kwargs): # noqa: E501
"""Get Render Task # noqa: E501
### Get information about a render task. Returns a render task object. To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.render_task(render_task_id, async=True)
>>> result = thread.get()
:param async bool
:param str render_task_id: Id of render task (required)
:param str fields: Requested fields.
:return: RenderTask
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.render_task_with_http_info(render_task_id, **kwargs) # noqa: E501
else:
(data) = self.render_task_with_http_info(render_task_id, **kwargs) # noqa: E501
return data
def render_task_with_http_info(self, render_task_id, **kwargs): # noqa: E501
"""Get Render Task # noqa: E501
### Get information about a render task. Returns a render task object. To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.render_task_with_http_info(render_task_id, async=True)
>>> result = thread.get()
:param async bool
:param str render_task_id: Id of render task (required)
:param str fields: Requested fields.
:return: RenderTask
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['render_task_id', 'fields'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method render_task" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'render_task_id' is set
if ('render_task_id' not in params or
params['render_task_id'] is None):
raise ValueError("Missing the required parameter `render_task_id` when calling `render_task`") # noqa: E501
collection_formats = {}
path_params = {}
if 'render_task_id' in params:
path_params['render_task_id'] = params['render_task_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/render_tasks/{render_task_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RenderTask', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def render_task_results(self, render_task_id, **kwargs): # noqa: E501
"""Render Task Results # noqa: E501
### Get the document or image produced by a completed render task. Note that the PDF or image result will be a binary blob in the HTTP response, as indicated by the Content-Type in the response headers. This may require specialized (or at least different) handling than text responses such as JSON. You may need to tell your HTTP client that the response is binary so that it does not attempt to parse the binary data as text. If the render task exists but has not finished rendering the results, the response HTTP status will be **202 Accepted**, the response body will be empty, and the response will have a Retry-After header indicating that the caller should repeat the request at a later time. Returns 404 if the render task cannot be found, if the cached result has expired, or if the caller does not have permission to view the results. For detailed information about the status of the render task, use [Render Task](#!/RenderTask/render_task). Polling loops waiting for completion of a render task would be better served by polling **render_task(id)** until the task status reaches completion (or error) instead of polling **render_task_results(id)** alone. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.render_task_results(render_task_id, async=True)
>>> result = thread.get()
:param async bool
:param str render_task_id: Id of render task (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.render_task_results_with_http_info(render_task_id, **kwargs) # noqa: E501
else:
(data) = self.render_task_results_with_http_info(render_task_id, **kwargs) # noqa: E501
return data
def render_task_results_with_http_info(self, render_task_id, **kwargs): # noqa: E501
"""Render Task Results # noqa: E501
### Get the document or image produced by a completed render task. Note that the PDF or image result will be a binary blob in the HTTP response, as indicated by the Content-Type in the response headers. This may require specialized (or at least different) handling than text responses such as JSON. You may need to tell your HTTP client that the response is binary so that it does not attempt to parse the binary data as text. If the render task exists but has not finished rendering the results, the response HTTP status will be **202 Accepted**, the response body will be empty, and the response will have a Retry-After header indicating that the caller should repeat the request at a later time. Returns 404 if the render task cannot be found, if the cached result has expired, or if the caller does not have permission to view the results. For detailed information about the status of the render task, use [Render Task](#!/RenderTask/render_task). Polling loops waiting for completion of a render task would be better served by polling **render_task(id)** until the task status reaches completion (or error) instead of polling **render_task_results(id)** alone. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.render_task_results_with_http_info(render_task_id, async=True)
>>> result = thread.get()
:param async bool
:param str render_task_id: Id of render task (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['render_task_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method render_task_results" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'render_task_id' is set
if ('render_task_id' not in params or
params['render_task_id'] is None):
raise ValueError("Missing the required parameter `render_task_id` when calling `render_task_results`") # noqa: E501
collection_formats = {}
path_params = {}
if 'render_task_id' in params:
path_params['render_task_id'] = params['render_task_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['image/jpeg', 'image/png', 'application/pdf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/render_tasks/{render_task_id}/results', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 52.113009
| 1,192
| 0.648066
| 4,999
| 39,658
| 4.947389
| 0.057411
| 0.087336
| 0.022319
| 0.021106
| 0.960335
| 0.955604
| 0.953016
| 0.9478
| 0.947194
| 0.939107
| 0
| 0.014382
| 0.265369
| 39,658
| 760
| 1,193
| 52.181579
| 0.834523
| 0.06551
| 0
| 0.794189
| 0
| 0
| 0.233499
| 0.064374
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.009685
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c75b70c846e48413c56ab469dc30166f6643df49
| 34,579
|
py
|
Python
|
mistral/tests/unit/notifiers/test_notify.py
|
liuzheng/mistral
|
3fd66f3b0575d909158595b19e687e7f1a6126fe
|
[
"Apache-2.0"
] | 3
|
2015-08-28T04:57:56.000Z
|
2017-03-27T10:59:56.000Z
|
mistral/tests/unit/notifiers/test_notify.py
|
liuzheng/mistral
|
3fd66f3b0575d909158595b19e687e7f1a6126fe
|
[
"Apache-2.0"
] | 21
|
2015-04-14T22:41:53.000Z
|
2019-02-20T09:30:10.000Z
|
mistral/tests/unit/notifiers/test_notify.py
|
liuzheng/mistral
|
3fd66f3b0575d909158595b19e687e7f1a6126fe
|
[
"Apache-2.0"
] | 12
|
2015-08-14T02:27:37.000Z
|
2020-12-31T10:09:21.000Z
|
# Copyright 2018 - Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import mock
from oslo_config import cfg
from mistral.db.v2 import api as db_api
from mistral.notifiers import base as notif
from mistral.notifiers import notification_events as events
from mistral.services import workbooks as wb_svc
from mistral.services import workflows as wf_svc
from mistral.tests.unit.notifiers import base
from mistral.workflow import states
from mistral_lib import actions as ml_actions
# Use the set_default method to set value otherwise in certain test cases
# the change in value is not permanent.
cfg.CONF.set_default('auth_enable', False, group='pecan')
EVENT_LOGS = []
def log_event(ex_id, data, event, timestamp, **kwargs):
EVENT_LOGS.append((ex_id, event))
class NotifyEventsTest(base.NotifierTestCase):
def setUp(self):
super(NotifyEventsTest, self).setUp()
self.publishers = {
'wbhk': notif.get_notification_publisher('webhook'),
'noop': notif.get_notification_publisher('noop')
}
self.publishers['wbhk'].publish = mock.MagicMock(side_effect=log_event)
self.publishers['wbhk'].publish.reset_mock()
self.publishers['noop'].publish = mock.MagicMock(side_effect=log_event)
self.publishers['noop'].publish.reset_mock()
del EVENT_LOGS[:]
def tearDown(self):
cfg.CONF.set_default('notify', None, group='notifier')
super(NotifyEventsTest, self).tearDown()
def test_notify_all_explicit(self):
wf_def = """
version: '2.0'
wf:
tasks:
t1:
action: std.noop
on-success:
- t2
t2:
action: std.noop
"""
wf_svc.create_workflows(wf_def)
notify_options = [
{
'type': 'webhook',
'events': events.EVENTS
}
]
params = {'notify': notify_options}
wf_ex = self.engine.start_workflow('wf', '', **params)
self.await_workflow_success(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
self.assertEqual(states.SUCCESS, wf_ex.state)
self.assertIsNone(wf_ex.state_info)
self.assertEqual(2, len(task_exs))
t1_ex = self._assert_single_item(task_exs, name='t1')
t2_ex = self._assert_single_item(task_exs, name='t2')
self.assertEqual(states.SUCCESS, t1_ex.state)
self.assertIsNone(t1_ex.state_info)
self.assertEqual(states.SUCCESS, t2_ex.state)
self.assertIsNone(t2_ex.state_info)
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertEqual(6, len(EVENT_LOGS))
self.assertIn((wf_ex.id, events.WORKFLOW_LAUNCHED), EVENT_LOGS)
self.assertIn((t1_ex.id, events.TASK_LAUNCHED), EVENT_LOGS)
self.assertIn((t1_ex.id, events.TASK_SUCCEEDED), EVENT_LOGS)
self.assertIn((t2_ex.id, events.TASK_LAUNCHED), EVENT_LOGS)
self.assertIn((t2_ex.id, events.TASK_SUCCEEDED), EVENT_LOGS)
self.assertIn((wf_ex.id, events.WORKFLOW_SUCCEEDED), EVENT_LOGS)
def test_notify_all_implicit(self):
wf_def = """
version: '2.0'
wf:
tasks:
t1:
action: std.noop
on-success:
- t2
t2:
action: std.noop
"""
wf_svc.create_workflows(wf_def)
notify_options = [{'type': 'webhook'}]
params = {'notify': notify_options}
wf_ex = self.engine.start_workflow('wf', '', **params)
self.await_workflow_success(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
self.assertEqual(states.SUCCESS, wf_ex.state)
self.assertIsNone(wf_ex.state_info)
self.assertEqual(2, len(task_exs))
t1_ex = self._assert_single_item(task_exs, name='t1')
t2_ex = self._assert_single_item(task_exs, name='t2')
self.assertEqual(states.SUCCESS, t1_ex.state)
self.assertIsNone(t1_ex.state_info)
self.assertEqual(states.SUCCESS, t2_ex.state)
self.assertIsNone(t2_ex.state_info)
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertEqual(6, len(EVENT_LOGS))
self.assertIn((wf_ex.id, events.WORKFLOW_LAUNCHED), EVENT_LOGS)
self.assertIn((t1_ex.id, events.TASK_LAUNCHED), EVENT_LOGS)
self.assertIn((t1_ex.id, events.TASK_SUCCEEDED), EVENT_LOGS)
self.assertIn((t2_ex.id, events.TASK_LAUNCHED), EVENT_LOGS)
self.assertIn((t2_ex.id, events.TASK_SUCCEEDED), EVENT_LOGS)
self.assertIn((wf_ex.id, events.WORKFLOW_SUCCEEDED), EVENT_LOGS)
def test_notify_order(self):
wf_def = """
version: '2.0'
wf:
tasks:
t1:
action: std.noop
on-success:
- t2
t2:
action: std.noop
"""
wf_svc.create_workflows(wf_def)
notify_options = [
{'type': 'webhook'}
]
params = {'notify': notify_options}
wf_ex = self.engine.start_workflow('wf', '', **params)
self.await_workflow_success(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
self.assertEqual(states.SUCCESS, wf_ex.state)
self.assertIsNone(wf_ex.state_info)
self.assertEqual(2, len(task_exs))
t1_ex = self._assert_single_item(task_exs, name='t1')
t2_ex = self._assert_single_item(task_exs, name='t2')
self.assertEqual(states.SUCCESS, t1_ex.state)
self.assertIsNone(t1_ex.state_info)
self.assertEqual(states.SUCCESS, t2_ex.state)
self.assertIsNone(t2_ex.state_info)
expected_order = [
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(t1_ex.id, events.TASK_SUCCEEDED),
(t2_ex.id, events.TASK_LAUNCHED),
(t2_ex.id, events.TASK_SUCCEEDED),
(wf_ex.id, events.WORKFLOW_SUCCEEDED)
]
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertListEqual(expected_order, EVENT_LOGS)
def test_notify_multiple(self):
self.assertFalse(self.publishers['wbhk'].publish.called)
self.assertFalse(self.publishers['noop'].publish.called)
wf_def = """
version: '2.0'
wf:
tasks:
t1:
action: std.noop
on-success:
- t2
t2:
action: std.noop
"""
wf_svc.create_workflows(wf_def)
notify_options = [
{'type': 'webhook'},
{'type': 'noop'}
]
params = {'notify': notify_options}
wf_ex = self.engine.start_workflow('wf', '', **params)
self.await_workflow_success(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
self.assertEqual(states.SUCCESS, wf_ex.state)
self.assertIsNone(wf_ex.state_info)
self.assertEqual(2, len(task_exs))
t1_ex = self._assert_single_item(task_exs, name='t1')
t2_ex = self._assert_single_item(task_exs, name='t2')
self.assertEqual(states.SUCCESS, t1_ex.state)
self.assertIsNone(t1_ex.state_info)
self.assertEqual(states.SUCCESS, t2_ex.state)
self.assertIsNone(t2_ex.state_info)
expected_order = [
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(t1_ex.id, events.TASK_SUCCEEDED),
(t1_ex.id, events.TASK_SUCCEEDED),
(t2_ex.id, events.TASK_LAUNCHED),
(t2_ex.id, events.TASK_LAUNCHED),
(t2_ex.id, events.TASK_SUCCEEDED),
(t2_ex.id, events.TASK_SUCCEEDED),
(wf_ex.id, events.WORKFLOW_SUCCEEDED),
(wf_ex.id, events.WORKFLOW_SUCCEEDED)
]
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertTrue(self.publishers['noop'].publish.called)
self.assertListEqual(expected_order, EVENT_LOGS)
def test_notify_from_cfg(self):
self.assertFalse(self.publishers['wbhk'].publish.called)
self.assertFalse(self.publishers['noop'].publish.called)
wf_def = """
version: '2.0'
wf:
tasks:
t1:
action: std.noop
on-success:
- t2
t2:
action: std.noop
"""
wf_svc.create_workflows(wf_def)
notify_options = [
{'type': 'webhook'},
{'type': 'noop'}
]
cfg.CONF.set_default(
'notify',
json.dumps(notify_options),
group='notifier'
)
wf_ex = self.engine.start_workflow('wf', '')
self.await_workflow_success(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
self.assertEqual(states.SUCCESS, wf_ex.state)
self.assertIsNone(wf_ex.state_info)
self.assertEqual(2, len(task_exs))
t1_ex = self._assert_single_item(task_exs, name='t1')
t2_ex = self._assert_single_item(task_exs, name='t2')
self.assertEqual(states.SUCCESS, t1_ex.state)
self.assertIsNone(t1_ex.state_info)
self.assertEqual(states.SUCCESS, t2_ex.state)
self.assertIsNone(t2_ex.state_info)
expected_order = [
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(t1_ex.id, events.TASK_SUCCEEDED),
(t1_ex.id, events.TASK_SUCCEEDED),
(t2_ex.id, events.TASK_LAUNCHED),
(t2_ex.id, events.TASK_LAUNCHED),
(t2_ex.id, events.TASK_SUCCEEDED),
(t2_ex.id, events.TASK_SUCCEEDED),
(wf_ex.id, events.WORKFLOW_SUCCEEDED),
(wf_ex.id, events.WORKFLOW_SUCCEEDED)
]
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertTrue(self.publishers['noop'].publish.called)
self.assertListEqual(expected_order, EVENT_LOGS)
def test_notify_from_cfg_and_params(self):
self.assertFalse(self.publishers['wbhk'].publish.called)
self.assertFalse(self.publishers['noop'].publish.called)
wf_def = """
version: '2.0'
wf:
tasks:
t1:
action: std.noop
on-success:
- t2
t2:
action: std.noop
"""
wf_svc.create_workflows(wf_def)
cfg.CONF.set_default(
'notify',
json.dumps([{'type': 'noop'}]),
group='notifier'
)
params = {'notify': [{'type': 'webhook'}]}
wf_ex = self.engine.start_workflow('wf', '', **params)
self.await_workflow_success(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
self.assertEqual(states.SUCCESS, wf_ex.state)
self.assertIsNone(wf_ex.state_info)
self.assertEqual(2, len(task_exs))
t1_ex = self._assert_single_item(task_exs, name='t1')
t2_ex = self._assert_single_item(task_exs, name='t2')
self.assertEqual(states.SUCCESS, t1_ex.state)
self.assertIsNone(t1_ex.state_info)
self.assertEqual(states.SUCCESS, t2_ex.state)
self.assertIsNone(t2_ex.state_info)
expected_order = [
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(t1_ex.id, events.TASK_SUCCEEDED),
(t1_ex.id, events.TASK_SUCCEEDED),
(t2_ex.id, events.TASK_LAUNCHED),
(t2_ex.id, events.TASK_LAUNCHED),
(t2_ex.id, events.TASK_SUCCEEDED),
(t2_ex.id, events.TASK_SUCCEEDED),
(wf_ex.id, events.WORKFLOW_SUCCEEDED),
(wf_ex.id, events.WORKFLOW_SUCCEEDED)
]
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertTrue(self.publishers['noop'].publish.called)
self.assertListEqual(expected_order, EVENT_LOGS)
def test_workbook_notify(self):
wb_def = """
version: '2.0'
name: wb
workflows:
wf1:
tasks:
t1:
workflow: wf2
on-success:
- t2
t2:
action: std.noop
wf2:
tasks:
t1:
action: std.noop
"""
wb_svc.create_workbook_v2(wb_def)
notify_options = [{'type': 'webhook'}]
params = {'notify': notify_options}
wf1_ex = self.engine.start_workflow('wb.wf1', '', **params)
self.await_workflow_success(wf1_ex.id)
with db_api.transaction():
wf1_ex = db_api.get_workflow_execution(wf1_ex.id)
wf1_task_exs = wf1_ex.task_executions
wf1_t1_ex = self._assert_single_item(wf1_task_exs, name='t1')
wf1_t2_ex = self._assert_single_item(wf1_task_exs, name='t2')
wf1_t1_act_exs = db_api.get_workflow_executions(
task_execution_id=wf1_t1_ex.id
)
wf2_ex = wf1_t1_act_exs[0]
wf2_task_exs = wf2_ex.task_executions
wf2_t1_ex = self._assert_single_item(wf2_task_exs, name='t1')
self.assertEqual(states.SUCCESS, wf1_ex.state)
self.assertIsNone(wf1_ex.state_info)
self.assertEqual(2, len(wf1_task_exs))
self.assertEqual(states.SUCCESS, wf1_t1_ex.state)
self.assertIsNone(wf1_t1_ex.state_info)
self.assertEqual(states.SUCCESS, wf1_t2_ex.state)
self.assertIsNone(wf1_t2_ex.state_info)
self.assertEqual(1, len(wf1_t1_act_exs))
self.assertEqual(states.SUCCESS, wf2_ex.state)
self.assertIsNone(wf2_ex.state_info)
self.assertEqual(1, len(wf2_task_exs))
self.assertEqual(states.SUCCESS, wf2_t1_ex.state)
self.assertIsNone(wf2_t1_ex.state_info)
expected_order = [
(wf1_ex.id, events.WORKFLOW_LAUNCHED),
(wf1_t1_ex.id, events.TASK_LAUNCHED),
(wf2_ex.id, events.WORKFLOW_LAUNCHED),
(wf2_t1_ex.id, events.TASK_LAUNCHED),
(wf2_t1_ex.id, events.TASK_SUCCEEDED),
(wf2_ex.id, events.WORKFLOW_SUCCEEDED),
(wf1_t1_ex.id, events.TASK_SUCCEEDED),
(wf1_t2_ex.id, events.TASK_LAUNCHED),
(wf1_t2_ex.id, events.TASK_SUCCEEDED),
(wf1_ex.id, events.WORKFLOW_SUCCEEDED)
]
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertListEqual(expected_order, EVENT_LOGS)
def test_notify_task_error(self):
wf_def = """
version: '2.0'
wf:
tasks:
t1:
action: std.noop
on-success:
- t2
t2:
action: std.fail
"""
wf_svc.create_workflows(wf_def)
notify_options = [{'type': 'webhook'}]
params = {'notify': notify_options}
wf_ex = self.engine.start_workflow('wf', '', **params)
self.await_workflow_error(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
self.assertEqual(states.ERROR, wf_ex.state)
self.assertIsNotNone(wf_ex.state_info)
self.assertEqual(2, len(task_exs))
t1_ex = self._assert_single_item(task_exs, name='t1')
t2_ex = self._assert_single_item(task_exs, name='t2')
self.assertEqual(states.SUCCESS, t1_ex.state)
self.assertIsNone(t1_ex.state_info)
self.assertEqual(states.ERROR, t2_ex.state)
self.assertIsNotNone(t2_ex.state_info)
expected_order = [
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(t1_ex.id, events.TASK_SUCCEEDED),
(t2_ex.id, events.TASK_LAUNCHED),
(t2_ex.id, events.TASK_FAILED),
(wf_ex.id, events.WORKFLOW_FAILED)
]
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertListEqual(expected_order, EVENT_LOGS)
def test_notify_task_transition_fail(self):
wf_def = """
version: '2.0'
wf:
tasks:
t1:
action: std.noop
on-complete:
- fail
"""
wf_svc.create_workflows(wf_def)
notify_options = [{'type': 'webhook'}]
params = {'notify': notify_options}
wf_ex = self.engine.start_workflow('wf', '', **params)
self.await_workflow_error(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
self.assertEqual(states.ERROR, wf_ex.state)
self.assertIsNone(wf_ex.state_info)
self.assertEqual(1, len(task_exs))
t1_ex = self._assert_single_item(task_exs, name='t1')
self.assertEqual(states.SUCCESS, t1_ex.state)
self.assertIsNone(t1_ex.state_info)
expected_order = [
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(t1_ex.id, events.TASK_SUCCEEDED),
(wf_ex.id, events.WORKFLOW_FAILED)
]
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertListEqual(expected_order, EVENT_LOGS)
def test_notify_with_items_task(self):
wf_def = """
version: '2.0'
wf:
tasks:
t1:
with-items: i in <% list(range(0, 3)) %>
action: std.noop
on-success:
- t2
t2:
action: std.noop
"""
wf_svc.create_workflows(wf_def)
notify_options = [{'type': 'webhook'}]
params = {'notify': notify_options}
wf_ex = self.engine.start_workflow('wf', '', **params)
self.await_workflow_success(wf_ex.id)
self._sleep(1)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
self.assertEqual(states.SUCCESS, wf_ex.state)
self.assertIsNone(wf_ex.state_info)
self.assertEqual(2, len(task_exs))
t1_ex = self._assert_single_item(task_exs, name='t1')
t2_ex = self._assert_single_item(task_exs, name='t2')
self.assertEqual(states.SUCCESS, t1_ex.state)
self.assertIsNone(t1_ex.state_info)
self.assertEqual(states.SUCCESS, t2_ex.state)
self.assertIsNone(t2_ex.state_info)
expected_order = [
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(t1_ex.id, events.TASK_SUCCEEDED),
(t2_ex.id, events.TASK_LAUNCHED),
(t2_ex.id, events.TASK_SUCCEEDED),
(wf_ex.id, events.WORKFLOW_SUCCEEDED)
]
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertListEqual(expected_order, EVENT_LOGS)
def test_notify_pause_resume(self):
wf_def = """
version: '2.0'
wf:
tasks:
t1:
action: std.async_noop
on-success:
- t2
t2:
action: std.noop
"""
wf_svc.create_workflows(wf_def)
notify_options = [{'type': 'webhook'}]
params = {'notify': notify_options}
wf_ex = self.engine.start_workflow('wf', '', **params)
self.await_workflow_running(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
t1_ex = self._assert_single_item(task_exs, name='t1')
t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id)
self.assertEqual(states.RUNNING, wf_ex.state)
self.assertEqual(1, len(task_exs))
self.assertEqual(states.RUNNING, t1_ex.state)
self.assertEqual(1, len(t1_act_exs))
self.assertEqual(states.RUNNING, t1_act_exs[0].state)
# Pause the workflow.
self.engine.pause_workflow(wf_ex.id)
self.await_workflow_paused(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
t1_ex = self._assert_single_item(task_exs, name='t1')
t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id)
# Workflow is paused but the task is still running as expected.
self.assertEqual(states.PAUSED, wf_ex.state)
self.assertEqual(1, len(task_exs))
self.assertEqual(states.RUNNING, t1_ex.state)
self.assertEqual(1, len(t1_act_exs))
self.assertEqual(states.RUNNING, t1_act_exs[0].state)
expected_order = [
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(wf_ex.id, events.WORKFLOW_PAUSED)
]
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertListEqual(expected_order, EVENT_LOGS)
# Complete action execution of task 1.
self.engine.on_action_complete(
t1_act_exs[0].id,
ml_actions.Result(data={'result': 'foobar'})
)
self.await_workflow_paused(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
self.assertEqual(states.PAUSED, wf_ex.state)
self.assertIsNone(wf_ex.state_info)
self.assertEqual(1, len(task_exs))
t1_ex = self._assert_single_item(task_exs, name='t1')
self.assertEqual(states.SUCCESS, t1_ex.state)
self.assertIsNone(t1_ex.state_info)
expected_order = [
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(wf_ex.id, events.WORKFLOW_PAUSED),
(t1_ex.id, events.TASK_SUCCEEDED)
]
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertListEqual(expected_order, EVENT_LOGS)
# Resume the workflow.
self.engine.resume_workflow(wf_ex.id)
self.await_workflow_success(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
self.assertEqual(states.SUCCESS, wf_ex.state)
self.assertIsNone(wf_ex.state_info)
self.assertEqual(2, len(task_exs))
t1_ex = self._assert_single_item(task_exs, name='t1')
t2_ex = self._assert_single_item(task_exs, name='t2')
self.assertEqual(states.SUCCESS, t1_ex.state)
self.assertIsNone(t1_ex.state_info)
self.assertEqual(states.SUCCESS, t2_ex.state)
self.assertIsNone(t2_ex.state_info)
expected_order = [
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(wf_ex.id, events.WORKFLOW_PAUSED),
(t1_ex.id, events.TASK_SUCCEEDED),
(wf_ex.id, events.WORKFLOW_RESUMED),
(t2_ex.id, events.TASK_LAUNCHED),
(t2_ex.id, events.TASK_SUCCEEDED),
(wf_ex.id, events.WORKFLOW_SUCCEEDED)
]
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertListEqual(expected_order, EVENT_LOGS)
def test_notify_pause_resume_task(self):
wf_def = """
version: '2.0'
wf:
tasks:
t1:
action: std.async_noop
on-success:
- t2
t2:
action: std.noop
"""
wf_svc.create_workflows(wf_def)
notify_options = [{'type': 'webhook'}]
params = {'notify': notify_options}
wf_ex = self.engine.start_workflow('wf', '', **params)
self.await_workflow_running(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
t1_ex = self._assert_single_item(task_exs, name='t1')
t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id)
self.assertEqual(states.RUNNING, wf_ex.state)
self.assertEqual(1, len(task_exs))
self.assertEqual(states.RUNNING, t1_ex.state)
self.assertEqual(1, len(t1_act_exs))
self.assertEqual(states.RUNNING, t1_act_exs[0].state)
# Pause the action execution of task 1.
self.engine.on_action_update(t1_act_exs[0].id, states.PAUSED)
self.await_workflow_paused(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
t1_ex = self._assert_single_item(task_exs, name='t1')
t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id)
self.assertEqual(states.PAUSED, wf_ex.state)
self.assertEqual(1, len(task_exs))
self.assertEqual(states.PAUSED, t1_ex.state)
self.assertEqual(1, len(t1_act_exs))
self.assertEqual(states.PAUSED, t1_act_exs[0].state)
expected_order = [
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(t1_ex.id, events.TASK_PAUSED),
(wf_ex.id, events.WORKFLOW_PAUSED)
]
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertListEqual(expected_order, EVENT_LOGS)
# Resume the action execution of task 1.
self.engine.on_action_update(t1_act_exs[0].id, states.RUNNING)
self.await_task_running(t1_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
t1_ex = self._assert_single_item(task_exs, name='t1')
t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id)
self.assertEqual(states.RUNNING, wf_ex.state)
self.assertEqual(1, len(task_exs))
self.assertEqual(states.RUNNING, t1_ex.state)
self.assertEqual(1, len(t1_act_exs))
self.assertEqual(states.RUNNING, t1_act_exs[0].state)
# Complete action execution of task 1.
self.engine.on_action_complete(
t1_act_exs[0].id,
ml_actions.Result(data={'result': 'foobar'})
)
# Wait for the workflow execution to complete.
self.await_workflow_success(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
self.assertEqual(states.SUCCESS, wf_ex.state)
self.assertIsNone(wf_ex.state_info)
self.assertEqual(2, len(task_exs))
t1_ex = self._assert_single_item(task_exs, name='t1')
t2_ex = self._assert_single_item(task_exs, name='t2')
self.assertEqual(states.SUCCESS, t1_ex.state)
self.assertIsNone(t1_ex.state_info)
self.assertEqual(states.SUCCESS, t2_ex.state)
self.assertIsNone(t2_ex.state_info)
# TASK_RESUMED comes before WORKFLOW_RESUMED because
# this test resumed the workflow with on_action_update.
expected_order = [
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(t1_ex.id, events.TASK_PAUSED),
(wf_ex.id, events.WORKFLOW_PAUSED),
(t1_ex.id, events.TASK_RESUMED),
(wf_ex.id, events.WORKFLOW_RESUMED),
(t1_ex.id, events.TASK_SUCCEEDED),
(t2_ex.id, events.TASK_LAUNCHED),
(t2_ex.id, events.TASK_SUCCEEDED),
(wf_ex.id, events.WORKFLOW_SUCCEEDED)
]
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertListEqual(expected_order, EVENT_LOGS)
def test_notify_cancel(self):
wf_def = """
version: '2.0'
wf:
tasks:
t1:
action: std.async_noop
on-success:
- t2
t2:
action: std.noop
"""
wf_svc.create_workflows(wf_def)
notify_options = [{'type': 'webhook'}]
params = {'notify': notify_options}
wf_ex = self.engine.start_workflow('wf', '', **params)
self.await_workflow_running(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
t1_ex = self._assert_single_item(task_exs, name='t1')
t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id)
self.assertEqual(states.RUNNING, wf_ex.state)
self.assertEqual(1, len(task_exs))
self.assertEqual(states.RUNNING, t1_ex.state)
self.assertEqual(1, len(t1_act_exs))
self.assertEqual(states.RUNNING, t1_act_exs[0].state)
# Cancel the workflow.
self.engine.stop_workflow(wf_ex.id, states.CANCELLED)
self.await_workflow_cancelled(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
t1_ex = self._assert_single_item(task_exs, name='t1')
t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id)
# Workflow is cancelled but the task is still running as expected.
self.assertEqual(states.CANCELLED, wf_ex.state)
self.assertEqual(1, len(task_exs))
self.assertEqual(states.RUNNING, t1_ex.state)
self.assertEqual(1, len(t1_act_exs))
self.assertEqual(states.RUNNING, t1_act_exs[0].state)
expected_order = [
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(wf_ex.id, events.WORKFLOW_CANCELLED)
]
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertListEqual(expected_order, EVENT_LOGS)
# Complete action execution of task 1.
self.engine.on_action_complete(
t1_act_exs[0].id,
ml_actions.Result(data={'result': 'foobar'})
)
self.await_workflow_cancelled(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
self.assertEqual(states.CANCELLED, wf_ex.state)
self.assertIsNone(wf_ex.state_info)
self.assertEqual(1, len(task_exs))
t1_ex = self._assert_single_item(task_exs, name='t1')
self.assertEqual(states.SUCCESS, t1_ex.state)
self.assertIsNone(t1_ex.state_info)
expected_order = [
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(wf_ex.id, events.WORKFLOW_CANCELLED),
(t1_ex.id, events.TASK_SUCCEEDED)
]
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertListEqual(expected_order, EVENT_LOGS)
def test_notify_cancel_task(self):
wf_def = """
version: '2.0'
wf:
tasks:
t1:
action: std.async_noop
on-success:
- t2
t2:
action: std.noop
"""
wf_svc.create_workflows(wf_def)
notify_options = [{'type': 'webhook'}]
params = {'notify': notify_options}
wf_ex = self.engine.start_workflow('wf', '', **params)
self.await_workflow_running(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
t1_ex = self._assert_single_item(task_exs, name='t1')
t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id)
self.assertEqual(states.RUNNING, wf_ex.state)
self.assertEqual(1, len(task_exs))
self.assertEqual(states.RUNNING, t1_ex.state)
self.assertEqual(1, len(t1_act_exs))
self.assertEqual(states.RUNNING, t1_act_exs[0].state)
# Cancel the action execution of task 1.
self.engine.on_action_update(t1_act_exs[0].id, states.CANCELLED)
self.await_workflow_cancelled(wf_ex.id)
with db_api.transaction():
wf_ex = db_api.get_workflow_execution(wf_ex.id)
task_exs = wf_ex.task_executions
t1_ex = self._assert_single_item(task_exs, name='t1')
t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id)
self.assertEqual(states.CANCELLED, wf_ex.state)
self.assertEqual(1, len(task_exs))
self.assertEqual(states.CANCELLED, t1_ex.state)
self.assertEqual(1, len(t1_act_exs))
self.assertEqual(states.CANCELLED, t1_act_exs[0].state)
expected_order = [
(wf_ex.id, events.WORKFLOW_LAUNCHED),
(t1_ex.id, events.TASK_LAUNCHED),
(t1_ex.id, events.TASK_CANCELLED),
(wf_ex.id, events.WORKFLOW_CANCELLED)
]
self.assertTrue(self.publishers['wbhk'].publish.called)
self.assertListEqual(expected_order, EVENT_LOGS)
| 33.345227
| 79
| 0.614072
| 4,367
| 34,579
| 4.566293
| 0.04992
| 0.036508
| 0.060178
| 0.050549
| 0.893185
| 0.877037
| 0.857229
| 0.841282
| 0.836417
| 0.824984
| 0
| 0.017384
| 0.276353
| 34,579
| 1,036
| 80
| 33.377413
| 0.779523
| 0.036554
| 0
| 0.816667
| 0
| 0
| 0.108127
| 0
| 0
| 0
| 0
| 0
| 0.301282
| 1
| 0.021795
| false
| 0
| 0.014103
| 0
| 0.037179
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c771b42bc981ae56a631b97c13177c07870316a9
| 13,103
|
py
|
Python
|
tests/test_find_dependencies.py
|
mrbean-bremen/pytest-find-dependencies
|
5a0869bf755d504ff433a93514ebb9ad2817bdd3
|
[
"MIT"
] | 2
|
2021-04-07T19:22:02.000Z
|
2022-01-16T15:58:28.000Z
|
tests/test_find_dependencies.py
|
mrbean-bremen/pytest-find-dependencies
|
5a0869bf755d504ff433a93514ebb9ad2817bdd3
|
[
"MIT"
] | 4
|
2021-12-25T07:09:15.000Z
|
2022-01-15T15:47:57.000Z
|
tests/test_find_dependencies.py
|
mrbean-bremen/pytest-find-dependencies
|
5a0869bf755d504ff433a93514ebb9ad2817bdd3
|
[
"MIT"
] | null | null | null |
import pytest
pytest_plugins = ["pytester"]
@pytest.fixture
def test_path(testdir):
testdir.tmpdir.join("pytest.ini").write(
"[pytest]\n" "console_output_style = classic"
)
yield testdir
def test_no_checks_if_not_configured(test_path):
test_path.makepyfile(
test_one="""
def test_a(): pass
def test_b(): pass
def test_c(): pass
"""
)
result = test_path.runpytest("-v", "-p", "no:randomly", "-W ignore")
result.assert_outcomes(passed=3, failed=0)
result.stdout.fnmatch_lines([
"test_one.py::test_a PASSED",
"test_one.py::test_b PASSED",
"test_one.py::test_c PASSED"
])
def test_no_checks_for_single_test(test_path):
test_path.makepyfile(
test_one="""
def test_a(): pass
"""
)
result = test_path.runpytest("-v", "--find-dependencies", "-W ignore")
result.assert_outcomes(passed=1, failed=0)
result.stdout.fnmatch_lines([
"Only one test collected: ignoring option --find-dependencies",
"test_one.py::test_a PASSED",
])
def test_no_checks_if_collection_failed(test_path):
test_path.makepyfile(
test_one="""
def test_a(): pass
def test_b(): pass
"""
)
test_path.makepyfile(
conftest="""
def pytest_collection(session):
session.perform_collect()
session.testsfailed = 1
"""
)
result = test_path.runpytest("--find-dependencies", "-W ignore")
result.assert_outcomes(passed=0, failed=0)
result.stdout.fnmatch_lines([
"*Interrupted: 1 errors during collection*",
"*no tests ran*"
])
def test_no_dependencies(test_path):
test_path.makepyfile(
test_one="""
def test_a(): pass
def test_b(): assert False
def test_c(): pass
"""
)
result = test_path.runpytest("--find-dependencies")
result.stdout.fnmatch_lines([
"The following tests are always failing and are "
"excluded from the analysis:",
" test_one.py::test_b",
"No dependent tests found."
])
def test_single_dependency_collect_only(test_path):
test_path.makepyfile(
test_one="""
flag = True
def test_a(): pass
def test_b(): assert flag
def test_c(): global flag; flag = False
"""
)
result = test_path.runpytest("--find-dependencies", "--collect-only",
"-p", "no:randomly")
result.stdout.fnmatch_lines([
"collected 3 items",
"<Module test_one.py>",
" <Function test_a>",
" <Function test_b>",
" <Function test_c>"
])
def test_single_dependency_last_index(test_path):
test_path.makepyfile(
test_one="""
flag = True
def test_a(): pass
def test_b(): assert flag
def test_c(): global flag; flag = False
"""
)
result = test_path.runpytest("--find-dependencies", "-p", "no:randomly")
result.stdout.fnmatch_lines([
"Run dependency analysis for 3 tests.",
"Executed 7 tests in 3 test runs.",
"Dependent tests:",
" test_one.py::test_b depends on test_one.py::test_c"
])
def test_single_dependency_first_index(test_path):
test_path.makepyfile(
test_one="""
flag = True
def test_a(): global flag; flag = False
def test_b(): assert flag
def test_c(): pass
"""
)
result = test_path.runpytest("--find-dependencies", "-p", "no:randomly")
result.stdout.fnmatch_lines([
"Run dependency analysis for 3 tests.",
"Executed 6 tests in 2 test runs.",
"Dependent tests:",
" test_one.py::test_b depends on test_one.py::test_a"
])
def test_single_dependency1(test_path):
test_path.makepyfile(
test_one="""
flag = True
def test_a(): pass
def test_b(): assert flag
def test_c(): global flag; flag = False
def test_d(): pass
"""
)
result = test_path.runpytest("--find-dependencies", "-p", "no:randomly")
result.stdout.fnmatch_lines([
"Run dependency analysis for 4 tests.",
"Executed 11 tests in 4 test runs.",
"Dependent tests:",
" test_one.py::test_b depends on test_one.py::test_c"
])
def test_single_reversed_first(test_path):
test_path.makepyfile(
test_one="""
flag = True
def test_a(): pass
def test_b(): assert flag
def test_c(): global flag; flag = False
def test_d(): pass
"""
)
result = test_path.runpytest("--find-dependencies", "--reversed-first",
"-p", "no:randomly")
result.stdout.fnmatch_lines([
"Run dependency analysis for 4 tests.",
"Executed 10 tests in 3 test runs.",
"Dependent tests:",
" test_one.py::test_b depends on test_one.py::test_c"
])
def test_single_dependency2(test_path):
test_path.makepyfile(
test_one="""
flag = True
def test_a(): pass
def test_b(): assert flag
def test_c(): pass
def test_d(): pass
def test_e(): pass
def test_f(): pass
def test_g(): global flag; flag = False
def test_h(): pass
def test_i(): pass
"""
)
result = test_path.runpytest("--find-dependencies", "-p", "no:randomly")
result.stdout.fnmatch_lines([
"Run dependency analysis for 9 tests.",
"Executed 27 tests in 6 test runs.",
"Dependent tests:",
" test_one.py::test_b depends on test_one.py::test_g"
])
def test_single_dependency3(test_path):
test_path.makepyfile(
test_one="""
flag = True
def test_a(): pass
def test_b(): global flag; flag = False
def test_c(): pass
def test_d(): pass
def test_e(): assert flag
def test_f(): pass
def test_g(): pass
"""
)
result = test_path.runpytest("--find-dependencies", "-p", "no:randomly")
result.stdout.fnmatch_lines([
"Run dependency analysis for 7 tests.",
"Executed 19 tests in 4 test runs.",
"Dependent tests:",
" test_one.py::test_e depends on test_one.py::test_b"
])
def test_single_dependency1_with_randomly(test_path):
test_path.makepyfile(
test_one="""
flag = True
def test_a(): pass
def test_b(): assert flag
def test_c(): global flag; flag = False
def test_d(): pass
"""
)
result = test_path.runpytest("--find-dependencies")
result.stdout.fnmatch_lines([
"Run dependency analysis for 4 tests.",
"Dependent tests:",
" test_one.py::test_b depends on test_one.py::test_c"
])
def test_single_dependency2_with_randomly(test_path):
test_path.makepyfile(
test_one="""
flag = True
def test_a(): pass
def test_b(): assert flag
def test_c(): pass
def test_d(): pass
def test_e(): pass
def test_f(): pass
def test_g(): global flag; flag = False
def test_h(): pass
def test_i(): pass
"""
)
result = test_path.runpytest("--find-dependencies", "-p", "no:randomly")
result.stdout.fnmatch_lines([
"Run dependency analysis for 9 tests.",
"Dependent tests:",
" test_one.py::test_b depends on test_one.py::test_g"
])
def test_single_dependency3_with_randomly(test_path):
test_path.makepyfile(
test_one="""
flag = True
def test_a(): pass
def test_b(): global flag; flag = False
def test_c(): pass
def test_d(): pass
def test_e(): assert flag
def test_f(): pass
def test_g(): pass
"""
)
result = test_path.runpytest("--find-dependencies")
result.stdout.fnmatch_lines([
"Run dependency analysis for 7 tests.",
"Dependent tests:",
" test_one.py::test_e depends on test_one.py::test_b"
])
def test_two_dependencies(test_path):
test_path.makepyfile(
test_one="""
flag = True
def test_a(): pass
def test_b(): assert flag
def test_c(): pass
def test_d(): assert flag
def test_e(): global flag; flag = False
def test_f(): pass
"""
)
result = test_path.runpytest("--find-dependencies", "-p", "no:randomly")
result.stdout.fnmatch_lines([
"Run dependency analysis for 6 tests.",
"Executed 21 tests in 7 test runs.",
"Dependent tests:",
" test_one.py::test_b depends on test_one.py::test_e",
" test_one.py::test_d depends on test_one.py::test_e"
])
def test_single_dependency_in_other_module1(test_path):
test_path.makepyfile(
test_one="""
import util
def test_a(): pass
def test_b(): util.set_flag(False)
def test_c(): pass
def test_d(): pass
def test_e(): assert util.flag
def test_f(): pass
def test_g(): pass
"""
)
test_path.makepyfile(
util="""
flag = True
def set_flag(new_flag):
global flag
flag = new_flag
"""
)
result = test_path.runpytest("--find-dependencies", "-p", "no:randomly")
result.stdout.fnmatch_lines([
"Run dependency analysis for 7 tests.",
"Executed 19 tests in 4 test runs.",
"Dependent tests:",
" test_one.py::test_e depends on test_one.py::test_b"
])
def test_single_dependency_in_other_module2(test_path):
test_path.makepyfile(
test_one="""
import util
def test_a(): pass
def test_b(): assert util.flag
def test_c(): pass
def test_d(): pass
def test_e(): util.set_flag(False)
def test_f(): pass
def test_g(): pass
"""
)
test_path.makepyfile(
util="""
flag = True
def set_flag(new_flag):
global flag
flag = new_flag
"""
)
result = test_path.runpytest("--find-dependencies", "-p", "no:randomly")
result.stdout.fnmatch_lines([
"Run dependency analysis for 7 tests.",
"Executed 20 tests in 5 test runs.",
"Dependent tests:",
" test_one.py::test_b depends on test_one.py::test_e"
])
def test_permanent_dependency(test_path):
test_path.makepyfile(
test_one="""
import util
def test_a(): pass
def test_b(): assert not util.lock_exists()
def test_c(): pass
def test_d(): util.create_lock()
def test_e(): pass
"""
)
test_path.makepyfile(
util="""
import os
def create_lock():
with open("lock.lck", "w") as f:
f.write("test")
def lock_exists():
return os.path.exists("lock.lck")
"""
)
result = test_path.runpytest("--find-dependencies", "-p", "no:randomly")
result.stdout.fnmatch_lines([
"Run dependency analysis for 5 tests.",
"Executed 11 tests in 3 test runs.",
"Tests failing permanently after all tests have run:",
" test_one.py::test_b"
])
def test_permanent_dependency_reversed_first(test_path):
test_path.makepyfile(
test_one="""
import util
def test_a(): pass
def test_b(): assert not util.lock_exists()
def test_c(): pass
def test_d(): util.create_lock()
def test_e(): pass
"""
)
test_path.makepyfile(
util="""
import os
def create_lock():
with open("lock1.lck", "w") as f:
f.write("test")
def lock_exists():
return os.path.exists("lock1.lck")
"""
)
result = test_path.runpytest("--find-dependencies", "--reversed-first",
"-p", "no:randomly")
result.stdout.fnmatch_lines([
"Run dependency analysis for 5 tests.",
"Executed 10 tests in 2 test runs.",
"The following tests are always failing and are "
"excluded from the analysis:",
" test_one.py::test_b",
"No dependent tests found."
])
def test_ignored_tests_with_marker_no_dependency(test_path):
test_path.makepyfile(
test_one="""
import pytest
def test_a(): pass
@pytest.mark.order(2)
def test_b(): pass
def test_c(): pass
def test_d(): pass
@pytest.mark.order(1)
def test_e(): pass
@pytest.mark.dependency
def test_f(): pass
"""
)
result = test_path.runpytest("--find-dependencies", "-vv",
"--markers-to-ignore=order,dependency",
"-p", "no:randomly")
result.stdout.fnmatch_lines([
"Run dependency analysis for 3 tests.",
"Executed 6 tests in 2 test runs.",
"No dependent tests found."
])
| 27.819533
| 76
| 0.570862
| 1,637
| 13,103
| 4.342089
| 0.089188
| 0.117192
| 0.06964
| 0.060355
| 0.875352
| 0.855234
| 0.805571
| 0.796849
| 0.768852
| 0.748593
| 0
| 0.007583
| 0.305579
| 13,103
| 470
| 77
| 27.878723
| 0.773601
| 0
| 0
| 0.744526
| 0
| 0
| 0.583073
| 0.015722
| 0
| 0
| 0
| 0
| 0.048662
| 1
| 0.051095
| false
| 0.177616
| 0.019465
| 0
| 0.075426
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
c7b3b9486f38920de4e71ce9ed52320027acb2f0
| 55,347
|
py
|
Python
|
congress/tests/datalog/test_builtin.py
|
mail2nsrajesh/congress
|
a724dfb59c43a5e88e2b03e714a5f962d6976762
|
[
"Apache-2.0"
] | null | null | null |
congress/tests/datalog/test_builtin.py
|
mail2nsrajesh/congress
|
a724dfb59c43a5e88e2b03e714a5f962d6976762
|
[
"Apache-2.0"
] | null | null | null |
congress/tests/datalog/test_builtin.py
|
mail2nsrajesh/congress
|
a724dfb59c43a5e88e2b03e714a5f962d6976762
|
[
"Apache-2.0"
] | null | null | null |
#! /usr/bin/python
#
# Copyright (c) 2014 IBM, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from oslo_log import log as logging
from congress.datalog import base as datalog_base
from congress.datalog import builtin
from congress.datalog import compile
from congress import exception
from congress.policy_engines import agnostic
from congress.tests import base
from congress.tests import helper
LOG = logging.getLogger(__name__)
addmap = {
'comparison': [
{'func': 'f(x,y)', 'num_inputs': 2,
'code': lambda x, y: x if x > y else y}],
'newcategory': [
{'func': 'g(x,y)', 'num_inputs': 2, 'code': lambda x, y: x + y}]}
append_builtin = {'arithmetic': [{'func': 'div(x,y)',
'num_inputs': 2,
'code': 'lambda x,y: x / y'}]}
class TestBuiltins(base.TestCase):
def setUp(self):
super(TestBuiltins, self).setUp()
self.cbcmap = builtin.CongressBuiltinCategoryMap(
builtin._builtin_map)
self.predl = self.cbcmap.builtin('lt')
def test_add_and_delete_map(self):
cbcmap_before = self.cbcmap
self.cbcmap.add_map(append_builtin)
self.cbcmap.delete_map(append_builtin)
self.assertTrue(self.cbcmap.mapequal(cbcmap_before))
def test_add_map_only(self):
self.cbcmap.add_map(append_builtin)
predl = self.cbcmap.builtin('div')
self.assertIsNotNone(predl)
self.cbcmap.add_map(addmap)
predl = self.cbcmap.builtin('max')
self.assertIsNotNone(predl)
def test_add_and_delete_builtin(self):
cbcmap_before = self.cbcmap
self.cbcmap.add_map(append_builtin)
self.cbcmap.delete_builtin('arithmetic', 'div', 2)
self.assertTrue(self.cbcmap.mapequal(cbcmap_before))
def test_string_pred_string(self):
predstring = str(self.predl)
self.assertNotEqual(predstring, 'ltc(x,y')
def test_add_and_delete_to_category(self):
cbcmap_before = self.cbcmap
arglist = ['x', 'y', 'z']
pred = builtin.CongressBuiltinPred('testfunc', arglist, 1,
lambda x: not x)
self.cbcmap.insert_to_category('arithmetic', pred)
self.cbcmap.delete_from_category('arithmetic', pred)
self.assertTrue(self.cbcmap.mapequal(cbcmap_before))
def test_all_checks(self):
predtotest = self.cbcmap.builtin('lt')
self.assertTrue(self.cbcmap.builtin_is_registered(predtotest))
def test_eval_builtin(self):
predl = self.cbcmap.builtin('plus')
result = predl.code(1, 2)
self.assertEqual(result, 3)
predl = self.cbcmap.builtin('gt')
result = predl.code(1, 2)
self.assertFalse(result)
# NOTE(thinrichs): this test will be removed once we remove bare builtins
class TestReorder(base.TestCase):
def check(self, input_string, correct_string, msg):
rule = compile.parse1(input_string)
actual = compile.reorder_for_safety(rule)
correct = compile.parse1(correct_string)
if correct != actual:
emsg = "Correct: " + str(correct)
emsg += "; Actual: " + str(actual)
self.fail(msg + " :: " + emsg)
def check_err(self, input_string, unsafe_lit_strings, msg):
rule = compile.parse1(input_string)
try:
compile.reorder_for_safety(rule)
self.fail("Failed to raise exception for " + input_string)
except exception.PolicyException as e:
errmsg = str(e)
# parse then print to string so string rep same in err msg
unsafe_lits = [str(compile.parse1(x)) for x in unsafe_lit_strings]
missing_lits = [m for m in unsafe_lits
if m + " (vars" not in errmsg]
if len(missing_lits) > 0:
self.fail(
"Unsafe literals {} not reported in error: {}".format(
";".join(missing_lits), errmsg))
def test_reorder_builtins(self):
self.check("p(x, z) :- q(x, y), plus(x, y, z)",
"p(x, z) :- q(x, y), plus(x, y, z)",
"No reorder")
self.check("p(x, z) :- plus(x, y, z), q(x, y)",
"p(x, z) :- q(x, y), plus(x, y, z)",
"Basic reorder")
self.check("p(x, z) :- q(x, y), r(w), plus(x, y, z), plus(z, w, y)",
"p(x, z) :- q(x, y), r(w), plus(x, y, z), plus(z, w, y)",
"Chaining: no reorder")
self.check("p(x, z) :- q(x, y), plus(x, y, z), plus(z, w, y), r(w)",
"p(x, z) :- q(x, y), plus(x, y, z), r(w), plus(z, w, y)",
"Chaining: reorder")
self.check("p(x) :- lt(t, v), plus(z, w, t), plus(z, u, v), "
" plus(x, y, z), q(y), r(x), s(u), t(w) ",
"p(x) :- q(y), r(x), plus(x, y, z), s(u), plus(z, u, v), "
" t(w), plus(z, w, t), lt(t, v)",
"Partial-order chaining")
def test_unsafe_builtins(self):
# an output
self.check_err("p(x) :- q(x), plus(x, y, z)",
["plus(x,y,z)"],
"Basic Unsafe input")
self.check_err("p(x) :- q(x), r(z), plus(x, y, z)",
["plus(x,y,z)"],
"Basic Unsafe input 2")
self.check_err("p(x, z) :- plus(x, y, z), plus(z, y, x), "
" plus(x, z, y)",
["plus(x, y, z)", "plus(z, y, x)", "plus(x, z, y)"],
"Unsafe with cycle")
# no outputs
self.check_err("p(x) :- q(x), lt(x, y)",
["lt(x,y)"],
"Basic Unsafe input, no outputs")
self.check_err("p(x) :- q(y), lt(x, y)",
["lt(x,y)"],
"Basic Unsafe input, no outputs 2")
self.check_err("p(x, z) :- lt(x, y), lt(y, x)",
["lt(x,y)", "lt(y, x)"],
"Unsafe with cycle, no outputs")
# chaining
self.check_err("p(x) :- q(x, y), plus(x, y, z), plus(z, 3, w), "
" plus(w, t, u)",
["plus(w, t, u)"],
"Unsafe chaining")
self.check_err("p(x) :- q(x, y), plus(x, y, z), plus(z, 3, w), "
" lt(w, t)",
["lt(w, t)"],
"Unsafe chaining 2")
def test_reorder_negation(self):
self.check("p(x) :- q(x), not u(x), r(y), not s(x, y)",
"p(x) :- q(x), not u(x), r(y), not s(x, y)",
"No reordering")
self.check("p(x) :- not q(x), r(x)",
"p(x) :- r(x), not q(x)",
"Basic")
self.check("p(x) :- r(x), not q(x, y), s(y)",
"p(x) :- r(x), s(y), not q(x,y)",
"Partially safe")
self.check("p(x) :- not q(x, y), not r(x), not r(x, z), "
" t(x, y), u(x), s(z)",
"p(x) :- t(x,y), not q(x,y), not r(x), u(x), s(z), "
" not r(x, z)",
"Complex")
def test_unsafe_negation(self):
self.check_err("p(x) :- not q(x)",
["q(x)"],
"Basic")
self.check_err("p(x) :- not q(x), not r(x)",
["q(x)", "r(x)"],
"Cycle")
self.check_err("p(x) :- not q(x, y), r(y)",
["q(x, y)"],
"Partially safe")
def test_reorder_builtins_negation(self):
self.check("p(x) :- not q(z), plus(x, y, z), s(x), s(y)",
"p(x) :- s(x), s(y), plus(x, y, z), not q(z)",
"Basic")
self.check("p(x) :- not q(z, w), plus(x, y, z), lt(z, w), "
" plus(x, 3, w), s(x, y)",
"p(x) :- s(x,y), plus(x, y, z), plus(x, 3, w), "
" not q(z, w), lt(z, w)",
"Partial order")
def test_unsafe_builtins_negation(self):
self.check_err("p(x) :- plus(x, y, z), not q(x, y)",
['plus(x,y,z)', 'q(x,y)'],
'Unsafe cycle')
self.check_err("p(x) :- plus(x, y, z), plus(z, w, t), not q(z, t),"
" s(x), t(y)",
['plus(z, w, t)', 'q(z, t)'],
'Unsafety propagates')
NREC_THEORY = 'non-recursive theory test'
MAT_THEORY = 'materialized view theory test'
# NOTE(thinrichs): this test will be removed once we remove bare builtins
class TestTheories(base.TestCase):
def prep_runtime(self, code=None, msg=None, target=None):
# compile source
if msg is not None:
LOG.debug(msg)
if code is None:
code = ""
if target is None:
target = NREC_THEORY
run = agnostic.Runtime()
run.create_policy(NREC_THEORY, abbr="NRT",
kind=datalog_base.NONRECURSIVE_POLICY_TYPE)
run.create_policy(MAT_THEORY, abbr="MAT",
kind=datalog_base.MATERIALIZED_POLICY_TYPE)
run.debug_mode()
run.insert(code, target=target)
return run
def check_equal(self, actual_string, correct_string, msg):
self.assertTrue(helper.datalog_equal(
actual_string, correct_string, msg))
def test_materialized_builtins(self):
self.test_builtins(MAT_THEORY)
def test_builtins(self, th=NREC_THEORY):
"""Test the mechanism that implements builtins."""
run = self.prep_runtime()
run.insert('p(x) :- q(x,y), plus(x,y,z), r(z)'
'q(1,2)'
'q(2,3)'
'r(3)'
'r(5)', target=th)
self.check_equal(run.select('p(x)', target=th), "p(1) p(2)", "Plus")
run.delete('r(5)', target=th)
self.check_equal(run.select('p(x)', target=th), "p(1)", "Plus")
run = self.prep_runtime()
run.insert('p(x) :- q(x,y), minus(x,y,z), r(z)'
'q(2,1)'
'q(3,1)'
'r(1)'
'r(4)', target=th)
self.check_equal(run.select('p(x)', target=th), "p(2)", "Minus")
run.delete('r(4)', target=th)
run.insert('r(2)', target=th)
self.check_equal(run.select('p(x)', target=th), "p(2) p(3)", "Minus")
run = self.prep_runtime()
run.insert('p(x, z) :- q(x,y), plus(x,y,z)'
'q(1,2)'
'q(2,3)', target=th)
self.check_equal(run.select('p(x, y)', target=th),
"p(1, 3) p(2, 5)", "Plus")
run = self.prep_runtime()
run.insert('m(x) :- j(x,y), lt(x,y)'
'j(1,2)'
'j(3,2)', target=th)
self.check_equal(run.select('m(x)', target=th), 'm(1)', "LT")
run = self.prep_runtime()
run.insert('m(x) :- j(x,y), lt(x,y), r(y)'
'j(1,2)'
'j(2,3)'
'j(3,2)'
'r(2)', target=th)
self.check_equal(run.select('m(x)', target=th), 'm(1)', "LT 2")
run = self.prep_runtime()
run.insert('p(x,z) :- q(x), plus(x,1,z)'
'q(3)'
'q(5)', target=th)
self.check_equal(run.select('p(x,z)', target=th),
'p(3, 4) p(5,6)', "Bound input")
run = self.prep_runtime()
run.insert('p(x) :- q(x), plus(x,1,5)'
'q(4)'
'q(5)', target=th)
self.check_equal(run.select('p(x)', target=th),
'p(4)', "Bound output")
run = self.prep_runtime()
run.insert('p(x, z) :- plus(x,y,z), q(x), r(y)'
'q(4)'
'r(5)', target=th)
self.check_equal(run.select('p(x, y)', target=th),
'p(4, 9)',
"Reordering")
run = self.prep_runtime()
run.insert('p(x, z) :- plus(x,y,z), q(x), q(y)'
'q(4)'
'q(5)', target=th)
self.check_equal(run.select('p(x, y)', target=th),
'p(4, 9) p(4, 8) p(5, 9) p(5, 10)',
"Reordering with self joins")
def test_materialized_builtins_content(self):
self.test_builtins_content(MAT_THEORY)
def test_builtins_content(self, th=NREC_THEORY):
"""Test the content of the builtins, not the mechanism."""
def check_true(code, msg):
run = self.prep_runtime('')
run.insert(code, target=th)
self.check_equal(
run.select('p(x)', target=th),
'p(1)',
msg)
def check_false(code, msg):
th = NREC_THEORY
run = self.prep_runtime('')
run.insert(code, target=th)
self.check_equal(
run.select('p(x)', target=th),
'',
msg)
#
# Numbers
#
# int
code = 'p(1) :- int(2,2)'
check_true(code, "int")
code = 'p(1) :- int(2.3, 2)'
check_true(code, "int")
code = 'p(1) :- int(2, 3.3)'
check_false(code, "int")
# float
code = 'p(1) :- float(2,2.0)'
check_true(code, "float")
code = 'p(1) :- float(2.3,2.3)'
check_true(code, "float")
code = 'p(1) :- float(2,3.3)'
check_false(code, "int")
# plus
code = 'p(1) :- plus(2,3,5)'
check_true(code, "plus")
code = 'p(1) :- plus(2,3,1)'
check_false(code, "plus")
# minus
code = 'p(1) :- minus(5, 3, 2)'
check_true(code, "minus")
code = 'p(1) :- minus(5, 3, 6)'
check_false(code, "minus")
# minus negative: negative numbers should not be supported
# code = 'p(1) :- minus(3, 5, x)'
# check_false(code, "minus")
# times
code = 'p(1) :- mul(3, 5, 15)'
check_true(code, "multiply")
code = 'p(1) :- mul(2, 5, 1)'
check_false(code, "multiply")
# divides
code = 'p(1) :- div(10, 2, 5)'
check_true(code, "divides")
code = 'p(1) :- div(10, 4, 2)'
check_true(code, "integer divides")
code = 'p(1) :- div(10, 4.0, 2.5)'
check_true(code, "float divides")
code = 'p(1) :- div(10.0, 3, 3.3)'
check_false(code, "divides")
#
# Comparison
#
# less than
code = 'p(1) :- lt(1, 3)'
check_true(code, "lessthan")
code = 'p(1) :- lt(5, 2)'
check_false(code, "lessthan")
# less than equal
code = 'p(1) :- lteq(1, 3)'
check_true(code, "lessthaneq")
code = 'p(1) :- lteq(3, 3)'
check_true(code, "lessthaneq")
code = 'p(1) :- lteq(4, 3)'
check_false(code, "lessthaneq")
# greater than
code = 'p(1) :- gt(9, 5)'
check_true(code, "greaterthan")
code = 'p(1) :- gt(5, 9)'
check_false(code, "greaterthan")
# greater than equal
code = 'p(1) :- gteq(10, 5)'
check_true(code, "greaterthaneq")
code = 'p(1) :- gteq(10, 10)'
check_true(code, "greaterthaneq")
code = 'p(1) :- gteq(5, 20)'
check_false(code, "greaterthaneq")
# equal
code = 'p(1) :- equal(5, 5)'
check_true(code, "equal")
code = 'p(1) :- equal(5, 7)'
check_false(code, "equal")
# max
code = 'p(1) :- max(3, 4, 4)'
check_true(code, "max")
code = 'p(1) :- max(3, 7, 3)'
check_false(code, "max")
#
# Strings
#
# len
code = 'p(1) :- len("abcde", 5)'
check_true(code, "Len")
code = 'p(1) :- len("abcde", 7)'
check_false(code, "Len")
# concat
code = 'p(1) :- concat("abc", "def", "abcdef")'
check_true(code, "concat")
code = 'p(1) :- concat("abc", "def", "zxy")'
check_false(code, "concat")
#
# Datetime
# We should make some of these more robust but can't do
# that with the safety restrictions in place at the time
# of writing.
#
# lessthan
code = ('p(1) :- datetime_lt('
'"Jan 1, 2014 10:00:00", "2014-01-02 10:00:00")')
check_true(code, "True datetime_lt")
code = ('p(1) :- datetime_lt('
'"2014-01-03 10:00:00", "Jan 2, 2014 10:00:00")')
check_false(code, "False datetime_lt")
# lessthanequal
code = ('p(1) :- datetime_lteq('
'"Jan 1, 2014 10:00:00", "2014-01-02 10:00:00")')
check_true(code, "True datetime_lteq")
code = ('p(1) :- datetime_lteq('
'"Jan 1, 2014 10:00:00", "2014-01-01 10:00:00")')
check_true(code, "True datetime_lteq")
code = ('p(1) :- datetime_lteq('
'"2014-01-02 10:00:00", "Jan 1, 2014 10:00:00")')
check_false(code, "False datetime_lteq")
# greaterthan
code = ('p(1) :- datetime_gt('
'"Jan 5, 2014 10:00:00", "2014-01-02 10:00:00")')
check_true(code, "True datetime_gt")
code = ('p(1) :- datetime_gt('
'"2014-01-03 10:00:00", "Feb 2, 2014 10:00:00")')
check_false(code, "False datetime_gt")
# greaterthanequal
code = ('p(1) :- datetime_gteq('
'"Jan 5, 2014 10:00:00", "2014-01-02 10:00:00")')
check_true(code, "True datetime_gteq")
code = ('p(1) :- datetime_gteq('
'"Jan 5, 2014 10:00:00", "2014-01-05 10:00:00")')
check_true(code, "True datetime_gteq")
code = ('p(1) :- datetime_gteq('
'"2014-01-02 10:00:00", "Mar 1, 2014 10:00:00")')
check_false(code, "False datetime_gteq")
# equal
code = ('p(1) :- datetime_equal('
'"Jan 5, 2014 10:00:00", "2014-01-05 10:00:00")')
check_true(code, "True datetime_equal")
code = ('p(1) :- datetime_equal('
'"Jan 5, 2014 10:00:00", "2014-01-02 10:00:00")')
check_false(code, "False datetime_equal")
# plus
code = ('p(1) :- datetime_plus('
'"Jan 5, 2014 10:00:00", 3600, "2014-01-05 11:00:00")')
check_true(code, "True datetime_plus")
code = ('p(1) :- datetime_plus('
'"Jan 5, 2014 10:00:00", "1:00:00", "2014-01-05 11:00:00")')
check_true(code, "True datetime_plus")
code = ('p(1) :- datetime_plus('
'"Jan 5, 2014 10:00:00", 3600, "2014-01-05 12:00:00")')
check_false(code, "False datetime_plus")
# minus
code = ('p(1) :- datetime_minus('
'"Jan 5, 2014 10:00:00", "25:00:00", "2014-01-04 09:00:00")')
check_true(code, "True datetime_minus")
code = ('p(1) :- datetime_minus('
'"Jan 5, 2014 10:00:00", 3600, "2014-01-05 09:00:00")')
check_true(code, "True datetime_minus")
code = ('p(1) :- datetime_minus('
'"Jan 5, 2014 10:00:00", "9:00:00", "Jan 4, 2014 10:00:00")')
check_false(code, "False datetime_minus")
# to_seconds
code = ('p(1) :- datetime_to_seconds('
'"Jan 1, 1900 1:00:00", 3600)')
check_true(code, "True datetime_to_seconds")
code = ('p(1) :- datetime_to_seconds('
'"Jan 1, 1900 1:00:00", 3601)')
check_false(code, "False datetime_to_seconds")
# extract_time
code = ('p(1) :- extract_time('
'"Jan 1, 1900 1:00:00", "01:00:00")')
check_true(code, "True extract_time")
code = ('p(1) :- extract_time('
'"Jan 1, 1900 1:00:00", "02:00:00")')
check_false(code, "False extract_time")
# extract_date
code = ('p(1) :- extract_date('
'"Jan 1, 1900 1:00:00", "1900-01-01")')
check_true(code, "True extract_date")
code = ('p(1) :- extract_date('
'"Jan 1, 1900 1:00:00", "2000-01-01")')
check_false(code, "False extract_date")
# pack_datetime
code = ('p(1) :- pack_datetime(2000, 1, 1, 10, 5, 6, '
'"2000-1-1 10:5:6")')
check_true(code, "True pack_datetime")
code = ('p(1) :- pack_datetime(2000, 1, 1, 10, 5, 6, '
'"2000-1-1 10:5:20")')
check_false(code, "False pack_datetime")
# pack_date
code = ('p(1) :- pack_date(2000, 1, 1, '
'"2000-1-1")')
check_true(code, "True pack_date")
code = ('p(1) :- pack_date(2000, 1, 1, '
'"2000-1-2")')
check_false(code, "False pack_date")
# pack_time
code = ('p(1) :- pack_time(5, 6, 7, '
'"5:6:7")')
check_true(code, "True pack_time")
code = ('p(1) :- pack_time(5, 6, 7, '
'"10:6:7")')
check_false(code, "False pack_time")
# unpack_datetime
code = ('p(1) :- unpack_datetime("2000-1-1 10:5:6", '
'2000, 1, 1, 10, 5, 6)')
check_true(code, "True unpack_datetime")
code = ('p(1) :- unpack_datetime("2000-1-1 10:5:6", '
'2000, 1, 1, 12, 5, 6)')
check_false(code, "False unpack_datetime")
# unpack_date
code = ('p(1) :- unpack_date("2000-1-1 10:5:6", '
'2000, 1, 1)')
check_true(code, "True unpack_date")
code = ('p(1) :- unpack_date("2000-1-1 10:5:6", '
'2000, 1, 5)')
check_false(code, "False unpack_date")
# unpack_time
code = ('p(1) :- unpack_time("2000-1-1 10:5:6", '
'10, 5, 6)')
check_true(code, "True unpack_time")
code = ('p(1) :- unpack_time("2000-1-1 10:5:6", '
'12, 5, 6)')
check_false(code, "False unpack_time")
# unpack_time
code = 'p(1) :- now(x)'
check_true(code, "True unpack_time")
#
# Network Address IPv4
#
# ip equal
code = ('p(1) :- ips_equal("192.0.2.1", "192.0.2.1")')
check_true(code, "True ip_equal")
code = ('p(1) :- ips_equal("192.0.2.1", "192.0.2.2")')
check_false(code, "False ip_equal")
# ip less than
code = ('p(1) :- ips_lt("192.0.2.1", "192.0.2.2")')
check_true(code, "True ip_lt")
code = ('p(1) :- ips_lt("192.0.2.1", "192.0.2.1")')
check_false(code, "False ip_lt")
code = ('p(1) :- ips_lt("192.0.2.2", "192.0.2.1")')
check_false(code, "False ip_lt")
# ip less than equal
code = ('p(1) :- ips_lteq("192.0.2.1", "192.0.2.1")')
check_true(code, "True ip_lteq")
code = ('p(1) :- ips_lteq("192.0.2.1", "192.0.2.2")')
check_true(code, "True ip_lteq")
code = ('p(1) :- ips_lteq("192.0.2.2", "192.0.2.1")')
check_false(code, "False ip_lteq")
# ip greater than
code = ('p(1) :- ips_gt("192.0.2.2", "192.0.2.1")')
check_true(code, "True ip_gt")
code = ('p(1) :- ips_gt("192.0.2.1", "192.0.2.1")')
check_false(code, "False ip_gt")
code = ('p(1) :- ips_gt("192.0.2.1", "192.0.2.2")')
check_false(code, "False ip_gt")
# ip greater than equal
code = ('p(1) :- ips_gteq("192.0.2.2", "192.0.2.1")')
check_true(code, "True ip_gteq")
code = ('p(1) :- ips_gteq("192.0.2.2", "192.0.2.2")')
check_true(code, "True ip_gteq")
code = ('p(1) :- ips_gteq("192.0.2.1", "192.0.2.2")')
check_false(code, "False ip_gteq")
# networks equal
code = ('p(1) :- networks_equal("192.0.2.0/24", "192.0.2.112/24")')
check_true(code, "True networks_equal")
code = ('p(1) :- networks_equal("192.0.2.0/24", "192.0.3.0/24")')
check_false(code, "False networks_equal")
# networks overlap
code = ('p(1) :- networks_overlap("192.0.2.0/23", "192.0.2.0/24")')
check_true(code, "True networks_overlap")
code = ('p(1) :- networks_overlap("192.0.2.0/24", "192.0.3.0/24")')
check_false(code, "False networks_overlap")
# ip in network
code = ('p(1) :- ip_in_network("192.168.0.1", "192.168.0.0/24")')
check_true(code, "True ip_in_network")
code = ('p(1) :- ip_in_network("192.168.10.1", "192.168.0.0/24")')
check_false(code, "False ip_in_network")
#
# Network Address IPv6
#
# ip equal
code = ('p(1) :- ips_equal("::ffff:192.0.2.1", "::ffff:192.0.2.1")')
check_true(code, "True ip_equal v6")
code = ('p(1) :- ips_equal("::ffff:192.0.2.1", "::ffff:192.0.2.2")')
check_false(code, "False ip_equal v6")
# ip less than
code = ('p(1) :- ips_lt("::ffff:192.0.2.1", "::ffff:192.0.2.2")')
check_true(code, "True ip_lt v6")
code = ('p(1) :- ips_lt("::ffff:192.0.2.1", "::ffff:192.0.2.1")')
check_false(code, "False ip_lt v6")
code = ('p(1) :- ips_lt("::ffff:192.0.2.2", "::ffff:192.0.2.1")')
check_false(code, "False ip_lt v6")
# ip less than equal
code = ('p(1) :- ips_lteq("::ffff:192.0.2.1", "::ffff:192.0.2.1")')
check_true(code, "True ip_lteq v6")
code = ('p(1) :- ips_lteq("::ffff:192.0.2.1", "::ffff:192.0.2.2")')
check_true(code, "True ip_lteq v6")
code = ('p(1) :- ips_lteq("::ffff:192.0.2.2", "::ffff:192.0.2.1")')
check_false(code, "False ip_lteq v6")
# ip greater than
code = ('p(1) :- ips_gt("::ffff:192.0.2.2", "::ffff:192.0.2.1")')
check_true(code, "True ip_gt v6")
code = ('p(1) :- ips_gt("::ffff:192.0.2.1", "::ffff:192.0.2.1")')
check_false(code, "False ip_gt v6")
code = ('p(1) :- ips_gt("::ffff:192.0.2.1", "::ffff:192.0.2.2")')
check_false(code, "False ip_gt v6")
# ip greater than equal
code = ('p(1) :- ips_gteq("::ffff:192.0.2.2", "::ffff:192.0.2.1")')
check_true(code, "True ip_gteq v6")
code = ('p(1) :- ips_gteq("::ffff:192.0.2.2", "::ffff:192.0.2.2")')
check_true(code, "True ip_gteq v6")
code = ('p(1) :- ips_gteq("::ffff:192.0.2.1", "::ffff:192.0.2.2")')
check_false(code, "False ip_gteq v6")
# networks equal
code = ('p(1) :- networks_equal("fe80::ffff:192.0.2.0/24",'
' "fe80::ffff:192.0.2.112/24")')
check_true(code, "True networks_equal v6")
code = ('p(1) :- networks_equal("fe80::ffff:192.0.2.0/24",'
' "ae80::ffff:192.0.2.0/24")')
check_false(code, "False networks_equal v6")
# networks overlap
code = ('p(1) :- networks_overlap("fe80::ffff:192.0.2.0/23",'
' "fe80::ffff:192.0.2.0/24")')
check_true(code, "True networks_overlap v6")
code = ('p(1) :- networks_overlap("fe80::ffff:192.0.2.0/24",'
' "ae80::ffff:192.0.3.0/24")')
check_false(code, "False networks_overlap v6")
# ip in network
code = ('p(1) :- ip_in_network("fe80::ffff:192.168.0.1",'
' "fe80::ffff:192.168.0.0/24")')
check_true(code, "True ip_in_network v6")
code = ('p(1) :- ip_in_network("fe80::ffff:192.168.10.1",'
' "ae80::ffff:192.168.10.1/24")')
check_false(code, "False ip_in_network v6")
class TestNamedspacedReorder(base.TestCase):
def check(self, input_string, correct_string, msg):
rule = compile.parse1(input_string)
actual = compile.reorder_for_safety(rule)
correct = compile.parse1(correct_string)
if correct != actual:
emsg = "Correct: " + str(correct)
emsg += "; Actual: " + str(actual)
self.fail(msg + " :: " + emsg)
def check_err(self, input_string, unsafe_lit_strings, msg):
rule = compile.parse1(input_string)
try:
compile.reorder_for_safety(rule)
self.fail("Failed to raise exception for " + input_string)
except exception.PolicyException as e:
errmsg = str(e)
# parse then print to string so string rep same in err msg
unsafe_lits = [str(compile.parse1(x)) for x in unsafe_lit_strings]
missing_lits = [m for m in unsafe_lits
if m + " (vars" not in errmsg]
if len(missing_lits) > 0:
self.fail(
"Unsafe literals {} not reported in error: {}".format(
";".join(missing_lits), errmsg))
def test_reorder_builtins(self):
self.check("p(x, z) :- q(x, y), builtin:plus(x, y, z)",
"p(x, z) :- q(x, y), builtin:plus(x, y, z)",
"No reorder")
self.check("p(x, z) :- builtin:plus(x, y, z), q(x, y)",
"p(x, z) :- q(x, y), builtin:plus(x, y, z)",
"Basic reorder")
self.check("p(x, z) :- q(x, y), r(w), builtin:plus(x, y, z), "
" builtin:plus(z, w, y)",
"p(x, z) :- q(x, y), r(w), builtin:plus(x, y, z), "
" builtin:plus(z, w, y)",
"Chaining: no reorder")
self.check("p(x, z) :- q(x, y), builtin:plus(x, y, z), "
" builtin:plus(z, w, y), r(w)",
"p(x, z) :- q(x, y), builtin:plus(x, y, z), r(w), "
" builtin:plus(z, w, y)",
"Chaining: reorder")
self.check("p(x) :- builtin:lt(t, v), builtin:plus(z, w, t), "
" builtin:plus(z, u, v), "
" builtin:plus(x, y, z), q(y), r(x), s(u), t(w) ",
"p(x) :- q(y), r(x), builtin:plus(x, y, z), s(u), "
" builtin:plus(z, u, v), "
" t(w), builtin:plus(z, w, t), builtin:lt(t, v)",
"Partial-order chaining")
def test_unsafe_builtins(self):
# an output
self.check_err("p(x) :- q(x), builtin:plus(x, y, z)",
["builtin:plus(x,y,z)"],
"Basic Unsafe input")
self.check_err("p(x) :- q(x), r(z), builtin:plus(x, y, z)",
["builtin:plus(x,y,z)"],
"Basic Unsafe input 2")
self.check_err("p(x, z) :- builtin:plus(x, y, z), "
" builtin:plus(z, y, x), builtin:plus(x, z, y)",
["builtin:plus(x, y, z)", "builtin:plus(z, y, x)",
"builtin:plus(x, z, y)"],
"Unsafe with cycle")
# no outputs
self.check_err("p(x) :- q(x), builtin:lt(x, y)",
["builtin:lt(x,y)"],
"Basic Unsafe input, no outputs")
self.check_err("p(x) :- q(y), builtin:lt(x, y)",
["builtin:lt(x,y)"],
"Basic Unsafe input, no outputs 2")
self.check_err("p(x, z) :- builtin:lt(x, y), builtin:lt(y, x)",
["builtin:lt(x,y)", "builtin:lt(y, x)"],
"Unsafe with cycle, no outputs")
# chaining
self.check_err("p(x) :- q(x, y), builtin:plus(x, y, z), "
" builtin:plus(z, 3, w), builtin:plus(w, t, u)",
["builtin:plus(w, t, u)"],
"Unsafe chaining")
self.check_err("p(x) :- q(x, y), builtin:plus(x, y, z), "
" builtin:plus(z, 3, w), builtin:lt(w, t)",
["builtin:lt(w, t)"],
"Unsafe chaining 2")
def test_reorder_negation(self):
self.check("p(x) :- q(x), not u(x), r(y), not s(x, y)",
"p(x) :- q(x), not u(x), r(y), not s(x, y)",
"No reordering")
self.check("p(x) :- not q(x), r(x)",
"p(x) :- r(x), not q(x)",
"Basic")
self.check("p(x) :- r(x), not q(x, y), s(y)",
"p(x) :- r(x), s(y), not q(x,y)",
"Partially safe")
self.check("p(x) :- not q(x, y), not r(x), not r(x, z), "
" t(x, y), u(x), s(z)",
"p(x) :- t(x,y), not q(x,y), not r(x), u(x), s(z), "
" not r(x, z)",
"Complex")
def test_unsafe_negation(self):
self.check_err("p(x) :- not q(x)",
["q(x)"],
"Basic")
self.check_err("p(x) :- not q(x), not r(x)",
["q(x)", "r(x)"],
"Cycle")
self.check_err("p(x) :- not q(x, y), r(y)",
["q(x, y)"],
"Partially safe")
def test_reorder_builtins_negation(self):
self.check("p(x) :- not q(z), builtin:plus(x, y, z), s(x), s(y)",
"p(x) :- s(x), s(y), builtin:plus(x, y, z), not q(z)",
"Basic")
self.check("p(x) :- not q(z, w), builtin:plus(x, y, z), "
" builtin:lt(z, w), builtin:plus(x, 3, w), s(x, y)",
"p(x) :- s(x,y), builtin:plus(x, y, z), "
" builtin:plus(x, 3, w), not q(z, w), builtin:lt(z, w)",
"Partial order")
def test_unsafe_builtins_negation(self):
self.check_err("p(x) :- builtin:plus(x, y, z), not q(x, y)",
['builtin:plus(x,y,z)', 'q(x,y)'],
'Unsafe cycle')
self.check_err("p(x) :- builtin:plus(x, y, z), builtin:plus(z, w, t),"
" not q(z, t), s(x), t(y)",
['builtin:plus(z, w, t)', 'q(z, t)'],
'Unsafety propagates')
class TestNamespacedTheories(base.TestCase):
def prep_runtime(self, code=None, msg=None, target=None):
# compile source
if msg is not None:
LOG.debug(msg)
if code is None:
code = ""
if target is None:
target = NREC_THEORY
run = agnostic.Runtime()
run.create_policy(NREC_THEORY, abbr="NRT",
kind=datalog_base.NONRECURSIVE_POLICY_TYPE)
run.create_policy(MAT_THEORY, abbr="MAT",
kind=datalog_base.MATERIALIZED_POLICY_TYPE)
run.debug_mode()
run.insert(code, target=target)
return run
def check_equal(self, actual_string, correct_string, msg):
self.assertTrue(helper.datalog_equal(
actual_string, correct_string, msg))
def test_materialized_builtins(self):
self.test_builtins(MAT_THEORY)
def test_builtins(self, th=NREC_THEORY):
"""Test the mechanism that implements builtins."""
run = self.prep_runtime()
run.insert('p(x) :- q(x,y), builtin:plus(x,y,z), r(z)'
'q(1,2)'
'q(2,3)'
'r(3)'
'r(5)', target=th)
self.check_equal(run.select('p(x)', target=th), "p(1) p(2)", "Plus")
run.delete('r(5)', target=th)
self.check_equal(run.select('p(x)', target=th), "p(1)", "Plus")
run = self.prep_runtime()
run.insert('p(x) :- q(x,y), builtin:minus(x,y,z), r(z)'
'q(2,1)'
'q(3,1)'
'r(1)'
'r(4)', target=th)
self.check_equal(run.select('p(x)', target=th), "p(2)", "Minus")
run.delete('r(4)', target=th)
run.insert('r(2)', target=th)
self.check_equal(run.select('p(x)', target=th), "p(2) p(3)", "Minus")
run = self.prep_runtime()
run.insert('p(x, z) :- q(x,y), builtin:plus(x,y,z)'
'q(1,2)'
'q(2,3)', target=th)
self.check_equal(run.select('p(x, y)', target=th),
"p(1, 3) p(2, 5)", "Plus")
run = self.prep_runtime()
run.insert('m(x) :- j(x,y), builtin:lt(x,y)'
'j(1,2)'
'j(3,2)', target=th)
self.check_equal(run.select('m(x)', target=th), 'm(1)', "LT")
run = self.prep_runtime()
run.insert('m(x) :- j(x,y), builtin:lt(x,y), r(y)'
'j(1,2)'
'j(2,3)'
'j(3,2)'
'r(2)', target=th)
self.check_equal(run.select('m(x)', target=th), 'm(1)', "LT 2")
run = self.prep_runtime()
run.insert('p(x,z) :- q(x), builtin:plus(x,1,z)'
'q(3)'
'q(5)', target=th)
self.check_equal(run.select('p(x,z)', target=th),
'p(3, 4) p(5,6)', "Bound input")
run = self.prep_runtime()
run.insert('p(x) :- q(x), builtin:plus(x,1,5)'
'q(4)'
'q(5)', target=th)
self.check_equal(run.select('p(x)', target=th),
'p(4)', "Bound output")
run = self.prep_runtime()
run.insert('p(x, z) :- builtin:plus(x,y,z), q(x), r(y)'
'q(4)'
'r(5)', target=th)
self.check_equal(run.select('p(x, y)', target=th),
'p(4, 9)',
"Reordering")
run = self.prep_runtime()
run.insert('p(x, z) :- builtin:plus(x,y,z), q(x), q(y)'
'q(4)'
'q(5)', target=th)
self.check_equal(run.select('p(x, y)', target=th),
'p(4, 9) p(4, 8) p(5, 9) p(5, 10)',
"Reordering with self joins")
def test_materialized_builtins_content(self):
self.test_builtins_content(MAT_THEORY)
def test_builtins_content(self, th=NREC_THEORY):
"""Test the content of the builtins, not the mechanism."""
def check_true(code, msg):
run = self.prep_runtime('')
run.insert(code, target=th)
self.check_equal(
run.select('p(x)', target=th),
'p(1)',
msg)
def check_false(code, msg):
th = NREC_THEORY
run = self.prep_runtime('')
run.insert(code, target=th)
self.check_equal(
run.select('p(x)', target=th),
'',
msg)
#
# Numbers
#
# int
code = 'p(1) :- builtin:int(2,2)'
check_true(code, "int")
code = 'p(1) :- builtin:int(2.3, 2)'
check_true(code, "int")
code = 'p(1) :- builtin:int(2, 3.3)'
check_false(code, "int")
# float
code = 'p(1) :- builtin:float(2,2.0)'
check_true(code, "float")
code = 'p(1) :- builtin:float(2.3,2.3)'
check_true(code, "float")
code = 'p(1) :- builtin:float(2,3.3)'
check_false(code, "int")
# plus
code = 'p(1) :- builtin:plus(2,3,5)'
check_true(code, "plus")
code = 'p(1) :- builtin:plus(2,3,1)'
check_false(code, "plus")
# minus
code = 'p(1) :- builtin:minus(5, 3, 2)'
check_true(code, "minus")
code = 'p(1) :- builtin:minus(5, 3, 6)'
check_false(code, "minus")
# minus negative: negative numbers should not be supported
# code = 'p(1) :- minus(3, 5, x)'
# check_false(code, "minus")
# times
code = 'p(1) :- builtin:mul(3, 5, 15)'
check_true(code, "multiply")
code = 'p(1) :- builtin:mul(2, 5, 1)'
check_false(code, "multiply")
# divides
code = 'p(1) :- builtin:div(10, 2, 5)'
check_true(code, "divides")
code = 'p(1) :- builtin:div(10, 4, 2)'
check_true(code, "integer divides")
code = 'p(1) :- builtin:div(10, 4.0, 2.5)'
check_true(code, "float divides")
code = 'p(1) :- builtin:div(10.0, 3, 3.3)'
check_false(code, "divides")
#
# Comparison
#
# less than
code = 'p(1) :- builtin:lt(1, 3)'
check_true(code, "lessthan")
code = 'p(1) :- builtin:lt(5, 2)'
check_false(code, "lessthan")
# less than equal
code = 'p(1) :- builtin:lteq(1, 3)'
check_true(code, "lessthaneq")
code = 'p(1) :- builtin:lteq(3, 3)'
check_true(code, "lessthaneq")
code = 'p(1) :- builtin:lteq(4, 3)'
check_false(code, "lessthaneq")
# greater than
code = 'p(1) :- builtin:gt(9, 5)'
check_true(code, "greaterthan")
code = 'p(1) :- builtin:gt(5, 9)'
check_false(code, "greaterthan")
# greater than equal
code = 'p(1) :- builtin:gteq(10, 5)'
check_true(code, "greaterthaneq")
code = 'p(1) :- builtin:gteq(10, 10)'
check_true(code, "greaterthaneq")
code = 'p(1) :- builtin:gteq(5, 20)'
check_false(code, "greaterthaneq")
# equal
code = 'p(1) :- builtin:equal(5, 5)'
check_true(code, "equal")
code = 'p(1) :- builtin:equal(5, 7)'
check_false(code, "equal")
# max
code = 'p(1) :- builtin:max(3, 4, 4)'
check_true(code, "max")
code = 'p(1) :- builtin:max(3, 7, 3)'
check_false(code, "max")
#
# Strings
#
# len
code = 'p(1) :- builtin:len("abcde", 5)'
check_true(code, "Len")
code = 'p(1) :- builtin:len("abcde", 7)'
check_false(code, "Len")
# concat
code = 'p(1) :- builtin:concat("abc", "def", "abcdef")'
check_true(code, "concat")
code = 'p(1) :- builtin:concat("abc", "def", "zxy")'
check_false(code, "concat")
#
# Datetime
# We should make some of these more robust but can't do
# that with the safety restrictions in place at the time
# of writing.
#
# lessthan
code = ('p(1) :- builtin:datetime_lt('
'"Jan 1, 2014 10:00:00", "2014-01-02 10:00:00")')
check_true(code, "True datetime_lt")
code = ('p(1) :- builtin:datetime_lt('
'"2014-01-03 10:00:00", "Jan 2, 2014 10:00:00")')
check_false(code, "False datetime_lt")
# lessthanequal
code = ('p(1) :- builtin:datetime_lteq('
'"Jan 1, 2014 10:00:00", "2014-01-02 10:00:00")')
check_true(code, "True datetime_lteq")
code = ('p(1) :- builtin:datetime_lteq('
'"Jan 1, 2014 10:00:00", "2014-01-01 10:00:00")')
check_true(code, "True datetime_lteq")
code = ('p(1) :- builtin:datetime_lteq('
'"2014-01-02 10:00:00", "Jan 1, 2014 10:00:00")')
check_false(code, "False datetime_lteq")
# greaterthan
code = ('p(1) :- builtin:datetime_gt('
'"Jan 5, 2014 10:00:00", "2014-01-02 10:00:00")')
check_true(code, "True datetime_gt")
code = ('p(1) :- builtin:datetime_gt('
'"2014-01-03 10:00:00", "Feb 2, 2014 10:00:00")')
check_false(code, "False datetime_gt")
# greaterthanequal
code = ('p(1) :- builtin:datetime_gteq('
'"Jan 5, 2014 10:00:00", "2014-01-02 10:00:00")')
check_true(code, "True datetime_gteq")
code = ('p(1) :- builtin:datetime_gteq('
'"Jan 5, 2014 10:00:00", "2014-01-05 10:00:00")')
check_true(code, "True datetime_gteq")
code = ('p(1) :- builtin:datetime_gteq('
'"2014-01-02 10:00:00", "Mar 1, 2014 10:00:00")')
check_false(code, "False datetime_gteq")
# equal
code = ('p(1) :- builtin:datetime_equal('
'"Jan 5, 2014 10:00:00", "2014-01-05 10:00:00")')
check_true(code, "True datetime_equal")
code = ('p(1) :- builtin:datetime_equal('
'"Jan 5, 2014 10:00:00", "2014-01-02 10:00:00")')
check_false(code, "False datetime_equal")
# plus
code = ('p(1) :- builtin:datetime_plus('
'"Jan 5, 2014 10:00:00", 3600, "2014-01-05 11:00:00")')
check_true(code, "True datetime_plus")
code = ('p(1) :- builtin:datetime_plus('
'"Jan 5, 2014 10:00:00", "1:00:00", "2014-01-05 11:00:00")')
check_true(code, "True datetime_plus")
code = ('p(1) :- builtin:datetime_plus('
'"Jan 5, 2014 10:00:00", 3600, "2014-01-05 12:00:00")')
check_false(code, "False datetime_plus")
# minus
code = ('p(1) :- builtin:datetime_minus('
'"Jan 5, 2014 10:00:00", "25:00:00", "2014-01-04 09:00:00")')
check_true(code, "True datetime_minus")
code = ('p(1) :- builtin:datetime_minus('
'"Jan 5, 2014 10:00:00", 3600, "2014-01-05 09:00:00")')
check_true(code, "True datetime_minus")
code = ('p(1) :- builtin:datetime_minus('
'"Jan 5, 2014 10:00:00", "9:00:00", "Jan 4, 2014 10:00:00")')
check_false(code, "False datetime_minus")
# to_seconds
code = ('p(1) :- builtin:datetime_to_seconds('
'"Jan 1, 1900 1:00:00", 3600)')
check_true(code, "True datetime_to_seconds")
code = ('p(1) :- builtin:datetime_to_seconds('
'"Jan 1, 1900 1:00:00", 3601)')
check_false(code, "False datetime_to_seconds")
# extract_time
code = ('p(1) :- builtin:extract_time('
'"Jan 1, 1900 1:00:00", "01:00:00")')
check_true(code, "True extract_time")
code = ('p(1) :- builtin:extract_time('
'"Jan 1, 1900 1:00:00", "02:00:00")')
check_false(code, "False extract_time")
# extract_date
code = ('p(1) :- builtin:extract_date('
'"Jan 1, 1900 1:00:00", "1900-01-01")')
check_true(code, "True extract_date")
code = ('p(1) :- builtin:extract_date('
'"Jan 1, 1900 1:00:00", "2000-01-01")')
check_false(code, "False extract_date")
# pack_datetime
code = ('p(1) :- builtin:pack_datetime(2000, 1, 1, 10, 5, 6, '
'"2000-1-1 10:5:6")')
check_true(code, "True pack_datetime")
code = ('p(1) :- builtin:pack_datetime(2000, 1, 1, 10, 5, 6, '
'"2000-1-1 10:5:20")')
check_false(code, "False pack_datetime")
# pack_date
code = ('p(1) :- builtin:pack_date(2000, 1, 1, '
'"2000-1-1")')
check_true(code, "True pack_date")
code = ('p(1) :- builtin:pack_date(2000, 1, 1, '
'"2000-1-2")')
check_false(code, "False pack_date")
# pack_time
code = ('p(1) :- builtin:pack_time(5, 6, 7, '
'"5:6:7")')
check_true(code, "True pack_time")
code = ('p(1) :- builtin:pack_time(5, 6, 7, '
'"10:6:7")')
check_false(code, "False pack_time")
# unpack_datetime
code = ('p(1) :- builtin:unpack_datetime("2000-1-1 10:5:6", '
'2000, 1, 1, 10, 5, 6)')
check_true(code, "True unpack_datetime")
code = ('p(1) :- builtin:unpack_datetime("2000-1-1 10:5:6", '
'2000, 1, 1, 12, 5, 6)')
check_false(code, "False unpack_datetime")
# unpack_date
code = ('p(1) :- builtin:unpack_date("2000-1-1 10:5:6", '
'2000, 1, 1)')
check_true(code, "True unpack_date")
code = ('p(1) :- builtin:unpack_date("2000-1-1 10:5:6", '
'2000, 1, 5)')
check_false(code, "False unpack_date")
# unpack_time
code = ('p(1) :- builtin:unpack_time("2000-1-1 10:5:6", '
'10, 5, 6)')
check_true(code, "True unpack_time")
code = ('p(1) :- builtin:unpack_time("2000-1-1 10:5:6", '
'12, 5, 6)')
check_false(code, "False unpack_time")
# unpack_time
code = 'p(1) :- builtin:now(x)'
check_true(code, "True unpack_time")
#
# Network Address IPv4
#
# ip equal
code = ('p(1) :- builtin:ips_equal("192.0.2.1", "192.0.2.1")')
check_true(code, "True ip_equal")
code = ('p(1) :- builtin:ips_equal("192.0.2.1", "192.0.2.2")')
check_false(code, "False ip_equal")
# ip less than
code = ('p(1) :- builtin:ips_lt("192.0.2.1", "192.0.2.2")')
check_true(code, "True ip_lt")
code = ('p(1) :- builtin:ips_lt("192.0.2.1", "192.0.2.1")')
check_false(code, "False ip_lt")
code = ('p(1) :- builtin:ips_lt("192.0.2.2", "192.0.2.1")')
check_false(code, "False ip_lt")
# ip less than equal
code = ('p(1) :- builtin:ips_lteq("192.0.2.1", "192.0.2.1")')
check_true(code, "True ip_lteq")
code = ('p(1) :- builtin:ips_lteq("192.0.2.1", "192.0.2.2")')
check_true(code, "True ip_lteq")
code = ('p(1) :- builtin:ips_lteq("192.0.2.2", "192.0.2.1")')
check_false(code, "False ip_lteq")
# ip greater than
code = ('p(1) :- builtin:ips_gt("192.0.2.2", "192.0.2.1")')
check_true(code, "True ip_gt")
code = ('p(1) :- builtin:ips_gt("192.0.2.1", "192.0.2.1")')
check_false(code, "False ip_gt")
code = ('p(1) :- builtin:ips_gt("192.0.2.1", "192.0.2.2")')
check_false(code, "False ip_gt")
# ip greater than equal
code = ('p(1) :- builtin:ips_gteq("192.0.2.2", "192.0.2.1")')
check_true(code, "True ip_gteq")
code = ('p(1) :- builtin:ips_gteq("192.0.2.2", "192.0.2.2")')
check_true(code, "True ip_gteq")
code = ('p(1) :- builtin:ips_gteq("192.0.2.1", "192.0.2.2")')
check_false(code, "False ip_gteq")
# networks equal
code = ('p(1) :- builtin:networks_equal("192.0.2.0/24", '
'"192.0.2.112/24")')
check_true(code, "True networks_equal")
code = ('p(1) :- builtin:networks_equal("192.0.2.0/24", '
'"192.0.3.0/24")')
check_false(code, "False networks_equal")
# networks overlap
code = ('p(1) :- builtin:networks_overlap("192.0.2.0/23", '
'"192.0.2.0/24")')
check_true(code, "True networks_overlap")
code = ('p(1) :- builtin:networks_overlap("192.0.2.0/24", '
'"192.0.3.0/24")')
check_false(code, "False networks_overlap")
# ip in network
code = ('p(1) :- builtin:ip_in_network("192.168.0.1", '
'"192.168.0.0/24")')
check_true(code, "True ip_in_network")
code = ('p(1) :- builtin:ip_in_network("192.168.10.1", '
'"192.168.0.0/24")')
check_false(code, "False ip_in_network")
#
# Network Address IPv6
#
# ip equal
code = ('p(1) :- builtin:ips_equal("::ffff:192.0.2.1", '
' "::ffff:192.0.2.1")')
check_true(code, "True ip_equal v6")
code = ('p(1) :- builtin:ips_equal("::ffff:192.0.2.1", '
' "::ffff:192.0.2.2")')
check_false(code, "False ip_equal v6")
# ip less than
code = ('p(1) :- builtin:ips_lt("::ffff:192.0.2.1", '
' "::ffff:192.0.2.2")')
check_true(code, "True ip_lt v6")
code = ('p(1) :- builtin:ips_lt("::ffff:192.0.2.1", '
' "::ffff:192.0.2.1")')
check_false(code, "False ip_lt v6")
code = ('p(1) :- builtin:ips_lt("::ffff:192.0.2.2", '
' "::ffff:192.0.2.1")')
check_false(code, "False ip_lt v6")
# ip less than equal
code = ('p(1) :- builtin:ips_lteq("::ffff:192.0.2.1", '
' "::ffff:192.0.2.1")')
check_true(code, "True ip_lteq v6")
code = ('p(1) :- builtin:ips_lteq("::ffff:192.0.2.1", '
' "::ffff:192.0.2.2")')
check_true(code, "True ip_lteq v6")
code = ('p(1) :- builtin:ips_lteq("::ffff:192.0.2.2", '
' "::ffff:192.0.2.1")')
check_false(code, "False ip_lteq v6")
# ip greater than
code = ('p(1) :- builtin:ips_gt("::ffff:192.0.2.2", '
' "::ffff:192.0.2.1")')
check_true(code, "True ip_gt v6")
code = ('p(1) :- builtin:ips_gt("::ffff:192.0.2.1", '
' "::ffff:192.0.2.1")')
check_false(code, "False ip_gt v6")
code = ('p(1) :- builtin:ips_gt("::ffff:192.0.2.1", '
' "::ffff:192.0.2.2")')
check_false(code, "False ip_gt v6")
# ip greater than equal
code = ('p(1) :- builtin:ips_gteq("::ffff:192.0.2.2", '
' "::ffff:192.0.2.1")')
check_true(code, "True ip_gteq v6")
code = ('p(1) :- builtin:ips_gteq("::ffff:192.0.2.2", '
' "::ffff:192.0.2.2")')
check_true(code, "True ip_gteq v6")
code = ('p(1) :- builtin:ips_gteq("::ffff:192.0.2.1", '
' "::ffff:192.0.2.2")')
check_false(code, "False ip_gteq v6")
# networks equal
code = ('p(1) :- builtin:networks_equal("fe80::ffff:192.0.2.0/24",'
' "fe80::ffff:192.0.2.112/24")')
check_true(code, "True networks_equal v6")
code = ('p(1) :- builtin:networks_equal("fe80::ffff:192.0.2.0/24",'
' "ae80::ffff:192.0.2.0/24")')
check_false(code, "False networks_equal v6")
# networks overlap
code = ('p(1) :- builtin:networks_overlap("fe80::ffff:192.0.2.0/23",'
' "fe80::ffff:192.0.2.0/24")')
check_true(code, "True networks_overlap v6")
code = ('p(1) :- builtin:networks_overlap("fe80::ffff:192.0.2.0/24",'
' "ae80::ffff:192.0.3.0/24")')
check_false(code, "False networks_overlap v6")
# ip in network
code = ('p(1) :- builtin:ip_in_network("fe80::ffff:192.168.0.1",'
' "fe80::ffff:192.168.0.0/24")')
check_true(code, "True ip_in_network v6")
code = ('p(1) :- builtin:ip_in_network("fe80::ffff:192.168.10.1",'
' "ae80::ffff:192.168.10.1/24")')
check_false(code, "False ip_in_network v6")
| 35.410749
| 78
| 0.476105
| 7,827
| 55,347
| 3.256037
| 0.044589
| 0.018207
| 0.052737
| 0.056622
| 0.928115
| 0.919443
| 0.909751
| 0.904807
| 0.899784
| 0.888523
| 0
| 0.089419
| 0.338465
| 55,347
| 1,562
| 79
| 35.433419
| 0.606626
| 0.052595
| 0
| 0.731429
| 0
| 0.155238
| 0.401868
| 0.070321
| 0
| 0
| 0
| 0
| 0.010476
| 1
| 0.038095
| false
| 0
| 0.010476
| 0
| 0.055238
| 0.000952
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c7be613c508eb22fc72f784e7057778b90208596
| 29,438
|
py
|
Python
|
spider.seniortesting.club/projects/spider_yanzhi/spider_yanzhi/useragents.py
|
seniortesting/websites
|
a54a5384f2e987cb5217233fa28dfb8271806677
|
[
"MIT"
] | null | null | null |
spider.seniortesting.club/projects/spider_yanzhi/spider_yanzhi/useragents.py
|
seniortesting/websites
|
a54a5384f2e987cb5217233fa28dfb8271806677
|
[
"MIT"
] | null | null | null |
spider.seniortesting.club/projects/spider_yanzhi/spider_yanzhi/useragents.py
|
seniortesting/websites
|
a54a5384f2e987cb5217233fa28dfb8271806677
|
[
"MIT"
] | null | null | null |
USER_AGENT_WECHAT = [
'Mozilla/5.0 (Linux; Android 7.1.1; MI 6 Build/NMF26X; wv) AppleWebKit/537.36 (KHTML, like Gecko) Mobile/15E148 MicroMessenger/6.6.1.1220(0x26060135) NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (Linux; Android 7.1.1; OD103 Build/NMF26F; wv) AppleWebKit/537.36 (KHTML, like Gecko) Mobile/11D257 MicroMessenger/6.6.1.1220(0x26060135) NetType/4G Language/zh_CN',
'Mozilla/5.0 (Linux; Android 6.0.1; SM919 Build/MXB48T; wv) AppleWebKit/537.36 (KHTML, like Gecko) Mobile/12A365 MicroMessenger/6.6.1.1220(0x26060135) NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (Linux; Android 5.1.1; vivo X6S A Build/LMY47V; wv) AppleWebKit/537.36 (KHTML, like Gecko) Mobile/13F69 MicroMessenger/6.6.1.1220(0x26060135) NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (Linux; Android 5.1; HUAWEI TAG-AL00 Build/HUAWEITAG-AL00; wv) AppleWebKit/537.36 (KHTML, like Gecko) Mobile/15C202 MicroMessenger/6.6.1.1220(0x26060135) NetType/4G Language/zh_CN',
'Mozilla/5.0 (Linux; Android 5.0; SM-N9100 Build/LRX21V) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/37.0.0.0 Mobile Safari/537.36 MicroMessenger/6.0.2.56_r958800.520 NetType/WIFI',
'Mozilla/5.0 (iPhone; CPU iPhone OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Mobile/11D257 MicroMessenger/6.0.1 NetType/WIFI',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12A365 MicroMessenger/5.4.1 NetType/WIFI',
'Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Mobile/13F69 MicroMessenger/6.6.1 NetType/4G Language/zh_CN',
'Mozilla/5.0 (iPhone; CPU iPhone OS 11_2_2 like Mac OS X) AppleWebKit/604.4.7 (KHTML, like Gecko) Mobile/15C202 MicroMessenger/6.6.1 NetType/4G Language/zh_CN',
'Mozilla/5.0 (iPhone; CPU iPhone OS 12_2 like Mac OS X) AppleWebKit/604.3.5 (KHTML, like Gecko) Mobile/15B150 MicroMessenger/6.6.1 NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (iPhone; CPU iPhone OS 12_2 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E148 MicroMessenger/7.0.5(0x17000523) NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (iphone x Build/MXB48T; wv) AppleWebKit/537.36 (KHTML, like Gecko) Mobile/15C202 MicroMessenger/6.6.1.1220(0x26060135) NetType/WIFI Language/zh_CN',
]
USER_AGENT_MOBILE = USER_AGENT_WECHAT + [
# IPhone
"Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
'Mozilla/5.0 (iPhone; CPU iPhone OS 9_3 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13E233 Safari/601.1',
# IPod
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
# IPAD
"Mozilla/5.0 (iPad; U; CPU OS 4_2_1 like Mac OS X; zh-cn) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148 Safari/6533.18.5",
"Mozilla/5.0 (iPad; U; CPU OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
# Android
"Mozilla/5.0 (Linux; U; Android 2.2.1; zh-cn; HTC_Wildfire_A3333 Build/FRG83D) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
'Mozilla/5.0 (Linux; Android 7.1.1; MI 6 Build/NMF26X; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/57.0.2987.132 MQQBrowser/6.2 TBS/043807 Mobile Safari/537.36 MicroMessenger/6.6.1.1220(0x26060135) NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (Linux; Android 7.1.1; OD103 Build/NMF26F; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043632 Safari/537.36 MicroMessenger/6.6.1.1220(0x26060135) NetType/4G Language/zh_CN',
'Mozilla/5.0 (Linux; Android 6.0.1; SM919 Build/MXB48T; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043632 Safari/537.36 MicroMessenger/6.6.1.1220(0x26060135) NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (Linux; Android 5.1.1; vivo X6S A Build/LMY47V; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043632 Safari/537.36 MicroMessenger/6.6.1.1220(0x26060135) NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (Linux; Android 5.1; HUAWEI TAG-AL00 Build/HUAWEITAG-AL00; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043622 Safari/537.36 MicroMessenger/6.6.1.1220(0x26060135) NetType/4G Language/zh_CN',
'Mozilla/5.0 (Linux; Android 7.0; FRD-AL10 Build/HUAWEIFRD-AL10; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/62.0.3202.84 Mobile Safari/537.36 MicroMessenger/6.7.3.1360(0x26070336) NetType/WIFI Language/zh_CN Process/appbrand0',
"Mozilla/5.0 (Linux; U; Android 1.5; en-us; sdk Build/CUPCAKE) AppleWebkit/528.5 (KHTML, like Gecko) Version/3.1.2 Mobile Safari/525.20.1",
"Mozilla/5.0 (Linux; U; Android 2.1; en-us; Nexus One Build/ERD62) AppleWebKit/530.17 (KHTML, like Gecko) Version/4.0 Mobile Safari/530.17",
"Mozilla/5.0 (Linux; U; Android 2.2; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Mozilla/5.0 (Linux; U; Android 1.5; en-us; htc_bahamas Build/CRB17) AppleWebKit/528.5 (KHTML, like Gecko) Version/3.1.2 Mobile Safari/525.20.1",
"Mozilla/5.0 (Linux; U; Android 2.1-update1; de-de; HTC Desire 1.19.161.5 Build/ERE27) AppleWebKit/530.17 (KHTML, like Gecko) Version/4.0 Mobile Safari/530.17",
"Mozilla/5.0 (Linux; U; Android 2.2; en-us; Sprint APA9292KT Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Mozilla/5.0 (Linux; U; Android 1.5; de-ch; HTC Hero Build/CUPCAKE) AppleWebKit/528.5 (KHTML, like Gecko) Version/3.1.2 Mobile Safari/525.20.1",
"Mozilla/5.0 (Linux; U; Android 2.2; en-us; ADR6300 Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Mozilla/5.0 (Linux; U; Android 2.1; en-us; HTC Legend Build/cupcake) AppleWebKit/530.17 (KHTML, like Gecko) Version/4.0 Mobile Safari/530.17",
"Mozilla/5.0 (Linux; U; Android 1.5; de-de; HTC Magic Build/PLAT-RC33) AppleWebKit/528.5 (KHTML, like Gecko) Version/3.1.2 Mobile Safari/525.20.1 FirePHP/0.3",
"Mozilla/5.0 (Linux; U; Android 1.6; en-us; HTC_TATTOO_A3288 Build/DRC79) AppleWebKit/528.5 (KHTML, like Gecko) Version/3.1.2 Mobile Safari/525.20.1",
"Mozilla/5.0 (Linux; U; Android 1.0; en-us; dream) AppleWebKit/525.10 (KHTML, like Gecko) Version/3.0.4 Mobile Safari/523.12.2",
"Mozilla/5.0 (Linux; U; Android 1.5; en-us; T-Mobile G1 Build/CRB43) AppleWebKit/528.5 (KHTML, like Gecko) Version/3.1.2 Mobile Safari 525.20.1",
"Mozilla/5.0 (Linux; U; Android 1.5; en-gb; T-Mobile_G2_Touch Build/CUPCAKE) AppleWebKit/528.5 (KHTML, like Gecko) Version/3.1.2 Mobile Safari/525.20.1",
"Mozilla/5.0 (Linux; U; Android 2.0; en-us; Droid Build/ESD20) AppleWebKit/530.17 (KHTML, like Gecko) Version/4.0 Mobile Safari/530.17",
"Mozilla/5.0 (Linux; U; Android 2.2; en-us; Droid Build/FRG22D) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Mozilla/5.0 (Linux; U; Android 2.0; en-us; Milestone Build/ SHOLS_U2_01.03.1) AppleWebKit/530.17 (KHTML, like Gecko) Version/4.0 Mobile Safari/530.17",
"Mozilla/5.0 (Linux; U; Android 2.0.1; de-de; Milestone Build/SHOLS_U2_01.14.0) AppleWebKit/530.17 (KHTML, like Gecko) Version/4.0 Mobile Safari/530.17",
"Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/525.10 (KHTML, like Gecko) Version/3.0.4 Mobile Safari/523.12.2",
"Mozilla/5.0 (Linux; U; Android 0.5; en-us) AppleWebKit/522 (KHTML, like Gecko) Safari/419.3",
"Mozilla/5.0 (Linux; U; Android 1.1; en-gb; dream) AppleWebKit/525.10 (KHTML, like Gecko) Version/3.0.4 Mobile Safari/523.12.2",
"Mozilla/5.0 (Linux; U; Android 2.0; en-us; Droid Build/ESD20) AppleWebKit/530.17 (KHTML, like Gecko) Version/4.0 Mobile Safari/530.17",
"Mozilla/5.0 (Linux; U; Android 2.1; en-us; Nexus One Build/ERD62) AppleWebKit/530.17 (KHTML, like Gecko) Version/4.0 Mobile Safari/530.17",
"Mozilla/5.0 (Linux; U; Android 2.2; en-us; Sprint APA9292KT Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Mozilla/5.0 (Linux; U; Android 2.2; en-us; ADR6300 Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Mozilla/5.0 (Linux; U; Android 2.2; en-ca; GT-P1000M Build/FROYO) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
"Mozilla/5.0 (Linux; U; Android 3.0.1; fr-fr; A500 Build/HRI66) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13",
"Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/525.10 (KHTML, like Gecko) Version/3.0.4 Mobile Safari/523.12.2",
"Mozilla/5.0 (Linux; U; Android 1.6; es-es; SonyEricssonX10i Build/R1FA016) AppleWebKit/528.5 (KHTML, like Gecko) Version/3.1.2 Mobile Safari/525.20.1",
"Mozilla/5.0 (Linux; U; Android 1.6; en-us; SonyEricssonX10i Build/R1AA056) AppleWebKit/528.5 (KHTML, like Gecko) Version/3.1.2 Mobile Safari/525.20.1",
'Mozilla/5.0 (iPhone 84; CPU iPhone OS 10_3_3 like Mac OS X) AppleWebKit/603.3.8 (KHTML, like Gecko) Version/10.0 MQQBrowser/7.8.0 Mobile/14G60 Safari/8536.25 MttCustomUA/2 QBWebViewType/1 WKType/1',
'Mozilla/5.0 (Linux; Android 7.0; STF-AL10 Build/HUAWEISTF-AL10; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043508 Safari/537.36 V1_AND_SQ_7.2.0_730_YYB_D QQ/7.2.0.3270 NetType/4G WebP/0.3.0 Pixel/1080',
'Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_3 like Mac OS X) AppleWebKit/603.3.8 (KHTML, like Gecko) Mobile/14G60 MicroMessenger/6.5.18 NetType/WIFI Language/en',
'Mozilla/5.0 (Linux; Android 5.1.1; vivo Xplay5A Build/LMY47V; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/48.0.2564.116 Mobile Safari/537.36 T7/9.3 baiduboxapp/9.3.0.10 (Baidu; P1 5.1.1)',
'Mozilla/5.0 (Linux; U; Android 7.0; zh-cn; STF-AL00 Build/HUAWEISTF-AL00) AppleWebKit/537.36 (KHTML, like Gecko)Version/4.0 Chrome/37.0.0.0 MQQBrowser/7.9 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.0; LEX626 Build/HEXCNFN5902606111S) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/35.0.1916.138 Mobile Safari/537.36 T7/7.4 baiduboxapp/8.3.1 (Baidu; P1 6.0)',
'Mozilla/5.0 (iPhone 92; CPU iPhone OS 10_3_2 like Mac OS X) AppleWebKit/603.2.4 (KHTML, like Gecko) Version/10.0 MQQBrowser/7.7.2 Mobile/14F89 Safari/8536.25 MttCustomUA/2 QBWebViewType/1 WKType/1',
'Mozilla/5.0 (Linux; U; Android 7.0; zh-CN; ZUK Z2121 Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/40.0.2214.89 UCBrowser/11.6.8.952 Mobile Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Mobile/15A372 MicroMessenger/6.5.17 NetType/WIFI Language/zh_HK',
'Mozilla/5.0 (Linux; U; Android 6.0.1; zh-CN; SM-C7000 Build/MMB29M) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/40.0.2214.89 UCBrowser/11.6.2.948 Mobile Safari/537.36',
'MQQBrowser/5.3/Mozilla/5.0 (Linux; Android 6.0; TCL 580 Build/MRA58K; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/52.0.2743.98 Mobile Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 10_2 like Mac OS X) AppleWebKit/602.3.12 (KHTML, like Gecko) Mobile/14C92 MicroMessenger/6.5.16 NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (Linux; U; Android 5.1.1; zh-cn; MI 4S Build/LMY47V) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.146 Mobile Safari/537.36 XiaoMi/MiuiBrowser/9.1.3',
'Mozilla/5.0 (Linux; U; Android 7.0; zh-CN; SM-G9550 Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/40.0.2214.89 UCBrowser/11.7.0.953 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.1; m3 note Build/LMY47I; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/48.0.2564.116 Mobile Safari/537.36 T7/9.3 baiduboxapp/9.3.0.10 (Baidu; P1 5.1)',
'Mozilla/5.0 (Linux; U; Android 5.1.1; zh-cn; MI 4S Build/LMY47V) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.146 Mobile Safari/537.36 XiaoMi/MiuiBrowser/9.1.3',
'Mozilla/5.0 (Linux; U; Android 7.1.2; zh-cn; MI 5X Build/N2G47H) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.146 Mobile Safari/537.36 XiaoMi/MiuiBrowser/9.2.2',
'Mozilla/5.0 (Linux; Android 7.0; MIX Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043508 Safari/537.36 V1_AND_SQ_7.2.0_730_YYB_D QQ/7.2.0.3270 NetType/WIFI WebP/0.3.0 Pixel/1080',
'Mozilla/5.0 (Linux; Android 6.0.1; MI 4LTE Build/MMB29M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043508 Safari/537.36 V1_AND_SQ_7.1.5_708_YYB_D QQ/7.1.5.3215 NetType/4G WebP/0.3.0 Pixel/1080',
'Mozilla/5.0 (Linux; Android 7.1.1; MI 6 Build/NMF26X) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/57.0.2987.132 Mobile Safari/537.36 Maxthon/3047',
'Mozilla/5.0 (Linux; Android 6.0.1; MI 5s Build/MXB48T; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043508 Safari/537.36 V1_AND_SQ_7.2.0_730_YYB_D QQ/7.2.0.3270 NetType/WIFI WebP/0.3.0 Pixel/1080',
'Mozilla/5.0 (Linux; Android 7.0; MI 5 Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043508 Safari/537.36 MicroMessenger/6.5.13.1100 NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (Linux; U; Android 6.0.1; zh-cn; MI 4LTE Build/MMB29M) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.146 Mobile Safari/537.36 XiaoMi/MiuiBrowser/9.2.2',
'Mozilla/5.0 (Linux; Android 7.1.1; MI 6 Build/NMF26X; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043508 Safari/537.36 MicroMessenger/6.5.13.1100 NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (Linux; U; Android 7.0; zh-CN; MI 5s Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/40.0.2214.89 UCBrowser/11.7.0.953 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 7.1.1; MI 6 Build/NMF26X; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043508 Safari/537.36 MicroMessenger/6.5.10.1080 NetType/4G Language/zh_CN',
'Mozilla/5.0 (Linux; U; Android 6.0.1; zh-CN; MI 4LTE Build/MMB29M) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/40.0.2214.89 UCBrowser/11.7.0.953 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.0.1; MI 5s Plus Build/MXB48T; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043508 Safari/537.36 MicroMessenger/6.5.8.1060 NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (Linux; U; Android 7.0; zh-cn; MIX Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.146 Mobile Safari/537.36 XiaoMi/MiuiBrowser/9.2.2',
'Mozilla/5.0 (Linux; Android 6.0; MI 5 Build/MRA58K; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043508 Safari/537.36 MicroMessenger/6.5.13.1100 NetType/4G Language/zh_CN',
'Mozilla/5.0 (Linux; Android 4.4.4; HM NOTE 1LTE Build/KTU84P; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043508 Safari/537.36 V1_AND_SQ_6.6.9_482_YYB_D QQ/6.6.9.3060 NetType/WIFI WebP/0.3.0 Pixel/720',
'Mozilla/5.0 (Linux; Android 6.0; Redmi Note 4X Build/MRA58K; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/55.0.2883.91 Mobile Safari/537.36 rabbit/1.0 baiduboxapp/7.1 (Baidu; P1 6.0)',
'Mozilla/5.0 (Linux; Android 6.0; Redmi Note 4 Build/MRA58K; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043508 Safari/537.36 MicroMessenger/6.5.13.1100 NetType/4G Language/zh_CN',
'Mozilla/5.0 (Linux; U; Android 5.1.1; zh-cn; Redmi 3 Build/LMY47V) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.146 Mobile Safari/537.36 XiaoMi/MiuiBrowser/9.1.3',
'Mozilla/5.0 (Linux; U; Android 6.0; zh-CN; Redmi Note 4 Build/MRA58K) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/40.0.2214.89 UCBrowser/11.7.0.953 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 6.0.1; Redmi Note 3 Build/MMB29M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043508 Safari/537.36 MicroMessenger/6.5.13.1100 NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (Linux; Android 7.0; FRD-AL00 Build/HUAWEIFRD-AL00; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043508 Safari/537.36 V1_AND_SQ_7.1.0_0_TIM_D TIM2.0/1.2.0.1692 QQ/6.5.5 NetType/2G WebP/0.3.0 Pixel/1080 IMEI/869953022249635',
'Mozilla/5.0 (Linux; Android 7.0; VTR-AL00 Build/HUAWEIVTR-AL00; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/53.0.2785.49 Mobile MQQBrowser/6.2 TBS/043508 Safari/537.36 wxwork/2.1.3 MicroMessenger/6.3.22',
'Mozilla/5.0 (Linux; Android 6.0; HUAWEI NXT-AL10 Build/HUAWEINXT-AL10; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/48.0.2564.116 Mobile Safari/537.36 T7/9.3 baiduboxapp/9.3.0.10 (Baidu; P1 6.0)',
'Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_3 like Mac OS X) AppleWebKit/603.3.8 (KHTML, like Gecko) Mobile/14G60 MicroMessenger/6.5.18 NetType/WIFI Language/en',
'Mozilla/5.0 (iPhone; CPU iPhone OS 10_2 like Mac OS X) AppleWebKit/602.3.12 (KHTML, like Gecko) Mobile/14C92 MicroMessenger/6.5.16 NetType/WIFI Language/zh_CN',
'Mozilla/5.0 (Linux; U; Android 6.0.1; zh-CN; SM-C7000 Build/MMB29M) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/40.0.2214.89 UCBrowser/11.6.2.948 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; U; Android 5.1; zh-cn; MX5 Build/LMY47I) AppleWebKit/537.36 (KHTML, like Gecko)Version/4.0 Chrome/37.0.0.0 MQQBrowser/7.8 Mobile Safari/537.36',
# QQ浏览器 Android版本
"MQQBrowser/26 Mozilla/5.0 (Linux; U; Android 2.3.7; zh-cn; MB200 Build/GRJ22; CyanogenMod-7) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1",
# Android Opera Mobile
"Opera/9.80 (Android 2.3.4; Linux; Opera Mobi/build-1107180945; U; en-GB) Presto/2.8.149 Version/11.10",
# Android Pad Moto Xoom
"Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/534.13 (KHTML, like Gecko) Version/4.0 Safari/534.13",
# BlackBerry
"Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+",
# WebOS HP Touchpad
"Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0",
# Nokia N97
"Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124",
# Windows Phone Mango
"Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)",
# UC浏览器
"UCWEB7.0.2.37/28/999",
"NOKIA5700/ UCWEB7.0.2.37/28/999",
# UCOpenwave
"Openwave/ UCWEB7.0.2.37/28/999",
# UC Opera
"Mozilla/4.0 (compatible; MSIE 6.0; ) Opera/UCWEB7.0.2.37/28/999",
'Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Mobile/13F69 MicroMessenger/6.6.1 NetType/4G Language/zh_CN',
'Mozilla/5.0 (iPhone; CPU iPhone OS 11_2_2 like Mac OS X) AppleWebKit/604.4.7 (KHTML, like Gecko) Mobile/15C202 MicroMessenger/6.6.1 NetType/4G Language/zh_CN',
'Mozilla/5.0 (iPhone; CPU iPhone OS 11_1_1 like Mac OS X) AppleWebKit/604.3.5 (KHTML, like Gecko) Mobile/15B150 MicroMessenger/6.6.1 NetType/WIFI Language/zh_CN'
]
USER_AGENT_PC = [
# Opera
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36 OPR/26.0.1656.60",
"Opera/8.0 (Windows NT 5.1; U; en)",
"Mozilla/5.0 (Windows NT 5.1; U; en; rv:1.8.1) Gecko/20061208 Firefox/2.0.0 Opera 9.50",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; en) Opera 9.50",
# Firefox
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0",
"Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10",
# Safari
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.57.2 (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2",
# chrome
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11",
"Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16",
# 360
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko",
# 淘宝浏览器
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.11 TaoBrowser/2.0 Safari/536.11",
# 猎豹浏览器
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; LBBROWSER)",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E; LBBROWSER)",
# QQ浏览器
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; QQBrowser/7.0.3698.400)",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)",
# sogou浏览器
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 SE 2.X MetaSr 1.0",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SV1; QQDownload 732; .NET4.0C; .NET4.0E; SE 2.X MetaSr 1.0)",
# maxthon浏览器
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.3.4000 Chrome/30.0.1599.101 Safari/537.36",
# UC浏览器
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 UBrowser/4.0.3214.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",
"Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5",
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; AcooBrowser; .NET CLR 1.1.4322; .NET CLR 2.0.50727)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Acoo Browser; SLCC1; .NET CLR 2.0.50727; Media Center PC 5.0; .NET CLR 3.0.04506)",
"Mozilla/4.0 (compatible; MSIE 7.0; AOL 9.5; AOLBuild 4337.35; Windows NT 5.1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)",
"Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)",
"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 1.0.3705; .NET CLR 1.1.4322)",
"Mozilla/4.0 (compatible; MSIE 7.0b; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 3.0.04506.30)",
"Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN) AppleWebKit/523.15 (KHTML, like Gecko, Safari/419.3) Arora/0.3 (Change: 287 c9dfb30)",
"Mozilla/5.0 (X11; U; Linux; en-US) AppleWebKit/527+ (KHTML, like Gecko, Safari/419.3) Arora/0.6",
"Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.2pre) Gecko/20070215 K-Ninja/2.1.1",
"Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9) Gecko/20080705 Firefox/3.0 Kapiko/3.0",
"Mozilla/5.0 (X11; Linux i686; U;) Gecko/20070322 Kazehakase/0.4.5",
"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.8) Gecko Fedora/1.9.0.8-1.fc10 Kazehakase/0.5.6",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.20 (KHTML, like Gecko) Chrome/19.0.1036.7 Safari/535.20",
"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; fr) Presto/2.9.168 Version/11.52",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.11 TaoBrowser/2.0 Safari/536.11",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; LBBROWSER)",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E; LBBROWSER)",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 LBBROWSER",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; QQBrowser/7.0.3698.400)",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SV1; QQDownload 732; .NET4.0C; .NET4.0E; 360SE)",
"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)",
"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.89 Safari/537.1",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.89 Safari/537.1",
"Mozilla/5.0 (iPad; U; CPU OS 4_2_1 like Mac OS X; zh-cn) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148 Safari/6533.18.5",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:2.0b13pre) Gecko/20110307 Firefox/4.0b13pre",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:16.0) Gecko/20100101 Firefox/16.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11",
"Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36",
# 微信
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36 MicroMessenger/6.5.2.501 NetType/WIFI WindowsWechat'
]
| 132.008969
| 297
| 0.707045
| 5,720
| 29,438
| 3.615734
| 0.080594
| 0.017793
| 0.073107
| 0.087322
| 0.86481
| 0.849096
| 0.825017
| 0.805531
| 0.78005
| 0.756455
| 0
| 0.201376
| 0.131089
| 29,438
| 222
| 298
| 132.603604
| 0.607178
| 0.008391
| 0
| 0.210256
| 0
| 0.948718
| 0.940715
| 0.051913
| 0
| 0
| 0.004458
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.010256
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
c7c99327a66b0875059a6dbfa9fb6bcafa0eaa7c
| 34,880
|
py
|
Python
|
fhir/resources/DSTU2/tests/test_bundle.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
fhir/resources/DSTU2/tests/test_bundle.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
fhir/resources/DSTU2/tests/test_bundle.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 1.0.2.7202 on 2019-05-14.
# 2019, SMART Health IT.
import io
import json
import os
import unittest
from . import bundle
from .fhirdate import FHIRDate
class BundleTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get("FHIR_UNITTEST_DATADIR") or ""
with io.open(os.path.join(datadir, filename), "r", encoding="utf-8") as handle:
js = json.load(handle)
self.assertEqual("Bundle", js["resourceType"])
return bundle.Bundle(js)
def testBundle1(self):
inst = self.instantiate_from("practitioner-examples-general.json")
self.assertIsNotNone(inst, "Must have instantiated a Bundle instance")
self.implBundle1(inst)
js = inst.as_json()
self.assertEqual("Bundle", js["resourceType"])
inst2 = bundle.Bundle(js)
self.implBundle1(inst2)
def implBundle1(self, inst):
self.assertEqual(inst.entry[0].fullUrl, "http://hl7.org/fhir/Practitioner/13")
self.assertEqual(inst.entry[0].resource.id, "13")
self.assertEqual(
inst.entry[0].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[0].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[1].fullUrl, "http://hl7.org/fhir/Practitioner/14")
self.assertEqual(inst.entry[1].resource.id, "14")
self.assertEqual(
inst.entry[1].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[1].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[2].fullUrl, "http://hl7.org/fhir/Practitioner/15")
self.assertEqual(inst.entry[2].resource.id, "15")
self.assertEqual(
inst.entry[2].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[2].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[3].fullUrl, "http://hl7.org/fhir/Practitioner/16")
self.assertEqual(inst.entry[3].resource.id, "16")
self.assertEqual(
inst.entry[3].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[3].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[4].fullUrl, "http://hl7.org/fhir/Practitioner/17")
self.assertEqual(inst.entry[4].resource.id, "17")
self.assertEqual(
inst.entry[4].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[4].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[5].fullUrl, "http://hl7.org/fhir/Practitioner/18")
self.assertEqual(inst.entry[5].resource.id, "18")
self.assertEqual(
inst.entry[5].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[5].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[6].fullUrl, "http://hl7.org/fhir/Practitioner/19")
self.assertEqual(inst.entry[6].resource.id, "19")
self.assertEqual(
inst.entry[6].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[6].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[7].fullUrl, "http://hl7.org/fhir/Practitioner/20")
self.assertEqual(inst.entry[7].resource.id, "20")
self.assertEqual(
inst.entry[7].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[7].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[8].fullUrl, "http://hl7.org/fhir/Practitioner/21")
self.assertEqual(inst.entry[8].resource.id, "21")
self.assertEqual(
inst.entry[8].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[8].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[9].fullUrl, "http://hl7.org/fhir/Practitioner/22")
self.assertEqual(inst.entry[9].resource.id, "22")
self.assertEqual(
inst.entry[9].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[9].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.id, "3ad0687e-f477-468c-afd5-fcc2bf897809")
self.assertEqual(
inst.meta.lastUpdated.date, FHIRDate("2012-05-29T23:45:32Z").date
)
self.assertEqual(inst.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z")
self.assertEqual(inst.type, "collection")
def testBundle2(self):
inst = self.instantiate_from("bundle-example.json")
self.assertIsNotNone(inst, "Must have instantiated a Bundle instance")
self.implBundle2(inst)
js = inst.as_json()
self.assertEqual("Bundle", js["resourceType"])
inst2 = bundle.Bundle(js)
self.implBundle2(inst2)
def implBundle2(self, inst):
self.assertEqual(
inst.entry[0].fullUrl, "https://example.com/base/MedicationOrder/3123"
)
self.assertEqual(inst.entry[0].resource.id, "3123")
self.assertEqual(inst.entry[0].search.mode, "match")
self.assertEqual(inst.entry[0].search.score, 1)
self.assertEqual(
inst.entry[1].fullUrl, "https://example.com/base/Medication/example"
)
self.assertEqual(inst.entry[1].resource.id, "example")
self.assertEqual(inst.entry[1].search.mode, "include")
self.assertEqual(inst.id, "bundle-example")
self.assertEqual(inst.link[0].relation, "self")
self.assertEqual(
inst.link[0].url,
"https://example.com/base/MedicationOrder?patient=347&_include=MedicationOrder.medication",
)
self.assertEqual(inst.link[1].relation, "next")
self.assertEqual(
inst.link[1].url,
"https://example.com/base/MedicationOrder?patient=347&searchId=ff15fd40-ff71-4b48-b366-09c706bed9d0&page=2",
)
self.assertEqual(
inst.meta.lastUpdated.date, FHIRDate("2014-08-18T01:43:30Z").date
)
self.assertEqual(inst.meta.lastUpdated.as_json(), "2014-08-18T01:43:30Z")
self.assertEqual(inst.total, 3)
self.assertEqual(inst.type, "searchset")
def testBundle3(self):
inst = self.instantiate_from("document-example-dischargesummary.json")
self.assertIsNotNone(inst, "Must have instantiated a Bundle instance")
self.implBundle3(inst)
js = inst.as_json()
self.assertEqual("Bundle", js["resourceType"])
inst2 = bundle.Bundle(js)
self.implBundle3(inst2)
def implBundle3(self, inst):
self.assertEqual(
inst.entry[0].fullUrl,
"http://fhir.healthintersections.com.au/open/Composition/180f219f-97a8-486d-99d9-ed631fe4fc57",
)
self.assertEqual(
inst.entry[0].resource.id, "180f219f-97a8-486d-99d9-ed631fe4fc57"
)
self.assertEqual(
inst.entry[0].resource.meta.lastUpdated.date,
FHIRDate("2013-05-28T22:12:21Z").date,
)
self.assertEqual(
inst.entry[0].resource.meta.lastUpdated.as_json(), "2013-05-28T22:12:21Z"
)
self.assertEqual(
inst.entry[1].fullUrl,
"http://fhir.healthintersections.com.au/open/Practitioner/example",
)
self.assertEqual(inst.entry[1].resource.id, "example")
self.assertEqual(
inst.entry[1].resource.meta.lastUpdated.date,
FHIRDate("2013-05-05T16:13:03Z").date,
)
self.assertEqual(
inst.entry[1].resource.meta.lastUpdated.as_json(), "2013-05-05T16:13:03Z"
)
self.assertEqual(
inst.entry[2].fullUrl,
"http://fhir.healthintersections.com.au/open/Patient/d1",
)
self.assertEqual(inst.entry[2].resource.id, "d1")
self.assertEqual(
inst.entry[3].fullUrl,
"http://fhir.healthintersections.com.au/open/Encounter/doc-example",
)
self.assertEqual(inst.entry[3].resource.id, "doc-example")
self.assertEqual(
inst.entry[3].resource.meta.lastUpdated.date,
FHIRDate("2013-05-05T16:13:03Z").date,
)
self.assertEqual(
inst.entry[3].resource.meta.lastUpdated.as_json(), "2013-05-05T16:13:03Z"
)
self.assertEqual(
inst.entry[4].fullUrl, "urn:uuid:541a72a8-df75-4484-ac89-ac4923f03b81"
)
self.assertEqual(
inst.entry[4].resource.meta.lastUpdated.date,
FHIRDate("2013-05-05T16:13:03Z").date,
)
self.assertEqual(
inst.entry[4].resource.meta.lastUpdated.as_json(), "2013-05-05T16:13:03Z"
)
self.assertEqual(
inst.entry[5].fullUrl, "urn:uuid:124a6916-5d84-4b8c-b250-10cefb8e6e86"
)
self.assertEqual(
inst.entry[5].resource.meta.lastUpdated.date,
FHIRDate("2013-05-05T16:13:03Z").date,
)
self.assertEqual(
inst.entry[5].resource.meta.lastUpdated.as_json(), "2013-05-05T16:13:03Z"
)
self.assertEqual(
inst.entry[6].fullUrl, "urn:uuid:673f8db5-0ffd-4395-9657-6da00420bbc1"
)
self.assertEqual(
inst.entry[7].fullUrl, "urn:uuid:47600e0f-b6b5-4308-84b5-5dec157f7637"
)
self.assertEqual(
inst.entry[7].resource.meta.lastUpdated.date,
FHIRDate("2013-05-05T16:13:03Z").date,
)
self.assertEqual(
inst.entry[7].resource.meta.lastUpdated.as_json(), "2013-05-05T16:13:03Z"
)
self.assertEqual(inst.id, "father")
self.assertEqual(
inst.meta.lastUpdated.date, FHIRDate("2013-05-28T22:12:21Z").date
)
self.assertEqual(inst.meta.lastUpdated.as_json(), "2013-05-28T22:12:21Z")
self.assertEqual(inst.signature.contentType, "image/jpg")
self.assertEqual(inst.signature.type[0].code, "1.2.840.10065.1.12.1.1")
self.assertEqual(inst.signature.type[0].display, "AuthorID")
self.assertEqual(
inst.signature.type[0].system, "http://hl7.org/fhir/valueset-signature-type"
)
self.assertEqual(
inst.signature.when.date, FHIRDate("2015-08-31T07:42:33+10:00").date
)
self.assertEqual(inst.signature.when.as_json(), "2015-08-31T07:42:33+10:00")
self.assertEqual(inst.type, "document")
def testBundle4(self):
inst = self.instantiate_from("diagnosticreport-examples-general.json")
self.assertIsNotNone(inst, "Must have instantiated a Bundle instance")
self.implBundle4(inst)
js = inst.as_json()
self.assertEqual("Bundle", js["resourceType"])
inst2 = bundle.Bundle(js)
self.implBundle4(inst2)
def implBundle4(self, inst):
self.assertEqual(
inst.entry[0].fullUrl, "http://hl7.org/fhir/DiagnosticReport/3"
)
self.assertEqual(inst.entry[0].resource.id, "3")
self.assertEqual(
inst.entry[0].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[0].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[1].fullUrl, "http://hl7.org/fhir/DiagnosticReport/4"
)
self.assertEqual(inst.entry[1].resource.id, "4")
self.assertEqual(
inst.entry[1].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[1].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[2].fullUrl, "http://hl7.org/fhir/DiagnosticReport/5"
)
self.assertEqual(inst.entry[2].resource.id, "5")
self.assertEqual(
inst.entry[2].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[2].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[3].fullUrl, "http://hl7.org/fhir/DiagnosticReport/6"
)
self.assertEqual(inst.entry[3].resource.id, "6")
self.assertEqual(
inst.entry[3].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[3].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[4].fullUrl, "http://hl7.org/fhir/DiagnosticReport/7"
)
self.assertEqual(inst.entry[4].resource.id, "7")
self.assertEqual(
inst.entry[4].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[4].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[5].fullUrl, "http://hl7.org/fhir/DiagnosticReport/8"
)
self.assertEqual(inst.entry[5].resource.id, "8")
self.assertEqual(
inst.entry[5].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[5].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[6].fullUrl, "http://hl7.org/fhir/DiagnosticReport/9"
)
self.assertEqual(inst.entry[6].resource.id, "9")
self.assertEqual(
inst.entry[6].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[6].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[7].fullUrl, "http://hl7.org/fhir/DiagnosticReport/15"
)
self.assertEqual(inst.entry[7].resource.id, "15")
self.assertEqual(
inst.entry[7].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[7].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[8].fullUrl, "http://hl7.org/fhir/DiagnosticReport/16"
)
self.assertEqual(inst.entry[8].resource.id, "16")
self.assertEqual(
inst.entry[8].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[8].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[9].fullUrl, "http://hl7.org/fhir/DiagnosticReport/17"
)
self.assertEqual(inst.entry[9].resource.id, "17")
self.assertEqual(
inst.entry[9].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[9].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(inst.id, "72ac8493-52ac-41bd-8d5d-7258c289b5ea")
self.assertEqual(
inst.meta.lastUpdated.date, FHIRDate("2012-04-14T10:35:23Z").date
)
self.assertEqual(inst.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z")
self.assertEqual(inst.type, "collection")
def testBundle5(self):
inst = self.instantiate_from("xds-example.json")
self.assertIsNotNone(inst, "Must have instantiated a Bundle instance")
self.implBundle5(inst)
js = inst.as_json()
self.assertEqual("Bundle", js["resourceType"])
inst2 = bundle.Bundle(js)
self.implBundle5(inst2)
def implBundle5(self, inst):
self.assertEqual(
inst.entry[0].fullUrl, "urn:uuid:3fdc72f4-a11d-4a9d-9260-a9f745779e1d"
)
self.assertEqual(inst.entry[0].request.method, "POST")
self.assertEqual(inst.entry[0].request.url, "DocumentReference")
self.assertEqual(
inst.entry[0].resource.meta.lastUpdated.date,
FHIRDate("2013-07-01T13:11:33Z").date,
)
self.assertEqual(
inst.entry[0].resource.meta.lastUpdated.as_json(), "2013-07-01T13:11:33Z"
)
self.assertEqual(
inst.entry[1].fullUrl, "http://localhost:9556/svc/fhir/Patient/a2"
)
self.assertEqual(
inst.entry[1].request.ifNoneExist,
"Patient?identifier=http://acme.org/xds/patients!89765a87b",
)
self.assertEqual(inst.entry[1].request.method, "POST")
self.assertEqual(inst.entry[1].request.url, "Patient")
self.assertEqual(inst.entry[1].resource.id, "a2")
self.assertEqual(
inst.entry[1].resource.meta.lastUpdated.date,
FHIRDate("2013-07-01T13:11:33Z").date,
)
self.assertEqual(
inst.entry[1].resource.meta.lastUpdated.as_json(), "2013-07-01T13:11:33Z"
)
self.assertEqual(
inst.entry[2].fullUrl, "http://localhost:9556/svc/fhir/Practitioner/a3"
)
self.assertEqual(inst.entry[2].request.method, "POST")
self.assertEqual(inst.entry[2].request.url, "Practitioner")
self.assertEqual(inst.entry[2].resource.id, "a3")
self.assertEqual(
inst.entry[2].resource.meta.lastUpdated.date,
FHIRDate("2013-07-01T13:11:33Z").date,
)
self.assertEqual(
inst.entry[2].resource.meta.lastUpdated.as_json(), "2013-07-01T13:11:33Z"
)
self.assertEqual(
inst.entry[3].fullUrl, "http://localhost:9556/svc/fhir/Practitioner/a4"
)
self.assertEqual(inst.entry[3].request.method, "POST")
self.assertEqual(inst.entry[3].request.url, "Practitioner")
self.assertEqual(inst.entry[3].resource.id, "a4")
self.assertEqual(
inst.entry[3].resource.meta.lastUpdated.date,
FHIRDate("2013-07-01T13:11:33Z").date,
)
self.assertEqual(
inst.entry[3].resource.meta.lastUpdated.as_json(), "2013-07-01T13:11:33Z"
)
self.assertEqual(
inst.entry[4].fullUrl,
"http://localhost:9556/svc/fhir/Binary/1e404af3-077f-4bee-b7a6-a9be97e1ce32",
)
self.assertEqual(inst.entry[4].request.method, "POST")
self.assertEqual(inst.entry[4].request.url, "Binary")
self.assertEqual(
inst.entry[4].resource.id, "1e404af3-077f-4bee-b7a6-a9be97e1ce32"
)
self.assertEqual(
inst.entry[4].resource.meta.lastUpdated.date,
FHIRDate("2013-07-01T13:11:33Z").date,
)
self.assertEqual(
inst.entry[4].resource.meta.lastUpdated.as_json(), "2013-07-01T13:11:33Z"
)
self.assertEqual(inst.id, "xds")
self.assertEqual(
inst.meta.lastUpdated.date, FHIRDate("2013-07-01T13:11:33Z").date
)
self.assertEqual(inst.meta.lastUpdated.as_json(), "2013-07-01T13:11:33Z")
self.assertEqual(inst.type, "transaction")
def testBundle6(self):
inst = self.instantiate_from("patient-examples-cypress-template.json")
self.assertIsNotNone(inst, "Must have instantiated a Bundle instance")
self.implBundle6(inst)
js = inst.as_json()
self.assertEqual("Bundle", js["resourceType"])
inst2 = bundle.Bundle(js)
self.implBundle6(inst2)
def implBundle6(self, inst):
self.assertEqual(inst.entry[0].fullUrl, "http://hl7.org/fhir/Patient/71")
self.assertEqual(inst.entry[0].resource.id, "71")
self.assertEqual(
inst.entry[0].resource.meta.lastUpdated.date,
FHIRDate("2012-06-03T23:45:32Z").date,
)
self.assertEqual(
inst.entry[0].resource.meta.lastUpdated.as_json(), "2012-06-03T23:45:32Z"
)
self.assertEqual(inst.entry[1].fullUrl, "http://hl7.org/fhir/Patient/72")
self.assertEqual(inst.entry[1].resource.id, "72")
self.assertEqual(
inst.entry[1].resource.meta.lastUpdated.date,
FHIRDate("2012-06-03T23:45:32Z").date,
)
self.assertEqual(
inst.entry[1].resource.meta.lastUpdated.as_json(), "2012-06-03T23:45:32Z"
)
self.assertEqual(inst.entry[2].fullUrl, "http://hl7.org/fhir/Patient/73")
self.assertEqual(inst.entry[2].resource.id, "73")
self.assertEqual(
inst.entry[2].resource.meta.lastUpdated.date,
FHIRDate("2012-06-03T23:45:32Z").date,
)
self.assertEqual(
inst.entry[2].resource.meta.lastUpdated.as_json(), "2012-06-03T23:45:32Z"
)
self.assertEqual(inst.entry[3].fullUrl, "http://hl7.org/fhir/Patient/74")
self.assertEqual(inst.entry[3].resource.id, "74")
self.assertEqual(
inst.entry[3].resource.meta.lastUpdated.date,
FHIRDate("2012-06-03T23:45:32Z").date,
)
self.assertEqual(
inst.entry[3].resource.meta.lastUpdated.as_json(), "2012-06-03T23:45:32Z"
)
self.assertEqual(inst.entry[4].fullUrl, "http://hl7.org/fhir/Patient/75")
self.assertEqual(inst.entry[4].resource.id, "75")
self.assertEqual(
inst.entry[4].resource.meta.lastUpdated.date,
FHIRDate("2012-06-03T23:45:32Z").date,
)
self.assertEqual(
inst.entry[4].resource.meta.lastUpdated.as_json(), "2012-06-03T23:45:32Z"
)
self.assertEqual(inst.entry[5].fullUrl, "http://hl7.org/fhir/Patient/76")
self.assertEqual(inst.entry[5].resource.id, "76")
self.assertEqual(
inst.entry[5].resource.meta.lastUpdated.date,
FHIRDate("2012-06-03T23:45:32Z").date,
)
self.assertEqual(
inst.entry[5].resource.meta.lastUpdated.as_json(), "2012-06-03T23:45:32Z"
)
self.assertEqual(inst.entry[6].fullUrl, "http://hl7.org/fhir/Patient/77")
self.assertEqual(inst.entry[6].resource.id, "77")
self.assertEqual(
inst.entry[6].resource.meta.lastUpdated.date,
FHIRDate("2012-06-03T23:45:32Z").date,
)
self.assertEqual(
inst.entry[6].resource.meta.lastUpdated.as_json(), "2012-06-03T23:45:32Z"
)
self.assertEqual(inst.entry[7].fullUrl, "http://hl7.org/fhir/Patient/78")
self.assertEqual(inst.entry[7].resource.id, "78")
self.assertEqual(
inst.entry[7].resource.meta.lastUpdated.date,
FHIRDate("2012-06-03T23:45:32Z").date,
)
self.assertEqual(
inst.entry[7].resource.meta.lastUpdated.as_json(), "2012-06-03T23:45:32Z"
)
self.assertEqual(inst.entry[8].fullUrl, "http://hl7.org/fhir/Patient/79")
self.assertEqual(inst.entry[8].resource.id, "79")
self.assertEqual(
inst.entry[8].resource.meta.lastUpdated.date,
FHIRDate("2012-06-03T23:45:32Z").date,
)
self.assertEqual(
inst.entry[8].resource.meta.lastUpdated.as_json(), "2012-06-03T23:45:32Z"
)
self.assertEqual(inst.entry[9].fullUrl, "http://hl7.org/fhir/Patient/80")
self.assertEqual(inst.entry[9].resource.id, "80")
self.assertEqual(
inst.entry[9].resource.meta.lastUpdated.date,
FHIRDate("2012-06-03T23:45:32Z").date,
)
self.assertEqual(
inst.entry[9].resource.meta.lastUpdated.as_json(), "2012-06-03T23:45:32Z"
)
self.assertEqual(inst.id, "b0a5e4277-83c4-4adb-87e2-e3efe3369b6f")
self.assertEqual(
inst.meta.lastUpdated.date, FHIRDate("2012-05-29T23:45:32Z").date
)
self.assertEqual(inst.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z")
self.assertEqual(inst.type, "collection")
def testBundle7(self):
inst = self.instantiate_from("patient-examples-general.json")
self.assertIsNotNone(inst, "Must have instantiated a Bundle instance")
self.implBundle7(inst)
js = inst.as_json()
self.assertEqual("Bundle", js["resourceType"])
inst2 = bundle.Bundle(js)
self.implBundle7(inst2)
def implBundle7(self, inst):
self.assertEqual(inst.entry[0].fullUrl, "http://hl7.org/fhir/Patient/1")
self.assertEqual(inst.entry[0].resource.id, "1")
self.assertEqual(
inst.entry[0].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[0].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[1].fullUrl, "http://hl7.org/fhir/Patient/2")
self.assertEqual(inst.entry[1].resource.id, "2")
self.assertEqual(
inst.entry[1].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[1].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[2].fullUrl, "http://hl7.org/fhir/Patient/3")
self.assertEqual(inst.entry[2].resource.id, "3")
self.assertEqual(
inst.entry[2].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[2].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[3].fullUrl, "http://hl7.org/fhir/Patient/4")
self.assertEqual(inst.entry[3].resource.id, "4")
self.assertEqual(
inst.entry[3].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[3].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[4].fullUrl, "http://hl7.org/fhir/Patient/5")
self.assertEqual(inst.entry[4].resource.id, "5")
self.assertEqual(
inst.entry[4].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[4].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[5].fullUrl, "http://hl7.org/fhir/Patient/6")
self.assertEqual(inst.entry[5].resource.id, "6")
self.assertEqual(
inst.entry[5].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[5].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[6].fullUrl, "http://hl7.org/fhir/Patient/7")
self.assertEqual(inst.entry[6].resource.id, "7")
self.assertEqual(
inst.entry[6].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[6].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[7].fullUrl, "http://hl7.org/fhir/Patient/8")
self.assertEqual(inst.entry[7].resource.id, "8")
self.assertEqual(
inst.entry[7].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[7].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[8].fullUrl, "http://hl7.org/fhir/Patient/9")
self.assertEqual(inst.entry[8].resource.id, "9")
self.assertEqual(
inst.entry[8].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[8].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.entry[9].fullUrl, "http://hl7.org/fhir/Patient/10")
self.assertEqual(inst.entry[9].resource.id, "10")
self.assertEqual(
inst.entry[9].resource.meta.lastUpdated.date,
FHIRDate("2012-05-29T23:45:32Z").date,
)
self.assertEqual(
inst.entry[9].resource.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z"
)
self.assertEqual(inst.id, "b248b1b2-1686-4b94-9936-37d7a5f94b51")
self.assertEqual(
inst.meta.lastUpdated.date, FHIRDate("2012-05-29T23:45:32Z").date
)
self.assertEqual(inst.meta.lastUpdated.as_json(), "2012-05-29T23:45:32Z")
self.assertEqual(inst.type, "collection")
def testBundle8(self):
inst = self.instantiate_from("diagnosticreport-examples-lab-text.json")
self.assertIsNotNone(inst, "Must have instantiated a Bundle instance")
self.implBundle8(inst)
js = inst.as_json()
self.assertEqual("Bundle", js["resourceType"])
inst2 = bundle.Bundle(js)
self.implBundle8(inst2)
def implBundle8(self, inst):
self.assertEqual(
inst.entry[0].fullUrl, "http://hl7.org/fhir/DiagnosticReport/103"
)
self.assertEqual(inst.entry[0].resource.id, "103")
self.assertEqual(
inst.entry[0].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[0].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[1].fullUrl, "http://hl7.org/fhir/DiagnosticReport/104"
)
self.assertEqual(inst.entry[1].resource.id, "104")
self.assertEqual(
inst.entry[1].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[1].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[2].fullUrl, "http://hl7.org/fhir/DiagnosticReport/105"
)
self.assertEqual(inst.entry[2].resource.id, "105")
self.assertEqual(
inst.entry[2].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[2].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[3].fullUrl, "http://hl7.org/fhir/DiagnosticReport/106"
)
self.assertEqual(inst.entry[3].resource.id, "106")
self.assertEqual(
inst.entry[3].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[3].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[4].fullUrl, "http://hl7.org/fhir/DiagnosticReport/107"
)
self.assertEqual(inst.entry[4].resource.id, "107")
self.assertEqual(
inst.entry[4].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[4].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[5].fullUrl, "http://hl7.org/fhir/DiagnosticReport/108"
)
self.assertEqual(inst.entry[5].resource.id, "108")
self.assertEqual(
inst.entry[5].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[5].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[6].fullUrl, "http://hl7.org/fhir/DiagnosticReport/109"
)
self.assertEqual(inst.entry[6].resource.id, "109")
self.assertEqual(
inst.entry[6].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[6].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[7].fullUrl, "http://hl7.org/fhir/DiagnosticReport/110"
)
self.assertEqual(inst.entry[7].resource.id, "110")
self.assertEqual(
inst.entry[7].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[7].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[8].fullUrl, "http://hl7.org/fhir/DiagnosticReport/111"
)
self.assertEqual(inst.entry[8].resource.id, "111")
self.assertEqual(
inst.entry[8].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[8].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(
inst.entry[9].fullUrl, "http://hl7.org/fhir/DiagnosticReport/112"
)
self.assertEqual(inst.entry[9].resource.id, "112")
self.assertEqual(
inst.entry[9].resource.meta.lastUpdated.date,
FHIRDate("2012-04-14T10:35:23Z").date,
)
self.assertEqual(
inst.entry[9].resource.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z"
)
self.assertEqual(inst.id, "2763c3ea-6bce-4f15-bdc9-4b41aaceee03")
self.assertEqual(
inst.meta.lastUpdated.date, FHIRDate("2012-04-14T10:35:23Z").date
)
self.assertEqual(inst.meta.lastUpdated.as_json(), "2012-04-14T10:35:23Z")
self.assertEqual(inst.type, "collection")
| 41.67264
| 120
| 0.601577
| 4,160
| 34,880
| 5.022356
| 0.069712
| 0.224716
| 0.276456
| 0.299813
| 0.914948
| 0.896137
| 0.867803
| 0.726128
| 0.711339
| 0.709233
| 0
| 0.110188
| 0.243893
| 34,880
| 836
| 121
| 41.722488
| 0.682023
| 0.00324
| 0
| 0.477556
| 1
| 0.003741
| 0.201369
| 0.02264
| 0
| 0
| 0
| 0
| 0.400249
| 1
| 0.021197
| false
| 0
| 0.007481
| 0
| 0.031172
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
c7ca9c4689784620f798b227a961ba9754586a6c
| 1,797
|
py
|
Python
|
Moves/MoveLeft.py
|
rknowlto/PoMoCo
|
3b5f013965c1a06b288e9bbbb74f618027b53f44
|
[
"Unlicense",
"MIT"
] | 1
|
2017-08-26T23:44:14.000Z
|
2017-08-26T23:44:14.000Z
|
Moves/MoveLeft.py
|
rknowlto/PoMoCo
|
3b5f013965c1a06b288e9bbbb74f618027b53f44
|
[
"Unlicense",
"MIT"
] | null | null | null |
Moves/MoveLeft.py
|
rknowlto/PoMoCo
|
3b5f013965c1a06b288e9bbbb74f618027b53f44
|
[
"Unlicense",
"MIT"
] | null | null | null |
import time
deg = -30
ankleDeg = 15
sleep = 0.3
hexy.LF.setFoot(footRx=ankleDeg)
hexy.LM.setFoot(footRx=ankleDeg)
hexy.LB.setFoot(footRx=ankleDeg)
hexy.RF.setFoot(footRx=-ankleDeg)
hexy.RM.setFoot(footRx=-ankleDeg)
hexy.RB.setFoot(footRx=-ankleDeg)
time.sleep(sleep)
hexy.LF.replantFoot(-deg,endAnkleAngle = -ankleDeg,stepTime=0.3)
hexy.RM.replantFoot(1,endAnkleAngle = ankleDeg,stepTime=0.3)
hexy.LB.replantFoot(deg,endAnkleAngle = -ankleDeg,stepTime=0.3)
time.sleep(sleep)
hexy.RF.replantFoot(deg,endAnkleAngle = ankleDeg,stepTime=0.3)
hexy.LM.replantFoot(1,endAnkleAngle = -ankleDeg,stepTime=0.3)
hexy.RB.replantFoot(-deg,endAnkleAngle = ankleDeg,stepTime=0.3)
time.sleep(sleep)
hexy.LF.setFoot(footRx=ankleDeg)
hexy.LM.setFoot(footRx=ankleDeg)
hexy.LB.setFoot(footRx=ankleDeg)
hexy.RF.setFoot(footRx=-ankleDeg)
hexy.RM.setFoot(footRx=-ankleDeg)
hexy.RB.setFoot(footRx=-ankleDeg)
time.sleep(sleep)
hexy.LF.replantFoot(-deg,endAnkleAngle = -ankleDeg,stepTime=0.3)
hexy.RM.replantFoot(1,endAnkleAngle = ankleDeg,stepTime=0.3)
hexy.LB.replantFoot(deg,endAnkleAngle = -ankleDeg,stepTime=0.3)
time.sleep(sleep)
hexy.RF.replantFoot(deg,endAnkleAngle = ankleDeg,stepTime=0.3)
hexy.LM.replantFoot(1,endAnkleAngle = -ankleDeg,stepTime=0.3)
hexy.RB.replantFoot(-deg,endAnkleAngle = ankleDeg,stepTime=0.3)
time.sleep(sleep)
hexy.LF.setFoot(footRx=ankleDeg)
hexy.LM.setFoot(footRx=ankleDeg)
hexy.LB.setFoot(footRx=ankleDeg)
hexy.RF.setFoot(footRx=-ankleDeg)
hexy.RM.setFoot(footRx=-ankleDeg)
hexy.RB.setFoot(footRx=-ankleDeg)
time.sleep(sleep)
hexy.LF.replantFoot(-deg,stepTime=0.3)
hexy.RM.replantFoot(1,stepTime=0.3)
hexy.LB.replantFoot(deg,stepTime=0.3)
time.sleep(sleep)
hexy.RF.replantFoot(deg,stepTime=0.3)
hexy.LM.replantFoot(1,stepTime=0.3)
hexy.RB.replantFoot(-deg,stepTime=0.3)
| 27.646154
| 64
| 0.784641
| 273
| 1,797
| 5.164835
| 0.080586
| 0.02695
| 0.268085
| 0.265957
| 0.977305
| 0.970213
| 0.970213
| 0.887943
| 0.887943
| 0.887943
| 0
| 0.028235
| 0.053979
| 1,797
| 64
| 65
| 28.078125
| 0.801176
| 0
| 0
| 0.791667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.020833
| 0
| 0.020833
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1bfae95b4be51a97abe4c64941a892907f1efc77
| 118,543
|
py
|
Python
|
ZEROSELFBOT-(NOT-HACKING-TOOL)/zeroselfbot.py
|
sahraoui741/Zero-attacker
|
aeb8ceb444204090f9c2e3642da4f9a0612eb31d
|
[
"MIT"
] | 182
|
2021-10-08T19:55:24.000Z
|
2022-03-31T08:51:48.000Z
|
ZEROSELFBOT-(NOT-HACKING-TOOL)/zeroselfbot.py
|
sahraoui741/Zero-attacker
|
aeb8ceb444204090f9c2e3642da4f9a0612eb31d
|
[
"MIT"
] | 16
|
2021-10-08T17:46:27.000Z
|
2022-03-31T10:09:56.000Z
|
ZEROSELFBOT-(NOT-HACKING-TOOL)/zeroselfbot.py
|
sahraoui741/Zero-attacker
|
aeb8ceb444204090f9c2e3642da4f9a0612eb31d
|
[
"MIT"
] | 66
|
2021-10-09T01:35:56.000Z
|
2022-03-26T20:40:17.000Z
|
from pytransform import pyarmor_runtime
pyarmor_runtime()
__pyarmor__(__name__, __file__, b'\x50\x59\x41\x52\x4d\x4f\x52\x00\x00\x03\x09\x00\x61\x0d\x0d\x0a\x09\x30\xe0\x02\x00\x00\x00\x00\x01\x00\x00\x00\x40\x00\x00\x00\x6b\x73\x00\x00\x00\x00\x00\x18\x52\xe1\x58\x11\x75\x56\x32\x00\x4f\xb1\xef\x48\x77\x32\x01\x30\x00\x00\x00\x00\x00\x00\x00\x00\xff\xf6\xe9\x75\xaf\x3b\xad\x4d\xb3\x12\x70\x91\x13\x8d\xf6\x1f\x90\xe3\xa2\x8c\x8b\x15\x17\x57\xf2\x91\x04\xe5\x50\x90\x6c\x12\x5b\x14\x58\xe9\x2b\xdc\xf4\x8c\x45\x70\x82\xb2\xab\x32\xc5\x0b\x27\xd9\xdc\x8c\x06\x04\x88\x6b\x4b\xb6\xed\x51\xf9\xef\x45\xbe\x9c\x2a\x2d\x71\xed\x91\x06\x2b\xc0\xf8\x88\xcd\xd6\x6d\x9f\x25\x17\x72\xc0\xd7\x9d\xd1\x13\x75\xc7\xcd\x55\xfb\x89\x4a\x8f\xc3\x21\x09\xc6\x21\xf4\xe2\xb5\xdd\xef\x5b\xf0\x6a\x53\x70\x24\xb2\x86\xf3\xe7\xd9\x56\x76\x9e\xf5\x3b\xce\x25\x84\x1e\x49\xe0\x9e\x09\x77\x4b\xd6\x0f\x57\x8b\xa1\xe9\x8a\x60\x45\xe2\xfb\x6a\xe8\x8a\xff\xb4\xa3\x68\x9f\x88\x21\x73\x15\xab\x91\xed\x3a\x29\xd8\xfd\xaa\x7b\x35\xef\xb6\xb5\xc4\x39\xcb\x53\x18\x00\x90\xbb\x77\x4a\xe1\x00\x1e\xda\x8b\x21\xb2\x8e\x65\x07\x12\x19\x98\x89\x17\x83\x24\xb0\x98\xaa\x15\x41\x3c\x6f\xcb\x7d\xa3\x5d\xfc\xdf\x07\x02\x5b\xc4\xb1\xf2\xd3\x62\xba\x14\xc6\xcd\x40\x67\x55\xef\xa7\x96\x68\xe8\xd5\x7b\xc3\xdf\x15\x39\x27\x7f\x82\x9b\xc4\x54\x9c\x0b\x29\x18\x05\xc2\xe2\xfd\xde\x07\x2f\xba\x1e\xa2\x9c\x7d\xa4\xbb\x82\x60\x78\x19\x9e\x8c\x28\xad\x46\xdb\x0b\x42\xa3\x8d\xe0\xc2\x0b\xb1\xe5\x47\x9f\x31\x54\x58\xde\xb4\xc9\xc1\xb2\x9a\xce\xe3\x91\x48\xcf\x1b\xce\x41\xba\xcd\x7b\xec\x04\x04\x88\x51\x70\x82\x94\x7b\xba\x08\xba\x3c\xf9\xae\x0f\xb8\x79\x9a\x14\x82\x9e\xbd\xf0\x3a\x7d\xa0\xa9\xc7\x1f\x8c\x17\x07\xb5\xb6\x4c\x83\xc3\x72\xe9\x55\x33\xae\xb3\xc5\xd7\x1b\xf2\x30\x0f\x7e\xf0\x15\x60\x2a\x14\x94\xb9\xe6\x1b\x2c\x12\x0f\x33\x20\x59\x86\xd5\x76\xb5\x19\x1f\x9e\x1e\x53\x12\xc7\x58\xfa\x7e\xda\xc5\xef\x05\x5d\x29\xfa\x48\x7b\xfa\xff\xa9\xd7\x0a\x28\x94\x91\xb7\xe8\x54\x07\x0a\x81\x8c\x13\x2c\x0b\xa0\x8a\x66\x99\xf2\x2a\x3a\xc9\x01\x41\xa9\xdd\x5c\x27\xd9\x29\x14\x05\x47\xf4\x04\x40\x5b\xba\xc1\xe4\x69\xd7\x3c\xc7\xe1\x7d\x0d\x51\xfd\xca\x41\xb1\x97\x53\x0b\xfb\x2b\x0d\x4a\x7b\x0c\x50\x7b\xcf\xcf\x66\x49\xc7\x08\x8b\xa4\x7a\x7c\x05\x73\x40\xeb\x91\xbc\xc8\x3c\x34\x44\x44\x0d\xd5\xd4\x2e\xf2\x2e\xd3\x24\x2a\xa0\x14\xee\x5d\x6d\x46\x9d\x7b\x6a\x62\x49\x8b\xe2\xd9\x5a\xcc\x1f\x5f\x8d\x24\x97\xcb\xa4\xf1\x43\xe8\x6a\xa9\x8c\x79\x4c\xbc\xcd\xf7\x2b\x54\x6f\xe7\x23\xb6\x1e\x45\x4a\x8b\x16\x92\x3e\x83\x44\x50\x6b\xe5\x5a\xac\x14\xa2\xcd\x17\x48\xd8\x8d\x83\x3f\xc1\xae\x94\x7c\x59\x0d\xf3\xcb\x8b\x09\x4c\xe1\xd0\xa2\x0f\xd1\x27\xfa\xa4\x0e\x01\x9e\x82\xcf\xe4\x58\xa2\x25\x0b\x71\xd6\xce\x96\x0e\xa9\xfa\xaf\xb2\xa5\x59\x88\x82\x63\x96\x15\x6c\xae\xd2\x45\x81\x4e\x9d\x24\x95\x0b\xcf\xc5\xef\xc2\x0e\xb3\x29\xc0\xf0\xcf\x47\xef\x4a\x27\xa2\x57\x1f\xa0\x24\x65\x80\xa3\xe0\xb2\xe2\xe9\xc9\x7e\x52\x1f\xd7\x54\x23\x98\xd4\xc3\x03\xd7\xf7\xa3\x9d\x55\xcb\xfa\x3f\x9c\xfa\x46\x44\x88\x56\x0e\x94\xa6\x31\x89\x26\xe6\x9a\xc8\x3f\x5d\x61\x50\x23\xad\xf8\x3c\xd9\xe9\xc4\x91\xdf\xf3\x2f\x4b\xf9\xd8\x34\x5f\xc4\xfb\x5a\x8f\xbb\x4d\x28\x5a\x55\xbd\xf4\x25\x4a\xa2\xe3\x90\x9b\x1e\x49\x0c\xeb\xd5\xc3\x42\x21\x1c\x42\x50\x09\xb4\x12\xef\x38\x50\xf0\xf8\x3a\x1f\x76\xc7\x65\x5d\xd1\x96\x73\x73\x1b\x17\xf4\xe6\xa9\x11\x1b\xf4\x9e\x26\x39\x6a\x96\xda\x53\x53\x8d\xf7\x50\xd6\xdc\xdb\x44\x00\xc2\x95\xa3\x27\xf3\x31\xd3\xc3\xa8\x39\x51\x5c\x6c\xa4\x93\xae\x54\x51\xb7\xce\x82\x43\x1b\x46\xc9\xe5\xb9\x59\xc4\x0f\x5d\xc0\x9b\x61\x4f\x5e\x33\x95\xa9\x73\xb3\x62\x54\x45\xd8\xf0\x95\x9d\x88\x05\x98\x6f\x86\x1d\xa6\xa9\x95\x22\xb0\x86\x8b\x38\x3f\xdf\x49\x64\xd0\x01\xd1\xff\x6b\x95\x6f\xe6\x39\xfe\x66\xaa\x6a\xf3\x62\x17\x22\x5d\xe4\x1f\x13\xa3\xd9\x1e\x5a\x6e\x22\x87\x32\xdb\xd6\x9d\xff\xc2\xe4\x55\x40\x57\xad\x6c\x4a\x89\x56\xea\x78\x23\x38\xc9\x00\x56\x92\xf2\x1f\xeb\xce\xfb\x26\xea\x21\x63\x59\xd9\x0f\xf0\x50\x11\x02\x6c\x60\x1a\x4f\xf3\xdb\x2e\x22\xb2\x98\xef\xbd\x24\x33\x5c\x8c\x76\x95\xbe\x95\x4b\xbe\xfc\x94\xf7\xb5\xe6\xf7\xc5\xc7\x0a\xb8\x56\x9c\xdb\xee\x32\xa7\x02\x26\x6a\x61\x77\x33\x50\x9c\x93\x1f\xc4\x39\xda\xad\x6c\x12\xb8\x1b\x67\xcc\x7d\x64\xcb\xa5\xf4\x71\xf4\x16\x2e\x71\xec\xa4\x03\x53\xd3\x90\xd6\xf0\x0c\x4b\x53\xc8\x4c\x69\x93\xf8\x89\xb2\x6a\x48\xfa\x1b\xa2\xa3\x68\x0d\x85\x87\x16\x79\x67\x01\x0b\xb0\xa0\x89\xc3\x28\x63\x8c\x73\x99\x06\x0e\xf8\x06\x2e\xae\xed\x85\x40\x84\xc1\x79\x25\xff\x4f\x32\xd0\x99\x8a\xc0\x03\x10\xa8\xd0\x3a\xb9\x52\xfd\xec\x3c\xc5\x77\xd2\x91\x2b\xd0\xc8\xa9\xaf\xcf\x57\x5b\xb9\x62\x10\x45\x4f\x7d\x3f\x72\xde\x95\xb1\x2a\xc7\xba\xa1\xd5\xde\xf4\x48\xa6\x9e\x3c\xa7\x08\x30\x5b\xf6\x13\xa8\xa5\x28\xfa\x3d\x9c\x40\xfc\x26\x2e\x55\x6d\x4d\x71\xd0\xdc\x16\x1d\xc7\xb6\xf9\x4b\x8f\xb8\xdf\x32\x88\xf4\x9e\x00\x41\x59\x71\x64\xb9\xc9\x2e\x5c\x5a\x5e\xa7\x6e\x63\xf0\xa0\x25\x3b\xc4\xa9\xb7\x1e\xe7\x52\xf0\x62\x61\x9d\x43\x9f\x3c\x1e\xca\x8b\xe6\x25\x1f\xc3\xbc\x77\xfc\xa6\x67\xc7\x77\xb8\x41\x66\x52\xdc\x16\xf4\x4b\x26\xd6\xd7\x7b\xea\x82\x17\x45\xe4\x23\x81\x02\xa1\xb4\x4c\x01\x55\xa6\x1c\x95\xbf\x29\xae\x45\x53\x0b\x90\x2f\x9a\xed\xd7\x9e\x48\xda\x26\xb3\xc2\xc4\x45\xbd\x72\x10\x5d\x23\xf4\xba\x77\xd5\x41\x16\x5e\x1e\xb7\x47\xdb\xf3\x91\x13\x98\xae\x5b\x6e\x95\x41\x94\x88\x75\xfe\x31\x65\xf2\x65\x15\xfd\x4a\xfb\x06\x6b\x0b\xc7\x81\x5b\x63\x0f\x8d\x44\xfe\xf2\x3b\xc4\x7e\x25\x07\x87\x36\x40\x7a\xd2\x7b\xfb\x0d\x2b\x14\x40\xad\x00\x90\xc1\x53\xb5\xa2\x4b\x57\x24\x26\xba\xe6\x19\x44\x69\xae\xf6\x45\xc9\xaa\xc0\x99\x57\x78\x63\x75\xb7\x84\x18\xe0\x16\xff\xb9\xb5\x1c\x21\xd5\x10\x4e\x5f\xf5\x38\x04\x58\xe1\x39\x72\x0d\x96\x68\x34\x24\xcd\xfa\x01\x6b\x88\x40\x14\xff\xa4\xda\xc4\x5e\xb5\xb1\x8f\x35\xdc\x7c\xda\xac\x03\xbc\x20\xc7\xf7\x52\x6a\x08\x8e\x3a\xd1\xf5\xc6\xe1\xe3\xa2\x6e\x15\x5a\x38\x34\xbf\x90\xe5\x9c\xca\x3e\x21\x65\xef\x17\x17\xe1\x3b\x96\x3e\x0c\x5e\xe0\x81\x0e\x35\xb5\xa6\xf7\xaf\xd2\xb8\x90\xa6\xbf\x3f\xf4\x5a\x83\xf6\xde\x55\xc1\xbd\xc3\x47\x70\xee\x8c\x02\x47\xda\x0c\x98\x00\x94\x82\xc8\xc5\x78\xcc\x13\xe2\xb3\x82\x22\x5f\xb5\x80\xb0\x54\x51\x5e\xae\x43\xe1\x53\xe2\x12\xc6\x76\xf8\x17\x87\x7c\x06\x59\x7a\x73\x24\xc9\xbc\x9c\x45\x35\xe1\x91\x35\x31\xf9\x2d\x1a\x50\xff\xcc\xf0\xa2\x6b\x3c\x62\xcf\x35\x4f\x61\xdb\xf2\x71\x5c\x03\xce\x0a\xfb\xe2\x0c\xe5\xfd\x38\x51\x64\xf6\x16\xb5\x76\xbf\x94\xdd\x8f\xc9\xcc\xd5\x92\xe8\xe1\x7b\x74\x12\x2a\xc4\x31\xe5\xf0\x35\x36\x6b\xc2\xe2\xef\x8f\x02\x2b\xc3\x6e\xb2\x84\xe9\xa9\x72\x68\x75\xc7\xe4\xd0\xa1\xba\x87\x43\x17\x59\x02\xdb\x56\xb8\x5a\x52\xdb\x26\x9d\x65\xd5\xe5\x6c\x22\x84\x23\x28\x73\x85\x66\x08\x4f\xd8\xfe\xca\x03\xfc\xc8\xa0\x9a\x3b\x2a\xc5\xbf\x24\x03\xb2\xd4\x38\x3c\x1e\xd5\x85\x2d\xa3\x54\xf8\x03\x93\x69\xdf\xe4\xf2\x26\xab\x6e\x42\x69\x1e\xfb\xa0\xc0\xc0\x9a\x87\x36\x87\x6c\x5c\xd0\xf9\x33\x14\x96\x82\xe3\x97\x13\xa7\x4d\x8a\xd0\xe6\x53\x3d\x4b\x8f\x09\xdc\x39\x25\x0b\x92\x08\x5b\x6a\xe8\x0e\x89\x2d\xe4\x49\x15\xa0\x75\x06\x97\xf2\xa6\x74\xb1\xa6\xf4\x7f\x7f\xc9\xf3\x40\x0c\x06\xa4\x8c\x32\x97\x9a\x72\x40\x69\x09\x24\x4b\xcd\x9e\x5c\xf8\xdf\xf9\x29\xa8\x02\x62\x6d\x8d\x5d\xc2\x8a\xe2\xcb\xdd\xd6\x5b\xff\x31\x3f\x0f\x9e\x18\x25\x42\x90\xe0\x5f\xbf\xfe\x06\xcc\xd2\x15\xf5\xf8\xfa\x7c\xfd\xfb\x37\x50\x26\x7b\x15\x64\x1c\x3e\x3c\x27\xfe\xd9\x32\x9c\x70\x63\x49\xef\x2a\xab\x83\x9f\x12\xc7\xdc\x46\x48\xf2\x22\x70\xb0\x80\x6d\xd0\x2c\x97\x12\x15\x25\xb3\x9d\x9f\x7e\xbb\xf2\x6d\xec\xac\x50\x1d\x87\x5a\xe6\x80\x48\xaa\x2a\xf6\xc6\xe7\xf7\x95\x03\xb4\xcf\xa9\xc1\x24\xfb\x8a\x82\xc6\x5e\xb9\x02\x54\xc3\x7e\x1e\xb8\xf5\xa6\xe3\x23\xeb\xb3\x7e\x27\x02\xc2\xb3\x54\x4d\xa2\x02\x31\xb3\x3f\x4a\x47\xce\xac\x89\xf1\x7f\x83\x83\x69\x58\x79\x94\x6b\xe6\xec\x9a\xba\x05\x4e\xfb\xa4\x92\x95\x31\x24\xeb\x0a\x1e\x65\x11\x9c\x80\x55\x3b\x0d\x4c\xa1\x96\x7e\x05\xbc\xbf\x0f\x3d\xa5\x40\x4e\x5f\x7f\x76\x91\xe0\xa3\x80\xb2\xa9\xcf\x14\xc4\x28\xc2\x4e\xf9\x1b\xef\x39\x4f\x95\x03\x88\x85\xba\xe4\x16\x2f\x10\x28\xcc\xa8\xf3\xc7\x9c\x3e\x80\x7c\x70\x8c\x78\xac\x17\x5c\xf4\x1b\x74\x92\xa3\x2b\xfa\x7c\x0e\x55\xcc\x15\x07\x96\x46\xaf\x5b\xa2\xf6\x24\xa4\xb6\xd2\x80\x4c\x22\xb3\xc2\x95\x97\xb7\xa3\x8a\xbb\x31\x97\xab\xcb\x48\x39\x4c\x8d\xab\x6b\xab\x94\x31\x04\xc7\x5f\xae\xa0\xc0\x5d\x5d\x5a\xa8\x58\x68\xc8\x3c\x72\xa9\xa5\x70\xf5\xe5\x61\x73\x23\xd6\x17\x7e\xb6\x86\x49\xcb\x18\x32\x8e\x98\xc2\x90\x59\x70\xe3\xf5\x9f\x8e\x94\x1c\xbe\x1c\xd3\x16\xcd\xce\x17\x70\x7d\x1d\x96\xab\xbb\x6d\xaf\xd6\xb0\x31\x15\xdb\x52\x4d\x4f\x53\xf2\x50\x4a\x45\x4a\x36\xce\x5c\x7f\x26\xdc\xe0\xe4\x91\x07\x61\xcb\xf2\x47\xb3\xd3\x61\x0b\x6f\x55\xed\x39\x97\x39\x6e\x38\x36\x2c\xc1\xfb\x4c\xa5\x74\x2c\xfb\xe7\x1f\x03\x4b\x2b\x4f\x88\xcd\x12\xdc\x45\x94\x9f\xdc\x8b\x5f\xb0\xf5\x4b\x09\x1e\x91\xcb\x09\xdd\x73\x43\x7b\xa4\x1a\x01\x71\x98\x98\xcf\x16\xc8\xc9\x54\x44\xd0\x87\x1b\xb3\x36\x44\x43\x3c\xbf\xa2\xf4\x20\xf1\xe6\xf4\xce\xe8\x39\xb7\xcb\x61\x4e\xda\x1e\xbd\x4d\x77\x8b\x48\x5e\xea\x8e\x57\x9a\x30\x96\x2c\xf6\xb4\x95\x4f\xba\xc6\x50\x69\x81\x97\xce\x85\xaa\x39\x0c\x7d\x61\x96\x6f\xb7\xe4\x8f\x85\x9f\xa0\x2c\xff\x32\x09\xde\x29\x14\x02\xd5\xf1\xaa\x72\x88\xba\xfb\x6a\x1f\x18\xdd\x11\xfb\x47\xe5\xf2\x0b\x46\x4f\x06\xd2\x60\x23\xb1\x68\xd1\x9f\x56\xa5\x6b\xb2\x46\x87\xb3\x54\xfe\xb6\x15\xa8\x78\x1f\x7e\x01\x28\xd8\x99\x18\x07\x5b\xc4\x11\x5f\xc6\x1a\x2d\x03\xe6\x6e\x4d\xe6\x54\xbb\xcb\x94\x06\xca\xea\x43\x24\x10\x8c\xfe\xee\xe9\xe0\xf4\x4f\x98\xc9\xdd\x27\xfa\xbe\xac\x1f\x6f\x40\xa0\xe4\x67\xd8\x65\x21\xe1\x0d\x52\xbf\x31\xfa\x31\x1b\xd6\xb1\x80\x8e\x3f\x0d\xb0\x19\xc5\x04\x35\x71\xad\xcc\xb7\xd7\xd5\x97\x54\x79\x7a\x8c\xf4\x90\x86\x4c\xcc\x63\x4b\xd9\xde\xcf\x7c\x7a\x42\xc3\x47\x0a\xdb\x68\x41\xd4\x38\x89\x0e\xf0\x5e\x25\xbe\x2a\xf7\x2e\x30\x60\xc5\x7d\x5f\xe3\x3e\xd1\xb9\x8d\xbc\x49\x0e\x1d\xf3\x38\x92\xd7\xf7\x52\x10\xb9\xc3\xb5\x06\xfc\xdb\x54\x48\x8f\x10\xe0\xff\x4a\x5f\xfe\x06\x0f\x83\xb6\x88\x7d\x85\x78\x8a\xa1\x18\x4e\x6b\xcc\x14\xe5\x78\xd0\xad\xe0\xbf\xc6\xe3\x1d\x58\x30\xc4\x4a\xc4\x8b\x5b\x67\x53\x3e\x09\x34\x8a\x81\x20\x43\x2a\x9b\x95\x4c\x73\xa2\x5e\x13\xc3\x58\x92\x21\x4e\x9c\x99\x9d\x1d\xa6\x90\x5d\x80\x2a\x78\xc6\x1c\xaf\x46\x94\xb7\x7e\xf1\xef\x81\x10\x7e\x07\xa9\x60\x55\x80\xb5\x37\x3b\x8d\x67\xff\xfd\xbb\x32\x7b\x08\xef\x7c\xd8\x77\x9f\xbb\x56\xec\x9c\x5a\xa3\x2d\x43\x2c\x30\x41\x71\xa9\xea\x0a\xa3\xd6\x33\x62\xb6\x9b\xfb\x37\x4a\xe9\x20\x6b\x8f\xe4\xbb\xb6\x9c\xa9\x93\x59\x7b\xc4\x54\x33\x69\x9c\xd3\x10\x7b\x62\xcd\xe7\x57\x78\x37\x9c\xcf\x58\xb2\xc6\x33\x3a\x26\x9d\xcd\x8a\x09\x54\x52\x9b\xf2\xe2\xc4\xbd\x6e\x18\x38\x21\x76\xb0\x65\x2a\x1d\x09\x92\x6f\x13\x26\xe3\xef\x86\x4d\xc0\x22\x8b\xab\xb5\x26\xe9\xaa\x48\xe0\x9f\x45\x7b\xc0\xd0\xc8\x70\x07\x38\x66\xa8\x97\xd2\x05\x4c\x40\xbc\x6b\x23\xe5\xd9\x0e\x38\x71\xc0\x11\x1b\xd9\xea\x0e\x6a\xc0\x58\x8a\x97\x91\x11\xdc\xa6\x54\xe8\xb3\x95\x07\x94\x43\x9b\xbc\xd5\x89\x42\x72\xc0\xf3\xae\xa8\xe9\xca\x44\x1d\x8d\xef\xc5\x81\xb7\x80\x7e\x9c\xdd\xbf\x78\x39\x2c\x8c\xe6\x8a\x87\x9e\xff\x22\x2e\xe8\xb1\x0e\xec\x71\x46\x16\x65\x61\xe1\xd4\xc7\xd4\x3c\x86\xbe\x59\x7e\x27\x57\x49\xbf\x8c\xa3\x9f\xd9\x23\xfa\x2d\xab\xaa\xfb\x07\x82\x28\x5b\x3d\xaa\x3b\xcb\x78\x4e\x73\xfa\xb6\xd2\x09\x5c\x08\x0e\xe3\xaf\x19\xd3\x61\xde\xad\x56\x56\xe4\xd4\x19\xdc\xa0\x2f\x44\x9d\x4f\xbd\xe9\x7e\xb2\x76\x24\xac\x2c\x6f\x47\xa4\x46\x79\xf7\x87\x26\x10\xe9\x93\xdb\xd2\xdd\x69\xba\x7d\x9e\x2e\xd4\xf8\x87\xe9\x9b\xcc\x28\x20\xf6\x47\x98\x78\x19\xc5\xb6\xaa\x87\xf8\x23\xfc\x5d\xde\x67\xce\x01\xee\x3b\xbd\x6a\xe1\xfa\x1a\x74\xcd\xb2\x36\x86\x02\x87\x9e\x75\xfb\x20\x69\xf0\x16\xcf\x47\x31\x6e\xea\xc7\x5d\xc7\x3d\x99\xb8\xbb\x19\x3d\x14\x2e\xf2\x4a\xcc\xb9\xaa\x66\xa7\xd1\x58\x9c\x15\x31\xe5\x8d\x98\xf4\x26\x1c\x82\xc9\x71\xd3\xc6\xe7\xd3\xde\x95\xca\xc8\x6e\x89\xa8\xcb\xe6\xb2\xbe\x8b\xd6\xa6\xb8\x71\xcb\x29\x32\xce\x8c\xcb\x54\x2d\x85\xc7\x86\x1a\x94\xe8\xb1\x1d\x63\x56\x82\x06\x4b\xe1\xc8\xe9\x6f\x3b\x08\x00\xbe\x7a\x8f\x78\x8c\x22\x00\xcc\x26\xd2\x28\xc2\x44\x5a\xa1\xa5\x1f\x97\xe3\x35\xf3\xd7\x5e\x7e\xe7\x03\x5d\x1e\xa9\x34\xb1\x70\x43\x0c\x03\x60\x69\x09\xf8\xc8\xcb\xdb\x9f\x40\x55\x4b\x9d\xe5\x6b\xf6\x24\x5c\x88\x70\x2c\x8f\xb4\x32\x1a\x56\x22\xfa\xac\x03\xfa\x9e\x75\x2f\x72\x19\x40\x07\x90\x1e\x08\x47\x67\x01\xba\x3f\x17\xe6\x4e\xb9\x15\x2f\xfa\x3f\x31\x0f\xd3\xba\x4d\x4c\x01\x03\xbb\xd2\x3d\x64\x52\xb9\x98\x2c\xd3\x4e\x7b\x40\xc8\xf2\x88\x2b\x3d\xa3\xed\x57\x16\xc9\x93\x88\xbe\x73\xef\x46\xd6\x99\xaf\xa6\xa5\xb0\xfb\xd3\x7a\xb5\x93\x64\xfe\xd3\x40\x4e\xef\xd4\x35\xaa\x35\x47\xb7\x0d\x81\xe7\x6c\x72\xd7\x39\x97\xde\x83\xef\xa5\x30\xdc\xf3\x38\x13\x78\x67\xdc\x04\x75\xba\x3f\x33\xe3\xf2\x74\xce\x70\x63\xb8\xc5\xf3\xaf\x74\x65\xbf\x48\x86\x41\x2b\x5e\x2a\x0d\x78\xb4\x09\x99\x2e\x57\x7c\x0e\x7f\x22\x0e\x3b\xc3\x67\x3b\xb3\xfc\xc2\xa3\x57\xee\x0d\x21\xc4\xe6\x55\x2e\x17\xfa\x75\x92\x8a\x48\x67\x15\x66\x52\x7b\xfd\xe9\xeb\xd9\x9b\x0c\x91\x2f\x5f\xf2\xb0\x45\xf7\x4a\x69\xe1\x62\x35\x5a\x84\x0b\x90\x18\xeb\x3d\xf6\x20\xae\x14\xf6\xaa\x24\xae\x21\xe6\x21\x0f\x22\x5e\xfe\xa5\x5b\x01\xf1\x0f\xac\xf0\xeb\xd8\x16\xb3\x74\x79\x69\x60\x74\x5f\x94\xb5\x60\x6f\x9b\xda\x27\x6c\xf7\xa4\xc8\x68\x13\xf5\x0a\x02\xa9\x3c\x44\x93\xfb\x5d\x96\xab\x86\xb5\x63\x94\xd6\x83\x42\xf1\x66\xa7\xbb\xd7\xd3\xda\xec\xe5\xa2\x1f\x67\x3a\xc2\x0e\xe2\x88\x69\x29\xbf\xb5\x82\xa7\xda\x72\xd6\x00\x16\x75\x1b\x95\x32\xb2\xef\xa8\xbe\x12\xc4\x5a\x77\x8a\xae\x99\xfe\x90\x39\x99\x10\x03\x99\x10\xb0\xf4\x32\x72\x58\x8c\x0d\xda\xfe\xa3\xc5\xdb\x7b\xa4\x0a\x8b\x86\xa2\x46\xb0\x93\x3d\x32\x95\x7f\x0f\xca\xf2\x06\x19\xcd\x61\x3d\x3a\x29\x87\x32\xb3\xfe\x21\x40\x1c\x63\xcf\x9c\xcd\x91\x08\xb6\x6a\xef\x69\x93\x77\x95\x2a\x6d\x1e\xd7\x40\x90\x25\x49\xef\x26\x1f\x6c\xe7\xfd\x66\xa4\xec\x05\x22\x3d\xbd\x7d\xbb\xf3\x14\x9e\xab\x45\x69\x10\xfd\x16\x9a\x67\xee\xac\x07\x8b\x25\x1d\x64\x54\x46\x85\x4c\x33\xf4\x3d\x28\x42\x1f\xf7\x41\x7f\xba\xe3\xdb\xdf\x35\x57\x28\xa0\x83\xae\xd0\x58\x61\xb4\x2c\xe1\x09\xc2\x18\x13\x9f\x69\x9c\xee\xfd\x9b\x97\xe4\xad\x1d\x52\x0c\x83\x4d\x4a\x26\x6b\xdc\x58\xa5\x2c\xc4\x28\xf6\x41\xe1\x95\x86\x4b\xdc\x66\x36\x1b\x36\x87\x8d\x1e\xb4\xfe\xd1\x6d\x63\x6a\x54\x07\x78\x21\xf7\xf8\x8f\xcc\x8d\x2c\x5e\x2d\x01\x26\x3c\xb9\x12\x4c\xbb\x25\xd4\x6b\xf0\x83\x4d\x54\x40\x1b\x4b\x56\xcd\x88\xa8\x5c\x7a\xbf\xd9\x36\x68\x73\x4e\x3e\xe7\xf4\x93\x45\xda\x9f\xb3\xa0\x23\xec\x6d\xbd\x10\x9d\xec\xbc\x98\xbc\x17\x74\x24\xce\xb7\x65\xeb\x5b\x0e\x84\x35\x0b\xf4\x4c\x62\xcd\x7f\x24\xd3\x91\xab\x07\x71\xf6\x55\x91\x27\xef\xa7\x7e\x37\xc8\xbf\xc0\x79\xd2\xb6\x0e\x94\x3e\x07\xd0\x28\x59\x50\x5a\xab\x25\x78\x69\xde\x3e\xbb\xf4\x86\x43\x25\x71\xcb\xad\xeb\x71\x87\x28\x1e\x58\x55\x38\xd6\x02\x48\xcc\xc1\x7c\x5f\xc8\x9f\x5f\xf4\xee\xfe\xe0\x38\xca\x9b\x35\xfc\xe2\x72\x52\x92\x14\x34\xfb\xe7\x58\x6c\x8f\xb3\x3b\xdb\x18\xeb\x6c\x32\xdb\x7e\x91\xee\x4b\xc8\xf0\x31\xb7\x25\xd6\xe1\x2f\xb8\xc2\x1d\x98\xae\xea\x6e\xc6\x4a\xf0\xd3\x53\x18\xd2\x1c\x6b\x80\x1a\xc8\x15\x27\x3b\xc9\x63\x56\x0a\xd9\xb9\x1e\x7b\x40\x04\xe0\xe9\xe8\xd2\xff\x5c\x6b\xe6\x9c\xa4\x2a\x7e\xea\x81\x92\x5e\x71\x2f\x57\x98\x1b\x93\xa6\x4d\x5b\xfd\xbd\x41\x62\xe9\x13\x82\x11\x85\x0b\xb7\xf4\xc3\x92\x40\xaa\xbc\xf7\x98\x29\x4d\x34\x3b\x3c\x99\x9c\x9e\x62\xe2\xc4\x79\xee\xb5\xdc\x94\x32\xb5\xd2\x09\x57\x09\x9e\xed\x9a\x62\x2d\x68\x74\xb3\x47\xb4\xee\xe6\x81\x46\xdf\x4c\x48\xca\xd3\xda\x03\x01\x27\x76\xec\x52\x27\xb3\x3d\x7b\x52\xb7\xb2\x19\xe6\x62\x34\xb6\xe2\x29\x5b\xe6\x4e\x72\x1a\xe6\x7f\xca\xf0\x9e\x23\x60\xd3\x8c\xf0\x24\x41\xba\xe0\xcb\x1a\x2d\xdc\x91\x48\xad\xbc\x50\x8e\x21\x09\x70\x09\x0e\xa7\x91\x85\xce\x81\xe0\x15\x0d\x8d\x45\x91\x1f\x39\x21\x96\x44\x3d\x96\xac\x9e\xaa\xa2\x78\xf0\xf4\xc2\xd5\xed\x80\xc9\x3b\x3e\x64\xe0\xdb\x2d\x0a\xc8\x3f\x3e\xa3\x82\xdd\xe5\x7a\xf4\xf0\x65\x28\xf2\xd8\x31\x88\x1f\x06\xa8\xd2\x25\xda\x84\x49\x45\x6b\x0a\x6a\x35\xe1\x44\x31\x96\x61\x1b\x4e\x6b\x2c\x8f\xd3\xa3\x82\xc3\xc7\xf1\x95\xa1\x6b\x63\x6d\xde\x62\x6e\x10\x7b\xfd\x77\x29\x54\x13\x81\x6a\xe4\xbd\x12\xbb\x0c\xe5\x65\x3c\xdd\x1c\xd9\x5a\xbb\xf6\x5d\x80\xf8\xfa\xd7\x39\x9c\xdb\xe5\x57\xd6\x30\x99\xc7\xcf\x6d\x54\xda\x3a\xab\x61\x9a\x3b\x36\xca\xb4\x86\xae\x8c\x42\x36\x3d\x3d\xa2\x28\x6b\xaa\x9a\xf6\x63\x36\xef\x0c\xa8\x23\xa7\x66\x0e\x40\xf2\xe0\x04\x9d\x76\xb9\xed\xfc\xfe\x8d\x89\x47\x2d\x4f\xa3\xae\xfe\xa6\xb2\xac\x84\xa2\x77\xe9\x83\xc2\x58\x34\xe4\x2c\xb8\x30\xcf\x48\xba\x8a\x2c\xa5\x44\xc9\x36\xce\x8f\x33\x25\x05\x82\xcb\x34\xfc\x24\x0c\x35\x84\xcd\x41\x68\xc7\x65\xf9\x8b\x88\x92\xbf\x80\xac\x38\x29\xb2\x57\x22\x5a\x97\x23\x7e\xbc\x36\xc3\x71\xdf\x24\x92\x41\x06\xd9\x27\xf6\x90\x5a\x05\xad\x64\x05\xf4\xd4\x85\xc1\x7f\x44\x90\x1f\xaa\xcb\x84\xfd\xd2\x31\xec\x52\x3f\x15\x6b\xef\x39\xf2\xcb\xe5\x78\xb3\x99\xfa\x38\x66\x79\x1e\xa4\x5d\xda\x06\xab\x21\xbd\xd1\x61\x9f\x06\x6b\xe6\x42\xce\x89\xa2\x62\x69\x37\xd7\x07\xdb\x6f\x9a\xca\x72\xf4\x2d\x66\x55\x51\xed\x2d\x44\xcb\x48\xe2\xb5\x4e\x54\xda\x2e\xcf\x4e\x86\xa4\x13\x2f\x3c\x34\x34\x08\x1b\xee\x60\x8a\x01\x48\x32\x65\x0f\xb1\x0e\x96\x07\x20\x66\xe1\x64\xf7\xc1\xce\x92\x00\xaa\x54\x79\xbe\x8d\xdc\x9d\x6e\x01\xa7\x6e\x23\x32\x2d\x24\x05\x8d\xeb\x9c\x73\x81\x9b\x2a\xfe\x03\x08\x5b\x80\xd4\xb2\x79\x71\x01\xfc\xd9\x52\x3f\x62\xb1\x38\x9b\xa8\x43\x2e\x45\x27\x9f\x26\x76\x26\x8a\x2c\xe7\xed\x85\xe6\x8a\xe3\x2b\x97\x2b\x4a\x0f\x67\xf0\x40\x94\x7b\x16\x92\xe5\xa9\xa2\xd5\xe5\xe2\xba\x6f\x8a\x73\x00\x50\x6b\xa4\xdc\x7f\x49\x97\x5a\xdb\x16\xa5\x5b\xb7\x1d\x67\xcc\x34\xdf\x3f\xa0\x1f\xeb\x0b\x25\x47\x88\x57\x5a\xd9\x8d\xf4\xb2\x0a\x6b\xf9\x07\xb1\xb6\x0a\x63\x5e\xac\x8c\xe3\xaa\x19\xca\xe9\x93\x49\x8f\xe0\x36\x31\x5e\x9a\xcb\x5c\x2c\xd3\xea\x51\x3d\x60\x5e\x17\xf0\x79\x34\xa1\x6e\x8b\xbc\x0d\xc7\x5d\x69\xb3\x32\x11\x9e\xca\xa5\x0c\xc0\x3e\x92\xe0\x08\xfc\xfc\x57\x4b\xde\xe6\x24\xaf\x01\xec\x2e\xf2\xde\x3f\x07\x12\xe7\x20\x88\x00\x2f\x06\xe1\xc4\x47\xcd\xa0\x93\x75\x04\x62\xdb\x0a\xe9\xf8\xc9\xde\x9c\x62\xde\x6d\x39\x13\x43\x9c\x13\xd9\x22\xae\x29\x55\x12\x1b\x66\xbc\x6f\x5b\xe4\xcb\x7c\x40\x92\x76\x8e\xce\xf8\xc8\xb1\x20\x62\xd4\x5b\x10\x84\x47\xcd\x46\x1f\x1d\x1f\x22\x03\xc2\x2a\x2f\x4e\xac\x3b\x6c\x00\x9c\x54\x58\x33\x95\x06\x6d\x9f\x8d\xfa\x04\x60\x8e\xf0\x02\xe4\x5d\x84\xa2\xab\x3c\x6b\xf9\xcf\xf4\x83\x6d\xc8\x2e\x98\xbe\x3a\x9c\xcc\xba\x24\xd5\x56\x29\x05\x03\x94\x26\x00\x39\x99\xa0\xd0\xc0\xd0\x15\x87\xb6\x20\xfd\x71\xd9\xba\x6a\x5a\x40\x73\x62\x60\x95\x15\x08\x61\xf6\x67\x68\xb6\x17\x71\x64\xaf\xe7\x92\x91\x75\x1c\x83\xb3\x17\x12\xe2\x30\x9a\x38\x18\x9c\x47\x39\xb7\xcc\x62\x28\xb7\x79\x0f\x7e\x14\x3b\x68\x86\x77\xa3\x37\x86\x08\x00\x8d\xc1\x4b\xcf\x28\x1f\xe4\xf7\x74\xe3\xb7\xe5\xd9\x73\x90\x0c\xeb\x2c\x29\x68\x0a\x69\x8e\xdf\xe9\x4d\xe1\x55\x12\xcf\x1d\xa2\x49\xe7\xeb\xc7\x8c\x43\x16\xd4\x93\x9f\xd9\xa9\x68\xa0\x4e\xa8\x7e\x9f\xeb\x3e\xab\x9d\x94\x3b\x70\x89\x10\x27\xef\xa5\x6a\x3e\xc8\xa8\xec\x74\xfc\x1d\xcb\x84\x5e\xd3\x58\x6b\xc2\xa3\xd4\x2f\x0a\x04\x92\x6b\x7a\x9c\xc8\xcd\x77\x64\x9d\x7e\x92\xb1\x7d\xa4\x1b\x8c\xb9\xec\x97\xaa\x79\x79\x6b\x2f\xea\xc6\x4f\x58\xf2\x57\x15\xa8\xe6\x45\x58\x79\xc5\x92\xfd\x9c\x24\x95\xe1\xfa\xbc\x96\x95\xfe\x44\xd7\xfe\x4d\x6e\xdd\x4a\x8d\x95\x6f\xf4\xe9\xf9\xf4\x8a\x41\xad\xb2\xa6\xa1\x9d\x93\x75\xf5\x8f\xf3\xad\x11\x2e\xd8\x30\xc0\x70\xfa\xe5\x58\x54\xfb\xf9\x88\xe9\x97\xb6\xad\x8f\xd6\xe8\x65\x24\xec\xfc\x13\x3c\xbb\x0c\xd4\xaa\xc2\xc7\x30\x44\x82\x29\xbf\x77\xbd\x79\xb9\x52\x57\x30\x20\x80\xe4\x90\x69\xd8\xb4\xb8\xf2\x52\xbf\x56\x18\x10\xed\xef\x8b\x50\xab\x8f\x62\x7b\x45\xce\x57\xe4\x4d\xcd\x52\x90\xf8\x17\xe1\x90\x4f\x9f\x2b\x45\x9e\x4a\x75\xcd\x21\x09\x10\x8e\xa9\x1e\x50\xf5\x45\x31\x10\xe5\x14\xe1\x1a\x42\x0e\x4d\x7a\xff\x44\x1f\x0d\x8b\x69\x90\xf2\x3c\x36\xac\xe5\x13\xa6\x52\xba\xab\xf2\xf6\xad\x98\x49\x49\x13\x13\xf4\xbe\x41\x38\xb0\xfd\xe8\x33\x35\x3c\xd3\x66\xd8\x4c\x2c\x50\x3b\x72\x44\xcb\x22\x00\xf0\x82\x6d\x45\x15\x86\x77\xce\xcf\xdc\xb5\x53\xfe\x0c\x79\x2d\x4a\x3f\xeb\xb2\x1e\x3a\xa9\xd3\x78\x5e\x04\x39\x03\xb4\x02\x0e\x7b\x87\x74\x70\x01\xd6\x46\xcc\x93\xaa\x9b\x93\x91\x89\x77\xf0\xae\x07\x48\x7e\xc4\x49\xb7\xc0\xe1\x29\xd6\xc5\xc4\x33\x9c\x6c\xc5\x35\x9c\xc3\xda\x2a\x5a\x70\xb9\xa3\x91\xdf\x65\xe8\xb2\x1e\x17\xb4\x50\xf0\x02\x40\xa8\xf1\x6c\x3a\xbb\x62\x1e\xa5\x14\x41\xe8\xb0\x17\x12\x22\xc6\xe6\x5d\xc2\xdf\x2b\x2a\x11\xe5\x24\x58\x43\xf1\xf9\xc6\x93\x02\xb5\xce\xa2\x57\x19\x8b\x36\xa7\x00\xc6\xb9\x91\xc4\x26\x1d\x43\x03\x26\xfb\xe2\x1c\x02\x23\xee\x2d\x5d\x51\x4e\x04\x4d\xab\x84\xcc\xc6\xbb\xfd\xee\x3f\xc6\x34\x21\xa8\x13\x14\xac\x2c\xfd\x13\x70\x05\x19\xa6\xdc\x74\x2f\x5a\x32\x00\xc1\x28\x04\x0d\x81\x9f\xb5\x4c\x92\x65\xc1\x49\xab\xb3\xbb\xf1\x53\xa9\x45\xe8\xe7\x9e\x64\x19\xfc\x41\xf7\x06\x01\xf2\xbe\xcf\x9a\x13\xda\x98\x5f\x39\x78\xef\xf4\x40\xa3\x53\xde\x9d\x3e\xa9\x07\x54\xc1\x8d\x8c\x10\x86\x07\x75\xfe\xac\x67\x42\xc9\x91\x66\xd8\xf0\xbd\x46\x12\x7b\x90\xd5\xe2\xec\x27\x9b\x1c\x90\xb9\x0a\x1c\xa6\x70\x7c\xfa\xc1\xb8\xdc\xae\xae\x0d\xed\xf9\x8b\xc6\x37\xf3\x95\xb3\x5a\x96\x93\x15\xa4\x3a\xcf\x00\xef\xe9\x24\xdd\xb1\x34\x25\x84\xcb\x4f\x9c\xcd\x64\x97\xcf\xce\x04\x66\x41\xed\xc5\xa9\xf5\xc4\xe6\x65\x01\x8d\xc7\x4e\x9d\xfa\xe7\xd2\xb9\x20\x34\xd0\x91\xf0\x89\x42\xbc\x97\x00\x7e\xa2\x84\x6a\x11\x50\x7d\x55\xad\x03\xde\x63\x64\xb1\xa4\x11\xd7\x36\xb2\xf8\x3a\xe3\x98\x08\xbd\x15\x88\xdf\x00\xd4\xa7\xe7\xaf\xeb\x8c\xeb\xeb\x49\x95\xce\x06\xb0\x71\x6a\x65\xf2\xdf\x8e\x90\x90\x3a\xea\xd9\xea\x67\xe4\x0f\xd7\xd7\x07\x67\x37\xff\xf6\xeb\x8d\x7f\xac\xef\xb9\x24\x8a\x61\x34\x32\xaf\x69\xc1\x41\x3a\x46\x51\xb6\xbd\x4c\xbe\x42\x1b\xf5\x7d\xef\xb3\xda\x9f\xbf\x83\xad\x76\x2d\xf3\x4e\xd9\x6f\xdb\x17\xce\x2b\x0a\xe4\xe8\x9c\xbc\x0b\x99\xad\x01\x0c\xb7\x3b\x80\x80\x8a\x71\xcb\xf7\xaf\x1b\xf8\x36\x77\x58\x50\x7e\x40\xca\x1d\x33\x77\xdd\x09\x44\xb6\xcf\x83\x3d\x55\x7f\x0c\x87\x57\x52\x10\x0d\xe2\x41\xd7\xe1\xab\x05\x46\xf9\x38\xa7\x15\xae\x7a\x8c\x91\x6e\x7b\x37\x7a\x1f\xae\xde\xd7\x23\x0c\xc3\xc4\x48\xbf\xe6\xb3\x33\x89\x83\xd8\xf0\x4b\x5d\xbc\xf6\xfa\xc7\x15\x3e\x83\xca\x3b\x5e\xf9\x18\x0f\x56\x83\x6f\x8a\x54\xc3\xcc\x04\xd7\x76\x7e\xea\x5d\x73\x40\x22\x6a\x06\x20\x87\x24\x95\xc0\xd6\x56\xf6\x25\xb4\x12\x06\x4e\xaa\xd3\x50\x7b\x66\x30\xa2\xe6\x2e\xa5\x85\x25\x3b\xb8\x50\xee\x30\x0d\x1f\x1c\x3d\x1e\x7f\x53\xc9\xfc\x01\xeb\x8c\x21\x54\xac\x43\x78\x36\xc8\xa2\xe9\x1e\x6f\xe7\x82\x23\x09\x90\xc0\x25\x67\xda\xf1\xc8\x6c\x7f\x6d\x55\x49\xd6\x05\x1e\xeb\xc3\x93\x68\x9d\xae\x34\x47\x74\xd7\xd7\xe6\x9e\x9f\xbd\xc2\x73\xed\xf0\x98\x2a\xc9\xd1\xc2\xc7\x14\xe1\xe6\xcc\xda\x40\xc3\xf0\xef\x40\xea\xbb\x80\x96\x2a\xf7\x3a\xee\x84\x1c\xdf\x5a\x6b\x2b\x12\xad\x8e\xae\x3e\x2e\x25\xa4\xa2\xe6\x14\xac\x6e\xee\x8b\x4e\x00\xe6\x78\x0b\xe6\x04\x53\x6d\x57\x48\x79\x61\x7b\x21\x94\x18\xfa\x80\xee\xf4\xa5\x0e\xc1\x2b\x9a\x8b\xc8\xa2\x49\x0b\xdd\x07\x7a\xf0\x8a\xff\x96\x52\xdd\x0d\x1e\x36\x69\x97\x5d\xe0\x72\xd5\x50\xe3\x97\x77\xde\x6c\x58\xd8\x7e\x51\x7e\xb7\x32\xf9\x10\xc2\x05\xa7\x0c\xb4\x97\x15\x83\x44\x56\xe8\x73\x19\xa0\x5f\x34\xb8\x10\xf3\x81\xb1\x8f\xc9\xaa\x4a\x10\xe2\x12\x55\x18\x5f\x13\x12\xc1\xcb\x02\x8c\x76\xf6\x19\x7a\xa5\x65\x10\x05\x0f\x5e\xe7\x70\x9a\x09\xa5\xce\xa9\x98\xaa\x97\xf0\x9b\x90\xaa\x6a\x5e\x96\x55\xb7\xd0\xa4\x28\x60\xea\xdd\x39\xbe\xf8\x0c\xa7\x2f\x28\x75\x57\x75\xf4\xeb\xcd\x2f\xd0\x42\x39\x62\x71\x61\x2b\x97\xb9\x81\x06\x1c\xcd\x16\x90\x75\x7d\x1f\x4c\xac\xf7\x8b\x11\x6e\xcb\x38\x0b\x85\xcb\x9f\x1f\xd0\x56\xcc\x2c\x5c\x92\xfe\x7a\xe3\xd9\xc5\x45\xdc\xbf\x63\x9f\x13\x64\x11\x81\xc7\x80\xf3\x03\x72\x5f\xeb\x8a\xb5\xa5\x96\x21\x53\x29\xcf\xe7\x28\x39\x74\x45\x01\x1a\xd6\x47\xf5\x40\x3a\x17\xb2\xf7\x39\x30\x38\x4d\xda\x57\x1c\x42\x2a\x89\x99\x33\x41\x0e\x67\xc7\xcf\xdd\x79\x9c\x2d\xf4\xcc\xf3\x77\x0e\x4e\x3c\x5d\x5c\xcd\xbd\x03\xc2\xf9\x78\xd3\x34\x5c\x24\x73\x52\x0f\x98\xce\xc9\xd3\x15\x40\xe3\x44\xd3\xeb\x52\x48\x5e\xfa\x91\x01\xba\x22\x98\x20\x3c\x8a\xed\x58\xbb\xc0\x75\x41\x16\xe5\x1b\xcf\x65\xcf\x40\x4e\x8f\x36\xc5\x7e\xfc\x6b\x75\xed\xdc\xcd\xdd\x45\x2b\x1c\x80\x5e\x11\x98\xb3\x8c\x7e\x8e\x94\x5a\x40\x5f\x83\xda\xab\x27\x6a\x6c\x5d\xc7\xa3\x72\xf3\x96\xd6\x24\xe1\x2b\x0f\x8d\xc7\x73\xe5\xdd\xe3\x7f\x74\xe0\xd5\x5c\x96\xe2\x1d\x8d\x53\xc4\x26\xaf\x90\xb7\xa0\x81\xb4\x97\x61\x30\xc8\x0a\x3f\x02\xdc\xb5\x57\x6d\x0b\x02\xe2\x1d\xb7\xdc\x34\x3b\xe5\x91\x94\xd9\x10\xcf\x7f\x86\x93\xfe\xa6\x1f\xb7\xb8\x30\x90\xa5\x5c\x72\x01\x02\xf8\x24\x17\x80\x6c\x0c\x52\x51\x1c\x4e\x9a\xc9\x9d\xa5\xa0\x85\x87\x87\x89\xaa\x75\x6e\xe3\xf8\x1f\x30\x8f\xf4\x35\xe7\x7b\x89\x69\x89\xe3\xc2\x9b\x70\x6e\x29\x8c\xde\x5e\x14\x30\x30\x2f\xd1\x63\x67\xc7\x34\xbc\x8b\x07\x5e\x44\x55\x76\x63\x74\xfc\x4d\x3c\x57\xbc\x19\x4e\xfd\x6d\x0b\x21\xab\xdc\x1e\x6e\xc9\x6b\xe3\x32\x38\x1f\x28\xdf\x16\x66\x75\x5e\x5f\xc2\x09\xce\xc9\x55\xe1\x92\x8c\x4d\xac\x11\x0e\x7c\xcf\x47\xcd\x31\x1f\x50\x9f\x35\x40\xa2\xf5\xc8\x2a\x24\x02\xaa\xfd\x6a\xf6\xd7\x63\x29\x71\x40\x61\x49\xbe\x12\xbf\x19\x80\x53\xa6\x81\x46\xd6\x50\x53\xcc\x25\xf4\xb4\x0d\xca\x73\x60\xe1\x77\x8a\x10\xe4\x0d\x4f\x80\xae\x0e\xa2\xfd\xca\xad\x1d\xe3\xb5\xe8\xea\xdd\xe3\xbd\x96\x8d\x96\xd7\x00\x4f\x54\xda\x23\x47\x36\x45\x13\x82\x25\xfe\x3a\x9c\xa3\x55\x2d\x12\x34\xc4\xec\xc2\xab\xcb\x4c\xdf\x74\x2f\xc5\x5f\x0d\x60\x2c\x21\x65\xb0\xf6\xa1\xc0\x1d\x0f\xc5\xbf\x06\xb8\xd3\x06\x1a\xb3\x69\xd5\x16\xef\x8b\x1f\x70\x40\x14\xd0\xa9\x93\xe8\xd7\xeb\x1b\xa5\xd5\x16\xdd\x6a\xb9\x8c\xa9\x25\xa4\x0e\x92\x37\xdd\xb6\x59\x12\xc2\xe7\xe8\xbf\xcb\xf5\x22\x43\xe7\x2b\xd1\x0f\x68\x86\x9f\x86\xa7\xd7\x23\x8d\xd7\xb3\x24\x8e\x7b\x3f\xc7\x3c\xfd\xd5\x98\x12\x3c\xaa\xff\xc0\x10\x24\xf6\xbe\x87\xef\x94\x8c\x55\xfa\xa8\xe0\xc4\xa0\xb1\x40\x41\x8a\x4d\x90\x84\x96\xdc\xd0\x9b\x2b\x21\x56\xdd\x7e\xaf\xff\xf4\x0e\x73\xcd\x4e\xeb\x71\x00\x30\xd7\xdf\xb8\x3d\x0c\x51\x33\xc6\xf6\x2c\xeb\xf4\x7f\xff\x93\xe7\x21\x45\xa3\x8f\xa1\xc0\x03\xb4\x05\x1a\x50\xc8\xdd\x57\x5a\x52\xd0\x24\x13\x6b\x86\x48\x77\x15\x04\x03\x4c\xee\x3e\xf8\x6d\x11\x6c\xc3\x73\xf1\xdc\x8f\x37\xed\xf3\xb5\xe8\x8a\x34\x34\x1e\x63\xb7\x03\x29\x17\xd2\x02\xeb\x60\xc7\xdc\x3f\x2d\x95\x2a\xb1\x40\x94\xdd\x3d\x9a\xe4\x19\xdf\x92\x48\x34\xe6\x1e\x75\x90\x9a\x69\xc3\xe4\xf3\xac\x54\x03\x9c\x62\xb4\x81\x00\x68\xd0\xf3\x93\x32\x2b\x0b\xbc\xed\x83\x25\x58\x88\x1b\x7e\x72\xb6\xc7\x88\xb6\xdd\xb8\xd7\x5d\x95\x8c\x45\x58\x88\x0b\x0d\xc2\x33\x30\xbd\x19\xe7\xad\xb8\x5d\x62\xe0\x4b\x0a\x31\xdc\x5c\x4c\x62\x5d\x55\x27\xd2\xea\x5f\x9f\x3b\x3c\x8e\xdf\x64\x55\xd3\x7e\xa5\x40\x1e\x2f\x2e\x80\xc0\xba\xc7\x0a\x36\x14\x6e\x93\xd3\x16\xb7\xbf\x78\x32\x92\x15\x28\x94\xdf\xe7\xec\x85\xc2\x5e\x18\x0e\xb4\xfc\xbd\xb9\xe2\x28\x32\xda\x39\x4f\x38\xa7\x96\xe1\xa5\x79\xde\xaf\x4f\x82\x92\x87\x10\xf7\xde\xd9\x80\xf6\x3e\xe6\xa4\xa8\xb8\xf1\x12\x97\xcb\x38\x77\x19\x55\xa7\x00\x5c\xbc\x20\x83\x34\x23\x65\x31\x1f\x43\xca\xc8\x4c\xed\xc6\xc7\xa3\x61\xc4\x79\x18\x39\x7b\x3f\x7a\x86\xab\x2a\x8b\x19\xab\xf0\x0e\xf2\x83\x05\xac\x9c\x86\xc9\xa3\x1e\x96\x43\xb5\x40\x30\x83\x3b\x80\x18\x89\x6c\xec\x54\x66\xba\x52\x7b\x12\x56\xd3\x79\x37\x4b\xc6\x4c\x17\x04\xdb\xb1\x68\x66\xc9\x41\x9c\x39\x93\x56\x68\x4e\xeb\x29\xbf\x69\xbb\x0e\xac\xc5\xc5\x63\x88\xa6\x03\x9e\x85\x28\xe2\x2e\x3e\x84\x2a\x78\x0f\xeb\x66\x88\x97\x23\x47\xb3\x33\xce\x37\x5d\x2a\x42\x87\x61\x7b\x2e\xa1\x8d\xce\xe7\xa0\x5f\xba\xab\x46\xaf\x60\xfb\xf3\x97\x4c\x23\x0a\x81\xab\xb2\xe7\x49\x9e\x85\x73\xcd\x6c\x93\xc4\x19\x91\x8c\x2c\x4f\x82\x46\xb5\xdb\xbd\xbe\xaa\xbd\x6f\x9f\x0b\x32\xd7\x40\xd6\xa3\x80\xe1\xbf\x7e\x4a\x32\x6b\x98\x07\xc2\x67\x24\x95\x18\xa3\xd6\x9c\x1f\xe0\x8a\xf6\x85\x11\xb9\x7f\xc5\xa8\x5f\x27\xc1\xb2\x88\x86\x8e\xfe\x45\x8c\x78\x92\x69\x57\xd7\x45\xe2\xff\xb9\x93\x6d\x8f\x35\x0a\xa9\xea\xa7\x30\xa8\x59\x97\x2b\xc8\x08\x6e\xa8\x91\x5f\x00\x19\xb9\xaa\x86\x6d\x76\x44\x9d\x9f\xed\x02\xf0\x7b\x85\x34\x32\x12\x90\xe7\xef\x16\x88\xa5\xd2\xe7\xe2\x51\x8e\xa4\xdf\xdc\xe2\xf2\xd7\x38\x13\xdc\x3a\x0a\xe6\xd3\x34\x78\x84\x03\x89\x65\x0c\x63\x79\x23\x62\x31\x8a\xf3\x63\x18\xf2\x5c\x8b\xed\x0e\x7a\xe9\x3b\xf7\x08\xc8\xe9\xc4\x33\xea\xf7\x5f\x21\x4a\xa7\xf8\x9f\x06\x33\x35\xa6\xd8\x00\x84\x27\xa2\x41\x80\xea\x25\xfb\x0c\xeb\x74\x7e\x9a\x5c\x96\xcd\xaa\x37\x80\xe0\xaa\xff\x89\x55\x7e\x91\xa5\xd1\x4b\x37\xf8\x05\x44\xd2\x96\xff\x44\xfb\x60\xef\xab\x35\x18\xb7\x0c\xb7\x77\xe0\xd6\xbe\x59\x79\x28\x75\x8b\xd5\xde\x7a\x53\x23\x3a\x8a\xd7\x07\x90\x8a\xd2\xcf\x6d\x82\x0e\xad\xcf\x68\x1c\xe4\xa4\x8e\x94\x1b\x0e\x66\x29\x72\xaf\x3e\xcc\x05\x43\xab\x41\x30\x61\x36\x11\x80\x0f\x2a\x8b\xe9\x1d\xdf\x28\x48\x96\xc5\x0b\x2d\x1c\xd5\xce\x0b\xea\xa8\x78\xa7\xae\xc9\xc1\xfe\x5e\xa9\xc5\x83\xa3\x77\x09\x18\x15\x40\x4f\x2c\x14\xa7\x97\x6e\x40\x9b\x4b\xf2\x2d\x84\xc0\xe4\xea\xfd\x0e\x9b\x65\xed\x9a\xbe\xd8\xae\x37\x91\xc0\x92\x1d\x98\x62\x87\x4c\x6c\xde\x74\xd8\xea\xd1\x5d\xc3\xb2\x63\x9c\x31\x58\x09\x3f\xb0\x5a\xf2\x95\x6f\x47\xd0\x67\xe5\x47\x8c\x7a\x66\x35\x80\xe1\x75\x1f\x21\x5f\x29\x4f\xc8\x9a\x30\x6a\xcd\x81\xd8\x53\x29\xd4\xe8\x7d\xc8\xfa\xdd\x0c\xd0\x60\x43\xfa\x59\x61\x07\xce\x6a\x55\xfd\xf1\xfa\xb5\xda\x54\x1f\x03\x4d\x91\x67\x68\x41\xcb\xdf\x26\x73\x63\x96\xba\xb1\xa1\x3e\x63\xc4\xfd\xc5\x17\xab\x45\xee\x10\xf0\x68\xdb\x32\xe1\xcf\xbf\x32\x69\x19\x4e\xb2\xdc\x73\xe1\x8a\x42\x5b\xd8\x42\xd2\x05\x08\xde\x1e\x48\xb8\xeb\xad\x63\x1d\x74\xcc\x2b\xe7\x6c\x9f\x97\x6c\x98\x01\x64\x1e\xec\xc4\x1b\x7c\x20\xab\xbf\xed\x97\xfb\xda\x53\xd5\x4c\x89\xc3\xa6\x41\xb2\xd8\xf6\x00\x8e\x17\x93\x7b\x28\x92\x19\x90\xf7\x3a\xf3\xda\x3b\xd1\xb1\x2b\xdd\xe2\x8a\xa7\x1f\xd9\x73\xd7\xa4\x0b\x90\xe6\x28\x76\x2a\x81\x76\x3e\xa4\x61\xc7\xa6\x3d\x65\x86\xdd\x1f\xa6\x26\xab\x84\x70\x13\x51\xbd\x58\xa4\x5b\x8f\x09\x79\x91\x2b\x61\x2e\x8b\xe7\x70\x0b\x0b\x52\x4c\x4a\xf3\x68\x65\xd3\xf2\xde\xb8\xb0\x48\x33\x51\x28\xdd\x85\x12\xaf\xdb\x49\x76\xbf\x89\xe9\x74\x5e\x8c\xf8\xe2\x1c\x3a\xb9\x64\x53\xf2\x0c\x4a\x64\x6d\x58\xb4\xc8\x50\x76\xf9\xa2\xd6\x93\x18\x33\xfa\x22\xbc\x7c\x96\xe8\xc3\x69\xce\xf2\xbb\xe2\xe8\x7c\x27\x02\x46\x38\xa2\x0c\x45\x53\x5f\x71\x37\x7c\x5e\x2f\x1c\xc8\x1d\xd5\xa4\x07\xb2\xf0\x65\x99\xb6\xb2\xe9\x34\xc8\xe4\x3a\x25\x0b\xa1\x8a\x4c\x2b\x96\x56\x61\x53\xe5\x2b\xad\x54\x50\x51\x52\x9b\x89\x30\xb3\x2d\xd8\xd7\x99\xe5\x73\x37\x24\xd2\xc0\x0b\x40\xd9\x54\x48\x18\x13\x6f\x54\xa4\x2b\xce\xac\xaa\xe6\x74\x2d\xab\xfd\xd3\x59\x95\xc0\x87\x0b\x24\x6b\xcb\xda\x4d\xea\x03\xd9\xef\x0d\xa7\x11\x59\x3c\xd1\x3d\x65\xb8\x66\x38\xfc\x36\x0b\x90\xaf\xd0\x80\x34\xbc\xbc\xad\x0e\xca\xc4\x55\x7c\x29\xdc\x7b\x05\xb0\xfd\x56\xc7\xc9\xeb\xb8\x70\xfa\x58\x59\xea\x5b\xfa\x5f\xf3\x27\x69\xc5\x23\x3a\x10\x6b\x31\xda\x2a\xf7\x59\xd9\x33\xda\x79\xe2\xff\x58\x26\x3d\x1e\x2d\x74\x61\xf9\x29\xc3\x32\x4c\x51\x9e\x94\xf6\x92\x6b\x06\x2a\x26\xdc\x8f\x27\xf5\x35\x4b\xbc\xfa\xbd\x57\xb6\x41\xb1\x9b\x67\xe3\xcf\xf7\x03\xfb\x72\xc1\xcf\xcb\x6e\x8d\x8b\x0e\x39\xe3\xb1\x81\x80\x58\xf3\xe4\xb3\x40\xb4\x42\x1b\x25\x92\x6d\x11\xbf\xcc\xe1\xb4\x66\xc5\xde\x70\xb7\x45\x03\xba\xb6\xa6\xaf\x73\x35\xbc\xfb\xaf\x54\xf0\x51\x0a\x0c\x49\x46\x36\xc1\xd7\x38\xb7\xde\x05\xb9\xa1\x7b\xe0\x08\x93\xd2\xa9\x9a\x3e\x58\xaf\x5a\xd5\x3a\x94\xa3\xc4\x4c\xd9\xc9\xf1\xe7\x4c\x96\x1c\x49\xab\x8c\x72\xf9\x5f\x67\x8f\x52\xe0\xa7\xb6\x6a\x5d\x43\x0b\xe5\x53\x95\x4d\xca\xe9\xbf\x5e\x6d\x26\xda\x3c\xa1\xc7\x27\x0f\xc3\xa6\x7d\x4a\x87\xa3\x0a\x15\x63\xb1\xb1\x49\xa8\x23\x4d\xdf\x5d\x23\x50\x3e\x23\xef\x8d\xc5\xd9\xde\xa1\xdb\x23\xe0\xee\x0d\xd2\x35\x5d\x9f\x70\xfb\xba\xe9\x16\x6e\x0e\xee\x22\x16\xa0\x1b\xe1\x85\x71\xcb\xc2\x80\x06\xf8\x76\x99\x93\x5b\x87\x54\x7f\x66\x69\x1d\xa0\x89\xb6\x53\xd1\x58\x45\x48\xff\xc3\xe0\xe0\xb2\xb2\x7c\x21\x12\xdc\x3e\xc6\x96\xbb\x21\xc6\xf9\xd9\xae\xd1\xbc\x38\x0b\x28\xe3\x88\x2d\xbb\x9d\x68\xb2\x25\x1e\xf1\xc8\xc8\x5f\xb5\x13\x70\xed\x84\x5f\x25\x15\x09\x73\x27\x3f\x89\x8a\xfe\xfc\x46\x53\x36\xb7\x5e\xf3\x69\xce\x6c\x1b\x56\x5a\xfa\xc2\xc1\x3f\xab\x54\xfe\x10\x3a\x23\x55\x88\x55\xdd\x65\x76\x2f\xf9\x90\x7f\x6c\x5e\x0c\x65\x1e\x66\xec\x64\x88\x8c\x75\x38\xf4\x13\xbd\x91\x36\x51\x90\x04\xbf\x43\x3d\x18\x68\xd1\x3b\x72\x8e\xa9\xdb\x69\x52\xa6\x46\x5c\x6b\x8a\x41\x7d\xcb\xd3\xfa\xac\x67\xd1\x3b\x08\xaf\x92\x48\xbc\xe7\x3f\x9c\x50\x9e\xf1\xea\xef\x30\x42\x72\xd1\xe6\x7d\x82\x8b\x59\x2d\x1a\x6c\xf1\x4c\x5e\x9b\x08\xff\x71\x80\x30\xa2\x48\xd7\x8b\x3d\x78\xbb\xda\x43\xdb\x5d\xba\xf0\x45\xdf\xf3\xb5\xa2\x43\x70\x8e\xb3\x96\x1f\xf5\x5b\x16\xae\x5c\xe3\xe5\x12\x0a\x6d\x1b\x4c\x77\x42\x6f\xea\x12\x7d\x9e\x0e\x0e\x2f\x53\x04\x5a\xe2\xbf\xac\xf4\x25\x4b\xeb\x04\x5e\x51\x53\x39\xa5\xf0\xb0\x59\x44\x6a\x0e\x42\x7c\x52\x56\x83\x42\x3e\x41\xf0\xdc\xe1\xc4\x37\xec\x6f\x7b\xf7\x59\x48\x43\x5f\x7c\x2d\x2e\x64\xff\x5e\xd0\xa0\x0c\xf0\x31\x61\x9f\x72\xc0\x71\xde\x00\x10\x8c\xce\x06\xc3\x50\x69\x76\x9a\x6e\xfe\x4a\x7e\x6c\x46\x7c\x4c\x5c\xc3\x90\x06\x1f\x3d\xb4\x21\x8b\x24\x6e\x44\xb8\x4e\x10\x90\x55\xfd\xc9\x5f\x09\xec\xc5\x1d\xc9\xf2\xfc\x61\x3e\x61\xbb\xc2\x1a\x6d\xe4\x6a\x5c\xe3\xf1\x56\xf0\x72\x23\x75\x16\x55\x7c\xb1\x2f\x31\x42\x89\xac\x9e\xc5\xd1\x9d\x37\x9a\x09\xd4\xa6\x6c\x0a\xb1\x94\x56\xc3\xec\x73\x0c\x37\x57\x7e\xac\x0c\x15\xca\xba\x20\x8e\xa0\x8a\x3a\xd2\x45\xf6\xc0\xec\x4d\x8d\x2a\xf6\xc6\xa8\x09\x83\x73\x31\xa9\xf6\x45\x87\xa0\x55\x67\xb2\x3d\x58\x41\xfc\x8c\x8d\xd9\xae\x0d\x3c\xd1\x10\x3f\x44\xdb\x12\x7f\x14\x6e\xbc\x43\xab\x49\x76\xbc\x13\xe1\x48\x69\xc6\x2e\xe0\x7e\xed\x2f\x3e\x83\xdb\x9d\x6b\x8d\xa0\xc5\xe1\x49\x64\xfd\xa2\xe9\x04\xfb\xa6\x38\x27\x8d\xe9\xae\x9b\x31\xe6\x7a\xd3\x61\x9b\x15\xac\x0d\xad\xea\x55\x40\x87\x84\x6f\xf1\x15\x04\xe7\x9a\x6b\xa6\x11\x0e\xf8\x40\x04\x83\x7c\xe8\x16\x9a\x7f\x21\xc7\x7b\x82\x78\x79\xf8\x35\x12\x24\x85\xf3\x85\xe9\x16\xe1\x66\x76\xdc\x5c\x93\xbe\xc2\x1b\xcd\x7b\x32\x87\x7c\x73\xcb\x78\x14\xb5\xab\x59\x17\x6e\x20\x08\x6c\xa5\x02\x87\x14\xba\x58\xce\xea\xe8\x81\x30\x26\x1d\x6f\x61\xa8\x00\x1e\x18\x5f\x07\xe5\xcf\xc8\xa4\xb1\xa1\x87\x37\xfd\xf2\x25\x4e\x15\xa7\x38\x86\xc4\x98\x02\x48\xb3\x62\xc7\x04\xd5\x3b\xe8\x03\x60\x9d\xf8\x6d\x8b\x02\x61\xa4\x08\x1a\xad\xb1\x91\xaf\x68\x61\x33\x13\x52\xe2\x8c\x67\x25\x70\x78\x0a\xdf\x56\x81\xce\xf3\x08\x4f\x92\x78\x78\x42\x6e\x0a\xf6\xd1\xa4\x48\xd9\x8e\x3b\x9b\x5f\x53\x05\xb9\x7f\xc5\x61\x2b\xb5\x13\x7d\xab\x0b\x52\x90\x5c\x32\xde\xe4\xae\x18\x06\x10\x34\x04\xaf\x4d\x1e\x6d\xe4\xa1\xac\xce\x1d\x75\x9b\x94\xc2\x2a\xe8\x76\x5e\xfe\xae\xf3\x6c\x26\xe5\xdb\x84\xd1\xc6\x7e\x35\x47\x46\xac\x39\xf8\xb4\xe7\xfa\x19\x85\x0f\x0d\x84\x80\xeb\xa6\x01\x94\xaa\x05\x7b\x2b\x8e\x7c\xfd\x5b\xe2\x87\x5f\xed\xc0\x6b\xab\xf5\x19\x6d\xb5\xe0\x91\xed\xb3\xa8\xf9\x31\x8d\x73\x4c\x4d\x56\x74\x75\x17\x32\x27\x9e\x77\xe8\xce\xc2\x6b\x69\x55\x65\xf9\x21\x7b\xd7\x82\x89\x3c\x3b\xee\x07\x1b\x28\xc4\xfc\xca\x6b\x5f\xab\x62\x24\xb9\xf6\x68\xe1\xc0\xf1\xa7\xea\x8c\x36\x11\x25\x17\xd7\x67\xbc\x91\x06\x67\x15\x35\x67\x5f\x9a\x48\x8e\xa8\x73\xf4\xcd\x60\x50\x9d\x42\x1f\x48\x7f\xe8\xea\x0b\x26\xa1\xe6\x28\x59\xe9\x8c\x3d\xf2\xa4\xeb\x46\xc0\xbc\x51\x97\x13\xea\xdc\x84\x19\xbd\x53\x15\x21\xc0\x28\xab\xe4\xa1\x7a\x5a\x9b\xe0\x78\x7d\xe6\xa0\x60\xd4\xc3\xfc\xe5\xcc\xe1\x26\xbd\x86\xf8\x0b\xc3\xc6\xdf\x25\x4f\x42\xf9\xd0\x6e\xea\xc6\xc4\x1d\x46\x1a\xde\x72\xe4\x1e\x8c\xa6\x6b\x2a\xf8\xce\x08\x52\xa9\x34\x36\x77\x0c\x85\x18\xee\x7d\xd7\x2b\xac\x72\x4b\xdd\x9d\x9c\x01\x61\x00\xcf\xce\xb7\x38\x2d\x96\x01\x93\x23\xee\x45\xba\x92\x28\xf4\x04\xc6\xa8\x76\xb2\x72\x5b\x24\xfc\xea\x37\x14\x31\xb3\x1b\x98\x0c\x06\xf2\x95\x56\x71\xc7\x59\x56\xb9\x4e\xd2\xc1\x46\x4e\x5d\x91\x0f\x03\x21\x11\x94\x43\x17\x08\x44\x5c\xaf\x00\x54\x10\x22\x0e\xb8\x01\xdf\x8d\x02\x46\x45\xda\x71\x3a\x68\xb7\x30\x48\x21\xe3\x44\xff\xc6\x4e\xc6\x1a\x2e\x03\x32\xf4\xcf\x66\xeb\x88\xf7\x6d\x75\x57\xf4\x83\xd3\xf5\x9e\xf4\x6a\x85\xd4\x9e\x4e\xd6\x63\x6d\xfd\x78\x8f\xa5\x7f\xcd\x32\x15\xf5\xaa\x3a\x01\xc3\x5f\x93\x86\x4c\x86\x19\xd5\xae\x56\x44\xcb\xc5\xf8\x4a\x53\x14\x90\x3f\x19\x6c\xa0\x1a\x5d\x40\x8f\x78\x29\xc6\x59\x80\x21\xed\x6d\x74\xc4\x1e\x1c\x78\xb8\x91\xa5\x55\xee\x82\x22\x9f\x59\x06\xd1\x85\x56\x8c\x1b\x4c\x61\xce\xba\x97\xe2\x2a\x0f\x95\xf8\x5b\xd0\xa7\x2a\x13\xdf\x3b\x36\xb2\xea\xa3\xa1\x78\xe7\x9c\x60\x44\xd7\x34\xa3\x72\xb6\x0b\xf5\xcc\xe3\x25\xc6\x3e\xd2\xa1\x7a\x55\x65\x07\xe0\x4e\x0a\xb7\xfe\xd2\x34\x42\xdb\x64\x7a\x47\x6d\x40\xfd\xf0\x03\x71\x4e\x96\xd2\xd9\x7f\x14\x51\xe5\xec\xad\x25\x86\xd9\xfb\x3d\xdc\x0f\x8c\x9a\xf7\x3e\xb6\x22\x63\x88\xaa\xe0\x9c\x7c\xeb\x6e\xcf\x18\x1f\xfa\xdc\xcb\x82\x30\xcf\xde\x64\x48\x0f\x3f\x98\x3c\x86\x32\x7d\xfd\x85\x46\xf0\xdb\x76\xa4\xbf\x01\x34\xd4\x06\xb9\x04\xdb\xea\x27\x5f\x48\x63\xa8\xd3\xb6\x4b\x23\xbb\x1d\x2c\x50\x85\x30\x43\xce\x49\xcf\xf8\x2f\xd8\x7a\x10\x45\xb3\xb3\xe9\xc9\x3f\xd2\xb9\x71\xba\xb9\xe8\xea\x12\x75\x2f\xee\x53\x0d\x42\x78\x1f\x6d\x6f\x4b\xe1\x8a\x00\x4b\xeb\x2d\xa1\x45\x66\x62\x98\xe8\xa4\x44\x7a\xd0\x96\xb3\xe4\x8a\x22\x17\x13\x2d\xe0\x18\xe5\x4d\x3b\x18\x58\x79\x84\xb5\xed\x38\x39\xaa\xeb\x40\xea\xa8\xd3\x0d\xf0\x7c\x7f\x22\x71\xc5\xc4\x97\x22\x26\x0e\x8d\x58\x7c\x56\xe6\x1a\x80\xf1\x2a\xd2\xfa\xa3\x23\xda\xce\xb6\x6e\x00\x33\x49\xa4\x1c\xdd\x29\x40\x94\x7e\xbc\x08\x3c\xbc\xa7\x14\x52\x8c\x45\xc5\x0d\x6b\xdc\x56\xce\x46\x0d\xbb\xe0\xc3\x66\x27\xba\x29\xff\xab\x20\xce\x06\x6b\xa7\xeb\x8f\x0b\x25\xba\x98\xb6\x8d\xbc\x42\x26\x50\x00\x06\x1a\x8a\xd9\x30\x7c\xc5\x5f\xbf\xff\x9d\xb8\x92\x6c\x81\xf1\x15\xb3\x13\xa7\x40\x81\x00\x86\x46\xf0\x45\xac\xf4\xd1\x36\xc8\x8d\xef\x13\xc5\xd5\xed\x95\x83\x87\x2d\xa5\x24\xc6\x93\x3c\x87\x04\xf2\xa8\x92\xd2\xe6\x52\x96\x61\xac\x45\x75\x06\x6f\x11\x87\x5c\x1d\x83\x08\xba\xb3\xf7\x55\x87\xbd\x77\xee\x76\x77\x67\xa7\x6b\xa8\x2b\x83\x53\xe0\x08\xda\x98\x8d\x5e\x0a\x34\x24\x37\x55\xf1\x1a\xcd\x00\x53\x9b\x81\xb0\x1a\x07\x45\xc4\x70\xec\x4e\xb3\x06\xd6\x28\x25\xcf\x7e\xe1\x35\xd5\x02\x56\x6c\x4f\x7f\x90\x81\xd0\xaf\xdf\x57\x0b\x57\xe8\xeb\xb0\x5b\x13\x38\x27\x04\x7e\x34\xbb\x30\xc5\xd8\x57\x7e\x48\x71\x46\x69\x75\x85\xcc\xc0\x53\xfa\x07\xab\xfd\xf3\x37\xe2\x3e\xe5\xe9\x4a\xa3\x76\x22\x69\xf1\x97\x36\xc5\x62\xa9\xa5\x2f\x49\x33\x99\xda\xd1\x99\xb1\x04\x88\xb5\xe5\xa1\xd5\x6e\x70\xd7\x71\x42\x83\x21\x31\xa1\xd4\x11\x83\xd6\xd2\x10\x18\x85\xc8\x05\xd8\x36\xd7\x06\x6b\x6c\xf5\x19\x28\xcd\x43\x41\x6c\x7d\x4f\xe6\x12\x6a\x26\x68\xf9\x30\xb6\xac\xe3\x54\x95\x37\xd2\xba\x62\x7a\x68\x09\x94\xc7\x43\x77\xa0\x3f\x78\x95\x26\xdb\xd8\x3e\x06\x97\x52\x11\xe5\xce\x8c\x34\x9f\xb1\x74\x38\x8c\x55\x48\x58\xbb\x22\x2e\x41\x48\xf7\x66\xad\xf0\x00\x10\x34\xe5\xe1\x32\xc6\x16\xef\x32\x9d\xb9\x1c\xd8\x47\x85\x1e\xb0\xaa\xab\x37\xf6\x57\xcd\xf6\xe9\xe7\x7f\xdf\xef\xc9\x5c\x5d\x14\x3d\xea\x89\x50\x31\x12\x2e\x0c\x7a\x5c\x25\x93\xbe\x6f\x34\x4f\x7c\xaf\xb7\xbe\xe1\x56\x30\xcd\x11\x09\x06\x89\x6e\xb1\xab\x8b\x07\xa0\xb0\x7e\x95\x9f\x6a\x0d\xbd\x86\x75\x5e\xb2\x8d\x60\xa7\x75\x04\x2c\x9c\x86\x5b\x5a\xad\x7e\xb8\x1c\x6b\x9a\xe2\xb2\xa7\xf6\x4e\xb4\xd9\x0b\xcd\x7f\xa0\x0a\x7e\x20\xed\x99\x24\x44\x01\x4e\x1f\x42\x7e\x4c\x77\xef\xc6\x6f\x21\x4f\xc8\x88\x12\x0c\x55\xab\xe7\x88\xb1\xb9\xe0\xcc\xf2\x22\xe2\xf8\x2b\xed\xc2\xf8\x73\xb0\x8b\xfc\x69\x93\x44\xdb\xcc\xa0\x1a\xeb\x30\x4e\xa9\xfa\x56\x94\x19\xb0\x7f\xc0\x70\x8a\x4c\x40\xe6\x13\xbb\x48\xff\xac\x46\xb1\x3e\x8d\x64\x67\x94\x71\xa9\x30\x59\x7a\x3d\x2a\xa5\xd2\x03\x3e\xbb\x03\xc5\x01\x9e\x91\xb4\x34\x3c\x65\x70\x2c\xab\x80\xc4\xe8\x88\x79\xc6\xb9\x6b\x76\xd1\xfa\xf7\x87\x0f\xfb\xb6\x3b\x93\x38\xd0\x26\x57\x48\x97\x89\x38\xcc\x95\xa9\x60\x91\x5b\xa0\x6b\x02\x44\x5b\x9a\xa3\xcd\x68\x52\xd5\xed\xa0\x1b\xd2\x11\x33\xcb\xc1\x7a\x22\x3a\x6b\x2e\x85\x02\xb3\x0b\x09\x52\x2a\x30\xb2\xfb\xab\x25\x1b\xdc\xac\xc3\x1b\x03\xc9\xa9\x8c\xb0\x3e\x53\x81\xdb\x94\x30\xe8\xe3\x1c\x25\xf8\xb5\x3a\xcd\x61\x56\x47\x0e\x68\xcf\xcf\x47\x31\x0a\x5f\x6a\x36\x05\x1a\xbc\x4d\x66\xbe\xa9\xef\x2a\x8c\x22\x12\x43\x7d\xe7\xcb\xd7\x26\x7d\x01\xcf\x4f\x1d\xc9\xa3\xeb\xa4\xdb\xc0\xdc\xee\xf9\xb4\x22\xcd\xcc\x64\xdc\x85\x50\x7e\x5f\x07\xfa\x84\x64\xb2\x66\x84\xc3\xe1\xa8\x39\x2d\xf7\x6a\x5a\x67\xb2\x41\xc2\xb9\x9e\xde\xd8\x93\x0a\xdb\x74\xb0\x60\x59\xf1\xaa\x54\xb2\x1a\xf9\x71\xf0\x63\x73\xd8\xd7\x20\x10\x80\x43\x13\x68\x9c\x9a\x61\xb7\x23\x14\x81\x9f\xc2\x00\x8a\x34\xb4\xe9\x7b\xd2\x79\xeb\x0e\xb3\x11\x80\x7b\x31\x54\xb5\x23\x03\x33\xff\x8e\x15\x72\x04\xeb\x91\xe5\x3b\x37\x1d\x81\x97\xd7\x74\xc9\x3a\x30\xc7\x92\xdd\x8b\x53\xbe\x37\x9d\x95\xd8\xb7\x13\x1c\xe2\xe0\xbe\x16\x28\xea\xd4\x44\x5a\xb8\x45\x26\x8a\xbc\xba\x8f\xf4\xf1\xba\x5d\x66\x2a\xd0\x40\x22\xbf\x22\x91\x68\x11\x28\x2a\x29\x5f\x0a\x24\xe3\xfb\x32\x92\x33\x0d\x2a\x3c\x9b\xcf\x24\x9c\x08\xdd\xbf\x6d\xb5\xd0\x61\xc2\x05\x67\x64\x22\x46\x75\x05\xae\x2d\xce\x48\xac\x81\x44\x9b\x87\xc7\x51\x25\x8f\x5e\x29\xe2\xad\x1c\xdc\x6e\xbb\xfd\xde\xa6\x9f\x46\x6e\x67\x73\x27\x09\x46\xbd\x4e\x33\xc5\x06\x85\x3d\x4c\x55\x85\x8d\xab\x26\xc9\xbb\x24\x85\xc0\x5a\xe5\xfd\x2e\x30\x1b\x81\xad\x87\xba\x98\x3b\x01\x20\xe7\xd4\xa4\x3f\x3d\xdd\x0a\xe4\x84\x9f\x8e\xf9\xd5\x11\xe1\x97\x15\x28\x43\x51\x6e\xaf\xe2\x7d\x84\xe0\x87\xf2\xd8\x22\x9a\x97\xfc\x5b\x7f\xe2\x7f\x63\x84\x6e\x18\x34\x6f\x6e\x1d\x29\x70\xe7\x2a\x5e\xfc\x17\x5b\x7b\x90\x48\xe2\xd5\x52\xe2\x7c\xf3\xe9\x18\x8f\x87\x9a\x6c\x67\xba\x2c\x2a\x9b\x30\x2e\x41\xbe\x33\x70\xf8\x89\x4b\x85\xb6\xde\x51\x9e\x09\x95\x8e\x68\x7f\x8e\x94\xa3\xf4\xbd\x1e\x3e\x40\x36\x4c\x11\xd7\x04\xda\xe1\x5d\x1b\xf4\xe0\xd8\x29\xbe\x17\xf4\x2b\xc1\xc3\xf0\xd2\x3b\x9e\x1d\xe6\x43\xa9\xb1\x7b\x68\x49\xe9\xff\x8c\x5f\x0d\x3d\x37\xce\x16\x06\xba\x82\xa4\x6f\x9e\x75\x1c\x00\xfb\xc1\x66\x4f\xf4\xcb\xac\x14\xdf\xd9\x04\xeb\xa6\x7a\xd4\x32\xcb\x07\x92\xe6\x0d\x67\xfd\xfb\x3f\xa1\x5d\x4d\x5a\xa6\xce\x33\xd4\x45\x9f\x33\x8a\x24\x5d\x7f\x50\xb0\x9e\x28\xa6\x06\xf4\x80\xe5\xa9\x0e\xf6\x72\x81\xe3\x01\x3b\x5d\x84\x93\x37\xba\xbd\x41\xb9\x71\x49\x6e\x02\x34\x0e\x2e\x2b\x14\xec\x62\xf5\x68\x9b\x63\x5a\x1b\x7b\xfa\x7a\x87\x45\xef\x6e\xed\x11\x99\xb4\x5b\xd1\x97\xd3\x14\xd5\x6f\x34\xc9\x34\xe4\xa2\x15\xc8\xea\xba\xf3\x38\x4a\x28\x13\x36\xc4\x13\xc3\x58\x22\x6f\x41\x62\x38\x66\x49\xb6\xc2\x5d\x4a\xbf\x25\x79\x2d\xcb\x78\x8e\xcd\x53\xc4\xa3\xd3\xb7\xce\x91\x1f\xab\xc9\x7c\xf3\x8b\xe8\x74\xba\xc1\x4f\xc0\x80\xbe\x41\xfd\x8a\x21\x98\x05\xdf\x56\x61\xc7\xb4\x36\x21\x42\x62\xa3\xc2\x27\x6a\x12\x17\x17\x10\x7d\xd1\x4f\x34\xcd\x23\x7c\xf2\x03\xe1\x59\x7e\x35\xcf\x6d\x1f\x39\x46\x44\x02\x55\xf3\x1b\x77\x56\x01\xb2\xf2\x3f\xab\x40\x1d\x54\x92\x9a\x10\x5b\x10\x3b\x77\x95\xc2\xd5\x3a\xe8\xcf\x15\x2f\xc2\xa6\xd8\xa9\x78\xac\xc5\x38\x12\x22\xaf\x1f\xa0\x55\x38\xbb\x11\x4d\x05\xa7\xcf\xd5\xe0\x4b\xd3\x02\xf6\x6e\x47\x8d\x01\xd1\xa3\xa7\x40\xa6\x39\x2f\xe6\xec\x21\xd2\x3f\x93\x4d\xdb\x13\x91\x95\x37\x80\x7f\xcd\x3e\x47\x35\x1d\x68\xc1\xff\x81\xe8\x9a\x3d\x40\xcf\xb6\x7f\xae\x82\xea\xd6\x24\x30\x97\x2d\x38\x40\xe5\x66\x5a\x01\x70\xca\x13\xac\x6d\x4e\x76\x4c\xca\xbf\x2e\x0f\xe7\xb1\x6c\xa5\xc0\x6e\x3a\x33\xaa\x63\x56\x6b\x8f\xec\xb9\x92\x91\x0a\xe4\x98\xb8\x0b\xae\x02\xf3\x20\xb8\x57\x9c\x2d\x77\x77\x78\xa8\x77\xaa\x93\xc6\x38\xe8\xde\x1e\xe3\x21\xb7\x15\xb3\xd7\x56\xc9\xd3\xd0\xb9\x8c\xdb\x75\x24\x41\xf5\xf8\x21\x8c\xca\x24\x4b\x7f\xf1\x35\xd6\x00\x26\x44\x0c\xf0\x19\xf3\xb4\x01\xda\x63\xdd\x69\xf5\x32\xa2\x95\x08\x47\xf2\x38\x00\xe2\xff\xd0\x0c\xd1\x3b\xd8\x13\xcf\xa0\x4b\x90\x7b\x11\xb2\x98\x56\x55\x48\xd0\x8e\x2f\xfc\xd7\x78\x18\x5c\xc2\xc5\xf5\x62\x8c\x43\xa8\xe2\x04\x37\x65\x24\x4c\xfb\x74\x0e\xd8\xd9\x67\xee\xb3\x81\x79\xee\x09\x94\x4a\x26\xcf\x4c\xf3\xcc\xb3\x28\x95\x14\xe1\xa1\xf9\x18\xc0\x72\xd1\x69\x81\x31\xab\xc7\x2e\x30\x88\x05\x50\x5b\x13\x1c\x6a\xcc\x9f\x87\xb3\x0a\x20\xb5\xef\xe9\x1b\x80\x2e\xe7\x05\xed\xb8\x0c\x8c\xb6\xfe\xc3\x6e\x4e\x1d\x31\x2a\x02\x5a\x78\x5c\xd8\xc3\xfa\xe7\xb3\x89\x49\x2d\xbd\x63\x44\x88\xff\xb9\x44\x85\x1f\x9b\x4e\x52\xf0\x52\x7d\xd2\xb5\x74\x83\xd1\x4b\xeb\xfa\xde\x94\x3c\x36\x42\x22\x65\x0d\x27\xfb\xf7\xca\xa6\x5f\xe7\x99\x93\x9d\xa9\x85\x5e\x21\x96\x9f\xee\xe1\x2e\xb5\x81\x62\xfa\x86\x01\xc4\x72\x5d\xa7\xe4\x13\xea\xea\xe1\xed\x54\x10\x9a\xdf\xad\xc1\x3c\xd9\x82\x18\x8e\xe6\x20\x74\xdd\x0b\x85\x44\xab\xa1\xc3\x0a\xb6\xf7\xad\xfd\x1c\x64\x7c\x62\x35\x95\xe3\xde\x07\x9c\xc8\x8f\xed\xbc\xb6\x53\xf7\x3c\x10\xec\x0d\x9d\x4a\xda\x0f\xf3\x5c\xa0\xd5\x05\x24\x9a\x09\xaf\x88\x15\x44\x9f\x55\xac\x23\x82\xae\xdf\x98\x1a\xa3\x1a\x4c\x32\x33\xfa\x71\xce\xf8\xf5\x9d\x45\xf3\xc1\xa5\x96\x17\x00\xcd\x49\xfe\x50\x37\x84\xd2\x72\xae\x97\x6f\x50\xa5\x5b\x5d\x02\xe2\xde\xe2\xba\x8a\x9f\x82\xab\x67\xdc\x33\x78\x91\x39\x31\xbf\x2a\xe3\xd7\xef\xde\x55\x46\x60\x68\x16\x49\x9c\x40\x49\xd1\x99\xe6\x12\xb1\x4f\x7c\xf9\xe6\x2d\x3f\x76\x94\xfd\x70\xb7\xba\xa6\x3b\xc9\xc7\x06\x01\x70\x5d\x94\xee\x32\x5e\xdd\x92\x17\x36\x02\x5c\x2b\x3e\x15\xaa\x6f\x06\x27\x4d\xd2\xc8\x7e\x35\x42\xeb\xad\x73\x79\xd4\xa5\x35\x0b\x08\x1f\x71\x17\xca\x8d\xa1\xcf\x25\xbf\x59\x33\xdd\x98\xf6\x78\xaf\x07\x99\x42\x8f\xa8\xfb\x75\xec\x8d\x83\x02\xf6\x95\x12\x6e\x8f\x1a\xd2\x35\x4f\x80\x58\x57\x85\x07\xeb\x48\xc0\x24\xc5\x79\x72\xf8\xb9\x00\x66\xb2\x74\x1d\xab\xdf\x13\x0e\x3b\xaa\xc4\xef\x06\x92\x9a\x1d\x2b\x1e\x18\xa3\xca\x9d\xe5\x64\xa9\xd6\x7c\x13\xc2\x22\xef\x15\x48\xd3\xc4\xaa\x3f\x39\x30\x23\x56\x0f\x04\x83\xdc\x66\x40\xba\xb7\x3e\x3c\xc0\xdd\x7a\xde\x63\xc6\xef\x14\x05\x02\xe9\x8c\x8f\x11\x6d\x34\x14\x9b\x6c\x27\x9b\x7d\x99\x17\x05\xda\x7c\x61\xf5\xa9\x31\x36\xb5\xd0\x18\xec\x78\x71\x80\x36\xbf\x7f\x7c\xc6\x3f\x0d\x77\x5e\xc1\x2d\x6a\xf6\x98\x64\xa4\xc3\x2a\x88\xea\xdf\x27\xe9\x67\xe7\xb9\x33\xd5\x31\x3d\x88\x20\x85\xb4\xc9\x36\xf4\xfe\xdb\x90\x47\xb8\xd6\xc7\xad\xf8\x16\x74\xd2\x78\x59\x21\x92\x5d\x39\x15\xff\x50\xb6\x4c\x7f\x1e\x82\x1e\xcb\x92\x63\x32\x49\x30\xab\x52\xdd\x80\x63\xfa\x9f\x9a\xfa\xb9\x0e\xb4\xe9\xd9\xcc\x1e\x0a\x69\xca\x50\x71\xb6\x30\x8d\x25\x9e\x0f\x40\xe7\xf4\x6f\x3e\x18\x43\x9c\xf2\x66\xe9\xeb\xb0\x5d\x5a\x0f\x47\xf9\xc7\x16\xe0\x57\x4b\x68\x53\x2d\xed\x10\xb5\x38\x78\x37\xfd\xbd\xb7\xda\xf0\x1c\xdc\xec\x90\x68\x06\x8a\xb7\xf9\xb8\x73\x96\xf9\x3a\x6b\x39\xbe\x8f\xc0\x25\x89\xd7\xfb\xa3\x41\x13\xa2\xf6\xf0\x6c\x4f\xae\x93\x05\x46\x4b\xee\x74\x57\x8b\x4d\xb5\xad\xf0\x35\x88\x62\x87\x55\xa2\x46\xc2\x31\x8a\x04\x9c\xe2\xc9\xa0\x7b\x6a\x20\xd7\xd2\x2b\x2c\x26\x71\x6d\x14\xb3\x20\x26\x2f\x80\x9c\xd0\x51\xbb\x41\x70\xf7\x2b\x07\x35\xb1\x21\xa5\x3f\x63\xc2\x09\xad\x23\x70\x35\x93\x68\x1c\x3a\x2c\x28\xd4\x99\x7d\x0a\x8e\x21\x96\xb1\x44\x61\x04\x29\x6b\x67\x49\x54\x59\x9b\x7d\x88\x09\xc2\xf7\xfd\x6d\xa8\xa5\xa8\x6b\xb0\x3e\x60\x8d\xab\x4f\x26\x78\x5e\x81\x82\x85\xfd\x4f\xdf\xa8\xad\x84\x62\x07\x95\x8b\x8c\xae\x92\xcd\xdc\xfb\x96\x56\x5c\x7d\x3e\x07\xf9\xe6\x8f\x5f\x56\xf4\x4d\x7b\x40\x28\x54\x2d\x8c\xb2\x93\xeb\xff\xbe\xab\x84\xd7\xad\x37\x0a\xbb\x95\x40\x93\x80\x07\x81\xb3\xde\x12\x83\xa3\xc4\x4f\xc2\xe1\xfb\x56\xad\xaa\x40\xf7\x0e\x1c\x8e\x2f\x68\x5a\x1d\xdb\x2a\x40\x2c\xb5\x5c\x5d\x94\x4b\x92\xb3\x29\x38\x20\x92\x1f\x7b\x77\xf8\x99\xe4\x48\xc6\x77\x4a\xdd\xab\x60\x1f\x66\x8c\x80\x29\x8e\x45\xf5\xe9\x07\xa8\xdb\x32\x15\x54\xfc\x31\x25\xbb\x5b\xa4\x3a\x7f\xeb\xd5\xba\x2c\x04\x94\x01\x6d\x76\x58\xfe\x1b\xc1\x56\xb7\x69\xc3\xf6\xc5\x91\x12\x7a\xd4\xb1\xcf\x9b\x3e\x38\xcc\x32\xf3\x4f\x6d\x35\x5f\xba\x8b\x9c\xe6\xf8\xef\xa8\x2a\x31\x40\x4b\x61\x56\x2f\x0a\x3f\xe7\xb0\x77\x01\x82\x3b\xf2\xc7\xdd\x64\x5f\x87\xe5\x8f\xdb\x07\xa7\xad\x0a\x52\x79\x01\xbe\xd5\x92\x32\xdd\x04\x89\x8d\xd3\x51\xc6\xd7\x35\x9e\xcf\x27\x74\x80\x69\x6b\x9c\x25\xf9\xb3\x28\x6c\xee\xf4\x71\xfa\x9a\xd6\xde\xf6\x5a\xfc\xf7\xe2\x7c\xc9\x8a\x1f\xfd\x58\x06\xb7\xb7\x4a\x86\x31\x3c\xd6\x3c\x40\x64\x8d\x1f\x98\xed\xd6\x95\xac\x80\x21\x01\xc7\xef\xf5\xf2\x7c\x83\x1c\x97\x5f\x02\x78\x16\x4e\x12\x7e\xea\x6a\x69\xdd\x61\x14\x05\x5c\x07\x2d\xb3\xa1\x3e\x61\x0e\x14\x59\x62\x3d\x62\xe8\xfb\x07\x02\x2b\xa1\x37\xeb\xb9\xef\x07\x97\x87\xd4\x82\x13\xbd\xd8\x14\x5b\xab\x1f\xaf\xd0\x6b\x8d\xb6\x65\x12\xad\xf2\x13\x88\x6c\x0a\x17\x24\x4c\x92\x5e\xae\xcf\xce\xf4\x7c\x90\x47\x10\x69\x94\xdb\x3c\x55\xfc\x99\x31\x5d\x01\x6f\x44\xb0\x99\xcb\x14\x79\x02\xb7\xbb\xde\x04\x12\x63\x68\x44\x23\xe9\xa9\xc3\x48\x4f\x1f\x6d\xce\x76\x22\x4e\x5d\xea\x32\x16\x84\x1f\x45\xff\x76\x4e\xfc\x75\x7c\xfc\x47\xa1\x60\x13\x9f\x67\x9b\x4f\x0d\x6b\x3f\x5f\x84\x21\x54\xd3\xa6\xa9\xa7\xaf\x53\xc5\x54\x63\x1d\x6e\x37\x59\x7d\x4e\x21\x10\xa8\xce\xd7\x30\x7f\x75\x0c\x0c\x70\x01\x3b\x07\x40\xfd\xe2\x4c\x92\x1c\xbd\x0a\x98\x3b\xe5\x5c\xb5\x06\x13\x2c\xe9\xd7\x69\xfd\xe8\x68\x7d\xae\x8e\x84\x38\xf9\x11\x9d\xda\xad\xaf\xca\x93\xec\x06\xe3\x37\xa2\x44\xfb\xb7\xf4\x92\xaf\x12\x9b\xa2\xfb\x1e\x4e\x24\x66\x8e\x00\xa3\x57\x8c\x6b\x07\x17\x3f\x91\x9b\xf5\x8e\x64\x3d\x83\x34\x9f\xcd\xbe\x3b\x5a\x83\xb6\xb7\x43\x41\x59\xf7\xb5\x00\x2d\xbc\xc3\x16\x9d\xc0\x9b\xdb\x80\xde\x9b\x1d\x7a\x14\xaa\x06\x54\x41\x13\x9e\x47\x6d\xc3\x50\xff\x9b\x29\x97\x27\x3a\xc9\x0f\xcb\x99\x79\x08\xcd\xe3\xa0\x25\x90\xbe\xd9\x0a\x85\x04\x66\x8c\x79\x49\xe9\x80\x04\xa3\x7f\x1a\x45\x82\x57\xbc\x1f\x38\xaa\xdd\x69\x04\xda\xc5\x8f\x4e\x80\x3f\xc4\x1a\x51\x74\x4d\xa7\x2d\x61\xdd\x19\xac\x51\x15\x92\x4e\xd2\x9f\xf5\x08\x5a\x76\x16\xbe\xcb\xbd\xad\x04\xc0\x88\x16\x71\x67\x95\xb4\xd6\xa3\x4e\x39\xa7\x27\x10\x9d\x3c\xeb\xc0\x30\x7d\x08\xed\x31\x7e\x32\xa9\xe8\x8e\x3f\x01\x0e\x9f\x10\x80\x2d\x2c\x5d\x84\x4b\x9a\xe2\xf2\xd2\x77\xb9\x7a\x90\x5f\x3a\xde\x61\x7d\x4c\xf6\x53\x53\x8a\x90\x12\x2a\x2f\xaf\xf3\xc6\xb3\x1a\x65\xe8\x80\xcf\x31\xae\x5c\xf1\xd9\xe0\xb7\x8a\xf5\x74\x32\xb1\xa2\x86\x64\xd2\x35\xff\x4d\x01\xea\xb5\x25\x4e\xa0\xdf\x98\x70\xe7\x33\xf2\x35\x09\x51\x9f\x1e\x64\xda\xd0\x7d\x7f\xa0\x90\x54\x2e\xc8\x57\x9f\xd1\x5e\x48\x71\x9d\x7c\xb6\xb6\x8b\x29\xff\x97\x9b\xb3\xca\x59\x1c\xe6\x52\xa9\xc4\x29\x6b\x65\x06\xf0\xfe\x2b\x79\xd8\xe4\xeb\xa7\x9f\x06\x91\x62\xcc\xee\xff\x86\xc6\xa5\x46\xed\xe9\x83\xdf\x5b\x23\x1f\xf5\x4a\x7b\xaf\x84\x54\xda\x8a\x84\x16\x94\x91\xa5\x09\xad\x9d\x86\xb2\x23\x7d\x39\x95\xbd\xcc\x4d\x7b\xec\x8d\x12\x75\xe3\x9d\x18\x7f\x44\x11\x25\xbb\x8c\x7a\xd6\xa3\x69\xd0\x5e\x1b\xf3\x94\xaa\xf2\x2a\xa3\x9a\x93\xe5\xde\x41\x36\x1e\x4b\x4b\x52\x7e\x8f\x31\x16\x52\xa0\x95\xd8\x00\x90\x42\x53\x78\x84\x98\x4d\x42\x43\x95\x2b\xb1\x48\x56\x07\xaa\x11\x74\x60\xc8\x5f\x37\x15\x19\xb7\x9f\x66\xb8\x85\x50\x1e\x90\x7c\x6d\x52\x9e\xd0\x86\xee\xd7\x8d\x1c\x1a\xd9\x69\x09\xd3\x32\xe6\x20\x1a\xd9\xd8\x09\x19\x30\x66\x38\x62\x10\x35\x81\x2c\x21\xcf\xfc\x12\xb7\x2a\xac\x0e\x0d\xf7\x08\x32\xd0\xac\xa0\xb9\xb2\x26\x96\x41\xb5\x45\x02\x7f\xf7\x7c\x9f\xee\x6a\x14\x75\x09\xc2\xb9\x46\xc3\xec\xef\x18\x1e\x33\x8b\x2f\xed\x54\x28\xee\x05\x2d\x76\x57\xf9\x6c\x32\x65\xe0\xed\x59\x33\x6c\xa6\x0e\xc2\x63\xff\xc3\x74\x2e\x14\xe0\xbc\x07\x3c\xda\x7c\xfe\xcf\x89\xb6\x76\xf0\x65\x76\xa7\xb9\xf1\xbd\x26\x91\xd9\x3c\xa7\x53\x7f\xff\x0e\x3d\x2a\x14\x33\xa5\xcc\xdf\xa0\xe8\x82\x20\x28\x87\x98\x70\x02\x6a\x43\xad\x61\xef\xf2\x54\x78\xba\x58\x3a\x08\x6e\x74\x27\x30\x19\x9d\x2c\xe9\x34\x0e\x80\xc7\x12\xf4\x5c\x95\x8c\xeb\xe5\xa8\x61\x8c\xb2\xd9\x50\xdc\xce\x9b\x6e\x09\xcf\x10\xa5\x47\x1f\x46\x95\x2b\x63\x23\xe5\xea\x55\x9a\x04\x12\x82\x83\x4f\xd8\x56\x33\x63\xcb\x55\x49\x76\x59\x1e\xb4\x88\x87\x2a\x77\x02\xe5\x32\x69\x14\x67\x9e\xf5\xbd\x94\xd9\x83\x3a\x82\x2b\x03\x3f\x26\x01\x4f\xd1\xc5\x9b\x4d\x66\x15\x0f\xe5\x33\x3f\x41\xfb\x16\x3b\x2e\x4c\x1d\x64\x3d\x8c\x85\xe7\x22\xf1\xba\x95\xc4\x4d\xdb\x3f\xea\x3f\xba\xa8\x9c\x7b\x4d\x01\xc7\xd7\xfd\x32\xfb\x83\xfd\x91\xd5\x86\xdc\x0d\x14\x67\x75\x58\xaa\xad\x28\x48\x1c\x76\x5b\x49\x94\xa2\x6a\xbb\x20\x1e\x63\xcf\x14\x57\xce\x32\xdc\x97\x56\x89\x29\x05\x72\x95\xcd\x5e\x13\x00\x8e\x36\x8c\x85\xb1\xf9\xc8\x68\xc8\xea\x0c\xcc\xb9\x8f\xdc\x59\xfb\xe5\xc4\x5a\x4f\xd4\xf0\xca\x27\x2f\xe5\x80\xc7\xf8\x93\xb5\x25\x89\x45\xa5\x5a\xdd\x29\x98\x41\xb9\x5e\xd8\x9d\xfa\x9d\xd1\xef\x8e\x6c\x7a\xde\xee\xeb\x92\x1b\x3d\xb1\x7b\xfb\x7b\xf2\x04\x1b\x99\x94\x48\x2b\xde\xa7\xac\x17\xbf\x89\xaa\x75\x10\x5a\xa7\x83\xf6\xa0\x13\x92\x42\xb4\x9c\xcc\xd1\x97\xa1\x96\xb2\xf8\x60\x82\xe9\xa8\xe2\x3d\xc3\x1e\x4a\xc8\x70\xad\xad\xba\xb7\x09\x49\xf9\xa9\xea\xe3\x71\xa5\x2c\xb9\x3c\xd9\xc0\xb1\x2e\xa4\x83\xf0\xb6\x72\xf4\x20\x08\xf3\x09\x44\x8f\xe5\x53\xb3\x97\xb0\x16\x69\x73\x4f\x5a\x89\xd0\xfd\xad\x01\x0e\xe7\xca\x2a\x3b\x35\xdc\x6a\x4c\x9d\x2e\xb8\x59\x01\x3d\x98\x67\x64\xd5\xdc\x12\xc0\x19\x10\xa1\x56\x97\x07\x62\x5c\xef\x95\x4f\xc4\xab\xf4\x2f\x13\x4e\x7c\x3e\x36\x41\xf0\x5b\x6d\xd0\x9d\x30\xff\x52\xe0\x48\xae\x4a\x9a\xec\xf2\x05\x98\xdc\x23\x16\x04\x28\xc3\x41\xe7\x94\x7c\xdd\x52\x4a\xc0\x85\x20\xe9\xcb\xff\x13\x3e\xea\x04\x27\x2d\x46\x61\xf4\xc8\xf7\x17\x01\x78\x7d\x77\xea\xfe\xed\x0a\xfa\x98\x90\x92\x1f\x6d\xe4\x4e\x16\x14\xfb\x3e\x2e\xf8\xbd\x52\xeb\x24\xb4\x6d\xd2\x56\xe1\x65\x78\x5e\x83\xc0\xe2\x54\x4f\xdb\x20\xec\x0d\x0a\x36\xfe\x83\x84\x66\xfd\x8e\x3c\xda\x42\xf7\xb5\x70\xd2\xf7\xfe\x12\x0e\x70\x63\x98\x25\x61\x49\x60\xf0\x36\x31\x07\x11\x99\xb4\x98\xf2\x14\x19\x3c\x9e\xe1\x8c\x8d\x65\x6b\x5b\xa3\x6b\xf3\xfc\xff\xf8\x29\xe8\x5a\x1f\x51\xdd\x02\x2a\xe0\x70\xb1\xe2\x01\xb0\x58\x04\x44\xf3\xf7\xf1\xcd\xda\xb4\x22\xc6\xa9\xe4\xf5\xce\xba\xd6\x1b\x33\xe7\x34\xe4\x26\xab\xf4\x36\x74\x82\xe3\x83\xaf\xa5\x84\x21\x75\x76\x56\xbe\x46\xc0\xaa\x6e\xf8\x41\x7e\x06\x1e\x04\x69\x05\xc2\x36\x1f\x09\xec\xbd\x40\x8b\x71\x66\x53\xcc\x8e\x24\x02\x8f\x1d\xbc\x83\xec\xc0\x3d\x25\xc1\x9a\xe2\x22\xbb\x0f\xb2\xd7\x88\xaa\x2c\x92\x90\x3a\x56\x47\x80\x27\x6e\x31\xae\xb2\xea\xce\x81\x98\xf2\x10\x23\xe4\x8b\xf3\x00\x93\x21\x79\xda\xb3\x20\x37\xe3\x2e\x8f\x97\x01\xfb\x9c\x13\x09\x07\x88\xae\x87\xd9\x2a\x9f\x7f\xd8\x6b\x0e\xde\x99\x51\x5a\xb0\xde\x8c\xfe\xea\x84\x14\x56\x19\xeb\x67\x13\x42\x36\xef\x99\xe9\x2a\x4f\xb3\x58\x03\x10\x79\xd3\xe2\xe1\x4e\x96\x56\xc4\x07\x1e\x2d\x62\x9a\x42\xf0\xcd\xf2\x2b\x40\x6f\xb0\x25\xaf\x1c\x63\x55\x99\xbb\x3c\x4f\xc0\xfe\x6f\x77\x20\x0e\x48\x09\xad\x3c\x8b\xbe\xa6\xdb\xcc\xa2\x50\x3c\xf0\xf6\x66\x18\x98\xaf\xd6\xf2\x76\xde\xe2\x84\x4a\xea\xee\x36\xc2\x8f\x02\x01\x0c\xde\x21\x59\x8b\x1f\x00\x84\x26\x99\x11\x33\x74\x5f\x71\xb8\xd9\xb2\x65\xfd\x21\x2c\xc2\x25\xf7\x78\xc7\xc6\xa8\x60\xb9\x1f\x7d\x58\xf6\x1b\xb0\x5b\x9b\x0f\xcf\x46\xc6\x6c\x92\x88\xa7\xe3\x6b\x8c\x9a\x29\x1f\xcd\xa9\xe8\xaa\x45\xb2\x0c\x5d\xf1\xb7\x7c\x5d\xa1\x92\x33\x88\x95\x6b\xae\x3a\x24\x2d\x08\x9b\x16\x08\x88\xa3\x84\x16\x7e\xad\xe1\xa8\x57\x3e\x36\xc0\x69\x93\x09\x3c\x30\x30\x6c\xc2\xab\xe6\x6e\x03\x29\xa2\xe4\x27\xbb\xc0\x53\xe0\x23\x8e\xea\xad\x67\x9e\x70\x36\x04\x89\xce\x23\x24\x63\x24\xb9\xa3\x78\x8e\x9e\x59\x00\x0a\x1e\x36\xf1\xe8\x81\xa8\x2a\xc7\xec\x05\x63\x9b\x43\x69\x4c\x1f\x58\x46\xca\xa5\x2e\xf3\x86\x01\xa3\x56\xcf\xfe\x99\xad\x5f\x37\x89\x90\x45\x6d\x2b\x4e\x33\x33\x73\xdc\xe4\x98\x94\xae\x78\x3c\x83\xf3\x49\xee\xa6\x26\x0a\x94\x5f\x49\x63\x21\xf5\x6e\x08\x5e\xba\x3c\x6d\x68\xc2\x25\x8c\x27\x4c\xdc\x49\x36\x0a\x11\x37\x93\xba\x4a\x09\x16\x6c\xa3\x4a\xd0\x80\x9b\x40\x4f\xa1\x1f\x5f\xfb\x20\xb8\xc4\x34\xbf\x62\x1a\x96\xb1\x5d\xc6\xc7\x6e\x40\x96\xf8\xe8\x43\x29\x44\xba\xf1\x28\x13\xd6\x62\x29\xa1\xb1\x2f\x55\x29\x8d\x98\x56\x15\x56\x8b\x11\xae\x56\x11\x24\x13\xaf\x6c\x81\x85\xfe\xc4\x1a\xf4\x5d\xab\xe0\x10\x75\x7e\x1b\xbd\x9a\x72\xcd\x9f\x13\x5e\x57\x16\xbe\xef\xa3\x97\xac\x0b\x38\xee\x79\x6d\x2a\xfd\xb4\x2c\xae\x28\x0b\x5a\x20\xb0\x61\x89\xfe\x43\x6e\x36\x52\xb5\x88\x40\x27\x8a\x11\xa9\x76\xb9\x9a\x70\xb8\x99\x02\xe0\xb7\xab\x8f\x00\x7f\xb4\xd1\xd6\x70\xc3\xdc\xb3\x67\x17\xfb\x99\x61\xcc\x3c\x2d\xc8\xad\x3c\xbf\xfe\xdb\x4c\x22\x3e\x17\xfd\x56\x5a\x41\x04\xb1\x1b\x4f\xb4\xa3\xa3\x84\x97\xf6\x15\xa2\x75\xf4\x0c\x2f\xbe\x87\x43\x90\x4b\x3d\xa6\x98\xf4\x42\xf6\xc8\x07\xe8\x69\xd3\x7b\x57\x36\x2e\x88\xbd\xf8\x63\xf6\x8b\xff\x88\xc7\x53\x3e\x70\xba\xfd\x3f\x7e\x08\x61\x3b\x50\xc7\x9e\xb9\xb3\x34\x8d\x24\x7e\x23\x97\x4c\xd7\x8b\xa0\x02\xed\xfe\x6a\xd7\x5d\x13\x0e\xb4\xd9\xb4\x7d\xd2\xf7\xc5\x62\x8f\x4a\xb9\x78\x82\x70\x4c\xcf\x5f\x72\xb6\x7d\xbf\xcf\xc7\x7a\x25\x4e\x61\x47\x2a\x26\xe2\xb1\x9b\xd3\x48\x0b\x4a\xee\xa9\x71\xaa\x7e\xd0\x1d\xdb\x49\x62\x38\x7a\xcd\x84\xdc\x7b\xa1\x78\x08\x35\xc9\x46\x81\x89\xe9\x0f\x48\x1a\x24\xcc\x1e\x62\xb2\x66\x98\x41\xce\xd6\x89\x4e\x6b\xf2\xa1\xc1\xd3\xe3\xd1\x76\x0d\xe0\x96\x99\x5b\xb6\xae\x87\xbf\x32\x64\x48\x32\x07\x44\xd4\x95\x6b\x19\xc1\x39\xf1\x27\x44\x0e\xc7\x83\xc1\xb2\xcb\x37\xb4\x84\x16\x3e\x8e\x4a\xa8\xd0\x84\x0c\x87\xa3\x0c\x0d\x3d\x32\x86\x9f\x95\xce\xdb\x69\xdb\xf6\x38\x41\x6b\xad\x30\x21\xad\x61\x7a\xa1\x17\x77\x74\x0c\xf0\x03\x66\x89\x38\x56\xce\x89\x6f\x01\x7f\xa1\x6e\xca\x6a\xf6\x3c\xff\x17\x9a\x36\xad\x97\x93\xcc\xc2\x6c\xc2\x10\xd1\x4c\x55\xba\x1e\xb1\x34\xf8\x54\xc5\x2c\x8e\x46\x96\xa4\x5f\x72\xf0\x5c\x0c\x1b\xa1\x16\x25\x05\xb9\x9a\xe0\x98\xa5\x27\xa9\x57\x99\xeb\x51\x33\x78\x92\x85\xef\xa0\xdf\xe2\x2e\x33\xe2\x2c\x62\xd1\x34\xa2\x22\xc7\x57\x03\xb7\x0b\x04\x44\xf1\xd6\xd5\x32\x41\x56\xcc\xd8\x77\x89\x10\xd0\x4d\xd6\x9e\xca\x22\xa8\x5d\x80\x90\x97\xb4\x59\x93\xf1\x59\x4d\x08\xab\x4c\x13\x51\x34\x64\xf4\x8f\xc7\x1c\xcf\x6b\xc0\xc3\x6f\xa8\xc3\x1b\xa1\x19\x7f\x80\xb0\x3b\x1a\x6b\xda\xa6\x4a\x4d\x97\x49\x2a\x3e\xa9\xb2\x3b\x38\xca\xe0\x81\xf6\x23\x43\xca\x4f\xf3\x5b\x29\x96\x63\xc8\xc2\xe1\xdd\x6d\x7f\x25\xcb\x2c\xc4\x37\x6f\xc6\x48\xc2\x3a\x1d\x70\x24\x96\x4a\x27\x10\x9e\x43\xef\x01\xeb\x69\x62\xd8\x4f\x9a\x2a\xa9\xa1\x21\xe3\xb1\x56\x65\x48\x7f\xe0\x65\x34\x9a\xf2\x52\x41\x6b\x01\x8c\x27\x04\x88\x2b\x80\x46\xb7\x93\xa6\x2d\x42\x6e\xe1\xf8\x5e\x6f\x6e\x89\xf3\x66\xbd\xe3\x48\x24\xbb\x18\x0f\xbc\x06\x93\xc0\x04\x8a\xe5\xef\xf8\xfd\x83\xf8\x89\x64\xaf\x8a\xae\xb6\xdc\x11\x6c\xd1\x2f\xa0\x54\x43\x33\xe8\x2f\x6e\xc0\x45\xd1\x01\x69\x2c\xba\x75\x83\xe1\x54\xdb\xaa\x25\x9f\x36\x70\x6b\x58\x22\x78\x4a\xfe\x74\xf3\x30\xbe\xc9\xe3\xbb\x76\x85\xfe\xe6\xff\xc5\x19\xca\xc7\xb2\x65\x67\x83\xb9\xbf\x56\x1c\x35\x85\x1e\xb8\x23\x58\x54\x5a\x3e\x11\xb7\x52\x45\xe0\x63\x5d\x90\x6d\x1a\xfe\x1c\xff\xf2\xa3\x03\xda\x5d\x62\x43\x07\x91\x78\x63\x64\x1a\x21\xe5\x2f\x94\xbc\x06\x70\xda\xf7\x53\x0a\xc4\x4f\xb4\x34\xe9\xe3\xa9\x40\x12\x54\x75\x4e\xc2\x88\xc4\xef\xbd\x2b\x69\x1a\x30\x95\x95\x7b\x00\x95\x19\x05\xf3\x53\xf3\x6f\xb6\xc1\xdf\x30\x76\x02\xcf\x88\xb2\x3e\x87\xb8\xa4\x32\x54\x4b\x2c\x8e\x70\xd2\x28\xc6\xb4\xd5\x95\xf3\x05\x67\xb1\xf3\xa3\x07\xa8\xae\x69\xa9\xab\x14\x6b\xbe\x35\x11\xdb\x15\xd7\x0e\x1a\x14\xaf\x8f\x8b\xe8\x34\x67\xd8\x80\x68\xac\x1c\x03\x4f\xd2\x80\x98\x42\xf3\xf7\x3c\x85\x07\x8e\xd4\x78\x5b\xa9\x0c\xc1\xf4\x6e\xdb\x48\xd3\xee\x2e\xc0\x20\xeb\x17\xfd\xa2\xfc\xb6\x4f\x55\xc3\x2e\xa3\xd7\x39\x89\x30\x16\xcc\xb8\x98\xa6\xe7\xe9\x3b\x2e\xd1\xcd\x49\xa2\xc3\x0a\xe8\x65\xf2\xc3\xe3\xea\xa9\xa4\xf6\x53\xd7\xb9\x1e\xcc\x88\x54\xa7\xeb\x02\x94\x04\x34\x60\x7d\x59\xe5\xfc\xb4\xb5\xac\xd5\x44\xe2\xd5\x9a\x85\x79\xf9\x81\x66\xd0\xf2\x12\x70\x22\xd7\xeb\xff\x9c\x9c\xce\xf8\x1d\x6f\x8e\x99\x35\xdf\x0d\xf9\x41\x52\x66\xb2\x53\xf5\x33\x50\x40\x47\x22\xc0\x64\xae\xd0\xf3\xf5\xbb\xee\x1c\xab\x7b\xe2\x9c\x34\x0c\x8a\x10\x80\x80\xdb\x2b\xce\x0b\xfe\xfe\xe5\xa6\x31\x3b\x8d\x8b\x38\xd0\x56\x48\x28\xac\xce\x89\x80\xc9\x3c\x2b\x28\xd2\xa2\xcb\x63\xc0\xc4\xef\x7f\x8a\x7b\x9f\x76\x1d\x40\x63\xa0\x59\xdd\xa5\x25\x73\xf9\xd2\x22\x0b\x1f\x8a\x0c\xe2\xc0\x7b\x1b\xfd\xe1\xf7\xc9\x4d\xce\x10\x7d\xc4\x8a\x48\xda\x45\x57\x46\xac\xbc\xd9\x16\x54\xb8\xa0\x07\xa1\xe6\x1d\x19\x3f\x5e\x64\xbf\x6e\xae\x9d\xde\xaa\x73\x35\xe1\xcc\x90\x7a\xd8\xbe\x3a\x99\x55\xdd\xba\xbb\x40\x25\x67\x4c\x79\x7f\x0a\x37\xc6\x95\x55\x5f\x78\x3a\xa1\xc5\x27\xd1\x63\x60\x25\x8d\xca\x90\xbf\xe5\x4f\x34\x07\x4b\xaf\x81\x23\xcc\x18\xf5\xca\xb6\xdb\xc4\x0c\xc5\xaf\xd2\x54\xfd\x77\x19\xa1\xb4\x07\x76\x69\x34\xb0\x72\xdd\xdc\xdb\xea\xf0\xdd\x05\x23\x18\xb5\x42\x32\xa3\xa5\xce\xe7\xeb\xa2\x3f\x27\xeb\x7d\x96\xf7\x06\xb0\x68\xd9\xb4\xca\x01\xbc\x84\x62\xba\xe7\x2d\x8c\xcf\xa6\x64\x3e\x7c\x94\x0b\xa1\x5e\x0d\x5a\xc1\x78\x46\x83\x57\x56\x44\x9c\xd5\x05\x72\x3f\x55\x34\x30\xe3\xc5\x25\x30\x4e\x65\x0a\x31\xe9\x56\x15\x2b\x4d\x26\x3d\xa2\x0a\x3e\x1f\x78\x36\x88\x5a\x81\x69\x79\x67\xea\x6e\x4a\xf0\x68\x8e\x70\x19\x23\xb5\x3f\x92\x6c\x02\xbd\xa5\xe6\x2e\x77\x28\xf5\x60\xce\x8c\xe9\xc2\x80\xb7\xe4\x65\x12\xea\x99\x10\x99\x64\x4e\xd9\x91\xa8\xe7\x25\xbf\x9f\x39\x74\x11\x25\xe8\x6d\x03\xe9\x7d\x03\xd6\xd4\x8b\x1b\xc8\x13\xe9\x6f\x79\x62\x86\xca\xdd\xc8\x1d\x23\x8a\x92\x1d\x61\x91\xd3\xa9\x61\xd8\x9b\x75\x81\x4a\x1a\x39\x42\xf4\x6c\xa2\x09\x2f\x0f\x6d\xe5\xe1\x37\xbc\x68\x97\x23\x0c\xe5\xf4\x5e\x92\x04\x8e\x88\x4e\x14\x99\xa3\xdf\x96\x94\x99\x72\xe7\x34\x7f\xeb\x10\xb5\xa4\xd6\xfa\xa2\x11\x28\xdf\x14\xf7\xee\x2f\x12\xdb\x2b\xcb\xa5\xab\xfa\xfd\xef\xc2\x70\x58\x01\x85\x01\x3b\x39\x5a\x3b\xcb\x45\xc4\x6f\xd6\x3a\x0d\x83\x97\x8e\x88\x5f\x47\x86\x6b\x2b\x5f\x43\x37\x81\x06\x15\x6f\x80\xde\xd4\x5c\x52\x0e\x4f\x97\x0b\x95\x65\x46\x0c\xd0\x46\x55\xc8\xd0\x9a\x06\x84\x2a\x73\xfc\x41\xef\x61\xda\x5c\xa2\xc1\x1d\x30\xf8\xa0\x06\x8e\x2a\x73\x7c\x03\xa2\x8b\x72\xf9\xa3\x32\xbe\x50\x50\x47\xb1\x4e\xda\xe8\x42\x74\xeb\xc1\x42\xe6\x11\x00\x0e\x73\xf5\xa3\x4f\x1d\x3c\x26\x88\x6e\xe8\xb9\xb6\x40\x24\xeb\xc1\x16\x58\xc2\xee\xf5\x29\xae\x18\x51\x37\x9f\xa7\xf5\xd7\x5b\xa9\x35\xcb\xa1\xf3\xac\x81\xe1\xc2\x02\x27\x9c\x90\x58\xcd\xa7\xc1\xa1\x5d\xba\x96\x6f\x46\x3f\x3e\x55\xb2\xc5\x40\x5c\xc7\x09\x93\x68\x86\x78\x50\x1f\x02\x88\x21\x18\x05\x1c\x48\x82\xe7\x3e\xde\xff\x8f\xeb\x57\x77\xaf\xdf\x2f\x5f\xc1\x59\x68\x5d\xed\x65\xf2\xa6\x4c\x7e\x17\x34\x31\xae\x2f\x5d\xc0\x32\xb8\x76\x36\xa6\x60\x51\x95\x40\x0a\xdb\x37\x3c\x5a\x8b\x0c\x84\x4e\xd2\x9f\xd3\x45\xc1\xbf\xba\xbc\x07\x4e\x89\x95\xc3\x2f\xfe\x26\x3e\x19\xf1\xab\xfe\x8e\xd6\x57\x9f\xd9\xe0\xa4\x49\xe5\xfc\x31\x55\x2a\x6d\xf4\x01\xf2\x15\xdd\xdf\xfa\x70\x41\x03\x55\x81\xd6\x6a\x4d\x5a\x75\xa5\xa6\x3c\xea\x36\xba\x00\x14\x15\xc3\x4d\x93\xe6\xb4\x39\x52\x86\xa9\x20\x88\xcf\x2d\x73\x4c\xde\xb9\x78\xe0\x22\x32\x75\x74\x99\x8b\x04\x4e\x95\xce\xc5\x22\x16\x94\x3c\x51\x3c\xce\xb5\x4e\x7a\xfb\xd3\x05\x60\xef\x14\xbd\x06\x4a\xea\xf6\x0c\xc5\x81\x25\xf3\xe7\xcd\xe0\x45\x4e\x2c\xce\xd8\x5b\x45\xd5\xf9\x75\xfa\x91\x01\x63\xd8\x39\x68\x64\xe8\x80\xbb\x8c\x85\xff\xd0\x53\x94\x2c\x97\x78\x41\x42\x98\xb0\x9a\x3b\x8f\x4b\xd5\x60\x3b\xb2\x70\x86\x74\xe6\x54\x3a\x6f\x9e\x8b\xa8\x3c\x21\x6f\xfa\x33\x1e\x0f\x21\xc1\x2e\xea\x96\xbe\xcd\x02\xf7\x4f\xdc\xcf\x00\xe3\x8d\x52\xd5\x3b\xa4\xad\xed\x96\xf6\x09\x4c\xda\xde\x48\x0c\xaa\x7b\xe1\xa5\x44\x4f\xf5\x3f\xf9\x40\x88\xa7\xf5\xbf\x29\x90\x6f\x83\xb7\xdc\x3d\xc9\xe9\x3b\x63\x84\x89\xd5\x4b\x41\x0d\x52\xcd\xed\xd5\xeb\x42\x2d\x54\x87\xbd\x0f\x5d\x0a\x17\x14\xff\x92\x7a\x53\xf6\x1a\x34\x67\x7d\x73\x0a\x24\x21\x73\xe6\x6a\x4e\xd0\x39\xdf\xc8\x75\x5b\xe8\xa3\x01\x2e\x10\x20\x52\x89\x9a\x5e\x4b\xfe\x25\xde\xeb\x12\x47\x02\x6a\xbc\xbb\x36\x59\x74\x4a\x38\xda\xeb\xbe\x3f\x61\x64\x95\x68\x05\x4b\xfb\x2c\x3e\x69\xc5\xaa\x9d\x86\x30\x07\xf6\x34\xe4\x91\x33\x8a\xc4\x7a\xd3\xbb\x45\x71\xf7\x84\x0e\x14\x92\x44\xc7\x94\xe5\x91\x6c\x68\x8b\x46\xdf\x50\xdc\xad\xfb\xf5\x4a\xd3\x71\x76\x06\x84\x4d\x23\xb3\x47\xd4\x03\xe2\x35\xb7\xf7\xb0\x13\x13\x9b\x0a\xa7\x21\x22\x7b\xad\x6a\x0c\x68\xc9\x5a\x85\xbf\x68\x1a\x93\x47\xfb\x19\x19\xb4\x3f\x19\xaf\x7f\x01\xfa\x45\x6a\x7e\xba\x6c\x2a\x40\x48\xa7\x6b\x06\xdf\xfe\xd7\xb1\x57\x28\x07\x07\x3f\xf2\x07\x1c\xb5\x1f\x26\x2a\x2d\x2f\x70\x64\x57\x8d\x64\x54\xc2\x10\x3c\x99\x5a\xe0\x52\x8c\xf2\xb8\xd0\x5f\x61\x5d\xfa\x03\x34\xb2\x3e\x56\x2d\x5e\x23\x02\x11\x5b\x00\x40\x13\xfb\x43\x9b\xaa\xad\x71\x55\x1c\xa8\xc4\x32\x0c\xb0\xd2\xcc\x67\x2a\x55\xbc\x9b\xc1\x6d\xcd\x0a\xc1\x35\x30\x20\x12\xd8\x65\x1e\x27\xae\xc2\xca\x9f\xa2\xc4\xc1\xf9\x8b\x1d\x61\xf9\x1a\x07\xee\x1a\x3a\x3f\xa9\xf7\x2a\x6c\xe4\x97\x87\xe3\xa9\xa2\x66\x57\x7e\xae\x7f\xb2\x08\xbd\x0d\xe5\x07\x3d\x7f\x5e\x89\x9b\x40\x51\x13\x40\x1c\x46\xa4\x15\xc1\x28\x45\xa7\x1a\xef\xa4\x97\xe7\x7d\x96\x9c\xd0\x15\xa6\x03\x71\x96\xfd\x71\xe0\x42\x0a\x2c\x4e\x6d\x47\x88\xbe\x89\x96\x98\x8f\xd6\xcb\xd7\xe1\x06\x45\x77\x7e\xb7\x4b\x54\x1a\xf4\x0d\xbb\xe7\xea\x7d\xd7\x12\x3c\xac\xcf\x38\x32\x30\x7d\x14\xb1\xb6\xaf\x6f\xcf\xb3\xfb\xfe\xac\x4d\xdc\x63\x05\xb5\xeb\x97\x8d\xe8\xb5\x93\xe9\xc2\xc8\xb6\xc8\xcb\x72\x43\xae\xc7\x4c\x0a\x21\xfa\xe4\x89\x36\xcf\x17\xb3\x8f\xd9\x8a\x7a\xce\x42\x83\x54\x58\xc8\x85\x0c\xb4\x03\xb6\xfc\x34\x87\xca\x5d\xef\x0f\xef\xfc\xac\x00\x7e\xd2\xec\x9d\x64\xfd\xd0\x57\x4c\x91\xcb\x08\x4a\x33\x7d\xf2\x00\x73\x07\xe6\x9b\x1c\xb9\x6f\x16\x3c\x98\xdf\xc2\x23\x03\x8f\xcb\xeb\xaa\x52\x41\x1b\xa8\x6d\xb7\xf0\x55\x85\xda\x5a\xd9\x00\x6a\xcf\x93\x24\x2f\x72\x01\xbf\xc4\x4f\xd8\x69\x87\xc6\x55\x4a\x66\x7e\x33\x5a\x88\x2f\xe5\x84\xca\xaa\x2c\x0e\x7e\x96\x60\x19\xe6\xe4\xf3\xa1\xa0\x09\x00\x51\xd2\xa3\x49\x7c\xd7\xde\x71\x9c\x6a\x6b\xc8\xe0\xd0\x85\x1e\x51\x00\xae\x53\xfb\x72\xd4\xfd\x88\xe2\x39\x86\x84\x1e\x09\x96\xe0\x34\x86\x52\x7f\xff\x84\xa7\x70\x95\x94\x31\xcc\x5e\x1c\xba\x2e\x23\x65\x60\xaf\xaf\xc5\x42\x1d\xe3\x12\xcf\x24\x3e\x64\x4c\x7d\x2e\xca\x5b\x22\x29\x02\xed\xf7\x8d\x93\x73\x7c\x18\x2f\x0c\x4f\xe3\x73\xe1\x57\x3b\xf5\xa1\x8e\xc7\xf2\x4b\x2c\x10\xcb\x5d\xdf\xac\x2e\xe7\xbc\x76\xd4\x5f\xfe\x4f\xc3\x63\xde\x23\xcb\x87\x08\xe1\x00\x7a\xa7\xd8\xfd\x3f\xef\x70\x90\xd3\x53\xb2\xee\xc2\x6a\x0b\xbd\x93\x2a\xd1\x35\x8f\x7d\x13\x8b\x90\x3c\x93\x94\x03\xf2\xd0\x91\x84\xdc\x24\x35\xa7\x0b\x9b\x61\x05\x16\x95\xe3\x6f\x94\xbe\xf7\xe3\x88\x9a\x88\xe8\x50\x96\xe2\x1b\xf2\x2e\xb9\xa7\x53\xc0\xa8\x8b\x73\x55\x31\x88\x8e\x9e\x3d\x1b\xd4\x6f\x43\x3d\x6a\x59\x40\x71\x1e\xee\xc8\x03\x84\x91\xf5\x4d\x4a\xa8\x98\x9d\x37\xd1\xc3\x3f\xfc\x89\x4c\x55\xdf\xb2\x29\xda\xeb\xbf\xe2\x96\xf8\x86\xc3\xc1\x1d\x2e\x59\x56\x3e\x00\x0e\xfe\xdc\x39\x84\x44\x98\x9b\xb4\x7e\x99\xe9\xa9\x66\x1a\x3a\x50\x81\xe2\x0a\x04\xc3\x5d\x8e\x9a\x62\xdd\x00\x23\xe2\xd6\xa5\xa5\xa0\x43\xa9\x1e\xc1\xbc\x91\xe6\x89\xcb\x82\x34\x51\xd0\x4e\xcd\x18\xa3\x0d\xb5\xcc\x87\x88\xef\x69\x2d\x3a\x55\x0c\x13\xef\xc2\x2c\x12\x3e\x7d\x25\x16\xae\x4f\x95\xac\xfd\xc2\x0e\xa4\x2c\x3a\x30\x5e\x74\x4a\x95\xbc\xea\x50\xd7\xec\x79\xc6\x08\x41\x62\x08\x74\x6d\x83\x22\x69\x07\x0a\x5b\xb7\x58\x9c\x2e\xf4\x6b\xdc\x3e\x82\xe4\xaf\x3c\x86\x04\xe9\x07\xfb\xd2\x00\x76\x63\x9e\xc1\xec\xac\x50\xeb\xcc\x48\x93\x55\x58\x8b\x87\x1b\x7f\x28\x8e\xd7\x56\x62\xab\xc0\xdf\x9b\x2e\x64\x8e\x9c\xb9\x6a\x33\xb9\xf1\x72\x59\xcc\x62\xa7\x33\x95\xaa\x40\xad\x97\x7d\x93\x8a\xd6\x9d\x2c\xbd\xcc\xf2\xbc\x98\x08\xc0\x23\x13\x7b\x26\xb6\x0e\x6c\x94\x4c\x47\xd9\xf3\x39\xb5\x2c\xd6\xe3\x3e\x14\x4b\x98\xd0\xdf\x38\x98\x1f\xbe\xdb\xb7\xe1\xc2\x50\xb8\x0c\xa3\x04\x03\xdd\xb2\xfe\x87\x1f\xa4\x25\xc3\x6b\x6f\x5c\xd9\x9a\x3c\x41\x1c\xd5\x37\x6b\x6a\x99\x77\xc0\x18\xa6\x62\x43\xe3\x66\x9f\xa5\x0f\xfa\x8f\x91\xd7\x62\x6e\x6b\x1d\x55\x7a\x63\xe4\xab\xc0\xd1\xc5\x53\x87\x72\xba\x90\x47\x78\x78\x34\xc2\xb6\x98\x78\x8e\x97\x42\x4c\xe4\xb5\xb3\x2d\x5f\x8d\xca\x1d\x7f\x17\x0c\x87\xb8\x2d\x38\x4c\x03\x62\x56\xe8\xb0\xd9\x17\x2d\xae\x65\xf8\x47\x82\x3f\x77\xaa\xe1\x92\x6a\xae\xa0\x82\x4a\xf4\x9f\xca\x6a\x5c\x44\xf5\x92\x1d\x82\x0a\x6c\xae\x25\xaa\x80\x59\xcc\x24\xe5\xc4\xdb\xbe\x1d\x42\xd3\x86\xfc\x44\xb4\x10\x02\xcd\xb6\x1f\xc6\xd0\xab\x7c\xba\xf0\x7c\xe0\x2c\x66\x03\x9d\xc2\x4b\xac\xfd\x14\xc9\x36\x08\x4d\x7a\x84\xe4\xec\x21\x6e\xd1\xb1\x3a\x4f\xde\x5a\x5e\x93\x72\x93\x88\x32\x41\xeb\xd1\x06\x0a\x58\x62\xfd\x07\x2a\x65\x60\xc4\x52\xe8\xd1\x80\xab\x6a\x2c\xb1\x56\x85\xc1\xed\x8f\x11\x6b\xb1\x11\x81\x43\x41\xa7\x94\x6c\x41\x58\x15\xd7\x64\xf8\x5a\x8f\x10\xa8\xc6\x76\x76\xe3\x7c\x45\xdc\xb4\x55\xa9\xb2\xfa\x9a\xc5\x9e\xd3\xa0\xc4\x49\x4d\x55\x06\x3c\x80\x01\xb0\x11\x42\x78\x0b\x2b\x3e\xe8\xc1\x0d\x5d\x6b\x11\x5b\xa0\xc7\xe1\x67\xbe\xf3\x79\xe3\xf7\xd1\x0c\x95\x91\xd6\x54\x6f\x42\xd0\x08\x36\xb1\x8a\xc4\xb8\x97\x00\x9f\xa8\x16\x26\xa6\x00\x0d\x35\x80\xff\xdb\x10\x05\x07\x0f\x5a\xc1\xa5\x02\xbb\xcb\x36\x67\x20\x33\x10\x2b\x58\xf0\x7b\x26\xf0\xa6\x81\xf1\x3e\x67\x4e\xad\x0d\x14\x10\xe4\x6d\xd2\xef\x7f\x22\x3a\xb9\x92\x7e\x89\x65\x5a\x8c\xca\xaa\xab\xba\xbb\x9f\x20\x48\x6c\xae\x9a\x95\x1f\xcf\xc2\x56\xb9\x72\x0c\x10\xd9\x50\xf8\x18\xfa\xd1\xb8\x16\x27\xd3\x06\xdd\xb7\x6a\x1f\xbf\xa8\x2e\x90\xf2\x0c\x51\x98\x62\xda\x89\x14\x49\xd7\x06\xcc\xad\x8b\x30\xe1\xb5\xa3\xd0\xfc\x96\x68\x9d\x2d\x6e\x94\xa4\x53\xda\xb0\x36\xb9\x3e\xb9\x8c\xf1\x02\xc7\x19\x3e\x37\x5a\x37\x30\x73\x99\xac\x68\xd6\x35\x5c\xa5\xa9\x6b\x42\x95\x88\x50\x0d\xd0\x68\x4c\x0a\x76\x9e\x93\x9b\x5c\xde\xb6\x5e\x4d\x35\xac\x70\x48\x6f\xa4\xb4\x4c\x00\x53\x86\x00\x4b\xe4\xd3\x59\xb6\xe2\xcd\xa0\x97\xf7\x5b\x94\x1e\xb2\x84\x46\x00\x27\x4e\xcb\x65\x1c\x5a\x18\x17\xd5\x33\x7e\x4f\x85\xa2\x07\x78\xa9\xe2\x7e\x4a\x24\xa9\x59\x2f\x14\x29\x9b\xd7\x1d\x70\xb7\xe4\x91\xc3\xd6\x9e\x60\x07\x5c\x3c\x52\x58\xda\x69\x9e\x49\x7f\xbe\xe8\x48\x63\xd8\x13\x23\xef\xe7\xbc\x6e\x3b\x7d\xf2\x63\xb5\xba\x46\xda\x5a\x66\x88\x7b\xf6\xb0\x2b\xa3\x88\x1b\xdd\x54\xfd\xcb\xd1\x37\x0a\xc6\x89\xe3\x38\x1f\x87\x49\x72\x93\x95\xee\xc4\xc5\x01\xbb\x4d\x16\xf9\x30\xbe\x4a\x88\x3b\x08\x57\x8f\xcf\xef\xd9\x99\xa8\x06\x33\x69\x11\xb1\x2a\x83\x61\x0f\xcb\x15\xa8\x5d\xe7\x93\x62\x5f\x52\xe8\x04\xbd\xdc\x10\x9e\x53\xc9\x26\xa0\x87\x05\x8b\x32\x9c\xe1\xad\x7d\xe3\x0e\xd6\x6e\xfe\x3e\xb8\x65\xbb\xb5\x4b\x81\xb8\xfa\xa0\xd1\xd2\x83\xcc\xbc\x82\x43\x72\xf0\xc9\x52\xd7\xeb\xea\xac\xe2\x85\x65\xd8\x3c\x2a\x0c\x0f\x81\x43\x96\x4d\x8c\xa5\x22\x2c\x4c\x80\xc0\xa9\x3d\x7e\x6f\xa9\xaf\xf6\x37\xdc\x4f\x46\x1a\xa0\x1b\x74\x5a\xec\xe8\xc0\x64\x91\x3c\xd2\x2d\xdf\xae\x90\xdb\x8e\xfe\xfa\x86\x74\x97\xc3\x4d\xe2\x17\x54\xf2\x2d\x77\xb6\x71\xd4\x45\x9c\x29\xb6\x00\x2a\xfc\x81\x2a\x16\x5e\x52\xe8\xc4\xd3\xf4\xe9\xa8\x44\x9b\x71\xe9\x85\x4f\x8c\xf4\x4a\x1d\x10\x76\xd3\xa5\x1f\x7c\x8c\x02\xa1\x48\x86\xa8\x48\x1b\x16\xda\x6a\x4a\xc8\xac\x5a\x4a\x99\x8f\x39\x93\x54\x5c\x53\x33\x18\x18\x8b\xd0\x9e\x37\xb1\x01\x86\x2a\xf5\x6c\x72\x27\x3a\xca\xf7\x17\x6b\x65\x51\xbe\x72\xba\xfe\x90\x2a\x88\xf7\x3d\x2f\x79\xac\xeb\xb6\xed\xb3\x23\xf2\x4a\x00\x6f\x44\xe4\xbc\x7b\x4a\x0f\x35\xa4\x94\xbf\x69\xfc\x4a\xc2\x55\x50\xfb\xcb\xf4\x5e\xd9\x91\x9f\x91\x06\x84\xc1\xd8\xb4\x54\xb8\xd8\x23\xbf\x3f\xdb\xa5\x0f\x84\x8a\x9d\x8b\x7a\x48\x49\x4d\xa4\x57\x8f\x89\x01\x81\x0e\x53\x36\x2c\x30\xd3\x92\x4f\x40\x9e\x3e\xb0\x70\x47\x8a\x53\xeb\xa9\x0b\xb3\x9e\x11\x8f\x3b\xcf\x05\x9a\xeb\x05\x79\x06\x19\x1f\x61\xc5\x0c\x31\x5f\xff\x73\x4e\xbf\x53\x14\xef\x00\xac\xcd\x58\xc5\xf4\x45\xe4\xa1\x57\xb5\xf8\x7b\x40\x24\xee\x3e\xe1\x41\x24\x3a\xe3\xe6\x29\x96\xad\x38\xfd\x06\xbb\x6a\xd6\x94\xe0\xe6\x26\x60\x21\x1d\x6b\x22\xdf\x1b\xb9\x62\xbd\x2c\x11\x98\x13\xf6\x11\xd7\xb8\xa4\xc2\xe8\xa3\x0a\x3f\x56\xb7\x85\xd2\xe7\x68\x07\x54\xec\xf9\x2d\xd3\xca\x44\x9d\x98\xfc\x22\xc0\x6a\x7a\x20\x58\x78\x60\xaa\x5e\x02\xec\x86\x34\xda\x8f\x1a\x75\xcd\x87\xe0\xbd\x8f\x71\x9f\x5a\xd6\xbc\x41\xfd\x3a\xf6\x4c\x5e\x0b\x87\x5d\xc6\xb3\x68\x21\xe4\x75\x1c\x91\x01\x8e\xd8\xa3\xe2\x32\x70\x80\xba\xf0\x05\x02\x4a\x3a\xa1\x20\x90\xd0\xe8\xe6\xa7\xa5\xf6\x4d\x40\xc8\x59\x75\xc1\xb2\xf9\x5e\xf0\x9d\x5b\x84\xbd\x6f\x17\xec\x92\xcb\x01\x10\x29\x1e\x98\xe4\xe8\xb9\x60\x6f\x0c\x00\x00\xf0\x6d\x21\x78\xd4\xd1\xb2\x57\xcf\x6b\x2f\x6f\x95\xea\xd7\xf1\x58\x93\xfe\x7a\xfb\xbe\xd6\xbf\x69\x8f\xdc\xaa\x3a\x10\x94\x78\x0a\x1c\xec\xc9\x10\x6c\x04\x97\xe2\xf2\x17\x48\x4d\xc6\xdd\xc4\x3c\x94\xce\xc4\x74\xca\x22\xbb\xf1\xf1\xab\x94\x86\x03\x3d\x2b\x09\x5f\x27\x42\xf0\x9e\x37\xe4\x80\x1e\xe5\xc8\x78\xea\x61\xd9\x5a\xd6\x5b\x59\x8d\xd9\x47\x07\x3e\xc0\xd4\x49\x2d\x76\xbd\x14\x39\x1a\xe0\xcf\x0b\x72\xea\x12\x74\xe7\x2a\xba\x6f\xa6\x69\xdd\x08\x61\xda\x29\x6a\xed\x46\xd6\x9f\x21\x02\xa1\xa0\x08\x91\x51\x61\xef\xa7\xdf\xc6\x6d\x94\x39\x88\x9a\x5e\xec\x58\xa8\x7b\x91\x9d\xb2\x17\xf3\xf6\x91\xce\xa1\x68\x9c\x62\x53\x9d\xd6\x6a\xf4\xbd\x01\xa9\x73\xbf\xc6\x02\x52\x77\x68\x59\x67\xdb\x13\xc1\xf9\x0b\xfd\x00\x06\xe7\x3c\x10\xa1\x4b\x17\x97\xab\x0e\xd8\x92\x68\x77\xf1\xdd\x3b\x23\xac\x26\xb5\xc9\x46\x94\xcf\x59\xcb\x47\xdf\x23\xd5\x91\x55\x67\x87\xaf\x2f\x0c\x4d\xd2\x96\x8a\xf8\x93\x40\x0b\x1e\xe7\xab\xa7\x90\xf9\xe1\x2a\x06\xc3\x61\x84\x64\x12\xb6\x16\xb4\xb4\x3a\xcf\x18\x18\xc5\xf4\x49\xcd\x91\x36\x0e\xcc\xc6\x26\x61\x9e\xb3\x6d\x01\xfb\xb2\x90\x62\xd5\xe3\xef\x1d\x86\x89\x22\xbf\x83\x5d\xc4\x5f\xc4\x61\xa8\xbd\x63\x90\x4e\x22\x11\xdd\xf2\xc5\x82\xf1\x51\x64\x14\xf1\xb5\x9c\xa3\x72\x70\x4e\x60\xee\xa4\x05\xe1\x37\x24\xb2\x37\x9a\xb9\x33\xd2\x1a\x72\x5b\x1c\xf8\xd0\x94\x81\x98\x54\x6b\x42\xc4\x41\xf5\x93\xa3\x68\x44\x76\x31\x97\x05\x96\x05\xd0\x9e\x30\x34\x5a\x6c\x91\xeb\xc3\xc5\xf7\x6b\x3d\xb7\x6e\xac\xc5\xd0\xa2\x6a\xe7\x85\xd7\x78\x58\x1e\xae\x51\xbe\xb6\xe0\xfd\x98\x78\xfc\x40\xcf\x92\x13\xcc\xcc\x35\xe7\x5a\xf1\x82\x01\x96\xca\xb5\xd4\xe7\x61\xa1\x1b\xe1\x6c\xee\x7f\xe2\x72\x3a\xdc\xb0\xa0\x67\x13\x82\xeb\x8e\x80\xd5\xbb\x14\xd0\x23\x83\xa2\x6e\x10\x4b\x41\x44\x54\x85\x38\xa4\xbd\xf3\xd7\x2a\x24\xc3\x10\x0f\x28\xd6\xc5\x6a\xd9\xdf\x0b\x4e\x94\xe2\xca\x9f\xe2\x7a\xb1\x82\xed\x2b\xae\x3e\xe4\xbc\xbe\xc4\x30\x24\x4d\x6d\x76\xce\xf4\x4b\x1a\xf4\xa9\x21\xa6\x19\x9a\xcf\xf6\x68\x08\xab\xa0\xf3\x72\xb8\xd1\xde\x74\x86\x4f\x89\x3e\x25\xe7\x04\x73\x6f\x17\x53\x6a\x09\x03\x8e\xbb\x15\x2b\xa1\x6c\xbb\x43\x19\x4a\x71\x8a\xab\x53\x4d\xcf\x87\x9d\xb3\x18\xb8\x76\x96\x8a\xd5\x1d\xdd\x71\x1c\xdd\x61\x0e\x60\x8f\xd5\xc5\x1f\x06\xb1\x57\x0d\xa3\x84\x90\xb3\xd4\xae\xd2\x59\x37\x7b\x55\xec\x30\x8b\x16\x56\xc4\x4e\xe9\x26\x4d\xa0\x78\xbc\xa4\x6f\xdb\xf7\x1b\xe7\xe3\x2d\x42\x09\x11\xcc\x59\x2d\x72\xa1\x4e\x20\xaa\x88\x1a\x2c\x1b\x97\xe0\x8e\x7c\x83\xb3\x3b\x7b\x0d\x82\x6a\x1f\x45\xd4\xa1\xc6\x33\x13\x84\xb1\x51\xa1\x27\xbb\x88\x5a\x02\x3a\xed\x65\x3a\x79\xcf\x0f\x50\x1f\x5a\x9a\xfe\xf5\x18\x89\xe4\xde\xd6\x63\xcb\xcb\x9f\x0d\x71\xb6\x4d\xf7\x15\xc0\xef\x52\xdf\x18\x39\xfb\x00\x3c\x63\xf9\xc9\xef\x82\xec\xad\xc6\x3d\x1a\xb3\xf0\x63\xf1\x80\x76\xa5\x13\x8a\xb1\x1e\x91\x1d\xf9\xdb\x49\xe0\x01\xb1\x81\x0e\x62\xa5\x28\x90\x78\x23\xcc\xba\xdd\xdc\x02\xb6\xf8\x5d\xa5\xd8\x1c\x90\xc4\x09\x53\x6f\x24\x13\x22\xb9\xbc\x82\xf5\xe4\x43\x0f\x6b\xd7\x01\x0a\xbf\xf8\x0a\x0a\xdc\x79\xe0\x37\x83\x87\x92\x82\x3b\xaa\xe3\xbc\xee\x40\x6d\xf2\x82\x41\xe6\xf7\x8b\x55\x82\x16\x26\x4e\xd1\xa7\xd8\x25\x2a\x52\xe5\xdd\xdc\x14\xee\x66\x16\x93\x96\x17\xc1\x99\x81\x2f\x66\x22\x7a\xf9\x1e\x1b\x5a\x34\x6f\xf0\x43\xc2\x21\x2e\x06\x6c\xb0\x6a\x93\x6c\x8d\xa7\x83\x8c\x50\x7d\xb0\x97\x31\x1d\xad\xce\xe7\xbf\x25\x9b\xef\xff\xf9\x58\x79\xf7\x28\xaa\x5a\xa5\xf3\x61\x03\xa1\x20\x01\xdf\x2f\xe7\xa6\x79\xb3\x0b\x86\xd1\xe8\xe8\xff\xac\x3d\x42\x70\x86\xf4\x57\x9c\x78\x96\x9f\x03\x9e\x92\xaa\xe2\xf1\x9c\xd0\x02\x49\xb4\xba\x80\xa0\x42\xe3\x41\x5f\x18\x8b\x66\x67\xb6\x25\x9b\xec\xa9\x28\xf5\xce\xaa\x18\xda\x04\x27\xde\xe8\x70\x8d\x59\xbd\x24\x67\x15\x8e\xfa\xbb\x90\xb4\xd4\xf5\x85\xbf\x2c\x33\x1f\xb7\x47\x5c\x20\xd0\x16\x95\xd7\xcc\x7b\x6c\xa4\x16\xdf\x5f\xa6\x2a\x38\x9e\xf8\xf9\xb0\xf8\x88\xcf\x1f\xfc\x52\xff\x2f\x51\xb1\x64\xc5\x71\x53\xa7\x68\x5c\x7b\x0c\x0e\x28\xa4\x55\x90\x7e\x44\x54\x9f\x16\xd9\x30\x49\x08\x90\x6a\xff\xcb\x67\xa6\xe0\x97\xc1\x93\x2c\x4d\x1e\xcd\x79\x0c\x6c\xec\x1c\x28\x25\xad\x95\x4b\xb8\x1c\xbc\x96\x20\x36\xdb\xa1\x7b\xd2\xd3\x4e\x9f\xef\xdc\xb5\x50\xa1\x67\x97\x8f\x31\x9b\x35\x30\xd7\xf2\xf4\xb5\x28\x26\x66\x06\x11\x22\x9a\xfb\x69\x30\xd5\x46\x56\x7c\x31\x3e\x47\x32\xd7\x48\x26\x5a\xfc\x6c\x3c\x90\xff\x1c\xe4\x78\xf8\xf6\xc0\xc5\x03\x33\x8f\x4a\xc4\xd0\x95\x4a\xca\x95\x09\x89\x60\xe2\x04\xae\x94\xe7\xc2\xae\x4e\x58\x6f\x5e\x51\x2d\xaf\x65\x5f\x70\xbc\xa4\xff\x8c\xf5\x72\x5e\x84\x62\x8d\xc1\x5b\xff\x0d\x15\xc2\xb3\x73\x3b\x5b\xef\x7e\xe4\x29\x1f\x00\x63\x0e\xde\xce\x80\xfb\x02\x08\xe0\xb2\xd9\xf2\x2d\x53\x0f\x4d\xf7\xe2\x94\x52\x4b\x1a\xfb\x18\x95\xee\x80\x5d\x68\x9d\xc0\x73\x83\x08\xce\x9c\x4e\x99\xc7\x16\x3e\xb4\xff\x27\x7a\x89\x14\x7e\x03\x92\xb3\x60\x55\x36\x0a\x4c\x4f\x58\x00\x30\x91\x1f\xbe\x3a\xab\xc6\x6d\xa8\x7c\xf6\x6c\x16\x2d\xda\xf3\x08\x4b\x04\xff\x87\x5d\x4d\xed\x6a\x46\xe1\x02\x53\xbe\x60\x33\xe0\xaf\x9b\x3b\x72\x07\xdc\xb4\x2d\x1d\x92\x11\x1f\xcb\x3c\xb5\x45\x9a\xea\xc3\x8f\xbd\x2d\x1d\x26\x4f\xf2\xc2\xcd\xf8\xcf\xb7\x5d\x82\xbd\x51\x9f\xbd\xe1\x4f\x41\xb9\x6c\x23\x76\x29\x3a\xad\x88\x13\xb9\xd7\x52\xda\xd3\xf5\x0c\xc3\x86\xb3\xa0\x3f\xa0\x0f\x9c\x92\xe0\xc8\x86\xbb\x33\xf7\x32\x82\xf7\x2c\x16\xd3\xd1\x73\xb1\x5b\xab\x93\xb1\xa6\x6e\xf8\x34\x07\x80\x7b\x29\xd4\x6b\xbe\x5c\x91\xa0\x52\xad\x64\x3b\x31\x88\x6b\x2f\x35\x79\xe4\xb6\x90\xb0\x1d\x7c\x71\x91\xb6\x6b\xad\xad\x08\xf7\x3b\xab\x78\x2c\xdd\xb7\xf6\x77\x89\xce\x24\x2b\x01\x93\x1b\x6a\x49\x23\x53\xcb\x9e\xe4\xfa\x0f\xfa\x1b\xd8\x9e\x20\xcf\x0b\x72\xfe\xab\x4a\xe2\x60\x8b\x24\x03\x62\x62\x43\xdb\x63\x9e\x61\x19\xf9\xd4\x73\x4b\x3c\x93\x60\x0e\x34\x71\x8e\x3c\xcc\x39\xd3\x74\x83\xa5\x99\x31\x62\x6a\x3b\x6e\x0e\x30\xce\x2e\x80\xc2\x6f\x1f\xfb\x40\xdd\xd2\xe9\xd2\xd4\xba\x4e\x9e\x8d\x86\x23\x50\xaa\x73\x60\xdd\x83\x0a\x8f\x30\x6c\x63\x14\x3f\x8f\x32\xb4\x21\x64\x5e\x05\x00\x5a\xbf\x54\xa7\xf8\xf6\x85\xaf\x45\xe9\xf0\x10\x64\xb4\x25\xfa\x96\x74\xdd\x52\x79\x30\x90\x03\x05\xd1\x91\x74\xf9\xca\x43\x85\x82\x0c\xe0\x4c\x9e\xef\x0f\x44\x7b\x9d\xa6\x11\x38\xc9\x95\x82\x5f\x41\x4f\xa5\xe6\x16\xb6\x9a\x44\x9d\xe1\x5e\x16\x47\x84\xc8\x9d\xf4\x5e\x04\x44\x9d\xea\x3b\xfe\x2f\x58\x09\xb9\x4c\xf9\x32\xfa\x07\x4f\xb8\x05\xd8\x79\x33\x0d\x02\xd6\xff\x00\x05\x7b\x34\xe5\xf6\xe8\x3d\x53\x3d\xcf\xb4\x68\xe9\xa8\xf5\xda\x1c\x3e\x0a\xff\x5b\xc9\xef\x49\x68\x60\x81\x1a\xdc\xb1\x97\x04\xac\xa1\x07\x74\x88\xe1\x2a\x6f\x88\x41\xd0\xb7\xc7\x2d\x24\x58\x50\x1c\x95\x44\xd8\x29\xc6\xfd\x08\x47\xfb\x92\x13\xff\xb3\xb4\xfd\x19\xfc\x76\xda\x32\x12\x72\x71\xeb\x43\xab\x36\x8c\x25\x6c\x54\xe3\xe0\x18\xb3\x71\xa9\xd0\x7a\x13\x80\x82\xc3\x23\x81\x1b\xf2\xac\xb6\x1e\xb2\x15\x8b\x18\x33\x1d\x9a\x23\x41\x38\x5b\x22\xe8\x99\x79\x2c\x7b\x1a\x1a\x1d\x43\x14\x51\x04\x78\x81\x8d\x4f\x48\x8e\xdb\x4b\x6a\x97\x13\x14\xce\x3d\xce\x46\x25\xd1\x41\x45\xc7\xe7\xa4\x63\x1a\x57\x3d\x7d\xdf\x93\x4f\xfd\x92\xe8\xa1\xa6\x63\xa4\xdd\x3b\x93\xbb\x7d\x84\xc3\xa0\x43\xca\x89\xb1\x7f\x4a\x88\xc7\x58\x1d\x5e\xbb\x57\x14\x28\x8c\x40\x4a\xa2\x6f\xca\x7b\xbf\x47\x3a\x27\x0b\xb9\x94\x77\x9f\xcc\x49\xc2\x50\x66\x05\x17\x11\x90\xc7\x17\x20\x26\x1f\x04\x44\xd4\xa8\x66\x8d\x7e\xf4\xdd\xca\x85\xa5\x87\x31\xfb\x16\x75\x94\xe2\x29\x1c\xe4\x3d\x1e\x76\xa6\xa1\x7a\x17\x45\x15\xb2\x5e\xb1\xf1\x33\x3c\x7d\xbf\x0a\xe1\xd9\x67\x5b\x74\x2c\x67\xa1\x04\x2c\x72\xd3\x59\xf4\xb8\x03\x1f\xa9\x61\xc0\x3e\x5c\x24\x50\xfd\x33\x06\xf8\xa1\xd0\x53\xf1\x21\xf3\x7d\xf3\xc0\x3e\x8f\x48\x53\x32\x5d\x76\x21\x42\xb0\x29\x22\xb7\x24\x37\x45\x5e\x1d\xd5\x23\x7c\xfc\x74\x2f\xeb\x49\x2b\xd9\x8a\xc9\xa9\xc2\x67\xe2\xc8\x42\x14\x79\x2a\x3c\x17\x89\x32\xa2\xa6\x79\x82\x2b\xf5\xeb\xe2\x60\x40\x60\xdc\xb8\x49\x53\xe5\x55\xe4\xa6\x07\x40\xd5\x5a\x0f\xe8\x2e\x4d\xcb\xef\x71\x99\x94\x4f\xa6\xd2\xa8\xbb\xe6\x6d\x9a\x5d\x82\x7a\x02\xa0\x61\xed\x6b\xf6\x44\xf3\xeb\x8b\x60\xeb\xaa\x2b\xc2\x82\x91\xc1\xe5\xf7\x7a\xd1\x0e\x3d\x01\x4c\x20\x03\x0f\x54\xc1\xae\xd7\xae\xb0\x9f\xa1\x6b\x63\x9f\x91\x20\x26\xbe\x50\x3e\x09\xb6\x05\xc0\x29\xc7\xc4\xa8\x7f\xe1\x18\x4d\x60\xeb\x4a\x66\x3b\xbb\x62\x60\x42\x5d\x78\xa9\xae\x04\x15\x52\xd8\x3a\xf2\xa5\x7a\x85\x97\x4f\x6e\xa7\x5e\x5d\xe0\x8d\x93\x37\x48\x45\xe9\x3b\xfb\x5b\x9c\x3a\x4b\x36\x3f\x94\xdb\x10\x4e\x4c\x08\xa2\x88\x06\xa2\x0e\xda\x28\x1b\x8f\x4f\xb1\x4e\xaa\xa0\xae\xee\x08\xf5\x1d\xef\x06\x57\xf7\xb9\x26\x91\xb7\xa5\x0d\x6a\x7d\x3c\x09\xc4\x75\xc5\x39\xae\x35\xe9\x1f\x6b\xd9\xfc\xda\xa9\xaf\x1a\x0e\x66\x5b\xac\xa8\xa1\x3e\x06\xa3\x8c\xcd\x1f\x6e\x51\x55\x39\xdc\xfe\x68\xb0\x22\xba\x7c\x4f\xe5\xda\x5a\x54\x58\x22\xf0\xcb\xea\xd6\x1f\x24\x7e\x99\xc6\x47\xdd\xf4\x0d\x1c\x32\xd6\x40\x15\xcf\x73\xf3\x61\x85\x5b\xdc\xbd\x18\x3d\x8c\x34\xb8\x90\x87\xd4\x2e\xe3\xb1\x24\x0f\x55\x5e\xa4\x03\xb3\x31\xcf\x58\x3d\x9e\xac\xe2\x56\x1e\xaa\x63\x0a\xdb\x40\x50\x24\x8d\x05\xb6\x54\x80\x78\x01\xec\x56\x05\x03\x8e\x8c\x4b\xcb\xba\x9c\x12\xde\xf8\x73\xfa\x55\x2f\xc2\x2b\xdf\xa0\xeb\x79\xc4\x35\x5d\x8b\x09\x4a\x96\x56\x62\x63\x89\x97\x57\xc3\x55\xfd\x65\x04\xc9\x22\x29\xec\xca\xc0\xeb\x6d\x7c\x85\xac\xbf\x1e\x04\xd4\x46\xa0\xbf\xb1\x7d\x64\xc1\xc5\x51\x1d\xc1\x3f\xac\xa2\x96\x5d\x2d\xa5\x3a\x92\x57\x07\xd5\x9f\x14\x7e\x4c\x35\xec\x8d\x15\xa8\xcc\x88\x73\x03\x12\x0f\x3c\x8d\xf1\x5c\x7f\x79\xb4\xc2\x32\x77\xbe\x87\x78\x13\x44\x0f\x34\xf1\x7f\x75\xcb\x05\x7a\x5a\x78\xe0\x64\xa4\x5e\xb5\xf6\xa3\x41\xa6\x66\x3a\xd9\x58\x7c\x91\x52\xf2\x35\x0b\xfa\xd9\xe1\x38\x3e\x0c\x9c\x98\xbf\x81\x80\x82\x05\xa1\x71\x38\x77\x3d\xb7\xeb\x37\x0f\xd7\xd2\x54\xcd\x32\x04\x07\x32\xf5\xfd\x7b\xc8\x7c\x29\x4d\x10\x8e\x24\x7b\x2e\x38\x8b\x28\xa9\x24\xc5\x8a\xb5\xcf\x58\x85\x00\xd8\xfc\x4a\x67\x53\x83\xb6\x05\x96\xf3\x3a\xed\x4c\x39\xf6\xd6\xf0\x7c\x18\x4c\x35\xaf\x67\xa4\x85\x29\x43\x91\xfd\x78\x07\x96\xc4\x02\x97\x0d\xc0\xf5\xf9\xfa\x97\x9e\xd6\x7c\x43\x50\xe7\xf1\x77\x4b\x33\x1c\xf8\x3c\x7e\xba\x8f\x85\xa8\x4c\x5a\x51\xb1\xed\xa7\x28\xe5\xe5\x51\x06\x0a\xa7\xc6\x23\xf2\x34\x9b\xa6\xca\x6b\x06\xf6\x7c\x0e\xc6\x84\xde\xeb\xe6\xab\x0a\x83\x3a\x37\xbf\x7d\x1b\xf8\xff\xee\xd1\x66\xb6\x87\x4a\x93\x74\x0d\x70\xf2\x1e\x2a\x2f\x63\x5e\xf3\xab\x37\x08\x9b\xda\x2a\xf2\xea\x3e\x29\xa1\x2b\xb4\x0b\x39\xce\x99\x64\xca\x69\xac\xcc\x35\x5a\x56\x14\x2e\xf2\x25\xc1\x50\x04\xa2\x30\xd4\x1e\x3f\xd6\xed\x80\x92\x64\x17\xb9\x78\x87\xb5\x5c\x99\x57\x95\x7a\xc1\x0a\xba\xbe\xdd\x03\x13\x89\xf6\x75\xc8\xe9\xe2\xba\xf5\x8f\xb9\x49\x13\x20\xde\x1e\x2a\x04\x45\xd1\x8e\x37\x63\x90\xf2\xb7\x63\x71\x21\x21\xd0\x1c\x57\x50\xbf\x5f\x5c\xfe\xeb\x21\x8d\x31\x12\xca\xd1\x64\x4d\xb3\xdb\xa0\x48\x33\x7f\x7d\xf1\x33\x58\x05\x4e\x8f\x8a\x0d\x55\xf9\xb5\x86\xb2\x13\x98\x4f\x75\xca\xfb\x7c\xc7\xdf\x98\xb3\xc1\x18\x35\xad\x10\xc0\x48\xba\x82\x42\x94\xbc\x3d\x59\xfc\x35\x51\x1d\xc2\xa2\x31\x84\xba\x55\xee\x0d\x85\xb6\x26\xdc\x7c\xab\xd0\x2b\xe3\xf2\x7c\x51\x04\xd1\x83\xfb\x98\x60\xd5\x46\xa1\x78\x10\xc5\x63\xc8\x4b\xb8\xb7\x36\xab\x9c\x0d\x43\x38\x04\x55\xf2\x3f\xae\xad\x00\x56\x2c\x10\xe8\xb7\x4a\x2b\xb9\xd6\x35\x7b\x00\xf9\x87\xf4\x8a\xbc\x8a\x2b\xdb\x15\xc5\xbd\xd8\x1a\x96\xe8\xe4\x5f\xf7\xa6\xde\xa0\x2e\x1b\x29\x15\xff\x83\x52\x33\x0e\x68\x2b\x4d\xe0\x30\xe5\x94\x9b\xea\x2b\x04\x92\x0f\x6f\x3a\x09\xac\xc9\x6d\x75\x2d\x58\x3d\x21\x58\x5e\xe7\xd3\x16\xd4\x83\x18\x76\x6f\xc9\xde\x4c\x23\xe2\x25\xe7\x88\x2c\x7d\x24\x02\xf1\xc0\x9c\x85\x58\x57\x9e\x04\x70\xe9\xe4\xab\xcc\x07\x65\x24\xc6\x76\x92\x6d\x24\x73\x50\xfd\xa0\x02\x2f\xa3\x19\xc9\x51\xd2\x2d\x83\x7c\x3c\x1d\x52\xa1\x79\x03\xb1\x38\x24\xd5\xa3\xc1\x91\xbf\x45\xde\x08\x14\x28\xd0\x4c\x20\x0b\xbd\x2e\x4b\xfc\x91\x38\x1a\xa6\xe5\xd0\xfd\x0a\xab\x52\x33\xf7\xea\x03\x85\x9a\x82\xc6\x3b\xbf\xdb\xc3\xd8\x7c\x23\xa7\x1c\xde\x79\xbc\x41\xf1\xde\xe2\x79\x96\xd3\x1e\x5a\xfb\x4a\xa1\x15\x4d\x51\x9e\xc6\x71\xd0\x63\x69\x12\xad\x82\x1b\xf1\xf1\x75\x0c\xfc\xd6\x68\xca\xf8\xb6\xfe\xf7\xe5\xbb\x41\x3a\xaf\xaf\x33\x7d\x8d\x14\xd1\x67\x21\x85\x17\x49\x11\x17\x5b\x00\x20\x70\x7a\x5d\x8e\x0c\xfd\x98\xe2\x61\x9f\x83\x29\xc2\xc1\xd0\xd9\x2b\x1d\x54\xf7\xd3\x82\xc2\xd5\x85\x54\x5f\xf2\x25\x25\x8e\xc1\x46\xbf\x71\x99\x4c\x4d\x95\xd9\x4e\xac\xa4\xab\x00\x11\x6a\x98\x0d\x5b\xfe\x11\xa6\xe7\xcd\x41\xfe\x09\x30\xfa\x2f\xbe\xee\xa8\x04\x7f\xd5\x90\xbc\x55\xd7\x3b\x5e\x7d\x00\xe6\x25\xff\x49\xac\xec\x8b\x75\xc8\xc1\xd1\x2c\xc8\x83\x15\x57\x26\x68\x8d\x2b\x3b\x5f\x5c\xf3\x61\xc6\x61\x15\xcf\x2d\x65\x8e\x08\x40\xb4\xfb\xfb\x38\x69\xab\x10\xc7\x62\x5d\x44\x61\x05\xad\x2a\x5b\xd7\xe5\xdc\xec\xbf\xfc\xd4\xc1\x45\x1f\x9b\x80\xfb\x04\xeb\xae\x08\x3f\x11\xc7\x01\x5d\xa0\xb4\x35\x98\x83\xe6\x49\x92\x1f\xfe\xf0\x61\xd4\x4e\x3f\xbd\x13\x9e\x95\x86\x85\x44\x20\x02\xce\x7f\xce\xf5\x81\x30\xde\x25\x4d\x02\xc0\xd6\x15\x53\x92\x62\xeb\x3a\xb2\x23\x63\xdc\x9c\x83\x7d\x13\x98\xeb\x6a\x63\x0d\xfe\x60\xaf\x5f\x8a\xc6\x19\x1e\x1b\x0b\x36\x96\x2a\xb8\x85\xf9\x3d\x10\x05\xad\x44\x82\xfc\xc0\x72\x6a\xd9\x7e\xb0\xc4\x66\x8b\x02\x37\xbb\x2e\x27\x8c\x17\x49\x79\xa8\x38\x40\x96\x16\xba\x75\x1a\x1c\x21\xd6\xb8\xe2\x01\x87\xb0\x8a\x3f\x89\x8f\x8b\xf7\x39\xc2\x29\x15\xac\xac\x21\x66\x8e\x10\xa2\xa4\x19\x5f\xb2\x34\x0f\x4a\xa1\x11\xcd\xbf\x95\x1d\x97\xd0\xe7\x36\xb8\x32\x50\xb4\xa0\xe4\xae\xc2\x8d\x10\x31\x85\x88\x35\x2c\x39\xad\x8d\x5b\xc8\xb0\x23\x99\xb7\x5e\x5f\xc8\xea\x09\xb9\x76\x1f\xfe\x57\x00\x7e\x9b\xdf\x24\x30\x00\x9f\xc6\x25\xc7\x4c\x3d\x8f\x90\x5f\x85\xf2\x9b\x4c\x40\xc1\x42\xe8\x43\x04\xcf\xb6\x88\x05\xc2\x87\xbe\x58\xb0\x0f\x2f\x89\x2f\x89\x13\x82\xde\x6e\xaa\x4b\x57\x79\x0c\x96\x62\xe5\x9a\x3f\x7d\xdd\x0d\x96\x34\x5a\x28\xe8\x9e\xeb\xbf\xb5\x8b\x66\x78\x70\xd0\xaf\x9a\x5b\x60\xfd\xe1\x93\xb4\x78\xee\x90\x73\x27\xe4\x87\xe7\xe4\x28\x6c\x6e\xb4\xcd\xc7\x1d\x36\x0d\xb9\x8b\x89\xfa\x8a\x72\x7c\xb2\xea\x64\x32\xb1\x33\x07\x9f\xfe\x08\x4a\xd0\x74\x35\x2f\xad\x08\x5f\x98\x90\xca\x80\x38\xce\x3b\x5d\x95\xed\x04\xe9\x2b\xaf\x3b\x10\x2c\x87\x3b\x7d\xd3\x13\xa9\x38\xf7\xce\xd8\xbe\x45\xdd\x01\x0f\xaa\x56\xf1\x22\xe8\x69\xb0\xa7\xf0\x4b\xe7\x08\x48\x2e\x7d\x9b\x8d\x86\x0c\xf3\xc2\x8d\x55\xf0\xdb\x68\x5c\xdf\xc4\xd1\xea\xda\xd4\xaf\xe6\x83\x94\xdc\x89\xf4\x07\x0e\x1c\x84\x46\x60\xca\x49\x09\x22\xab\xab\x96\xd6\x94\x8d\x7c\x1c\xff\x05\x27\x62\x07\x1d\xae\x12\x6d\x13\x38\x54\x7b\x58\xaa\xbe\x89\x50\xad\x7c\x1d\x73\x5e\x4b\xd8\xdd\xb5\xf2\x8c\xbc\xe7\x76\x51\x43\xee\xe3\x98\xb3\x3a\x0f\x7e\x32\x08\xa7\x7e\x24\x3e\x58\xe1\x26\x34\xb1\x00\x02\x9c\xd9\x73\xdd\x95\x6d\x73\xb4\xa9\xac\xdd\xeb\x37\x8f\x3e\x9d\x90\xf5\xf9\xbd\x82\x69\x59\xf2\xb3\x78\x0a\x21\xba\xb7\x70\x79\x97\x6a\x1a\x0e\xbe\x3f\xf9\x9b\x8e\x9d\x48\x3a\x58\x77\x8d\x07\x74\xca\xaa\x3c\xd0\x99\xf0\x59\xc2\xcc\x69\x71\xe2\x52\x62\x85\x8f\xaa\x84\x04\x75\xe0\xb7\x07\x44\x98\x9a\xc2\xb1\x46\x8b\xad\x44\xe4\x26\xc4\x26\x67\xc9\x31\x7b\x1f\xc6\x69\xb0\xb6\xa5\xa1\xa3\xea\x1f\x1d\x48\xb8\x03\x68\x1d\xe3\x8e\xae\xe7\x01\xd0\x8f\xd3\xb1\x3c\xed\x8e\xd3\x26\x49\x9c\x34\xa4\xb1\xe4\x31\xf6\x29\x0d\x89\x87\x0f\x88\xb7\xe4\xf3\x1d\xaa\xbb\x1a\x2b\xc8\x11\xd6\xc4\xa4\xf8\xbf\xb2\xd4\x6f\x24\x74\x8d\xf1\xed\xd2\x80\xc4\xda\x45\x19\x3a\x34\x8b\xd0\x12\x98\x6c\xdf\xa3\xf9\x11\x2e\xd9\x12\xe0\x59\x40\x45\xd2\x35\xc7\x8e\x87\xdb\xb7\x49\x40\x1f\x6a\x91\x28\x48\xe9\x88\x1a\xba\xc4\x7f\xc8\xa8\x34\x4d\x99\xb2\x2d\xd6\x12\x59\xff\xf2\x79\x7c\xd9\x7d\xa2\x62\xc0\x50\xad\xb1\xae\x77\xeb\x0a\xe2\x02\xd5\x62\x11\xf6\xf3\x18\x5b\x7d\x95\x52\xd4\x1c\xfd\x92\xd3\xb4\x85\x5c\x6c\x8c\x41\x91\xb9\xe1\xff\x43\x31\xb5\x67\x71\x3e\x24\xc0\xa3\x60\x64\xe3\xbe\xbe\x6c\xbf\x7b\x3f\xae\xb4\xb5\x7a\xc4\x18\x9a\x7c\xa1\x2d\xdb\xef\x7a\xe7\x05\x17\x11\xfa\x24\xb3\xab\x22\x27\x23\x65\x06\x0d\x69\x43\xb8\xff\xe9\xee\xfd\xaa\xaa\x65\x4a\x79\x50\x36\x4e\x4f\x52\x38\x43\x70\x57\xd8\xfc\x60\x9f\xbb\x93\xa9\xb0\x27\x5b\x69\x91\xc4\xa6\xbc\x6f\x7b\x89\x6e\xb9\xec\xe8\x0e\x58\xd2\x00\x17\x7f\x49\xd6\x78\x4d\xcd\x3c\xd7\x75\xc3\x6a\x4f\x62\xdd\xca\xb0\x47\x00\x6f\x94\xbb\x6c\x1c\xab\x5d\x84\x5c\x5b\xf1\xb0\xbd\xe0\x5d\x7d\x6d\xf7\x54\x01\x4b\xaa\x88\x6e\x11\xd3\x93\xd7\x72\x44\xf3\x4d\xf9\x94\xd6\xad\xed\x99\x11\x6f\x91\x7f\xce\x5c\x5c\x95\x37\x3b\xbf\xd9\x79\x84\x08\xda\x9c\x8e\x8e\xb2\xe2\xb3\x52\xdf\x79\xa2\x12\x3d\xaf\x2b\x45\xf0\x6c\x90\x57\xe4\x53\xd8\xe9\x54\xff\x07\x21\xe4\x0c\x9b\x90\x6a\x4b\xe1\x55\x37\x07\xcf\xc5\x38\x7c\xf4\x0b\x95\x8f\x10\x9b\x39\x56\xf8\x81\xc6\x53\x1c\x01\x5a\x04\xc6\x65\xcf\x45\x38\x0b\xa1\x4d\x34\xf7\xd5\xb4\x3c\x9a\xbf\x05\x42\x71\xf7\xb8\xde\x5a\x06\xef\x29\x43\x13\x0c\x16\xc5\x9d\x54\xf1\x4a\x53\x46\x29\x74\xad\xf7\x8d\xd0\xda\xd3\x57\x4a\x6c\x3a\xb5\x19\x65\x84\x90\xff\x62\x3c\x6a\x4c\x9f\xdb\x5e\x00\xae\xdf\xae\xd9\xc5\x6d\x4b\x2d\xf8\x8a\x6d\x11\xcf\x96\x0d\x95\x27\xe7\x67\x95\xb8\xe4\xdb\x43\x3d\x98\x34\x47\xb5\x90\xdf\xf7\xcb\xcb\x9e\x18\x7b\x9b\x9e\x54\x99\x5f\x92\x64\x97\x11\xeb\x24\x50\xf6\xeb\xc9\x6a\xd1\xf2\x25\xf1\x73\xfc\xd6\x8e\x55\x93\x32\xc9\x28\xb9\xf4\x60\x68\xea\x0f\x7f\xd8\x1e\x9e\x49\xad\x1a\x28\xb3\x5d\xd9\x70\xb8\x71\xe9\xd1\x97\x9d\x12\x76\x6d\xa4\xa9\x04\xbe\x5d\xdd\x38\x78\x41\x5b\xb4\x19\x51\x12\x76\x03\xe3\x28\x37\x65\x75\x03\x03\xcd\x32\xa3\xc1\x5c\xdb\xe9\x6e\x01\x05\xb8\x32\xc7\x8b\x7d\x64\xc5\x43\x0d\x9b\x2f\x0f\xe0\x80\xcc\x41\xde\xa1\x4f\x4f\xb3\x8f\x8a\xbf\xc8\xb7\x37\xf3\xe1\xa0\x56\xea\xe1\x9c\x56\x18\x6e\x2f\x8d\x60\x42\x43\xee\xb9\x8f\x18\x51\x26\xa0\x7f\xad\x9a\xe6\xb8\x99\x0d\x79\x64\x3d\x39\xd8\x63\x1a\x77\x0e\x1d\xf6\x47\xbf\xc2\xef\x98\x00\x89\xd4\xef\x74\x4a\x51\x43\x8e\xc4\xc0\x89\x39\xc3\xcf\x5b\x51\xae\x0d\xf1\x63\x40\x26\xe8\xb6\x4f\x31\xb2\x66\x73\x70\xdc\xfb\x7a\x98\x4b\x62\x0e\x41\xfc\x85\xf3\xd7\xbc\x42\x16\x59\x19\x40\x3a\x4d\xf9\x6e\xe1\x99\xf0\xfe\xc3\x44\x41\x94\xa3\x40\x00\x71\x24\x2f\xf4\xe9\x00\x57\x55\x2c\xed\xc5\xbd\xf6\xb2\xed\x7a\x7c\x03\x15\xf2\x74\x67\xc9\x07\xe6\xd8\x67\xa8\x9b\xce\x00\x3f\x85\x28\x3f\x0e\x10\x0c\xb6\x85\xdf\x05\x0c\x0a\x25\x67\xa1\x3a\x95\x45\x0c\x42\x61\x1f\x0d\xf6\x97\x1e\x95\x55\x8f\x18\x79\x46\xc5\xa0\xd1\x6f\x9e\x97\x9f\xe3\x71\xc9\x6d\x3f\x57\xd3\xae\xc9\x3e\x29\xcf\x9a\x05\x77\x65\x2a\xe5\x83\x0a\x61\xc6\x9b\xd0\x95\x36\x92\x84\x8d\x15\x49\xf0\xa0\x7f\x05\xd7\x54\x3d\x57\xe2\x52\xbe\xc2\x15\x7c\xfc\xe5\xb2\x7c\x23\x49\x75\x5e\x6d\xad\x04\xff\x18\xc2\x00\x61\x7f\x24\xac\x42\x8f\x4a\xf5\xef\x04\x13\xfd\xbb\x79\xe6\x73\xdc\x8e\xcf\x73\x14\x99\xe0\x32\x3b\x83\x72\xa5\x31\x03\x2e\xb4\xd4\x63\xa8\x24\xbc\x29\x10\x18\x39\xf6\x44\x69\x7c\x60\x0b\xf9\x39\xc2\x9c\x55\x43\x3d\x06\x72\x8a\xfa\xf2\x19\x9d\xf6\x70\x06\x99\x92\x69\xfb\xa8\xc0\xc4\xd8\x87\x89\xf6\xb5\x37\x46\xfd\x47\x03\x99\x53\x41\x8a\x1c\x16\x21\xa3\x9e\x9a\x61\x8f\xb8\xb5\x09\xe5\x7a\x0d\x83\xed\xf0\xad\x00\xcb\x2e\xac\xda\x2c\xcf\xe0\x4b\xf0\x66\x26\x83\x69\x1f\x9a\xdc\x8b\x4e\xc1\xba\xfe\x36\xc4\xaa\x72\x49\x72\xef\xed\xe7\xea\x43\xaa\xdf\xac\x3f\x74\x41\xa9\x03\xce\x2a\x12\x86\x03\x0b\x6a\xea\x71\xf0\xb7\x30\x7b\xe5\x5b\xc2\x83\x66\xca\xea\xd1\xd1\xb2\x15\x6e\xb5\xad\xd2\x1a\x2e\x73\x63\xe2\xf1\xe1\xba\x22\x45\x91\xaf\x99\xaa\x12\x1f\x6a\x2e\x13\x9c\xd0\xf0\x54\xfe\x9e\x39\x26\x88\xf2\x9b\xa5\x6c\xe2\x59\x1b\x37\x23\x29\xec\xf7\x44\x1b\x63\x84\x41\x32\xf1\x6f\xb4\xa0\x36\x8b\x39\x8f\xa6\x07\xe1\xb4\x6f\xf5\x6d\x20\x2d\x04\xc4\x1b\x10\xc0\xc8\xc8\xd9\x38\x1f\xe2\xfa\x55\x26\x64\x27\x88\xb6\xa6\x7a\x1c\x72\x21\x96\x56\x62\x40\xa9\x66\xd1\xa2\x7e\x5a\x0b\xa4\x06\x7a\x6d\x2e\x5b\x1f\x66\x7a\xc2\x02\x4d\x52\x8f\xab\x6d\x97\xde\x07\x93\xa9\x29\x8f\x1b\x16\x62\x88\x14\x16\xd1\x4c\x0f\x58\x3e\x27\x35\x94\x55\x13\x5b\x70\x53\xab\x48\x99\x42\xd4\x06\x90\xe0\x0b\xc6\x69\x50\xd6\x67\x90\x90\x30\xa4\x07\x83\x93\x25\x69\x8f\x53\x28\x0f\x67\x9f\x45\xee\x31\x05\x00\x54\xaf\x34\x05\x65\x25\x16\xc4\x27\xc6\xf3\xf5\xfb\xe7\x55\x0c\xd8\xf8\xc0\xb8\xfa\x02\xa2\x45\x33\xf3\x08\x16\x07\xfd\xa5\xbe\xdc\xad\xaa\xc0\xfd\x4d\xbf\xce\x97\x68\x5a\x97\x24\xf1\x3c\x5e\xe6\x9b\xf8\x7b\x12\x4a\x11\x50\xee\xda\xe7\x00\x0b\x50\x8f\x1f\x41\x82\x6e\xdd\xfc\xe4\x00\xda\x11\x14\x6c\x31\x91\x01\x31\xe6\xe8\xa1\xcc\x44\xe2\x68\x66\x3e\x82\xde\xfa\x94\xcc\x5e\xf5\x3b\x64\x79\xb2\x24\xf5\xf8\x18\x4b\x2e\xd3\xa4\xe9\xca\xd4\xf3\x59\x70\x42\x2f\x8e\xcb\xc7\x23\x74\xd2\x91\xf3\x53\x5f\xd2\x6a\x85\xdd\x69\xfa\x6b\x49\xfb\x6d\xb1\x6c\x46\x9f\x65\x7a\xf0\x6c\x64\x8b\x31\x05\x1c\xd4\x4c\xcd\xd1\x0b\x74\xcc\x34\x7d\xbe\x1d\x89\x55\x9f\x70\x04\x26\xc0\x77\x99\x2e\xac\xb0\x0a\x77\x96\xe3\x4a\x42\x9d\x84\xd3\x71\xfe\xf8\x3f\xdf\x35\x2f\xa7\x5e\x3b\xe6\x55\x9b\x58\xa4\x57\x18\xc8\xd2\xfb\x75\x0b\xdd\x2b\x86\xb8\xa8\x4d\xb1\xec\xa0\x05\x0f\xce\x17\xc6\x24\x71\x5a\x4d\x40\xc9\x89\x31\x4f\x12\x97\xe8\x34\xb7\x94\xd5\x67\xd8\x20\xca\x48\xd8\xfc\x7e\x7a\x15\x59\xa8\xad\xf0\xb6\x23\x99\x4f\x3b\xf2\x7e\xd5\x87\x79\x75\x10\x1c\x0b\x8e\x5a\x29\xb8\xce\xcc\x5e\x02\x52\x37\x4c\xcd\xeb\xfe\xf5\x78\x9e\x90\xa5\x48\x9d\x5e\xbe\xdf\x48\x19\x2d\x91\x1b\xbb\x11\x39\xe2\x92\x04\xf1\xef\x20\x42\x8f\x92\xcc\x3f\x4b\x84\x99\xd7\x6d\xcb\xc8\x35\xa0\xde\x52\x45\x02\x7a\xc3\xb1\x89\xe1\xc0\xce\x67\xcd\x66\x98\x77\xea\xf1\x18\x6e\x5c\x49\x27\x63\x19\x05\xe7\x95\x9b\x1f\xf6\xe4\x9a\x04\x17\x32\xab\x39\xc1\xc4\xff\xb1\xff\x6c\x82\x85\x29\xa5\xb7\x2f\x30\x21\x7b\xed\x8c\xd1\xe4\x86\xcf\x2e\x1a\xf9\x60\x35\x28\x98\xd3\xdd\x29\x50\xd1\xc4\xa0\x7b\x2e\x89\xbf\xee\xd4\x56\xa2\x70\x1d\xb1\xa2\x4e\x84\x33\xfd\x62\x85\x89\x6b\xe2\xd1\x41\x74\xa5\x8c\x8b\xb5\xf7\xab\xd6\xf5\xec\xd6\x98\xe0\x5b\x0f\x9e\x00\xcc\xb6\x80\x5f\xd3\xc5\xc1\xe6\xf8\x81\xea\x38\xb0\x30\x97\xd9\x4b\x5a\xc2\x92\x0c\x48\xa2\xb7\xb2\xc4\x40\x56\x76\x07\x4d\x43\xdf\x52\x27\xc5\x46\x2a\x29\x7c\x34\x3f\x49\x58\x55\x80\xa6\xc6\x18\x11\xa9\x17\x29\xa1\x8e\x8e\xd8\x9a\xa9\x8d\x14\x91\x85\x27\xd6\x1d\x4c\x83\x01\xe2\x0c\x7e\xc5\xee\x29\x0e\xe7\x54\x71\x5b\xbb\xff\x41\x40\x17\xfe\x9a\x0e\xe3\xbc\xe3\x09\xf8\x52\x55\xbc\xb8\x1b\x2b\x2f\xe7\xd6\xf6\xe9\x19\x7f\xbb\xbe\xb3\x09\x80\x42\xea\xc0\x5e\x90\x6a\x43\x02\x7a\x1d\xfa\x3a\xf0\x6e\x7a\xae\x51\x87\x0b\x17\x49\x2a\xa1\x43\x9a\x6d\x65\x53\x5c\xff\x04\x20\x8e\x41\xc6\x6f\x25\x70\x57\x33\x72\x50\xaa\xb1\xa7\xa7\x0b\x82\x44\x43\x03\x1d\xc0\x6b\x7c\x72\xd4\xa2\xb6\x68\x78\x9a\xd8\xdc\x22\x32\x02\x18\xf9\x88\x38\x8c\x32\x97\x53\x7c\xf1\x6b\xfd\xb7\xfd\x0e\xad\x67\xfc\xbf\x56\x38\xb1\xa8\x1d\x48\xb6\x35\x2d\x68\xea\x63\xe0\x55\x5c\xd3\x57\x75\x6a\x91\x58\xe0\x3e\x17\x3b\xee\x9a\x9f\x5a\xf2\xc1\x5f\x58\x52\xa0\xc0\xa2\xa8\x98\xbe\x71\x5a\x5f\x7d\x59\x58\xa6\x42\xf8\x37\x9a\xb1\x86\x35\x40\x03\xca\x16\xf7\x45\x90\x1d\xad\x98\xdc\x9c\x32\x27\xb2\x96\x4d\xc0\x4a\x2c\x94\x8d\x4e\x59\xd5\x44\x4e\xa8\x75\x0e\xb2\xa8\x8c\x91\x38\x73\xa5\x19\xeb\x20\x9b\xee\x10\x6e\x57\x23\x7c\xb2\xc0\x94\x29\xd0\xac\xdb\xd9\xca\xfc\xdf\x84\x58\xc3\x88\x8d\x25\x2b\xc9\x2d\x33\x66\xa0\xe4\x4e\x21\xb2\xde\x6b\xf1\x05\xb5\xa1\x26\x5b\x69\xa0\xac\x84\xda\xec\x1e\xa5\xb6\x56\x3c\x0d\x0e\x46\xb4\x1f\x41\xd3\x93\x48\x67\x63\xe8\x98\x25\x2d\x61\x83\xcd\x41\x65\x31\x6f\x5b\x48\xae\x6d\xe7\xa4\x17\xd0\x68\xdb\xb9\x11\x91\x09\x87\xa0\x02\xf2\xdf\x53\x75\x87\x80\x79\x4b\x73\xf2\xa5\x7d\x61\x83\x50\xf1\x59\x84\xe6\x5d\x75\x3b\xe2\xa7\xcb\xe3\x37\x89\x13\xad\xb5\x32\x05\x9d\xd3\x15\x5b\x91\xa4\x49\x94\x3c\x3c\x37\x83\x5c\xac\x08\x40\xb4\x0c\xfc\xb0\x52\xc9\x4b\xe2\xb0\xb7\x4f\xe5\xa3\x6c\xa0\xd8\xd8\x75\xb5\x24\x0a\xe2\xef\x2a\x29\xcf\x47\xdc\x4f\xde\x4f\xa0\x88\xa7\x54\x59\x59\x71\x4b\xf0\xba\x32\xca\x8b\x32\xd8\xca\x61\xb6\x02\x9c\x88\xa2\xf5\x70\xf6\x48\x5b\xe9\x0c\x04\x0d\x87\xad\xe9\x94\xfb\xce\xdc\xb7\xa6\xa5\x61\xb8\x26\xa4\x88\x30\x3d\x66\xb8\x4c\xfd\xcf\xb6\x2a\xca\xf0\x2f\x9f\x5b\x5e\xc7\xde\x74\x8b\x3b\xdb\xa8\x58\xe4\x96\xc6\x45\x38\x9d\x37\x11\x90\x09\xa5\x97\xcc\xc6\xce\xcb\x1e\x9c\x76\x73\xca\x70\x17\xd1\x2a\x0c\x17\xdc\x5c\xa6\x72\x46\x3e\x87\x65\x6f\x9f\x5d\x9c\x1e\xa0\xcb\x76\x5a\x45\x5d\xba\x72\x4c\xca\x7b\x3b\x42\x26\xa9\x51\x52\xb8\xaf\xaf\xaf\xe7\x57\x3d\xc7\xc1\x2a\xc8\x26\x1d\x48\xe4\xd8\x1d\x5d\xcb\x54\x22\xec\x58\x4a\xa6\x2a\xee\x1f\x70\xfb\x2e\x7e\x1d\x97\x2d\xf6\xb4\x61\xaf\xc8\xcd\xd8\xa7\x84\x86\xde\x51\x8b\xc6\xdc\x84\x7b\x08\xc3\x34\xb2\x5c\xa6\x26\xf5\x11\x67\x1e\xb8\x1b\xb7\x7f\x7c\x22\x1b\x11\xfd\xd6\x49\x84\x66\x8c\x8b\xea\xc9\x85\x54\x76\xa9\x49\x19\xf7\x59\xe9\x29\xd6\x7a\xf5\x48\x0c\x68\x8c\xf8\x69\xae\x6b\x22\xca\x9f\x47\xf1\xad\xed\x21\xdf\xf9\xfc\xbc\x03\x50\xe6\x7e\xe4\x1c\x64\x02\xc1\x87\x58\xa9\x50\xc1\x40\x5e\xdf\x48\x4c\x45\x3b\xc2\x71\x00\x1d\xa7\x14\x0b\x52\xde\x99\xee\x14\x0b\xca\x37\xf6\x15\x5c\x56\x88\x6d\x40\x12\xe4\x98\xf9\xd4\xa5\xda\x40\x5c\xfa\xb4\x15\xae\x19\xd2\xae\x85\xd9\x29\x3b\x75\x62\x79\x82\x27\xe5\x0b\x5b\x7b\xee\x25\x47\xf7\x60\x61\x79\xea\xe2\xb3\x89\xd2\x40\x93\xff\x81\xdc\x15\xa4\x78\x17\x3d\x48\x8e\x48\x4e\xb0\x6d\x8c\x56\xc9\x29\x8a\xec\x74\x93\x97\xd2\xa0\x2b\x48\x91\xee\x0d\xbb\xbb\xaf\xd3\x28\x3a\x10\x81\xd6\xe9\xbf\x1c\x92\xb1\xce\xd9\x54\x8d\x87\x25\xfd\x73\xce\x08\x65\x64\xd1\x18\xad\xc0\x6c\x4c\x40\x69\x2d\xbb\x54\xa9\xa8\x45\x55\x1a\x37\xdd\x15\x4c\x1c\x79\xab\x56\x0f\xb4\x6d\x57\x42\x9b\xfd\xcb\x1e\x04\x54\x4e\xf6\xa7\x67\x59\x8c\x95\xc6\xa7\x12\x4b\x64\x62\xc4\xd8\xdf\x71\x78\x06\x92\xb6\xc9\x79\x0f\x26\x0e\x84\xf6\xdd\x80\xff\xa7\x5c\x75\x6c\x97\x1d\x8a\xd2\x06\xe5\xdd\x1f\x3b\x58\xbd\x52\xda\xed\x3f\x8e\xb3\xe9\x26\xb4\x83\x5e\x1f\x0c\xb2\xf6\x59\xad\xa9\x3b\x44\x98\xc6\xa8\x29\x66\x18\x20\x1d\x44\xf6\x98\x20\xf1\xa4\xf4\xad\xf0\x6b\x17\x6c\x27\x9f\x9e\x41\xe0\x94\xa1\x96\x00\xcd\xfb\x84\x85\x11\x33\xa4\x82\x6f\x93\xa6\x19\x01\x2d\xed\x53\xc6\xa8\xb2\xd6\x74\xcc\xd1\x47\xe7\xc8\x9a\xf5\x68\x50\x49\x1b\xa7\xf1\xcb\xaf\x3a\x87\xfe\xa7\xf7\xba\x62\x74\xd1\x65\x72\x79\x51\x46\x43\xb0\x90\xf4\xaa\x43\x5a\xdb\x6a\x49\xbc\xab\xf3\x15\x2b\xae\x06\x68\x8a\x53\x95\xe1\x31\xe7\xb1\xa8\x03\xcf\xd2\x70\x12\xb1\x45\xa3\x2d\xf5\x09\xa6\x14\x88\xf6\x13\xc7\x8b\x6c\xab\xce\xf0\xdf\x98\xb6\xd7\xbd\x9e\x03\x29\x51\x0e\xef\x99\xa6\x1c\x92\x7a\x92\x76\x49\x02\xc7\xe1\x2e\xe8\x50\x97\xa4\x90\x4d\x50\xdc\x63\x7d\x9f\xfe\x3e\x9d\x68\x0a\x93\xba\x17\xcb\x56\x7a\x99\x5d\xcc\xaf\x84\xef\xa7\x1b\x5c\xfd\x55\xf0\xe2\xad\x29\x7d\xdd\x5b\x9c\x88\xb4\xba\xb6\xa2\x83\x2f\xe0\x7b\xa5\x0c\x62\x23\xc5\x9c\x32\xea\x19\x30\xc7\x7c\x52\x0f\x21\x92\xc9\x19\xa8\xb8\x64\x6c\x6a\xe1\xbb\x5c\x8d\xb8\xfd\xb6\x9a\xcf\x77\xdc\x93\xe1\xb0\x51\x04\x64\x83\x7a\x13\xe7\xb9\x55\x6a\xf1\x83\x0a\x33\xa8\x24\xab\xf9\x0c\x22\x29\xc6\x48\xab\x0a\xea\x9f\xce\xf1\x36\xae\x71\xe6\x1d\xe8\x97\x70\x16\x3e\x54\x54\x01\xb0\xb6\x5f\x9d\x6a\x67\x04\x3f\xcd\x30\x81\x3d\xf6\x27\x6b\xf2\x52\x82\x33\xb8\x81\x66\x3d\xd8\x5f\x4a\x4b\xe4\x2b\xda\x40\x3d\xd2\xe6\xcf\x1a\x92\xd3\x68\xed\x38\x6b\x53\xe8\xd8\x17\x62\xea\x6c\x79\xb0\x8d\xe9\xbd\xa0\x91\xbc\xd1\xda\x83\xb3\x7f\x6d\x13\xd0\x51\xa9\x60\xb7\x59\xf7\x47\x85\xd2\x44\x91\xbf\x63\x70\xeb\xf9\x06\xb2\x7d\xd9\x76\x63\x4b\x3d\x03\xd6\x95\x35\x9a\x03\xc2\x98\x45\x04\xe1\xea\x14\xd8\xef\xfd\x3d\x09\x18\x5f\x49\x5b\xb4\xd7\x9e\x56\xfc\x14\x35\x55\xc4\x17\x1f\x94\x31\xe0\x88\x59\x62\x8f\x74\xc2\xa4\x94\x79\x46\x36\xdb\x7f\x22\x50\xa0\xed\xc5\xcb\xb9\x7f\x07\x3b\x01\x4e\x71\xa6\xc8\x59\xf1\x46\xb6\x95\x53\xd7\x42\xb0\xbe\x81\x50\x8a\x64\xc4\x31\x34\xc2\xac\xc6\x8c\x4f\x1b\x7e\x70\x1d\xb6\x61\x4c\xa3\x47\xad\x2c\x51\x09\x27\x05\x8e\x66\x8a\x6a\xd3\xeb\x5b\x59\xca\x6b\xa3\x5c\x5f\x14\x02\x7d\xe6\xda\x27\x12\x03\x82\xae\xfc\x03\x17\xfd\xfd\x6b\x33\xe4\x42\x79\x38\xcb\x64\xaa\xf6\x8d\x8d\xf0\xdc\x77\x0c\x84\xb1\x12\xbf\x91\xf1\xae\x7d\xda\xb2\xb6\xd6\xd6\x67\x4e\xb3\x98\x07\x94\x61\x36\x2d\xeb\xd9\xef\x16\xf6\x65\xff\xcc\xd2\x3e\xe5\x73\x86\xf1\x80\x57\x76\x3e\xe6\x58\x4e\xbd\x61\x9b\xeb\x91\xbc\xd1\x96\x96\x64\x9a\x66\x4b\x9b\xa1\xc1\xca\xa7\x2f\xe5\x26\x86\x00\xdc\x81\x09\xf1\xd2\xcf\xf6\x4c\x84\xb2\xa1\xce\xea\x0d\x5c\xb1\x34\xd1\xf3\x3d\x11\x6d\x21\xf4\x29\x52\x86\x29\x85\xae\xce\x8b\x39\xbd\x2f\xb7\xb2\xbc\x2f\xbf\x52\x37\x3f\xd5\x4c\x1a\x9d\x36\x41\x68\x83\x56\x9f\x10\xf8\x37\x3e\x03\x92\x56\xfc\xa7\x12\x3c\xbf\xf0\xef\x59\xc0\xda\xf7\x88\xec\x7a\xbd\xa8\x0d\x64\x54\xf5\xb2\x10\x24\xb3\xf0\x5e\x90\xbe\x9e\x39\x15\x36\xae\x11\xc9\x63\x66\x77\xf4\x7d\x47\x0a\xc7\x45\x96\x2f\x1d\x49\x33\xe5\xeb\xdc\xcd\xfe\x32\x1c\x18\x29\xdc\x5c\xb0\xc0\x8d\x7a\xbd\xcf\x8c\xd9\xa3\xd1\x69\x72\xa4\x08\x01\xb4\x97\xcc\x34\xf3\x2c\x0d\xc0\x1e\xb3\x87\x8a\x5e\x88\x7d\xe6\x81\xac\xaf\x35\x45\xf8\x89\xfc\x99\x90\x70\xc5\xe7\xf8\x66\xfc\xcc\xe4\xb2\xc6\xf4\xa4\x71\x16\x72\xa4\x1a\xe9\x64\xf5\x22\xe8\x63\x2f\xce\x08\x94\x53\x50\xb6\xbc\xbf\x5d\x76\xde\xa0\x95\x09\x3b\x77\xf2\x1f\x78\x35\xeb\x7e\x70\x1b\xe2\x3e\x93\x26\x93\x7e\x4b\xe3\x3f\xeb\x3a\x0d\x9d\xe0\x0d\x10\xe8\x25\x35\x63\x31\x8c\x8c\x2a\x15\xf4\xc0\xdd\x9a\xd3\x26\x7e\x1f\x2e\xe0\x14\x0e\xd9\x10\xca\xe7\x3d\x15\x7c\x8b\x75\x51\xe7\x31\xbc\xe9\x0a\xe9\xd4\xcd\x6d\x81\x6f\x9c\x70\x97\xef\xde\xd8\x67\xf8\xc9\xc6\x23\xbc\xb4\x34\xbf\xf6\x79\xde\xb9\xb5\x9a\x47\x29\x2e\xfb\xa3\x95\xe9\xf1\x76\xf9\xe1\x7c\xc6\xd0\x70\xbc\x27\x65\x90\x5c\x1d\xec\x0b\xee\xdf\xdf\x04\x3a\x54\x58\x76\x74\xcd\x29\x78\xa9\xe7\xcd\xaf\x4c\xb2\x16\xb2\x41\x0b\xf7\xc8\xe9\xb3\x28\xb5\x70\x68\x46\xe7\xbb\x1a\x69\x5c\xd5\x11\x5d\x04\xf1\x38\x7d\xd2\xfc\x51\x0d\x09\xa5\x88\x47\xce\x1b\xa3\x3e\xeb\xe6\x58\xff\xb6\x93\x5b\x0e\xf8\xd6\x4a\x91\x7e\x72\x6a\xa2\x11\x2c\xe2\x2c\x3e\x6d\x2b\xc8\xad\xd1\x29\x64\x55\x4b\xf8\x25\xc5\x3c\xc1\xa9\x7b\x7b\xf4\x99\x66\xe9\x06\x99\x18\x90\x29\x83\x39\xfa\x41\xea\xef\xa2\x83\x33\x91\x65\x3f\xc1\xbf\xe5\xed\x21\xd5\x7c\x5a\xd3\x42\x9e\xde\xdf\x82\xe6\x33\xce\x6a\x33\x7d\xf5\x86\xa1\x0b\xc8\x01\x53\x1a\x7a\xaa\x37\xfa\x57\x54\xb4\x4b\xf6\x6f\x5d\x96\xb3\x25\x38\xeb\x66\x36\x0a\x50\xfa\x66\xc6\xd0\xb7\xca\x7d\x81\x7e\xf6\x4a\x1d\x71\xad\x0a\x30\x6b\x11\xe5\xa4\xa3\xfb\x28\x61\xb6\x28\xcf\x47\xbf\x2d\x76\x36\x0c\x21\xe1\x34\xed\x76\x3b\x44\x00\x95\xb3\x1f\xad\xda\x94\xc2\x36\x2c\xe3\x63\xe0\x96\xde\x27\x3d\x61\x83\x92\x50\xb3\x50\xfc\xa2\x49\x7e\xef\xdf\x25\x74\x58\x13\x17\x35\x52\xea\xe6\x8b\x83\xfa\xfa\x7a\x86\xe1\x39\xed\xc9\x26\xca\xd8\x54\xd2\x26\x51\x8e\xc2\x24\x7e\xd7\xb4\x25\x86\xb0\xbd\xf0\x51\x99\x23\xb0\xd3\xe3\x8e\xd2\xa4\xf6\xe1\x5d\xc6\x26\x3d\xec\xf3\x95\x86\xab\xfb\x2c\x5c\x1c\x2d\xba\x7f\x21\x72\xcb\x13\xb9\x33\x61\x15\xbd\x98\xf4\x0f\x9b\xb3\x2b\x01\xa0\x5a\x56\xef\x33\x19\xda\xc6\xf8\xc0\x2f\x63\x5f\x75\x7b\x44\xa0\x61\xbb\x28\x67\x40\xf4\x1d\x2d\xdf\xd6\x70\x26\xe9\xe9\xdb\x21\xe3\x44\xb0\x41\x86\x66\x13\x0e\x7f\xc8\x46\x46\xd0\xff\xd5\x6e\x30\x30\xac\x32\x46\x66\xe6\xb2\xfa\x80\xc1\xd2\x5b\x9e\x54\xc4\xc0\x93\x3f\x6a\xf0\x2e\x0e\x5c\x9a\x54\xfb\xc6\x57\x81\x2a\x89\x3e\x17\x58\xdc\x93\x68\xe6\xe0\x3f\xa3\x85\xf8\x55\x3c\x7f\x86\xa2\xdc\x91\xc7\xb2\xd5\x69\x86\x5e\x18\x5e\x07\xcc\xfc\x65\x53\x51\x9b\x45\x05\x1c\xc0\x54\x08\xc2\xb3\xb8\xb5\xd8\xdf\x9c\x6b\x93\x2e\x3d\x0c\xa2\x86\xa0\x44\xfe\xcb\xdd\xbf\xcc\x53\x51\x54\xf6\x98\x39\xd0\x84\xb5\xd9\x72\x0e\x62\x88\x82\x6d\x02\xda\x5f\xb2\x8d\x5c\x18\x18\x9a\xed\x8d\x56\x7f\xd8\xe4\x71\xaf\xdf\x74\xa5\xf4\x03\xd9\x7b\xf8\x5f\xe3\x0f\x6b\xc9\x76\xf1\xe8\xff\xb9\x09\x22\x41\xe8\x53\x8f\x19\x9b\x84\x04\x00\xce\xad\x33\x4f\xc5\x74\xb2\x9a\x3f\x51\xc0\xd7\x9f\xa3\x96\x6a\x83\x93\x85\xaf\x80\x61\x65\x42\x6a\x0e\x74\x3e\x85\x53\x50\xc1\x49\xea\x2c\x07\x99\x07\x33\x12\x5a\x59\x5f\x03\xff\xfc\x8f\x75\x9b\x02\xf7\x5e\xd9\x59\x29\x89\x66\x86\xd3\x32\xc5\x6b\x25\x1c\x50\x29\x9e\xd1\x94\xff\xd6\x57\x44\x98\xf9\x95\x64\xd6\x59\xb4\x9f\x93\x5d\x8a\x1e\xbc\xcd\x5e\x31\x02\x71\xd2\x43\x8a\x93\x60\xb4\x8e\x3f\xea\x15\x3f\x52\x01\x1f\xca\x08\xa3\x3d\xbd\xdf\x37\xdf\x17\x19\xd0\xa3\x34\xe4\xf1\xe4\x69\xa7\xfb\x66\x53\x07\x9f\x86\xd4\x14\xd1\x30\x9f\x5b\x27\x2e\x31\xa1\x71\x3d\x1d\xd3\x32\x21\x84\xd8\xb0\x1e\xea\x34\x9c\xa9\xa8\xc9\x56\xd2\xab\xf0\x5b\x65\x16\x33\x47\x6a\x1b\xd1\x1a\x07\x63\x5d\x35\xc4\xbc\x61\x78\x63\x0d\xe5\x5a\x91\xd5\x83\xc0\x5f\x77\xb1\xc8\xa9\xf7\x4f\x42\x71\x48\x94\x4d\xcd\xdf\x00\xa6\xbb\x5e\x87\x04\x96\x53\x2e\xf8\x13\xfa\xe8\x7e\xc1\xb3\x73\x49\x74\x84\x8f\x3c\x48\x76\xdd\x1e\xcb\x31\xd3\x0d\xbc\xff\xb2\xa2\x96\xa6\x16\x57\x5f\xb9\x0a\xe1\xde\xb3\xbe\xfe\xcc\x6d\x7f\x1e\x6e\x45\xe6\x67\x58\xc5\xfe\x3c\xdd\x24\xd2\x64\x46\x75\xfb\x0f\xce\xc9\xc2\x5d\x5e\x11\xc8\x81\x05\x02\xea\x96\xac\x0c\x87\x74\x47\x0b\xf2\x33\x53\xf5\x64\x23\x9e\x04\x85\xc5\xf7\x01\x68\xe9\x48\x5a\x4e\x64\x13\x48\x6b\xfc\xb2\xa1\xf2\xe1\xca\xe7\x98\x93\x6b\x25\xf0\xd7\xfa\x1b\x1c\x8c\x7f\x7c\x4d\xf0\x1f\x1d\xb5\x4b\xf6\xb4\x2a\x78\x04\x4d\x91\x58\xd1\xc6\x70\x52\x54\xde\xdf\x19\x48\xd4\xb4\xfc\x0e\xfe\xe4\x11\x33\xbe\x05\xee\x3f\x8a\x5c\x77\x6d\x51\xbe\xd1\xf6\xc8\x9f\xc7\x69\x3f\x35\xb1\xdc\xa8\x6b\x30\x7f\xeb\x86\x0e\x88\x70\x1a\x42\x2d\x5e\x18\x25\x5b\x2e\xa0\x17\x83\x63\x57\xc1\xfd\x6c\xbd\x50\x9b\x7a\xe1\x4c\x38\xbd\x78\x15\xc9\xb2\xe7\x59\x1c\xf5\xb7\xa2\x1f\x21\x1d\x50\xcc\x09\xe3\xe4\x6d\x58\xce\x16\x8b\x3d\xf6\x0a\x7f\x03\x8f\xb7\xc6\xe4\x19\xbe\x15\x83\x99\x3f\xd7\xd9\x76\x62\xd4\x4a\x53\x2f\x62\x1c\xec\xa3\x72\x99\xa6\x45\xae\xf0\x62\x42\x96\x0f\x18\xa7\xa1\x71\xf0\x59\xf2\x4b\xc0\x80\xe1\x83\x5b\x6d\xaa\x23\x75\xa6\xc5\xba\xa1\x4e\x8f\xcd\x4c\x18\x6c\x97\x12\x32\x14\x67\x75\xae\xad\x6c\x26\x11\x29\x59\x4e\x5b\xea\xba\xd8\xe8\xc2\xf4\x08\x91\x47\x2b\x29\xab\x9a\xaf\xc9\xde\xac\x1f\xff\x42\x30\x7a\xf0\xcf\x83\x20\xb4\x62\x02\xa4\xc2\x5f\x3f\x26\x38\xcc\xbe\x59\x5e\x9c\xdf\xd8\x98\x59\xd2\x0d\x5d\xf2\x7b\xc1\x14\x23\x8c\xea\x1d\x86\xb6\x3a\x88\x87\xfe\xaf\xa4\x13\x26\x6f\x04\x02\x17\x8e\x1a\x52\x42\x80\xce\x8b\x10\x87\xa7\xf6\x52\x11\x2e\xd9\x2e\xf8\xf6\x4f\xa8\x38\x08\x50\x04\x6f\x5d\x08\xd1\xac\xc5\xa4\x7d\x2a\x6b\x4b\xa0\xd5\x4a\x87\xaf\x98\x3d\x37\xfc\x61\xac\x1c\xae\xd0\xfc\xaa\xb9\x82\x36\x45\x45\x9f\x8d\x1c\x0e\xd7\xba\x59\x0f\x08\xa7\x1e\xb9\x25\x88\x2d\xf8\xe8\x2b\x89\x27\xa0\x21\x03\xea\xb9\xcf\x5e\x50\x27\x1c\xa3\xbf\x2a\xbf\xc9\x74\xb2\xf6\xc7\x80\x2e\xa4\x58\x46\x27\x4e\x8c\x34\x36\xe3\xe5\xe1\xd8\xe8\x2b\x94\xb4\xf5\xd7\x21\xfc\xe2\x72\x2f\x71\xf4\x65\x0b\x2c\x19\x87\xb7\x04\x81\xe5\x9e\x77\xfe\x5b\x19\xc6\xeb\x75\xee\x53\x77\x8c\x3d\x38\x37\x8c\x03\x21\xa5\x43\xda\x09\xc4\xb4\xbe\x37\xc0\x31\x93\xcd\x6c\xc0\xbd\x8f\x26\x67\xb2\x38\x87\x8f\x27\x32\xdc\x39\x58\x24\xfd\xae\x22\x8f\xe9\x32\xa9\x6a\x77\xda\x12\x06\x28\xd7\x80\xdd\xba\x8d\x10\x17\xdf\x44\x9c\xb8\x23\x51\x48\xbe\x4e\x16\x32\x67\xd7\xaa\x8a\x00\x89\x9e\xd9\x09\x03\xa9\xf5\xa7\x31\xd5\xa7\xf8\xf2\x31\x82\x19\xea\xe2\x6e\x5c\x62\x9b\x34\x02\x4c\xc1\xfa\x06\x32\xe6\x1f\x81\xc3\x88\xb8\x65\xf6\xf5\xf2\x52\x72\x64\x35\x32\x6e\x07\x98\x8f\xa1\x2b\x30\x2b\xe2\xf5\xf1\xfc\x98\x89\x3c\x61\x76\xac\x9b\xd0\x75\x47\xc5\xd3\xff\x3d\xd3\x80\xb4\xb2\xf2\xbd\x65\xc1\xf7\x66\x16\xe2\xe7\x0d\xfa\xd5\x31\xa1\x7e\x49\xb0\x10\x3e\x0e\xf8\x17\x9d\x22\x2d\x4d\x6f\x2b\x33\x10\x02\x4f\x81\x8f\xb8\x8e\xf0\xac\xbb\xd0\xf1\x6c\x32\xf4\xf8\x33\x21\xe4\x1a\x8d\x7f\x53\x84\x51\x12\x88\x2d\x44\x03\xfe\x9d\x95\x46\x9a\x29\xf8\x50\xfb\x98\x0a\x1e\x24\x22\x78\x8b\xc1\xf0\x25\x88\x7a\xc2\xe8\x28\x61\xd3\x47\xaa\x2e\xc9\xce\x21\xba\x05\x6c\x95\x25\xe1\x41\x9b\xac\x72\xcd\x75\xd8\xba\xe3\xd8\x80\x7f\xb4\x64\xff\xa9\x3e\xfc\x2a\x7e\x78\xa7\x51\xda\x4f\xd7\x21\x9b\x42\xae\xfb\x24\xdb\x49\xb0\x4e\x18\x5d\x6f\xa3\xfe\x19\x65\x92\x84\x99\xef\xdf\x03\x9e\x5f\xfb\x45\x0e\x4d\x00\x4c\x68\x1d\x86\x71\x09\x6c\x0f\x4f\x7e\xd6\x8c\x18\xd0\x89\x59\xc9\x1f\x57\xf6\xb5\x2a\xd0\xfd\x5c\x4c\xe7\x0c\x99\xd7\x13\x8a\xdf\x72\x8c\x07\x72\xee\xc6\x60\xa5\xd9\xeb\x62\x51\x8f\xef\xd3\xfa\x0d\x87\x69\xa5\x2b\xc9\x12\xd9\x4c\x68\x97\x38\x9b\xd5\x4e\xac\x73\x16\x8e\x3e\xc4\x31\x19\xf9\x1c\x10\x0a\xa9\xbf\xb1\x33\x77\x8e\xbf\x17\x77\x76\x1c\x95\x36\xb2\x0d\x22\x7a\x92\x5c\x6c\x62\x5e\xbd\x34\x03\x12\xd3\xab\x0a\xa2\xb6\x49\x5a\x74\x5f\xd8\x12\x31\x86\x6a\x19\x0b\xd2\xf4\xf4\x30\xf9\x37\xb3\xb9\xe7\x40\x01\x26\x2a\xf9\x8a\x06\x6e\x80\x4c\xa1\x86\x2e\x5b\xbf\x6b\x30\x2e\xeb\x3c\x1b\x96\xcd\x44\x13\x81\x5b\xa8\x96\x64\x0a\x61\x14\xdf\x5d\x52\x3a\x28\x82\xeb\x4a\x24\xa0\xa9\xa0\x65\x37\x75\x61\x6d\xc6\x88\x4f\xd2\x80\xf8\xc8\x81\xa5\x40\xbe\xa8\x2e\x97\x2e\x78\xa3\xa0\xde\xdf\xbd\xb2\x16\xcc\x14\xf7\xe4\x60\xa2\xf6\xdb\x32\xef\xf8\x7c\x93\xdc\x6e\x5e\x4a\x10\x72\x3d\x27\x29\xae\x90\xd9\xc4\xbd\xd0\x8c\xf3\x3d\xc6\xbd\x97\xde\x47\x88\xc2\xbc\x0d\x1b\x12\xc5\xe1\x30\xa4\x79\x6f\xee\xe3\x63\x24\x81\x55\x59\xf4\x07\x4b\x96\x6e\xc5\x30\x47\x6b\x98\xb0\x57\x74\xd0\x2b\x12\x0c\xe1\xd3\xe6\xe5\x9d\xec\x75\xd8\x96\x36\xc3\xdf\xe0\xe9\xcc\xad\xce\x61\x6c\x9b\xa4\x44\x0f\xd3\x01\x03\x6d\x30\xaa\xe5\x35\x32\x0f\x11\x11\x62\xb2\xd4\x5b\x6e\x50\x37\x16\x3a\x28\x0e\x86\x44\x72\x5a\xbc\xe8\xe8\x9f\xfc\xe9\x90\xf8\xfe\xaa\xfa\x46\xbb\x39\x7b\x70\xd9\x11\xbf\xf5\xa3\x43\x3d\xb2\x9e\x8f\xb4\xe9\x44\xc4\x5d\x4a\xa8\x98\x73\xff\xc6\x4e\x8a\xef\x68\x9d\xb5\x8e\x35\x3d\x69\x64\x3a\xc0\xfa\xc8\x25\xe6\xf8\xd7\x10\x13\xe8\xfe\x88\xe2\x8a\x1a\x4b\x1b\xa6\x3a\xcc\xc9\xef\xad\x65\x68\xae\xda\xa4\xaf\xae\x2a\xfc\x06\x8f\xd5\xa8\x28\x67\x83\x85\x2e\x9e\x19\x8e\x49\x85\xd0\xa7\xf0\xc4\x12\x7f\x1e\x04\x6e\xa5\xa7\x6c\x60\xf6\xbb\xcc\x78\xd3\xfe\x44\x4c\x4f\x79\xf2\x98\x14\x32\x3e\x84\xed\xa8\xda\x54\xd8\x6d\x06\x20\x19\x14\xfb\x08\x3d\x64\x1f\xa5\xd0\x66\x8e\x1c\x04\x90\x1a\xbd\x2c\xec\x41\x9b\xa9\xad\x71\x38\x41\x23\x68\xab\x44\x04\x0d\x86\x2e\x61\x73\xc6\x29\x78\x31\x30\xfc\x06\x38\x24\x83\x49\x4c\x02\x5a\xe7\x41\xef\xd1\x0f\xa5\x6d\x8d\xc3\x8b\x45\x63\x3b\x93\xc5\xbb\x5e\xd6\x17\x98\x1f\xf8\x9f\xfd\x41\xd6\x7f\xc0\xde\xae\xf1\xc0\x06\xf2\xa9\x4d\x4e\x0c\x87\xf6\xae\xa6\xfc\x03\xbc\xf5\x0b\x37\x8e\x38\x21\xf2\xb3\x8b\x78\x2c\x95\x7d\x04\x26\x9f\xeb\xbf\x14\xc8\x2d\xb5\x54\x25\xc9\xab\xfa\x9c\xa7\x34\x8b\x92\xb9\xec\xfc\xfa\x23\xbb\x4c\xad\xb5\x0f\xe1\xfa\xec\x75\xda\xa4\x7d\xd9\xc4\xdd\x4a\x30\x6f\x87\x52\x8a\xdb\x2e\x08\x49\xfa\xe0\xb1\xdb\x4b\x5a\x7b\xcd\xeb\xb2\x61\x05\x9b\x81\x29\x13\x34\x28\xf0\x8a\x67\x94\x9d\x41\xa4\x45\x3a\x79\xd7\xef\xd1\xe4\x19\xfb\xda\x25\xa4\x49\xc3\xbb\xb1\x86\x36\x4c\xc5\x82\xbc\xd4\x4e\x15\x7a\x53\x1b\x2f\x37\xc1\xfc\x88\x70\xa2\xad\xb4\x41\x11\xca\x64\xf0\x98\xc8\xdc\x16\xb9\x2f\xae\xc3\x98\xd4\x19\x12\x14\xfe\x8c\x95\x9d\x9a\xd2\xb5\x0f\xd2\xaa\x19\xe4\xff\x67\x7b\xbf\x68\x1e\x78\xba\x2c\x88\xde\x3b\x55\xfa\x91\x8a\xfc\x52\x4b\xb3\x34\xe0\x34\x49\x41\x2c\xb9\x42\xc0\xfc\x0e\xb6\x0b\xe7\x20\xc7\xe0\xc1\xd3\x0b\xe7\xbf\x72\xd0\xb8\xf5\xc8\x3a\xd7\x16\xf8\xdd\xa3\xa6\x23\xbe\x2c\x24\x1c\xe6\xa3\xda\xab\x80\x6b\x7c\x87\x05\x63\x23\xe7\x8e\x81\x5d\x87\xb3\x06\x0f\x9e\xa4\xb9\x37\x46\xea\x7e\xef\xdb\x3a\xd0\x34\x22\x65\x88\xff\x03\x95\xa2\x6c\x92\x6a\x91\xb9\x97\xbf\xd5\x44\xf0\x36\x69\x69\xc6\x06\xfd\x03\x1e\x34\x72\x5c\xe3\x79\x9f\x86\xb5\x36\xaa\xfc\x41\xf2\xdd\x12\xca\xdb\xd6\xbc\x58\xc7\x6f\x5c\x39\x1c\x8c\x58\x24\x6c\x69\xba\x97\x2b\x08\x89\x58\x4a\x2a\x98\x3a\x3e\xa8\xd3\xfc\x3a\x87\xf5\xcb\xde\xf4\x63\x61\x5d\xe3\x3f\xa0\xa1\x42\x1f\x17\xc5\xcb\x4a\x0d\x04\x30\x12\x2a\x32\xb8\x69\xfa\xe4\xf7\x09\xda\x09\x49\xe6\xc9\x7a\xc0\xd4\xb1\x46\xa9\x43\x68\x6c\x8a\x0e\xfa\xaf\x4e\x99\x51\x5d\x52\x63\x4c\xc7\x95\x1f\x1b\x77\x45\x9c\xc4\x44\x35\x0d\x1c\x63\x2a\xb5\x89\x94\x74\xae\x9a\x2d\xf9\xed\x29\x50\x13\xdd\x64\x5c\x66\xdd\x8a\x0a\x11\xbb\x2b\x22\x97\xaf\xa4\x81\xd6\x57\xb8\x5c\xd0\x5d\xcb\x29\xb0\x34\xa2\x54\x10\xae\x5e\xf2\x21\x68\x3b\x8a\x9c\x82\xdf\x32\x94\x7c\x9e\x6a\xb3\xf1\xd2\xe1\x6c\xe7\x94\x17\x81\x27\x08\xc7\x21\xc0\x05\x67\xf0\x55\x99\x2e\xf0\xbf\x4e\x6e\x2e\x31\xf3\xe0\x87\xcb\x01\x6c\xbf\x64\x8c\xc6\xb5\x38\x44\x8c\x46\x12\xa4\x80\x97\x17\xed\xfc\x65\xbd\x93\x7a\x31\x37\xbb\xd6\x76\x40\xdd\x82\x0f\x5f\xca\xb9\x40\x81\xa2\x04\xe1\x97\x99\x3a\x44\xcb\x3a\x30\x4c\x3c\x68\x7c\x55\x92\x55\x7e\xce\x22\x93\x8d\x58\x2e\xef\x15\x9c\xe2\xeb\xba\x88\xdc\x64\x90\x41\x2c\x04\xfa\x61\xba\x44\xee\x47\x05\x60\xef\xd2\xce\xe0\xd8\xb7\xf8\x9d\xea\x72\x25\xd0\x67\x00\xf8\xcc\xcf\x3d\x6b\xa2\x1f\x3c\x3b\x21\xb3\x6d\xb5\x9e\x35\x04\xbb\x6c\xa4\x5e\xb9\x32\x53\xdf\x5b\x86\x17\xa0\x62\xbc\x3d\x49\x51\x51\x82\xb3\x0c\x47\x0c\x33\xf1\xca\x4e\xd4\x68\x40\x8f\x50\xad\xfc\xa5\x9d\x2a\x9c\x7d\x1b\x07\xd7\x18\x59\x2a\xf8\xc7\x65\x06\xef\xe6\x69\x76\x1b\x5a\xfb\x1d\x4a\x7f\xb9\x61\x8c\x37\xa9\xae\xf7\xb7\x9b\x5d\x7e\xf7\x06\xa5\xf0\x72\x70\xef\x4e\x02\x22\x48\xf7\x82\x7c\xc8\x00\x38\xe8\x0e\x51\x88\x89\xc4\x4a\x4e\x8f\x9d\xd9\xdd\xf0\x75\x23\xba\x87\x4b\x40\xf6\xf9\x10\xe3\xe3\xb8\x5c\x77\x87\x36\x39\x38\xcd\x4e\x1d\xb8\x79\xf4\x06\x1a\x40\xe5\x84\xac\xc7\x47\xc7\xd7\x54\x91\x3a\xc1\xd4\x3a\xd1\xb4\xb0\xe0\x40\x54\x90\x5a\x84\x34\xc8\xe0\x19\x2b\x48\x68\xd4\xf2\x7f\x83\x05\x08\xfe\x7b\xa1\xad\xb2\xf8\x36\x19\xee\x12\x1a\x53\x77\xd9\xd2\xf7\x0d\xbd\x18\x72\x59\xd4\x94\x41\x46\x01\x1c\x6b\xdd\x2c\x80\xae\xb6\x9d\x76\xa2\xca\xa7\xf4\x71\xcc\xbb\xe3\x26\xe2\x5d\x82\xa8\x70\xf9\xec\x25\xa1\xb2\x4a\x33\x47\xc6\x68\x72\xa3\x42\x4c\x7a\x5d\xbe\xda\x7b\xce\x9d\x01\xb0\x52\x22\x35\xf9\x3a\x59\x59\x83\xf1\x61\xd8\xa5\xce\xfd\x3c\x80\xd8\x34\xef\x11\x29\xd3\x7e\x44\x79\xc8\x50\x07\xa0\x7f\x28\x7f\x8d\x47\xa8\xb3\x5b\x39\xbc\xc4\x13\xab\xcb\x0f\x41\x77\x30\xe0\x08\xf7\x7e\x2d\xbf\xd7\xea\x51\x18\x61\x78\x25\x93\x63\x3b\x2f\x3d\xbe\xb4\x50\xb2\xdb\x66\x1d\x81\x1d\xa4\x6b\x2c\x78\xef\x3c\x91\xe4\x6f\xff\x34\x9b\x60\x9b\x57\x04\x58\xc0\xa9\xcf\x5a\x8b\x35\x83\x18\x8d\xc8\xbb\xa4\xa1\x8b\x91\xea\x2e\xc3\x88\x5c\x81\x2d\x92\x24\xb5\x62\x6f\xf3\xa3\x70\xcf\x9b\x4c\x4e\x38\xe7\x55\x37\x5b\x75\xce\xc8\x7a\x77\x2b\x0a\xb5\xa8\xc8\x60\xeb\x81\x45\x54\xf2\xe1\x93\xeb\x9d\x3a\x93\x75\xfc\xe6\x3f\xde\x0f\xe4\x9c\x0d\x56\x70\xca\xf9\x60\xb6\xdd\xab\x84\x95\xd1\x93\xb4\xd4\xbc\x3b\x2b\x2a\x77\x31\xd1\x6e\x74\xa7\xe4\x94\x05\xe5\xe0\x21\x1c\x0a\x8a\x89\x3a\x6d\xf3\x08\xba\x21\xb0\xfd\xee\xcb\x06\x8d\xf7\x70\xe2\xed\x6f\xcf\x36\x8e\x6a\x34\xe7\x85\xbb\x1e\x34\x22\x88\x1c\x16\xe9\x90\x3c\xbf\xc8\x90\x24\x8c\x70\x98\x7e\xdd\x7f\x97\x4e\xed\x4a\x3e\x2a\xeb\x9b\x90\x05\x19\xe3\xf9\xe3\x19\x7c\x9e\x42\xdc\x42\xf4\x0a\x1c\xe5\x7b\x5c\x7d\xfc\x08\x5b\x7c\x70\xab\xad\xb9\x11\x87\x61\x8b\x75\xc6\xdb\xb9\xc6\x96\xd5\x2a\x0a\x4d\x42\xa1\x9f\xe3\x8c\xcb\x0d\xf6\x03\x0f\x79\x87\xbe\x75\xe3\x7b\x53\x8a\x53\x31\xb1\x7f\xff\xe9\xea\x40\xfc\x4c\x3e\x62\x40\xbd\x3e\xa3\x3b\xeb\xda\x94\x0f\xe1\xab\x93\xf3\xc9\x8d\x47\xbd\xbf\xe0\xbc\xe7\x6e\xe9\x6d\xe9\x6f\xca\xd1\xc1\xf7\xf5\x89\x24\xd2\x8e\xb3\xe0\xf8\x78\x79\x2c\x17\xa8\x3b\x5c\xdc\xbb\x39\x7b\x0d\xe2\x82\x81\x0e\x84\x84\xb7\xfc\x9e\x0c\x40\xa7\xa3\x72\x5d\x09\xd4\x37\x4a\x4e\x50\x6f\xc9\xd4\xb3\xce\xf0\x3b\xa7\x7e\xe7\x3b\x3d\xe9\xdb\x33\x76\x2a\xef\x7b\xc2\x29\x27\x5c\x3e\x95\x0e\xa9\x0b\x17\x10\xff\xe6\x17\x79\x86\x19\xc7\x17\x56\xb0\x1c\xf2\xc6\x53\xe0\x1b\x03\x01\x3e\x7b\x60\x3a\xd1\xf4\x0f\xb7\x5e\x9b\x8a\x49\xd8\x45\x35\x6c\x8c\xf3\x50\x5a\x59\x08\x51\x8d\xd9\xeb\x18\xcf\xe6\x61\x03\x4e\xf0\x68\x97\xa3\x9c\x3a\xd8\x15\xc1\xe3\x46\xc9\x2f\x29\xd0\x6f\x44\x9b\xa3\xd9\x0a\xef\x44\x4b\x39\x06\x18\x02\xd6\x0c\xf3\x68\x13\x12\x9c\x06\x4f\xd6\xdb\x12\x7d\xa7\xe4\x77\xdc\x7b\x66\x1c\x00\x12\x77\xc7\x74\x20\x09\x4a\x57\x63\xf0\x5b\x6b\xc3\x5f\xb8\x2b\x52\xf6\x51\xf1\x24\x30\x78\x1b\xdf\x1d\x94\x42\x74\xe3\x13\x85\x0a\xff\xe3\x14\xc4\x75\x52\x47\xaa\xb3\x6a\x34\x62\x7e\xe7\x72\x92\x21\x66\xc7\x26\x11\x0e\xb6\x5b\xcf\x3c\x75\x21\xb5\x69\x4d\xde\x80\xdf\xcb\x82\xcf\x0d\x8b\x38\x73\xc5\xe7\xcb\x64\xf8\xe1\x03\x25\xcf\x8c\xa1\x8a\x5c\xf9\xbd\xf9\xd4\xd2\x3d\x32\x78\xf0\x76\x65\xd9\xe7\x5a\x28\xba\x26\x59\xc3\x46\x5f\x12\x33\x2c\x5e\xf3\x5f\x5c\x67\x2c\xdb\x29\x7c\x94\x14\xd0\x9a\x48\xfb\xe0\xc3\x12\x15\x65\x51\xb1\xff\xe1\xaf\xe9\x1a\x1e\x25\x8f\x02\x91\xb9\xca\x0f\x23\x00\x0e\xbc\x77\xca\x9b\x08\xee\x68\x23\xb2\x29\x5d\x93\x6e\x4d\x80\x39\xe6\x2d\x63\x6c\x4c\x79\x97\x03\x91\xce\xbc\xa1\x48\xdd\xb9\x1d\x53\xe9\x6b\x3f\x12\xca\x7a\x68\xd7\x74\x42\x31\x88\x55\x37\xa4\x6d\x29\xe5\x7c\x57\x4c\x30\x53\x96\x8b\xb3\x1d\x7f\x62\x7c\x68\x06\x91\xa4\xbc\x01\x57\x47\x85\x22\x40\x7c\xb2\xa8\xdb\xba\xf7\xb9\x45\xa7\x56\x53\xad\x25\x29\xf6\xc9\x48\x63\xa8\x87\x48\x2d\x44\x26\x3d\x30\xb6\xd8\x1d\x61\x31\x9e\x24\x44\x01\x59\xde\xb2\x33\x78\x63\xde\x71\xc4\x34\x27\xea\xa1\xb5\xf8\xc9\xcd\x58\xd6\x01\x77\x3a\x5c\xa3\x32\x09\x33\x46\xe6\xc7\xa9\x77\xb2\xb0\x7b\x30\x91\xa1\x85\xa0\x09\xa6\xf7\x18\xbd\xc5\x42\xca\x4a\x44\x8e\x65\x56\x50\x32\x05\x9e\x60\x71\xfd\xda\xe0\x3d\x94\x75\xec\x0a\x24\xa4\xb9\x63\x00\xd7\x5f\x9b\x84\x2a\x86\x19\xd3\x19\x88\x41\xa7\x1c\xeb\xc7\x59\xf1\x32\x1d\xdb\x65\x87\xf3\x03\x1b\x3a\xa8\xa6\x7d\xb3\x95\x5b\x73\x0a\x5b\x23\x62\xd5\xa6\xe0\x2d\xe2\xef\x35\xd4\x86\x75\x65\xf5\x82\x52\x98\xbf\xbb\x46\xf2\xe7\x25\x33\x94\xd8\x6d\x74\xd8\xde\x38\x2e\x02\xd3\x8d\xed\xfe\x8d\xe9\xf8\x69\x73\x3c\xf6\x0a\xd6\x92\x4d\x99\xfe\x6b\xe4\x6b\x24\xf0\x2a\x16\xd7\x3b\x88\x7d\x4f\x66\x8d\xaa\x4c\x80\x5c\xba\x94\x36\xda\x7d\x51\x75\x04\x7e\x89\x55\x92\xb1\x8f\x58\xb0\x65\x65\xaf\x88\xae\x9c\x69\x1b\x3d\x77\x4e\x14\xb3\x05\x4c\x0b\xbf\x08\x54\x2f\x04\x23\xe3\x38\x5a\xbe\x2c\x49\x3c\x1e\xe6\xbc\x52\x35\xb0\xd6\xed\x8d\x6c\x49\x4f\x2a\x5a\x5b\xed\x35\x70\x68\x30\x71\x81\xc4\xfb\xb5\x93\xa3\x17\x88\xe2\x10\x2c\x86\xd2\x87\x15\x31\xdc\x63\x01\x38\x52\xb1\x8b\x4a\x17\xac\xba\x8d\x4a\x6c\x8d\x76\x8a\xa3\xb1\x0a\xf9\xad\x21\x94\x4e\x42\x65\xd4\x2a\x30\x2e\x61\xda\x83\x64\x1b\x86\x81\xcb\x41\x90\xcc\x19\x93\xda\x98\x9d\x57\xe1\x3f\x3b\x89\xdf\xa2\x51\xf1\x78\x4f\xea\x2c\x23\x06\x99\x5f\x1f\x79\x36\x98\x0f\x49\xc3\x6c\x7d\xcf\xb6\x38\x3b\x1d\x2a\xe6\xa6\x3f\x7d\x31\x53\x52\xfe\x67\xef\x73\xed\x95\x6d\x48\x91\x74\x17\xf0\x56\x6c\x13\x45\xc7\xa9\x0a\x0e\xd1\x19\x76\x2f\x14\x17\xfe\x7a\x89\x85\x18\x87\x0a\x4e\x0c\xc4\x7b\x4a\xe9\x66\xda\x4a\xe7\x3e\xcd\xe3\x1e\x52\xc7\x35\x37\x67\x80\x98\x00\xad\x3b\xad\x03\x46\x8f\x8f\x0a\x0e\xfd\xa0\x06\x74\xc0\x40\x50\xb9\xdf\xba\x3d\x87\x45\x1d\x52\xb3\x57\x8d\x70\xc7\xbd\xc3\x41\xc7\xd9\x3f\xde\xd6\x38\xe5\x2c\x0c\x0a\x45\xff\xe1\xcf\xb8\x12\x73\xda\xce\x1a\x3c\x39\x42\xda\xa4\x35\xc8\x34\x05\xbd\x4a\xff\x83\x3f\x53\x08\xf9\x81\x33\x19\x4d\x4c\x77\xad\xd4\x42\xfb\x4d\xee\xef\x7e\x26\x56\xfe\x64\xf8\x67\xe5\xda\x49\xf0\x9a\x14\x12\xf4\xbf\xd6\xcc\xa2\xec\x21\x50\x36\xfb\x32\x79\x58\xa2\x8a\x78\x41\x66\xaa\x90\x95\xdc\xf7\x91\x39\x79\x69\xd6\x2e\x4e\x27\xb8\x5f\xbd\x38\xdb\x00\x80\x3f\x0d\x08\x29\x6a\x26\xf6\x2f\x4e\x1d\x5b\x02\x97\xcb\x5e\xb1\x0f\xf5\x4f\x13\x5c\xb6\x07\xd2\x5c\x2b\x52\xb7\xf3\x75\xd4\x95\xc3\x17\x91\x76\x13\x2f\x92\xba\x47\x8a\x94\x41\xe8\x54\xe8\x0c\x7f\xd2\x08\x53\x23\x02\x64\x7f\x33\x00\x1e\x44\x77\x10\x50\x09\x1c\x71\x63\x65\xdc\x02\x9a\xab\xec\xea\x3b\xcf\xee\x63\x87\xa0\x99\xc2\xea\xd6\xa3\xf6\xa3\xa7\xa1\xa3\x4a\x46\x8f\x6f\x69\xa6\x68\x82\xfd\xda\xd8\x62\xec\x47\x3a\x22\x79\xd9\xf9\x0f\x71\x23\x80\x5a\xde\x71\x63\xab\x55\x3b\x79\xfe\x9c\x5f\xfb\x5a\xf9\x83\x6d\x6f\x96\x8a\x88\x7b\x6e\xe4\x53\xd1\x8b\x3a\x76\x8d\x5b\x10\x23\x82\x55\x43\xfb\x30\x3d\x6a\x37\x2b\x7b\xd2\xfa\x08\x7d\x37\x31\xcd\x63\xd0\x48\xe3\x38\x3e\xad\xd6\xce\x85\xb1\x8f\x8f\xe6\x28\x9d\x15\x23\x64\x20\x51\xd5\x6f\x24\x3d\xc4\x86\x3f\x6a\x69\x8f\xfe\xce\xc0\xcd\xd2\x11\x1d\x27\x10\xab\xfc\x89\x51\x99\x8c\x2d\x96\x2f\x5a\xc0\x14\x10\xdb\xef\xa8\xf9\x75\x68\x3d\x7b\xd4\x00\xbf\xad\x85\x7b\x46\x9d\x33\xf0\xab\x61\xab\xe8\x61\xb7\x56\x27\xb0\x7e\x8d\xeb\xfd\x39\xd7\x6a\x41\x37\xc9\xcd\xa7\x94\xb5\x10\xd9\x77\xcb\xed\x2d\x5d\x9f\x2b\x84\x81\x05\xa7\xa3\xa2\xea\x6c\x14\x84\x0a\x02\xbc\x0f\xd7\xe8\x70\x88\x8c\x43\x24\x3a\x8e\xb2\xfb\xc8\xbd\xab\xf9\x9c\x43\xe9\x33\x34\x0e\x4f\xae\x3c\x0a\xe8\xb8\xbc\xb2\x61\x8b\xec\x09\x5d\xf9\x92\x3d\xdc\x16\x05\x80\x24\x21\x60\x3c\x0e\xda\x97\xb7\xc0\x0d\x7f\xf8\x90\x3c\xf2\x8f\xc9\x07\x63\x3e\x41\x27\x28\x95\x3a\x8e\x36\xe9\x7f\xc5\xac\xb5\x13\x26\xb4\xb4\xcd\x45\xf8\xa9\xd0\x17\x88\x0f\xc9\x1e\xe6\x12\xc4\xd7\x50\xcd\x1c\x45\x9c\x9a\x27\x93\xfa\x84\x5a\xf4\xdc\xb1\x70\x14\x0e\x10\x3b\x15\x36\x85\xc6\x4a\xf2\x00\xaf\x1b\x9a\x37\x08\xbd\x62\x90\xd3\xe7\x5c\xee\x20\x62\x6d\x5e\x71\xa6\x0a\xf6\x9d\x60\x48\xfc\x8d\xf0\x63\x19\xc1\x24\x50\xcb\x8d\x4b\xa1\x1e\x9e\x52\xf3\xb8\x64\x1d\xe2\x76\xd0\x8d\xb1\x64\x5f\x9c\xaf\xb6\x05\xc0\xff\xb6\xd6\x51\x67\x34\xe3\xfe\x41\xcc\x5b\xd0\xf7\x45\x88\x30\x1f\x50\xab\x50\x0f\xa3\x1c\xa6\xf4\x6c\xf3\x54\x34\x6d\x42\x23\xbd\x01\x94\x52\xed\x0f\x42\x7c\xd3\xdb\x1f\xf6\x8a\x2e\xa7\x8a\xad\x51\x5f\xbf\xb7\x53\x58\xd1\x3c\x1c\xcf\xf1\x7a\x28\xe4\x19\xc2\xf5\x8e\xa7\x5c\x6f\x77\xef\x4f\xea\xdf\xec\x58\xdb\xca\x47\x7a\x3d\xbc\xa5\x5f\x3c\x2a\x95\xc9\x1d\x03\xf5\x96\x9b\x01\x5e\x6c\xfd\x98\x83\x0f\xe2\x42\x50\x1c\x57\x21\xea\x28\x52\xd0\x9d\x62\xbc\x53\x9b\x8a\x82\xbd\x2e\x25\x59\x43\xb8\x0e\x2c\x4a\x8d\xd8\xc3\xec\x2c\xd8\x4e\x68\xa7\x49\x96\x00\x9b\x62\x82\xcd\x62\xeb\x07\x6b\x4e\xa4\x41\x4d\xce\x69\xf9\xd3\x40\xbb\x0a\xe0\xca\x71\xac\x39\xa9\x1a\xb5\x08\x0a\x2e\xe3\x5c\x0e\xef\xee\xa6\xb8\xd9\x0c\x09\x81\x0f\x4b\xa1\xdc\x58\xe4\x2e\x82\x23\xe6\x17\x40\x21\xa7\x69\xb5\xf2\x19\x47\x36\xdd\x66\xc2\xc8\xff\xb5\x03\xdd\x31\xa4\xa6\x67\xa2\xf1\x7e\xe3\x3f\x20\xa8\x4a\x28\x83\xde\x91\x14\x55\x32\xe8\xdc\xf8\x86\x25\x35\x79\x91\x6f\xda\xf0\x5d\x28\x8e\xc3\xb6\xcf\xd0\x8f\xff\xca\x2e\xe9\xec\xd4\xa3\x52\x23\xb2\xdc\x91\x85\xb4\x75\x09\x74\x7a\x0b\x7c\x2c\x09\xf7\xf1\xe5\x2e\x44\xc3\x5f\x11\x56\x62\xc3\x4c\x78\x6d\x44\x61\x28\x6b\x05\xe3\x5f\xb4\xeb\xc6\xf4\x5e\x9e\x23\xba\xc3\x3f\x6c\x16\x03\x56\xe4\xb8\xc5\x68\x02\xf4\x4e\xa9\xcc\x0d\xe4\x2f\x76\x42\x58\xbe\x09\x95\xf0\xe0\xd1\x8c\x43\x4d\xec\xe8\x99\x45\x12\x42\x3c\x5c\x50\xb9\x7c\x28\x6d\x2f\x27\x89\x92\xf2\x1f\x55\xb6\xac\xd4\xf9\x2a\xbd\x9d\x28\x1b\x77\xb5\xd5\x5a\xfc\x70\x2f\xab\x81\xcf\xe5\x0f\x63\xf9\x9d\xfb\xc4\x6d\x08\x57\x83\x85\x29\xc0\xeb\xed\xf3\x37\x51\xd7\xb7\xad\x26\x6e\x78\xab\x41\xf9\x93\x46\x6b\x14\xbc\x79\xbd\x8b\x39\x31\xe3\xdb\xfc\xc2\xaa\xef\xdb\x84\x88\xe9\xb7\x85\xc3\x23\xf3\x96\x8c\x37\x8c\xc5\xb5\x9f\x48\x2a\x23\xc5\x4c\x3b\x92\xe9\x58\x50\xe0\x23\x0b\xe0\x46\x7f\xaa\x0d\xc4\xf9\x2c\x24\x5c\x3f\x92\x71\x68\x71\x5e\xcc\xe8\x06\x15\x07\xd2\xdb\x00\x53\x10\xe2\x2b\x61\xc4\x47\xdd\x9b\x1b\x38\x6c\xdf\x2a\x4a\x18\x74\xf2\x7a\x3f\x8a\xc7\xf5\xe4\x8f\xa6\xfa\xf7\x44\xe6\xb6\x64\x38\x2d\x01\xa8\x15\x0e\xbb\xc3\x0e\xae\x06\x96\x79\x86\xd8\x3e\xc9\x9e\x7a\x51\x50\x67\x2f\x5c\x10\xaa\xcb\xff\x30\x34\x6a\x00\xae\xdb\x82\x38\x23\xa3\x93\x50\xf8\x7c\x9f\x8a\x22\xea\x29\xbb\x67\xaa\xb2\x30\xed\x7f\xa6\xf1\x1e\x5f\x3a\xc8\xe3\x57\xdb\x9f\xd5\x0b\xb5\x4a\x4c\x01\x83\xd8\x0f\x89\xef\x27\xe9\xf3\x9d\x9d\x11\xd8\x33\x11\x36\x56\xb9\x97\x32\xe6\x8a\x10\xaf\x61\x2d\x8f\x94\xdf\x5f\xfb\xec\xd6\x7c\x17\x55\xea\xff\xcc\x30\x45\xd2\x62\x5a\xd7\xae\xe2\x36\x61\xfb\x56\xa9\x50\xc0\x48\xee\x2d\x31\x4d\x19\x79\x76\xe9\x9c\x16\x39\xe6\xf1\xd4\xcb\xa4\x8c\x0e\x7c\xe4\x1f\xc3\x33\xe6\x4a\xb7\x68\x2a\x37\x8d\x30\xd5\x4f\xf0\x43\x8d\x2e\x44\x77\x72\x05\x40\x6c\x20\x0f\x5f\x69\xc8\xd8\xa4\x47', 2)
| 39,514.333333
| 118,483
| 0.750049
| 29,623
| 118,543
| 3.001013
| 0.008946
| 0.00135
| 0.001316
| 0.00108
| 0.000439
| 0.00027
| 0.00027
| 0
| 0
| 0
| 0
| 0.313921
| 0.000084
| 118,543
| 3
| 118,483
| 39,514.333333
| 0.436073
| 0
| 0
| 0
| 0
| 0.333333
| 0.999173
| 0.999173
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 11
|
40532bfda262955b21e09ca583401cd37a717869
| 1,914
|
py
|
Python
|
design_patterns/builder/car/car_builder.py
|
amommendes/python-fluente
|
04b3b257802c368acd313f93ef42aee8c4564d9c
|
[
"Apache-2.0"
] | null | null | null |
design_patterns/builder/car/car_builder.py
|
amommendes/python-fluente
|
04b3b257802c368acd313f93ef42aee8c4564d9c
|
[
"Apache-2.0"
] | 2
|
2020-04-02T06:03:34.000Z
|
2021-08-23T20:40:32.000Z
|
design_patterns/builder/car/car_builder.py
|
amommendes/python-fluente
|
04b3b257802c368acd313f93ef42aee8c4564d9c
|
[
"Apache-2.0"
] | null | null | null |
from design_patterns.builder.builder import GenericBuilder
class Car:
def __init__(self):
self.seats = None
self.engine = None
self.trip_computer = None
self.gps = None
def describe(self):
print(f"""Seats: {self.seats}\nEngine: {self.engine}\nHas Trip Computer: {self.trip_computer}\nHas GPS: {self.gps}""")
class CarBuilder(GenericBuilder):
def __init__(self):
self.car = None
self.reset()
@property
def product(self) -> Car:
car = self.car
self.reset()
return car
def reset(self) -> None:
self.car = Car()
def setSeats(self, number) -> None:
self.car.seats = number
def setEngine(self, engine):
self.car.engine = engine
def setGPS(self, has_gps) -> None:
self.car.gps = has_gps
def setTripComputer(self, has_trip_computer) -> None:
self.car.trip_computer = has_trip_computer
class Manual():
def __init__(self):
self.seats = None
self.engine = None
self.trip_computer = None
self.gps = None
def describe(self):
print(f"""This your manual. Please read carefully.\n""" +
f"""Seats: {self.seats}\nEngine: {self.engine}\nHas Trip Computer: {self.trip_computer}\nHas GPS: {self.gps}""")
class ManualBuilder(GenericBuilder):
def __init__(self):
self.car = None
self.reset()
@property
def product(self) -> Manual:
car = self.car
self.reset()
return car
def reset(self) -> None:
self.car = Manual()
def setSeats(self, number) -> None:
self.car.seats = number
def setEngine(self, engine):
self.car.engine = engine
def setGPS(self, has_gps) -> None:
self.car.gps = has_gps
def setTripComputer(self, has_trip_computer) -> None:
self.car.trip_computer = has_trip_computer
| 23.341463
| 126
| 0.605016
| 237
| 1,914
| 4.738397
| 0.172996
| 0.11398
| 0.078362
| 0.053428
| 0.869991
| 0.869991
| 0.869991
| 0.869991
| 0.869991
| 0.869991
| 0
| 0
| 0.275862
| 1,914
| 81
| 127
| 23.62963
| 0.810245
| 0
| 0
| 0.785714
| 0
| 0.035714
| 0.130685
| 0.048092
| 0
| 0
| 0
| 0
| 0
| 1
| 0.321429
| false
| 0
| 0.017857
| 0
| 0.446429
| 0.035714
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
405aa680c1de6c0b4849737fa056666ef4701897
| 57
|
py
|
Python
|
src/eve_utils/skel/api/log_trace/__init__.py
|
pointw-dev/eve-utils
|
2af7d28232e0d22975cc8141fda27a3870694223
|
[
"MIT"
] | null | null | null |
src/eve_utils/skel/api/log_trace/__init__.py
|
pointw-dev/eve-utils
|
2af7d28232e0d22975cc8141fda27a3870694223
|
[
"MIT"
] | null | null | null |
src/eve_utils/skel/api/log_trace/__init__.py
|
pointw-dev/eve-utils
|
2af7d28232e0d22975cc8141fda27a3870694223
|
[
"MIT"
] | null | null | null |
import log_trace.trace_level
import log_trace.decorators
| 19
| 28
| 0.894737
| 9
| 57
| 5.333333
| 0.555556
| 0.375
| 0.583333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070175
| 57
| 2
| 29
| 28.5
| 0.90566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
405b1e05e30665caf1b56d799edb993551a9f5b1
| 217
|
py
|
Python
|
thirdfile.py
|
1frenchfrog1/testgithub
|
7191e44d75ba50438d9c2fe8f0fcf9fcf3a2a991
|
[
"MIT"
] | null | null | null |
thirdfile.py
|
1frenchfrog1/testgithub
|
7191e44d75ba50438d9c2fe8f0fcf9fcf3a2a991
|
[
"MIT"
] | null | null | null |
thirdfile.py
|
1frenchfrog1/testgithub
|
7191e44d75ba50438d9c2fe8f0fcf9fcf3a2a991
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
def printme3( str ):
"This prints a passed string into this function"
print(str)
return
def printme3too( str ):
"This prints a passed string into this function"
print(str)
return
| 18.083333
| 51
| 0.686636
| 31
| 217
| 4.806452
| 0.516129
| 0.09396
| 0.174497
| 0.187919
| 0.751678
| 0.751678
| 0.751678
| 0.751678
| 0.751678
| 0.751678
| 0
| 0.011905
| 0.225806
| 217
| 11
| 52
| 19.727273
| 0.875
| 0.506912
| 0
| 0.75
| 0
| 0
| 0.46
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0
| 0
| 0.5
| 0.75
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
|
0
| 10
|
40b2cc030d6a30c0712c0c57fa3c66ef5a1d7839
| 61,832
|
py
|
Python
|
ESMF/src/addon/ESMPy/src/ESMF/util/mesh_utilities.py
|
joeylamcy/gchp
|
0e1676300fc91000ecb43539cabf1f342d718fb3
|
[
"NCSA",
"Apache-2.0",
"MIT"
] | 1
|
2018-07-05T16:48:58.000Z
|
2018-07-05T16:48:58.000Z
|
ESMF/src/addon/ESMPy/src/ESMF/util/mesh_utilities.py
|
joeylamcy/gchp
|
0e1676300fc91000ecb43539cabf1f342d718fb3
|
[
"NCSA",
"Apache-2.0",
"MIT"
] | 1
|
2022-03-04T16:12:02.000Z
|
2022-03-04T16:12:02.000Z
|
ESMF/src/addon/ESMPy/src/ESMF/util/mesh_utilities.py
|
joeylamcy/gchp
|
0e1676300fc91000ecb43539cabf1f342d718fb3
|
[
"NCSA",
"Apache-2.0",
"MIT"
] | null | null | null |
"""
Utilities for regridding with Meshes
"""
import sys
try:
import numpy as np
except:
raise ImportError('The Numpy library cannot be found!')
try:
import ESMF
except:
raise ImportError('The ESMF library cannot be found!')
def mesh_create_5_pentahexa(coord_sys=None):
'''
PRECONDITIONS: None
POSTCONDITIONS: A 5 element Mesh has been created.
RETURN VALUES: \n Mesh :: mesh \n
2.5 8 10 --------11
/ \ / |
2.1 7 9 12
| | 5 /
| 4 | /
| | /
1.0 4 ------- 5 ------- 6
| | \ 3 |
| 1 | \ |
| | 2 \ |
-0.1 1 ------- 2 ------- 3
-0.1 1.0 2.1 2.5
Node Ids at corners
Element Ids in centers
Note: This mesh is not parallel, it can only be used in serial
'''
# Two parametric dimensions, and two spatial dimensions
mesh = ESMF.Mesh(parametric_dim=2, spatial_dim=2, coord_sys=coord_sys)
num_node = 12
num_elem = 5
nodeId = np.array([1,2,3,4,5,6,7,8,9,10,11,12])
nodeCoord = np.array([-0.1,-0.1, #node id 1
1.0,-0.1, #node id 2
2.1,-0.1, #node id 3
0.1, 1.0, #node id 4
1.0, 1.0, #node id 5
2.1, 1.0, #node id 6
0.1, 2.1, #node id 7
0.5, 2.5, #node id 8
1.0, 2.1, #node id 9
1.5, 2.5, #node id 10
2.5, 2.5, #node id 11
2.5, 2.1]) #node id 12
nodeOwner = np.zeros(num_node)
elemId = np.array([1,2,3,4,5])
elemType=np.array([ESMF.MeshElemType.QUAD,
ESMF.MeshElemType.TRI,
ESMF.MeshElemType.TRI, 5, 6])
# I believe python connections are 0-based
# elemConn=np.array([1,2,5,4, # elem id 1
# 2,3,5, # elem id 2
# 3,6,5, # elem id 3
# 4,5,9,8,7, # elem id 4
# 5,6,12,11,10,9]) # elem id 5
elemConn=np.array([0,1,4,3, # elem id 1
1,2,4, # elem id 2
2,5,4, # elem id 3
3,4,8,7,6, # elem id 4
4,5,11,10,9,8]) # elem id 5
mesh.add_nodes(num_node,nodeId,nodeCoord,nodeOwner)
mesh.add_elements(num_elem,elemId,elemType,elemConn)
return mesh, nodeCoord, nodeOwner, elemType, elemConn
def mesh_create_4_ngons():
'''
PRECONDITIONS: None
POSTCONDITIONS: A 4 element Mesh has been created.
RETURN VALUES: \n Mesh :: mesh \n
2.25 6 ------ 7 ----- 8 ------ 9
| \ / |
| \ 4 / |
| \ / |
| \ / |
1.00 | 5 |
| / \ |
| 1 / 2 \ 3 |
| / \ |
0.25 1 ------ 2 ----- 3 ------ 4
0.25 0.75 1.0 1.25 1.75
Node Ids at corners
Element Ids in centers
Note: This mesh is not parallel, it can only be used in serial
'''
# Two parametric dimensions, and two spatial dimensions
mesh = ESMF.Mesh(parametric_dim=2, spatial_dim=2)
num_node = 9
num_elem = 4
nodeId = np.array([1,2,3,4,5,6,7,8,9])
nodeCoord = np.array([0.25, 0.25,
0.25, 0.75,
0.25, 1.25,
0.25, 1.75,
1.0, 1.0,
2.25, 0.25,
2.25, 0.75,
2.25, 1.25,
2.25, 1.75])
nodeOwner = np.zeros(num_node)
elemId = np.array([1,2,3,4])
elemType=np.array([5,3,5,3])
elemConn=np.array([0,1,4,6,5,
1,2,4,
2,5,4,
2,3,8,7,4,
4,7,6])
mesh.add_nodes(num_node,nodeId,nodeCoord,nodeOwner)
mesh.add_elements(num_elem,elemId,elemType,elemConn)
return mesh, nodeCoord, nodeOwner, elemType, elemConn
def mesh_create_5():
'''
PRECONDITIONS: None
POSTCONDITIONS: A 5 element Mesh has been created.
RETURN VALUES: \n Mesh :: mesh \n
4.0 31 ------ 32 ------ 33
| | 22 / |
| 21 | / |
| | / 23 |
2.0 21 ------ 22 ------ 23
| | |
| 11 | 12 |
| | |
0.0 11 ------ 12 ------ 13
0.0 2.0 4.0
Node Ids at corners
Element Ids in centers
Note: This mesh is not parallel, it can only be used in serial
'''
# Two parametric dimensions, and two spatial dimensions
mesh = ESMF.Mesh(parametric_dim=2, spatial_dim=2)
num_node = 9
num_elem = 5
nodeId = np.array([11,12,13,21,22,23,31,32,33])
nodeCoord = np.array([0.0,0.0, # node 11
2.0,0.0, # node 12
4.0,0.0, # node 13
0.0,2.0, # node 21
2.0,2.0, # node 22
4.0,2.0, # node 23
0.0,4.0, # node 31
2.0,4.0, # node 32
4.0,4.0]) # node 33
nodeOwner = np.zeros(num_node)
elemId = np.array([11,12,21,22,23])
elemType=np.array([ESMF.MeshElemType.QUAD,
ESMF.MeshElemType.QUAD,
ESMF.MeshElemType.QUAD,
ESMF.MeshElemType.TRI,
ESMF.MeshElemType.TRI])
elemConn=np.array([0,1,4,3, # element 11
1,2,5,4, # element 12
3,4,7,6, # element 21
4,8,7, # element 22
4,5,8]) # element 23
elemCoord = np.array([1.0, 1.0,
3.0, 1.0,
1.0, 3.0,
2.5, 3.5,
3.5, 2.5])
mesh.add_nodes(num_node,nodeId,nodeCoord,nodeOwner)
mesh.add_elements(num_elem,elemId,elemType,elemConn, element_coords=elemCoord)
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemCoord
def mesh_create_10():
'''
PRECONDITIONS: None
POSTCONDITIONS: A 10 element Mesh has been created.
RETURN VALUES: \n Mesh :: mesh \n
4.0 41 ------ 42 ------- 43 ------ 44
| | | 33 / |
| 31 | 32 | / |
| | | / 34 |
2.5 31 ------ 32 ------- 33 ------ 34
| | | |
| 21 | 22 | 23 |
| | | |
1.5 21 ------ 22 ------- 23 ------ 24
| | | |
| 11 | 12 | 13 |
| | | |
0.0 11 ------ 12 ------- 13 ------ 14
0.0 1.5 2.5 4.0
Node Ids at corners
Element Ids in centers
Note: This mesh is not parallel, it can only be used in serial
'''
# Two parametric dimensions, and two spatial dimensions
mesh = ESMF.Mesh(parametric_dim=2, spatial_dim=2)
num_node = 16
num_elem = 10
nodeId = np.array([11,12,13,14,21,22,23,24,31,32,33,34,41,42,43,44])
nodeCoord = np.array([0.0,0.0, 1.5,0.0, 2.5,0.0, 4.0,0.0,
0.0,1.5, 1.5,1.5, 2.5,1.5, 4.0,1.5,
0.0,2.5, 1.5,2.5, 2.5,2.5, 4.0,2.5,
0.0,4.0, 1.5,4.0, 2.5,4.0, 4.0,4.0])
nodeOwner = np.zeros(num_node)
elemId = np.array([11,12,13,21,22,23,31,32,33,34])
elemType=np.array([ESMF.MeshElemType.QUAD,
ESMF.MeshElemType.QUAD,
ESMF.MeshElemType.QUAD,
ESMF.MeshElemType.QUAD,
ESMF.MeshElemType.QUAD,
ESMF.MeshElemType.QUAD,
ESMF.MeshElemType.QUAD,
ESMF.MeshElemType.QUAD,
ESMF.MeshElemType.TRI,
ESMF.MeshElemType.TRI])
elemConn = np.array([0,1,5,4,
1,2,6,5,
2,3,7,6,
4,5,9,8,
5,6,10,9,
6,7,11,10,
8,9,13,12,
9,10,14,13,
10,15,14,
10,11,15])
elemCoord = np.array([0.75, 0.75, 2.0, 0.75, 3.25, 0.75,
0.75, 2.0, 2.0, 2.0, 3.25, 2.0,
0.75, 3.25, 2.0, 3.25, 3.0, 3.5, 3.5, 3.0])
mesh.add_nodes(num_node,nodeId,nodeCoord,nodeOwner)
mesh.add_elements(num_elem,elemId,elemType,elemConn, element_coords=elemCoord)
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemCoord
def mesh_create_50(domask=False, doarea=False):
'''
PRECONDITIONS: None
POSTCONDITIONS: A 50 element Mesh has been created.
RETURN VALUES: \n Mesh :: mesh \n
3.75 81 ------ 82 ----- 83 ------ 84 ------ 85 ------ 86 ------ 87 ------ 88
| | | | | | | 77 / |
| 71 | 72 | 73 | 74 | 75 | 76 | / |
| | | | | | | / 78 |
3.25 71 ------ 72 ----- 73 ------ 74 ------ 75 ------ 76 ------ 77 ------ 78
| | | | | | | |
| 61 | 62 | 63 | 64 | 65 | 66 | 67 |
| | | | | | | |
2.75 61 ------ 62 ----- 63 ------ 64 ------ 65 ------ 66 ------ 67 ------ 68
| | | | | | | |
| 51 | 52 | 53 | 54 | 55 | 56 | 57 |
| | | | | | | |
2.25 51 ------ 52 ----- 53 ------ 54 ------ 55 ------ 56 ------ 57 ------ 58
| | | | | | | |
| 41 | 42 | 43 | 44 | 45 | 46 | 47 |
| | | | | | | |
1.75 41 ------ 42 ----- 43 ------ 44 ------ 45 ------ 46 ------ 47 ------ 48
| | | | | | | |
| 31 | 32 | 33 | 34 | 35 | 36 | 37 |
| | | | | | | |
1.25 31 ------ 32 ----- 33 ------ 34 ------ 35 ------ 36 ------ 37 ------ 38
| | | | | | | |
| 21 | 22 | 23 | 24 | 25 | 26 | 27 |
| | | | | | | |
0.75 21 ------ 22 ----- 23 ------ 24 ------ 25 ------ 26 ------ 27 ------ 28
| | | | | | | |
| 11 | 12 | 13 | 14 | 15 | 16 | 17 |
| | | | | | | |
0.25 11 ------ 12 ----- 13 ------ 14 ------ 15 ------ 16 ------ 17 ------ 18
0.25 0.75 1.25 1.75 2.25 2.75 3.25 3.75
Node Ids at corners
Element Ids in centers
Note: This mesh is not parallel, it can only be used in serial
'''
# Two parametric dimensions, and two spatial dimensions
mesh = ESMF.Mesh(parametric_dim=2, spatial_dim=2)
num_node = 64
num_elem = 50
nodeId = np.array([11,12,13,14,15,16,17,18,
21,22,23,24,25,26,27,28,
31,32,33,34,35,36,37,38,
41,42,43,44,45,46,47,48,
51,52,53,54,55,56,57,58,
61,62,63,64,65,66,67,68,
71,72,73,74,75,76,77,78,
81,82,83,84,85,86,87,88])
nodeCoord = np.array([0.25,0.25, 0.25,0.75, 0.25,1.25, 0.25,1.75, 0.25,2.25, 0.25,2.75, 0.25,3.25, 0.25,3.75,
0.75,0.25, 0.75,0.75, 0.75,1.25, 0.75,1.75, 0.75,2.25, 0.75,2.75, 0.75,3.25, 0.75,3.75,
1.25,0.25, 1.25,0.75, 1.25,1.25, 1.25,1.75, 1.25,2.25, 1.25,2.75, 1.25,3.25, 1.25,3.75,
1.75,0.25, 1.75,0.75, 1.75,1.25, 1.75,1.75, 1.75,2.25, 1.75,2.75, 1.75,3.25, 1.75,3.75,
2.25,0.25, 2.25,0.75, 2.25,1.25, 2.25,1.75, 2.25,2.25, 2.25,2.75, 2.25,3.25, 2.25,3.75,
2.75,0.25, 2.75,0.75, 2.75,1.25, 2.75,1.75, 2.75,2.25, 2.75,2.75, 2.75,3.25, 2.75,3.75,
3.25,0.25, 3.25,0.75, 3.25,1.25, 3.25,1.75, 3.25,2.25, 3.25,2.75, 3.25,3.25, 3.25,3.75,
3.75,0.25, 3.75,0.75, 3.75,1.25, 3.75,1.75, 3.75,2.25, 3.75,2.75, 3.75,3.25, 3.75,3.75])
nodeOwner = np.zeros(num_node)
elemId = np.array([11,12,13,14,15,16,17,
21,22,23,24,25,26,27,
31,32,33,34,35,36,37,
41,42,43,44,45,46,47,
51,52,53,54,55,56,57,
61,62,63,64,65,66,67,
71,72,73,74,75,76,77,78])
elemType = np.ones(num_elem-2)*ESMF.MeshElemType.QUAD
elemType = np.append(elemType, [ESMF.MeshElemType.TRI, ESMF.MeshElemType.TRI])
elemConn = np.array([11,12,22,21,12,13,23,22,13,14,24,23,14,15,25,24,15,16,26,25,16,17,27,26,17,18,28,27,
21,22,32,31,22,23,33,32,23,24,34,33,24,25,35,34,25,26,36,35,26,27,37,36,27,28,38,37,
31,32,42,41,32,33,43,42,33,34,44,43,34,35,45,44,35,36,46,45,36,37,47,46,37,38,48,47,
41,42,52,51,42,43,53,52,43,44,54,53,44,45,55,54,45,46,56,55,46,47,57,56,47,48,58,57,
51,52,62,61,52,53,63,62,53,54,64,63,54,55,65,64,55,56,66,65,56,57,67,66,57,58,68,67,
61,62,72,71,62,63,73,72,63,64,74,73,64,65,75,74,65,66,76,75,66,67,77,76,67,68,78,77,
71,72,82,81,72,73,83,82,73,74,84,83,74,75,85,84,75,76,86,85,76,77,87,86,
77,88,87,
77,78,88])
elemConn = np.array([np.where(a==nodeId) for a in elemConn]).flatten()
elemCoord = np.array(
[0.5, 0.5, 1.0, 0.5, 1.5, 0.5, 2.0, 0.5, 2.5, 0.5, 3.0, 0.5, 3.5, 0.5,
0.5, 1.0, 1.0, 1.0, 1.5, 1.0, 2.0, 1.0, 2.5, 1.0, 3.0, 1.0, 3.5, 1.0,
0.5, 1.5, 1.0, 1.5, 1.5, 1.5, 2.0, 1.5, 2.5, 1.5, 3.0, 1.5, 3.5, 1.5,
0.5, 2.0, 1.0, 2.0, 1.5, 2.0, 2.0, 2.0, 2.5, 2.0, 3.0, 2.0, 3.5, 2.0,
0.5, 2.5, 1.0, 2.5, 1.5, 2.5, 2.0, 2.5, 2.5, 2.5, 3.0, 2.5, 3.5, 2.5,
0.5, 3.0, 1.0, 3.0, 1.5, 3.0, 2.0, 3.0, 2.5, 3.0, 3.0, 3.0, 3.5, 3.0,
0.5, 3.5, 1.0, 3.5, 1.5, 3.5, 2.0, 3.5, 2.5, 3.5, 3.0, 3.5, 3.375, 3.625, 3.625, 3.375])
elemMask = None
if domask:
elemMask = np.ones(50)
elemMask[1] = 0
elemArea = None
if doarea:
elemArea = np.ones(48)*5
elemArea = np.append(elemArea, [2.5, 2.5])
mesh.add_nodes(num_node,nodeId,nodeCoord,nodeOwner)
mesh.add_elements(num_elem,elemId,elemType,elemConn,
element_mask=elemMask, element_area=elemArea, element_coords=elemCoord)
# TODO: clean this up!
if domask and doarea:
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemMask, elemArea
elif domask and not doarea:
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemMask
elif not domask and doarea:
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemArea
else:
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemCoord
def mesh_create_50_ngons(domask=False, doarea=False):
'''
PRECONDITIONS: None
POSTCONDITIONS: A 50 element Mesh has been created.
RETURN VALUES: \n Mesh :: mesh \n
3.75 81 ------ 82 ----- 83 ------ 84 ------ 85 ------ 86 ------ 87 ------ 88
| | | | | | | |
| 71 | 72 | 73 | 74 | 75 | 76 | 77 |
| | | | | | | |
3.25 71 ------ 72 ----- 73 ------ 74 ------ 75 ------ 76 ------ 77 ------ 78
| \ / | | | | |
| \ 64 / | | | | |
| \ / | | | | |
| \ / | | | | |
3.00 | 69 | | | | |
| / \ | | | | |
| 61 / 62 \ 63 | 65 | 66 | 67 | 68 |
| / \ | | | | |
2.75 61 ------ 62 ----- 63 ------ 64 ------ 65 ------ 66 ------ 67 ------ 68
| | | | | | | |
| 51 | 52 | 53 | 54 | 55 | 56 | 57 |
| | | | | | | |
2.25 51 ------ 52 ----- 53 ------ 54 ------ 55 ------ 56 ------ 57 ------ 58
| | | | | | | |
| 41 | 42 | 43 | 44 | 45 | 46 | 47 |
| | | | | | | |
1.75 41 ------ 42 ----- 43 ------ 44 ------ 45 ------ 46 ------ 47 ------ 48
| | | | | | | |
| 31 | 32 | 33 | 34 | 35 | 36 | 37 |
| | | | | | | |
1.25 31 ------ 32 ----- 33 ------ 34 ------ 35 ------ 36 ------ 37 ------ 38
| | | | | | | |
| 21 | 22 | 23 | 24 | 25 | 26 | 27 |
| | | | | | | |
0.75 21 ------ 22 ----- 23 ------ 24 ------ 25 ------ 26 ------ 27 ------ 28
| | | | | | | |
| 11 | 12 | 13 | 14 | 15 | 16 | 17 |
| | | | | | | |
0.25 11 ------ 12 ----- 13 ------ 14 ------ 15 ------ 16 ------ 17 ------ 18
0.25 0.75 1.0 1.25 1.75 2.25 2.75 3.25 3.75
Node Ids at corners
Element Ids in centers
Note: This mesh is not parallel, it can only be used in serial
'''
# Two parametric dimensions, and two spatial dimensions
mesh = ESMF.Mesh(parametric_dim=2, spatial_dim=2)
num_node = 65
num_elem = 50
nodeId = np.array([11,12,13,14,15,16,17,18,
21,22,23,24,25,26,27,28,
31,32,33,34,35,36,37,38,
41,42,43,44,45,46,47,48,
51,52,53,54,55,56,57,58,
61,62,63,64,65,66,67,68,69,
71,72,73,74,75,76,77,78,
81,82,83,84,85,86,87,88])
nodeCoord = np.array([0.25,0.25, 0.75,0.25, 1.25,0.25, 1.75,0.25, 2.25,0.25, 2.75,0.25, 3.25,0.25, 3.75,0.25,
0.25,0.75, 0.75,0.75, 1.25,0.75, 1.75,0.75, 2.25,0.75, 2.75,0.75, 3.25,0.75, 3.75,0.75,
0.25,1.25, 0.75,1.25, 1.25,1.25, 1.75,1.25, 2.25,1.25, 2.75,1.25, 3.25,1.25, 3.75,1.25,
0.25,1.75, 0.75,1.75, 1.25,1.75, 1.75,1.75, 2.25,1.75, 2.75,1.75, 3.25,1.75, 3.75,1.75,
0.25,2.25, 0.75,2.25, 1.25,2.25, 1.75,2.25, 2.25,2.25, 2.75,2.25, 3.25,2.25, 3.75,2.25,
0.25,2.75, 0.75,2.75, 1.25,2.75, 1.75,2.75, 2.25,2.75, 2.75,2.75, 3.25,2.75, 3.75,2.75, 1.0,3.0,
0.25,3.25, 0.75,3.25, 1.25,3.25, 1.75,3.25, 2.25,3.25, 2.75,3.25, 3.25,3.25, 3.75,3.25,
0.25,3.75, 0.75,3.75, 1.25,3.75, 1.75,3.75, 2.25,3.75, 2.75,3.75, 3.25,3.75, 3.75,3.75,])
nodeOwner = np.zeros(num_node)
elemId = np.array([11,12,13,14,15,16,17,
21,22,23,24,25,26,27,
31,32,33,34,35,36,37,
41,42,43,44,45,46,47,
51,52,53,54,55,56,57,
61,62,63,64,65,66,67,68,
71,72,73,74,75,76,77])
elemType = np.ones(num_elem)*ESMF.MeshElemType.QUAD
elemType[35] = 5
elemType[36] = ESMF.MeshElemType.TRI
elemType[37] = 5
elemType[38] = ESMF.MeshElemType.TRI
elemConn = np.array([11,12,22,21,12,13,23,22,13,14,24,23,14,15,25,24,15,16,26,25,16,17,27,26,17,18,28,27,
21,22,32,31,22,23,33,32,23,24,34,33,24,25,35,34,25,26,36,35,26,27,37,36,27,28,38,37,
31,32,42,41,32,33,43,42,33,34,44,43,34,35,45,44,35,36,46,45,36,37,47,46,37,38,48,47,
41,42,52,51,42,43,53,52,43,44,54,53,44,45,55,54,45,46,56,55,46,47,57,56,47,48,58,57,
51,52,62,61,52,53,63,62,53,54,64,63,54,55,65,64,55,56,66,65,56,57,67,66,57,58,68,67,
61, 62, 69, 72, 71, 62, 63, 69, 63, 64, 74, 73, 69, 69, 73, 72, 64, 65, 75, 74, 65, 66, 76, 75,
66, 67, 77, 76, 67, 68, 78, 77,
71,72,82,81,72,73,83,82,73,74,84,83,74,75,85,84,75,76,86,85,76,77,87,86,77,78,88,87])
elemConn = np.array([np.where(a==nodeId) for a in elemConn]).flatten()
# TODO: element coordinates is not supported for meshes containing ngons
elemCoord = np.array(
[0.5, 0.5, 1.0, 0.5, 1.5, 0.5, 2.0, 0.5, 2.5, 0.5, 3.0, 0.5, 3.5, 0.5,
0.5, 1.0, 1.0, 1.0, 1.5, 1.0, 2.0, 1.0, 2.5, 1.0, 3.0, 1.0, 3.5, 1.0,
0.5, 1.5, 1.0, 1.5, 1.5, 1.5, 2.0, 1.5, 2.5, 1.5, 3.0, 1.5, 3.5, 1.5,
0.5, 2.0, 1.0, 2.0, 1.5, 2.0, 2.0, 2.0, 2.5, 2.0, 3.0, 2.0, 3.5, 2.0,
0.5, 2.5, 1.0, 2.5, 1.5, 2.5, 2.0, 2.5, 2.5, 2.5, 3.0, 2.5, 3.5, 2.5,
0.5, 3.0, 1.0, 2.875, 1.5, 3.0, 1.0, 3.12, 2.0, 3.0, 2.5, 3.0, 3.0, 3.0, 3.5, 3.0,
0.5, 3.5, 1.0, 3.5, 1.5, 3.5, 2.0, 3.5, 2.5, 3.5, 3.0, 3.5, 3.5, 3.5])
elemMask = None
if domask:
elemMask = np.ones(num_elem)
elemMask[1] = 0
elemArea = None
if doarea:
elemArea = np.ones(num_elem)*5
elemArea[35] = 6.25
elemArea[36] = 1.25
elemArea[37] = 6.25
elemArea[38] = 1.25
mesh.add_nodes(num_node,nodeId,nodeCoord,nodeOwner)
mesh.add_elements(num_elem,elemId,elemType,elemConn, element_area=elemArea, element_mask=elemMask)
if domask and doarea:
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemMask, elemArea
elif domask and not doarea:
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemMask
elif not domask and doarea:
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemArea
else:
return mesh, nodeCoord, nodeOwner, elemType, elemConn
def mesh_create_4_ngons(domask=False, doarea=False):
'''
PRECONDITIONS: None
POSTCONDITIONS: A 4 element Mesh has been created.
RETURN VALUES: \n Mesh :: mesh \n
3.25 71 ------ 72 ----- 73 ------ 74
| \ / |
| \ 64 / |
| \ / |
| \ / |
3.00 | 69 |
| / \ |
| 61 / 62 \ 63 |
| / \ |
2.75 61 ------ 62 ----- 63 ------ 64
0.25 0.75 1.0 1.25 1.75
Node Ids at corners
Element Ids in centers
Note: This mesh is not parallel, it can only be used in serial
'''
# Two parametric dimensions, and two spatial dimensions
mesh = ESMF.Mesh(parametric_dim=2, spatial_dim=2)
num_node = 9
num_elem = 4
nodeId = np.array([61,62,63,64,69,71,72,73,74,])
nodeCoord = np.array([0.25,2.75, 0.75,2.75, 1.25,2.75, 1.75,2.75,
1.,3.,
0.25, 3.25, 0.75, 3.25, 1.25, 3.25, 1.75, 3.25])
nodeOwner = np.zeros(num_node)
elemId = np.array([61,62,63,64])
elemType = np.ones(num_elem)
elemType[0] = 5
elemType[1] = ESMF.MeshElemType.TRI
elemType[2] = 5
elemType[3] = ESMF.MeshElemType.TRI
elemConn = np.array([61, 62, 69, 72, 71, 62, 63, 69, 63, 64, 74, 73, 69, 69, 73, 72])
elemConn = np.array([np.where(a==nodeId) for a in elemConn]).flatten()
elemMask = None
if domask:
elemMask = np.ones(num_elem)
elemMask[1] = 0
elemArea = None
if doarea:
elemArea = np.ones(num_elem)*5
elemArea[35] = 6.25
elemArea[36] = 1.25
elemArea[37] = 6.25
elemArea[42] = 1.25
mesh.add_nodes(num_node,nodeId,nodeCoord,nodeOwner)
mesh.add_elements(num_elem,elemId,elemType,elemConn,
element_mask=elemMask, element_area=elemArea)
if domask and doarea:
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemMask, elemArea
elif domask and not doarea:
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemMask
elif not domask and doarea:
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemArea
else:
return mesh, nodeCoord, nodeOwner, elemType, elemConn
def mesh_create_5_parallel ():
'''
PRECONDITIONS: None
POSTCONDITIONS: A 5 element Mesh has been created in parallel.
RETURN VALUES: \n Mesh :: mesh \n
# 4.0 31 ------ 32 [32] ----- 33
# | | | 22 / |
# | 21 | | / |
# | | | / 23 |
# 2.0 [21] ---- [22] [22] ---- [23]
#
# 0.0 2.0 2.0 4.0
#
# PET 2 PET 3
#
#
# 2.0 21 ------ 22 [22] ----- 23
# | | | |
# | 11 | | 12 |
# | | | |
# 0.0 11 ------ 12 [12] ----- 13
#
# 0.0 2.0 2.0 4.0
#
# PET 0 PET 1
#
# Node Id labels at corners
# Element Id labels in centers
'''
# Two parametric dimensions, and two spatial dimensions
mesh = ESMF.Mesh(parametric_dim=2, spatial_dim=2)
if ESMF.pet_count() > 1:
if ESMF.pet_count() != 4:
raise NameError('MPI rank must be 4 to build this mesh!')
if (ESMF.local_pet() == 0):
num_node=4
num_elem=1
nodeId=np.array([11,12,21,22])
nodeCoord=np.array([0.0,0.0,
2.0,0.0,
0.0,2.0,
2.0,2.0 ])
nodeOwner=np.zeros(num_node)
elemId=np.array([11])
elemType=np.array([ESMF.MeshElemType.QUAD])
elemConn=np.array([0,1,3,2])
elif (ESMF.local_pet() == 1):
num_node=4
num_elem=1
nodeId=np.array([12,13,22,23])
nodeCoord=np.array([2.0,0.0,
4.0,0.0,
2.0,2.0,
4.0,2.0 ])
nodeOwner=np.array([0,
1,
0,
1])
elemId=np.array([12])
elemType=np.array([ESMF.MeshElemType.QUAD])
elemConn=np.array([0,1,3,2])
elif (ESMF.local_pet() == 2):
num_node=4
num_elem=1
nodeId=np.array([21,22,31,32])
nodeCoord=np.array([0.0,2.0,
2.0,2.0,
0.0,4.0,
2.0,4.0 ])
nodeOwner=np.array([0,
0,
2,
2])
elemId=np.array([21])
elemType=np.array([ESMF.MeshElemType.QUAD])
elemConn=np.array([0,1,3,2])
elif (ESMF.local_pet() == 3):
num_node=4
num_elem=2
nodeId=np.array([22,23,32,33])
nodeCoord=np.array([2.0,2.0,
4.0,2.0,
2.0,4.0,
4.0,4.0 ])
nodeOwner=np.array([0,
1,
2,
3])
elemId=np.array([22,23])
elemType=np.array([ESMF.MeshElemType.TRI,
ESMF.MeshElemType.TRI])
elemConn=np.array([0,3,2,
0,1,3])
# Add nodes and elements to the Mesh
mesh.add_nodes(num_node,nodeId,nodeCoord,nodeOwner)
mesh.add_elements(num_elem,elemId,elemType,elemConn)
return mesh, nodeCoord, nodeOwner, elemType, elemConn
def mesh_create_5_pentahexa_parallel ():
'''
PRECONDITIONS: None
POSTCONDITIONS: A 5 element Mesh has been created in parallel.
RETURN VALUES: \n Mesh :: mesh \n
# 2.1 8 10 --------11
# / \ / |
# 7 9 [9] 12
# | | | 5 /
# | 4 | | /
# | | | /
# 1.0 [4] ----- [5] [5] ----- [6]
#
# -0.1 1.0 1.0 2.1 2.5
#
# PET 2 PET 3
#
#
# 1.0 4 ------- 5 [5] ------- 6
# | | | \ 3 |
# | 1 | | \ |
# | | | 2 \ |
# -0.1 1 ------- 2 [2] ------- 3
#
# -0.1 1.0 1.0 2.1 2.5
#
# PET 0 PET 1
#
# Node Id labels at corners
# Element Id labels in centers
'''
# Two parametric dimensions, and two spatial dimensions
mesh = ESMF.Mesh(parametric_dim=2, spatial_dim=2)
if ESMF.pet_count() > 1:
if ESMF.pet_count() != 4:
raise NameError('MPI rank must be 4 to build this mesh!')
if (ESMF.local_pet() == 0):
num_node=4
num_elem=1
nodeId=np.array([1, 2, 4, 5])
nodeCoord=np.array([-0.1, -0.1,
1.0, -0.1,
- 0.1, 1.0,
1.0, 1.0 ])
nodeOwner=np.zeros(num_node)
elemId=np.array([1])
elemType=np.array([ESMF.MeshElemType.QUAD])
elemConn=np.array([0, 1, 3, 2 ])
elif (ESMF.local_pet() == 1):
num_node=4
num_elem=2
nodeId=np.array([2, 3, 5, 6])
nodeCoord=np.array([1.0, -0.1,
2.1, -0.1,
1.0, 1.0,
2.1, 1.0 ])
nodeOwner=np.array([0,
1,
0,
1])
elemId=np.array([2, 3])
elemType=np.array([ESMF.MeshElemType.TRI, ESMF.MeshElemType.TRI])
elemConn=np.array([0, 1, 2,
1, 3, 2])
elif (ESMF.local_pet() == 2):
num_node=5
num_elem=1
nodeId=np.array([4, 5, 7, 8, 9])
nodeCoord=np.array([-0.1, 1.0,
1.0, 1.0,
-0.1, 2.1,
0.5, 2.5,
1.0, 2.1 ])
nodeOwner=np.array([0,
0,
2,
2,
2])
elemId=np.array([4])
elemType=np.array([5])
elemConn=np.array([0, 1, 4, 3, 2])
elif (ESMF.local_pet() == 3):
num_node=6
num_elem=1
nodeId=np.array([5, 6, 9, 10, 11, 12])
nodeCoord=np.array([1.0, 1.0,
2.1, 1.0,
1.0, 2.1,
1.5, 2.5,
2.5, 2.5,
2.5, 2.1 ])
nodeOwner=np.array([0,
1,
2,
3,
3,
3])
elemId=np.array([5])
elemType=np.array([6])
elemConn=np.array([0, 1, 5, 4, 3, 2])
# Add nodes and elements to the Mesh
mesh.add_nodes(num_node,nodeId,nodeCoord,nodeOwner)
mesh.add_elements(num_elem,elemId,elemType,elemConn)
return mesh, nodeCoord, nodeOwner, elemType, elemConn
def mesh_create_10_parallel ():
'''
PRECONDITIONS: None
POSTCONDITIONS: A 10 element Mesh has been created in parallel.
RETURN VALUES: \n Mesh :: mesh \n
# 4.0 41 ------ 42 ------ 43 [43] ---------- 44
# | | | | / |
# | | | | 33 / |
# | 31 | 32 | | / |
# | | | | / 34 |
# | | | | / |
# 2.5 [31] ----- [32] ---- [33] [33] ---------- [34]
#
# 0.0 1.5 2.5 2.5 4.0
#
# PET 2 PET 3
#
#
# 2.5 31 ------ 32 ------ 33 [33] ----------- 34
# | | | | |
# | 21 | 22 | | 23 |
# | | | | |
# 1.5 21 ------ 22 ------ 23 [23] ---------- 24
# | | | | |
# | 11 | 12 | | 13 |
# | | | | |
# 0.0 11 ------ 12 ------ 13 [13] ----------- 14
#
# 0.0 1.5 2.5 2.5 4.0
#
# PET 0 PET 1
#
# Node Id labels at corners
# Element Id labels in centers
'''
if ESMF.pet_count() > 1:
if ESMF.pet_count() != 4:
raise NameError('MPI rank must be 4 to build this mesh!')
# Two parametric dimensions, and two spatial dimensions
mesh = ESMF.Mesh(parametric_dim=2, spatial_dim=2)
if (ESMF.local_pet() == 0):
num_node=9
num_elem=4
nodeId=np.array([11,12,13,21,22,23,31,32,33])
nodeCoord=np.array([0.0,0.0,
1.5,0.0,
2.5,0.0,
0.0,1.5,
1.5,1.5,
2.5,1.5,
0.0,2.5,
1.5,2.5,
2.5,2.5])
nodeOwner=np.zeros(num_node)
elemId=np.array([11,12,21,22])
elemType=np.array([ESMF.MeshElemType.QUAD,
ESMF.MeshElemType.QUAD,
ESMF.MeshElemType.QUAD,
ESMF.MeshElemType.QUAD])
elemConn=np.array([0,1,4,3,
1,2,5,4,
3,4,7,6,
4,5,8,7])
elif (ESMF.local_pet() == 1):
num_node=6
num_elem=2
nodeId=np.array([13,14,23,24,33,34])
nodeCoord=np.array([2.5,0.0,
4.0,0.0,
2.5,1.5,
4.0,1.5,
2.5,2.5,
4.0,2.5 ])
nodeOwner=np.array([0,1,0,1,0,1])
elemId=np.array([13,23])
elemType=np.array([ESMF.MeshElemType.QUAD,
ESMF.MeshElemType.QUAD])
elemConn=np.array([0,1,3,2,
2,3,5,4])
elif (ESMF.local_pet() == 2):
num_node=6
num_elem=2
nodeId=np.array([31,32,33,41,42,43])
nodeCoord=np.array([0.0,2.5,
1.5,2.5,
2.5,2.5,
0.0,4.0,
1.5,4.0,
2.5,4.0 ])
nodeOwner=np.array([0,0,0,2,2,2])
elemId=np.array([31,32])
elemType=np.array([ESMF.MeshElemType.QUAD,
ESMF.MeshElemType.QUAD])
elemConn=np.array([0,1,4,3,
1,2,5,4])
elif (ESMF.local_pet() == 3):
num_node=4
num_elem=2
nodeId=np.array([33,34,43,44])
nodeCoord=np.array([2.5,2.5,
4.0,2.5,
2.5,4.0,
4.0,4.0 ])
nodeOwner=np.array([0,1,2,3])
elemId=np.array([33,34])
elemType=np.array([ESMF.MeshElemType.TRI,
ESMF.MeshElemType.TRI])
elemConn=np.array([0,3,2,
0,1,3])
# Add nodes and elements to the Mesh
mesh.add_nodes(num_node,nodeId,nodeCoord,nodeOwner)
mesh.add_elements(num_elem,elemId,elemType,elemConn)
return mesh, nodeCoord, nodeOwner, elemType, elemConn
def mesh_create_50_parallel(domask=False, doarea=False):
'''
PRECONDITIONS: None
POSTCONDITIONS: A 50 element Mesh has been created in parallel.
RETURN VALUES: \n Mesh :: mesh \n
3.75 81 ------ 82 ----- 83 ------ 84 [84] ----- 85 ------ 86 ------ 87 ------ 88
| | | | | | | | 77 / |
| 71 | 72 | 73 | | 74 | 75 | 76 | / |
| | | | | | | | / 78 |
3.25 71 ------ 72 ----- 73 ------ 74 [74] ----- 75 ------ 76 ------ 77 ------ 78
| | | | | | | | |
| 61 | 62 | 63 | | 64 | 65 | 66 | 67 |
| | | | | | | | |
2.75 61 ------ 62 ----- 63 ------ 64 [64] ----- 65 ------ 66 ------ 67 ------ 68
| | | | | | | | |
| 51 | 52 | 53 | | 54 | 55 | 56 | 57 |
| | | | | | | | |
2.25 51 ------ 52 ----- 53 ------ 54 [54] ----- 55 ------ 56 ------ 57 ------ 58
| | | | | | | | |
| 41 | 42 | 43 | | 44 | 45 | 46 | 47 |
| | | | | | | | |
1.75 [41] ---- [42] --- [43] ---- [44] [44] ---- [45] ---- [46] ---- [47] ---- [48]
PET 2 PET 3
1.75 41 ------ 42 ----- 43 ------ 44 [44] ----- 45 ------ 46 ------ 47 ------ 48
| | | | | | | | |
| 31 | 32 | 33 | | 34 | 35 | 36 | 37 |
| | | | | | | | |
1.25 31 ------ 32 ----- 33 ------ 34 [34] ----- 35 ------ 36 ------ 37 ------ 38
| | | | | | | | |
| 21 | 22 | 23 | | 24 | 25 | 26 | 27 |
| | | | | | | | |
0.75 21 ------ 22 ----- 23 ------ 24 [24] ----- 25 ------ 26 ------ 27 ------ 28
| | | | | | | | |
| 11 | 12 | 13 | | 14 | 15 | 16 | 17 |
| | | | | | | | |
0.25 11 ------ 12 ----- 13 ------ 14 [14] ----- 15 ------ 16 ------ 17 ------ 18
0.25 0.75 1.25 1.75 1.75 2.25 2.75 3.25 3.75
PET 0 PET 1
Node Ids at corners
Element Ids in centers
'''
if ESMF.pet_count() > 1:
if ESMF.pet_count() != 4:
raise NameError('MPI rank must be 4 to build this mesh!')
# Two parametric dimensions, and two spatial dimensions
mesh = ESMF.Mesh(parametric_dim=2, spatial_dim=2)
if ESMF.local_pet() == 0:
num_node = 16
num_elem = 9
nodeId = np.array([11,12,13,14,
21,22,23,24,
31,32,33,34,
41,42,43,44])
nodeCoord = np.array([0.25,0.25, 0.25,0.75, 0.25,1.25, 0.25,1.75,
0.75,0.25, 0.75,0.75, 0.75,1.25, 0.75,1.75,
1.25,0.25, 1.25,0.75, 1.25,1.25, 1.25,1.75,
1.75,0.25, 1.75,0.75, 1.75,1.25, 1.75,1.75])
nodeOwner = np.zeros(num_node)
elemId = np.array([11,12,13,
21,22,23,
31,32,33])
elemType = np.ones(num_elem)*ESMF.MeshElemType.QUAD
elemConn = np.array([11,12,22,21,12,13,23,22,13,14,24,23,
21,22,32,31,22,23,33,32,23,24,34,33,
31,32,42,41,32,33,43,42,33,34,44,43])
elemConn = np.array([np.where(a==nodeId) for a in elemConn]).flatten()
elemMask = None
if domask:
elemMask = np.ones(num_elem)
elemMask[1] = 0
elemArea = None
if doarea:
elemArea = np.ones(num_elem)*5
elif ESMF.local_pet() == 1:
num_node = 20
num_elem = 12
nodeId = np.array([14,15,16,17,18,
24,25,26,27,28,
34,35,36,37,38,
44,45,46,47,48])
nodeCoord = np.array([0.25,1.75, 0.25,2.25, 0.25,2.75, 0.25,3.25, 0.25,3.75,
0.75,1.75, 0.75,2.25, 0.75,2.75, 0.75,3.25, 0.75,3.75,
1.25,1.75, 1.25,2.25, 1.25,2.75, 1.25,3.25, 1.25,3.75,
1.75,1.75, 1.75,2.25, 1.75,2.75, 1.75,3.25, 1.75,3.75])
nodeOwner = np.array([0,1,1,1,1,0,1,1,1,1,0,1,1,1,1,0,1,1,1,1])
elemId = np.array([14,15,16,17,
24,25,26,27,
34,35,36,37])
elemType = np.ones(num_elem)*ESMF.MeshElemType.QUAD
elemConn = np.array([14,15,25,24,15,16,26,25,16,17,27,26,17,18,28,27,
24,25,35,34,25,26,36,35,26,27,37,36,27,28,38,37,
34,35,45,44,35,36,46,45,36,37,47,46,37,38,48,47])
elemConn = np.array([np.where(a==nodeId) for a in elemConn]).flatten()
elemMask = None
if domask:
elemMask = np.ones(num_elem)
elemArea = None
if doarea:
elemArea = np.ones(num_elem)*5
elif ESMF.local_pet() == 2:
num_node = 20
num_elem = 12
nodeId = np.array([41,42,43,44,
51,52,53,54,
61,62,63,64,
71,72,73,74,
81,82,83,84])
nodeCoord = np.array([1.75,0.25, 1.75,0.75, 1.75,1.25, 1.75,1.75,
2.25,0.25, 2.25,0.75, 2.25,1.25, 2.25,1.75,
2.75,0.25, 2.75,0.75, 2.75,1.25, 2.75,1.75,
3.25,0.25, 3.25,0.75, 3.25,1.25, 3.25,1.75,
3.75,0.25, 3.75,0.75, 3.75,1.25, 3.75,1.75])
nodeOwner = np.array([0,0,0,0,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2])
elemId = np.array([41,42,43,
51,52,53,
61,62,63,
71,72,73])
elemType = np.ones(num_elem)*ESMF.MeshElemType.QUAD
elemConn = np.array([41,42,52,51,42,43,53,52,43,44,54,53,
51,52,62,61,52,53,63,62,53,54,64,63,
61,62,72,71,62,63,73,72,63,64,74,73,
71,72,82,81,72,73,83,82,73,74,84,83])
elemConn = np.array([np.where(a==nodeId) for a in elemConn]).flatten()
elemMask = None
if domask:
elemMask = np.ones(num_elem)
elemArea = None
if doarea:
elemArea = np.ones(num_elem)*5
elif ESMF.local_pet() == 3:
num_node = 25
num_elem = 17
nodeId = np.array([44,45,46,47,48,
54,55,56,57,58,
64,65,66,67,68,
74,75,76,77,78,
84,85,86,87,88])
nodeCoord = np.array([1.75,1.75, 1.75,2.25, 1.75,2.75, 1.75,3.25, 1.75,3.75,
2.25,1.75, 2.25,2.25, 2.25,2.75, 2.25,3.25, 2.25,3.75,
2.75,1.75, 2.75,2.25, 2.75,2.75, 2.75,3.25, 2.75,3.75,
3.25,1.75, 3.25,2.25, 3.25,2.75, 3.25,3.25, 3.25,3.75,
3.75,1.75, 3.75,2.25, 3.75,2.75, 3.75,3.25, 3.75,3.75])
nodeOwner = np.array([0,1,1,1,1,2,3,3,3,3,2,3,3,3,3,2,3,3,3,3,2,3,3,3,3])
elemId = np.array([44,45,46,47,
54,55,56,57,
64,65,66,67,
74,75,76,77,78])
elemType = np.ones(num_elem-2)*ESMF.MeshElemType.QUAD
elemType = np.append(elemType, [ESMF.MeshElemType.TRI, ESMF.MeshElemType.TRI])
elemConn = np.array([44,45,55,54,45,46,56,55,46,47,57,56,47,48,58,57,
54,55,65,64,55,56,66,65,56,57,67,66,57,58,68,67,
64,65,75,74,65,66,76,75,66,67,77,76,67,68,78,77,
74,75,85,84,75,76,86,85,76,77,87,86,
77,88,87,
77,78,88])
elemConn = np.array([np.where(a==nodeId) for a in elemConn]).flatten()
elemMask = None
if domask:
elemMask = np.ones(num_elem)
elemArea = None
if doarea:
elemArea = np.ones(num_elem-2)*5
elemArea = np.append(elemArea, [2.5, 2.5])
mesh.add_nodes(num_node,nodeId,nodeCoord,nodeOwner)
mesh.add_elements(num_elem,elemId,elemType,elemConn,
element_mask=elemMask, element_area=elemArea)
if domask and doarea:
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemMask, elemArea
elif domask and not doarea:
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemMask
elif not domask and doarea:
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemArea
else:
return mesh, nodeCoord, nodeOwner, elemType, elemConn
def mesh_create_50_ngons_parallel(domask=False, doarea=False):
'''
PRECONDITIONS: None
POSTCONDITIONS: A 50 element Mesh has been created in parallel.
RETURN VALUES: \n Mesh :: mesh \n
3.75 81 ------ 82 ----- 83 ------ 84 [84] ----- 85 ------ 86 ------ 87 ------ 88
| | | | | | | | |
| 71 | 72 | 73 | | 74 | 75 | 76 | 77 |
| | | | | | | | |
3.25 71 ------ 72 ----- 73 ------ 74 [74] ----- 75 ------ 76 ------ 77 ------ 78
| \ / | | | | | |
| \ 68 / | | | | | |
| \ / | | | | | |
| \ / | | | | | |
3.00 | 69 | | | | | |
| / \ | | | | | |
| 61 / 62 \ 63 | | 64 | 65 | 66 | 67 |
| / \ | | | | | |
2.75 61 ------ 62 ----- 63 ------ 64 [64] ----- 65 ------ 66 ------ 67 ------ 68
| | | | | | | | |
| 51 | 52 | 53 | | 54 | 55 | 56 | 57 |
| | | | | | | | |
2.25 51 ------ 52 ----- 53 ------ 54 [54] ----- 55 ------ 56 ------ 57 ------ 58
| | | | | | | | |
| 41 | 42 | 43 | | 44 | 45 | 46 | 47 |
| | | | | | | | |
1.75 [41] ---- [42] --- [43] ---- [44] [44] ---- [45] ---- [46] ---- [47] ---- [48]
PET 2 PET 3
1.75 41 ------ 42 ----- 43 ------ 44 [44] ----- 45 ------ 46 ------ 47 ------ 48
| | | | | | | | |
| 31 | 32 | 33 | | 34 | 35 | 36 | 37 |
| | | | | | | | |
1.25 31 ------ 32 ----- 33 ------ 34 [34] ----- 35 ------ 36 ------ 37 ------ 38
| | | | | | | | |
| 21 | 22 | 23 | | 24 | 25 | 26 | 27 |
| | | | | | | | |
0.75 21 ------ 22 ----- 23 ------ 24 [24] ----- 25 ------ 26 ------ 27 ------ 28
| | | | | | | | |
| 11 | 12 | 13 | | 14 | 15 | 16 | 17 |
| | | | | | | | |
0.25 11 ------ 12 ----- 13 ------ 14 [14] ----- 15 ------ 16 ------ 17 ------ 18
0.25 0.75 1.0 1.25 1.75 1.75 2.25 2.75 3.25 3.75
PET 0 PET 1
Node Ids at corners
Element Ids in centers
'''
if ESMF.pet_count() > 1:
if ESMF.pet_count() != 4:
raise NameError('MPI rank must be 4 to build this mesh!')
# Two parametric dimensions, and two spatial dimensions
mesh = ESMF.Mesh(parametric_dim=2, spatial_dim=2)
if ESMF.local_pet() == 0:
num_node = 16
num_elem = 9
nodeId = np.array([11,12,13,14,
21,22,23,24,
31,32,33,34,
41,42,43,44])
nodeCoord = np.array([0.25,0.25, 0.75,0.25, 1.25,0.25, 1.75,0.25,
0.25,0.75, 0.75,0.75, 1.25,0.75, 1.75,0.75,
0.25,1.25, 0.75,1.25, 1.25,1.25, 1.75,1.25,
0.25,1.75, 0.75,1.75, 1.25,1.75, 1.75,1.75])
nodeOwner = np.zeros(num_node)
elemId = np.array([11,12,13,
21,22,23,
31,32,33])
elemType = np.ones(num_elem)*ESMF.MeshElemType.QUAD
elemConn = np.array([11,12,22,21,12,13,23,22,13,14,24,23,
21,22,32,31,22,23,33,32,23,24,34,33,
31,32,42,41,32,33,43,42,33,34,44,43])
elemConn = np.array([np.where(a==nodeId) for a in elemConn]).flatten()
elemMask = None
if domask:
elemMask = np.ones(num_elem)
elemMask[1] = 0
elemArea = None
if doarea:
elemArea = np.ones(num_elem)*5
elif ESMF.local_pet() == 1:
num_node = 20
num_elem = 12
nodeId = np.array([14,15,16,17,18,
24,25,26,27,28,
34,35,36,37,38,
44,45,46,47,48])
nodeCoord = np.array([1.75,0.25, 2.25,0.25, 2.75,0.25, 3.25,0.25, 3.75,0.25,
1.75,0.75, 2.25,0.75, 2.75,0.75, 3.25,0.75, 3.75,0.75,
1.75,1.25, 2.25,1.25, 2.75,1.25, 3.25,1.25, 3.75,1.25,
1.75,1.75, 2.25,1.75, 2.75,1.75, 3.25,1.75, 3.75,1.75])
nodeOwner = np.array([0,1,1,1,1,0,1,1,1,1,0,1,1,1,1,0,1,1,1,1])
elemId = np.array([14,15,16,17,
24,25,26,27,
34,35,36,37])
elemType = np.ones(num_elem)*ESMF.MeshElemType.QUAD
elemConn = np.array([14,15,25,24,15,16,26,25,16,17,27,26,17,18,28,27,
24,25,35,34,25,26,36,35,26,27,37,36,27,28,38,37,
34,35,45,44,35,36,46,45,36,37,47,46,37,38,48,47])
elemConn = np.array([np.where(a==nodeId) for a in elemConn]).flatten()
elemMask = None
if domask:
elemMask = np.ones(num_elem)
elemArea = None
if doarea:
elemArea = np.ones(num_elem)*5
elif ESMF.local_pet() == 2:
num_node = 21
num_elem = 13
nodeId = np.array([41,42,43,44,
51,52,53,54,
61,62,63,64,69,
71,72,73,74,
81,82,83,84])
nodeCoord = np.array([0.25,1.75, 0.75,1.75, 1.25,1.75, 1.75,1.75,
0.25,2.25, 0.75,2.25, 1.25,2.25, 1.75,2.25,
0.25,2.75, 0.75,2.75, 1.25,2.75, 1.75,2.75, 1.0,3.0,
0.25,3.25, 0.75,3.25, 1.25,3.25, 1.75,3.25,
0.25,3.75, 0.75,3.75, 1.25,3.75, 1.75,3.75])
nodeOwner = np.array([0,0,0,0,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2])
elemId = np.array([41,42,43,
51,52,53,
61,62,63,68,
71,72,73])
elemType = np.ones(num_elem)*ESMF.MeshElemType.QUAD
elemType[6] = 5
elemType[7] = ESMF.MeshElemType.TRI
elemType[8] = 5
elemType[9] = ESMF.MeshElemType.TRI
elemConn = np.array([41,42,52,51,42,43,53,52,43,44,54,53,
51,52,62,61,52,53,63,62,53,54,64,63,
61,62,69,72,71,62,63,69,63,64,74,73,69,69,73,72,
71,72,82,81,72,73,83,82,73,74,84,83])
elemConn = np.array([np.where(a==nodeId) for a in elemConn]).flatten()
elemMask = None
if domask:
elemMask = np.ones(num_elem)
elemArea = None
if doarea:
elemArea = np.ones(num_elem)*5
elemArea[6] = 6.25
elemArea[7] = 1.25
elemArea[8] = 6.25
elemArea[9] = 1.25
elif ESMF.local_pet() == 3:
num_node = 25
num_elem = 16
nodeId = np.array([44,45,46,47,48,
54,55,56,57,58,
64,65,66,67,68,
74,75,76,77,78,
84,85,86,87,88])
nodeCoord = np.array([1.75,1.75, 2.25,1.75, 2.75,1.75, 3.25,1.75, 3.75,1.75,
1.75,2.25, 2.25,2.25, 2.75,2.25, 3.25,2.25, 3.75,2.25,
1.75,2.75, 2.25,2.75, 2.75,2.75, 3.25,2.75, 3.75,2.75,
1.75,3.25, 2.25,3.25, 2.75,3.25, 3.25,3.25, 3.75,3.25,
1.75,3.75, 2.25,3.75, 2.75,3.75, 3.25,3.75, 3.75,3.75])
nodeOwner = np.array([0,1,1,1,1,2,3,3,3,3,2,3,3,3,3,2,3,3,3,3,2,3,3,3,3])
elemId = np.array([44,45,46,47,
54,55,56,57,
64,65,66,67,
74,75,76,77])
elemType = np.ones(num_elem)*ESMF.MeshElemType.QUAD
elemConn = np.array([44,45,55,54,45,46,56,55,46,47,57,56,47,48,58,57,
54,55,65,64,55,56,66,65,56,57,67,66,57,58,68,67,
64,65,75,74,65,66,76,75,66,67,77,76,67,68,78,77,
74,75,85,84,75,76,86,85,76,77,87,86,77,78,88,87])
elemConn = np.array([np.where(a==nodeId) for a in elemConn]).flatten()
elemMask = None
if domask:
elemMask = np.ones(num_elem)
elemArea = None
if doarea:
elemArea = np.ones(num_elem)*5
mesh.add_nodes(num_node,nodeId,nodeCoord,nodeOwner)
mesh.add_elements(num_elem,elemId,elemType,elemConn,
element_mask=elemMask, element_area=elemArea)
if domask and doarea:
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemMask, elemArea
elif domask and not doarea:
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemMask
elif not domask and doarea:
return mesh, nodeCoord, nodeOwner, elemType, elemConn, elemArea
else:
return mesh, nodeCoord, nodeOwner, elemType, elemConn
def initialize_field_mesh(field, nodeCoord, nodeOwner, elemType, elemConn,
domask=False, elemMask=None):
'''
PRECONDITIONS: A Field has been created on the elements of a Mesh.
POSTCONDITIONS: The Field has been initialized to an analytic
field.
RETURN VALUES: \n Field :: field \n
'''
[node, element] = [0,1]
if field.staggerloc == element:
offset = 0
for i in range(field.grid.size_owned[element]):
if (elemType[i] == ESMF.MeshElemType.TRI):
x1 = nodeCoord[(elemConn[offset])*2]
x2 = nodeCoord[(elemConn[offset+1])*2]
x3 = nodeCoord[(elemConn[offset+2])*2]
y1 = nodeCoord[(elemConn[offset])*2+1]
y2 = nodeCoord[(elemConn[offset+1])*2+1]
y3 = nodeCoord[(elemConn[offset+2])*2+1]
x = (x1 + x2 + x3) / 3.0
y = (y1 + y2 + y3) / 3.0
offset = offset + 3
elif (elemType[i] == ESMF.MeshElemType.QUAD):
x1 = nodeCoord[(elemConn[offset])*2]
x2 = nodeCoord[(elemConn[offset+1])*2]
y1 = nodeCoord[(elemConn[offset+1])*2+1]
y2 = nodeCoord[(elemConn[offset+3])*2+1]
x = (x1 + x2) / 2.0
y = (y1 + y2) / 2.0
offset = offset + 4
else:
raise ValueError("Elem type is not supported.")
#print ('[{0},{1}] = {2}'.format(x,y,field.data[i]))
field.data[i] = 20.0 + x**2 +x*y + y**2
if domask:
# calculate field
if (elemMask[i] == 0):
field.data[i] = 0
elif field.staggerloc == node:
ind = 0
for i in range(field.grid.size[node]):
x = nodeCoord[i*2]
y = nodeCoord[i*2+1]
if (nodeOwner[i] == ESMF.local_pet()):
if ind > field.grid.size_owned[node]:
raise ValueError("Overstepped the mesh bounds!")
field.data[ind] = 20.0 + x**2 +x*y + y**2
#print ('[{0},{1}] = {2}'.format(x,y,field.data[ind]))
ind += 1
if domask:
# calculate field
if (elemMask[i] == 0):
field.data[i] = 0
else:
raise ValueError("Field staggerloc is not supported")
return field
def compute_mass_mesh(valuefield, dofrac=False, fracfield=None,
uninitval=422397696.):
'''
PRECONDITIONS: 'fracfield' contains the fractions of each cell
which contributed to a regridding operation involving
'valuefield. 'dofrac' is a boolean value that gives
the option to not use the 'fracfield'.\n
POSTCONDITIONS: The mass of the data field is computed.\n
RETURN VALUES: float :: mass \n
'''
mass = 0.0
# mesh area field must be built on elements
areafield = ESMF.Field(valuefield.grid, name='areafield',
meshloc=ESMF.MeshLoc.ELEMENT)
areafield.get_area()
ind = np.where(valuefield.data != uninitval)
if dofrac:
mass = np.sum(areafield.data[ind[0]] * valuefield.data[ind[0]] * fracfield.data[ind[0]])
else:
mass = np.sum(areafield.data[ind[0]] * valuefield.data[ind[0]])
return mass
| 43.29972
| 122
| 0.376294
| 8,209
| 61,832
| 2.802899
| 0.031063
| 0.047155
| 0.012386
| 0.017515
| 0.898083
| 0.879091
| 0.861315
| 0.84367
| 0.828067
| 0.808901
| 0
| 0.243742
| 0.462414
| 61,832
| 1,427
| 123
| 43.330063
| 0.448466
| 0.346811
| 0
| 0.65771
| 0
| 0
| 0.008896
| 0
| 0
| 0
| 0
| 0.001402
| 0
| 1
| 0.016355
| false
| 0
| 0.005841
| 0
| 0.056075
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dc0d1184e1242e724cdf316f494a157898696c70
| 6,506
|
py
|
Python
|
RecoParticleFlow/PFClusterProducer/python/particleFlowClusterHBHETimeSelected_cfi.py
|
nistefan/cmssw
|
ea13af97f7f2117a4f590a5e654e06ecd9825a5b
|
[
"Apache-2.0"
] | 3
|
2018-08-24T19:10:26.000Z
|
2019-02-19T11:45:32.000Z
|
RecoParticleFlow/PFClusterProducer/python/particleFlowClusterHBHETimeSelected_cfi.py
|
nistefan/cmssw
|
ea13af97f7f2117a4f590a5e654e06ecd9825a5b
|
[
"Apache-2.0"
] | 3
|
2018-08-23T13:40:24.000Z
|
2019-12-05T21:16:03.000Z
|
RecoParticleFlow/PFClusterProducer/python/particleFlowClusterHBHETimeSelected_cfi.py
|
nistefan/cmssw
|
ea13af97f7f2117a4f590a5e654e06ecd9825a5b
|
[
"Apache-2.0"
] | 5
|
2018-08-21T16:37:52.000Z
|
2020-01-09T13:33:17.000Z
|
import FWCore.ParameterSet.Config as cms
particleFlowClusterHBHETimeSelected = cms.EDProducer(
"PFClusterTimeSelector",
src = cms.InputTag('particleFlowClusterHBHE'),
cuts = cms.VPSet(
cms.PSet(
depth=cms.double(1.0),
minEnergy = cms.double(0.0),
maxEnergy = cms.double(1.0),
endcap = cms.bool(False),
minTime = cms.double(-30.),
maxTime = cms.double(30.)
),
cms.PSet(
depth=cms.double(1.0),
minEnergy = cms.double(0.0),
maxEnergy = cms.double(1.0),
endcap = cms.bool(True),
minTime = cms.double(-30.),
maxTime = cms.double(30.)
),
cms.PSet(
depth=cms.double(2.0),
minEnergy = cms.double(0.0),
maxEnergy = cms.double(1.0),
endcap = cms.bool(False),
minTime = cms.double(-30.),
maxTime = cms.double(30.)
),
cms.PSet(
depth=cms.double(2.0),
minEnergy = cms.double(0.0),
maxEnergy = cms.double(1.0),
endcap = cms.bool(True),
minTime = cms.double(-30.),
maxTime = cms.double(30.)
),
cms.PSet(
depth=cms.double(3.0),
minEnergy = cms.double(0.0),
maxEnergy = cms.double(1.0),
endcap = cms.bool(False),
minTime = cms.double(-30.),
maxTime = cms.double(30.)
),
cms.PSet(
depth=cms.double(3.0),
minEnergy = cms.double(0.0),
maxEnergy = cms.double(1.0),
endcap = cms.bool(True),
minTime = cms.double(-30.),
maxTime = cms.double(30.)
),
cms.PSet(
depth=cms.double(1.0),
minEnergy = cms.double(1.0),
maxEnergy = cms.double(2.0),
endcap = cms.bool(False),
minTime = cms.double(-20.),
maxTime = cms.double(15.)
),
cms.PSet(
depth=cms.double(1.0),
minEnergy = cms.double(1.0),
maxEnergy = cms.double(2.0),
endcap = cms.bool(True),
minTime = cms.double(-20.),
maxTime = cms.double(16.)
),
cms.PSet(
depth=cms.double(2.0),
minEnergy = cms.double(1.0),
maxEnergy = cms.double(2.0),
endcap = cms.bool(False),
minTime = cms.double(-20.),
maxTime = cms.double(15.)
),
cms.PSet(
depth=cms.double(2.0),
minEnergy = cms.double(1.0),
maxEnergy = cms.double(2.0),
endcap = cms.bool(True),
minTime = cms.double(-20.),
maxTime = cms.double(15.)
),
cms.PSet(
depth=cms.double(3.0),
minEnergy = cms.double(1.0),
maxEnergy = cms.double(2.0),
endcap = cms.bool(False),
minTime = cms.double(-20.),
maxTime = cms.double(15.)
),
cms.PSet(
depth=cms.double(3.0),
minEnergy = cms.double(1.0),
maxEnergy = cms.double(2.0),
endcap = cms.bool(True),
minTime = cms.double(-20.),
maxTime = cms.double(15.)
),
cms.PSet(
depth=cms.double(1.0),
minEnergy = cms.double(2.),
maxEnergy = cms.double(5.0),
endcap = cms.bool(False),
minTime = cms.double(-20.),
maxTime = cms.double(25.)
),
cms.PSet(
depth=cms.double(1.0),
minEnergy = cms.double(2.0),
maxEnergy = cms.double(5.0),
endcap = cms.bool(True),
minTime = cms.double(-20.),
maxTime = cms.double(25.)
),
cms.PSet(
depth=cms.double(2.0),
minEnergy = cms.double(2.),
maxEnergy = cms.double(5.0),
endcap = cms.bool(False),
minTime = cms.double(-15.),
maxTime = cms.double(25.)
),
cms.PSet(
depth=cms.double(2.0),
minEnergy = cms.double(2.0),
maxEnergy = cms.double(5.0),
endcap = cms.bool(True),
minTime = cms.double(-15.),
maxTime = cms.double(25.)
),
cms.PSet(
depth=cms.double(3.0),
minEnergy = cms.double(2.),
maxEnergy = cms.double(5.0),
endcap = cms.bool(False),
minTime = cms.double(-15.),
maxTime = cms.double(25.)
),
cms.PSet(
depth=cms.double(3.0),
minEnergy = cms.double(2.0),
maxEnergy = cms.double(5.0),
endcap = cms.bool(True),
minTime = cms.double(-15.),
maxTime = cms.double(25.)
),
cms.PSet(
depth=cms.double(1.0),
minEnergy = cms.double(5.),
maxEnergy = cms.double(9999999.0),
endcap = cms.bool(False),
minTime = cms.double(-5),
maxTime = cms.double(20.)
),
cms.PSet(
depth=cms.double(1.0),
minEnergy = cms.double(5.0),
maxEnergy = cms.double(9999999.0),
endcap = cms.bool(True),
minTime = cms.double(-5),
maxTime = cms.double(20.)
),
cms.PSet(
depth=cms.double(2.0),
minEnergy = cms.double(5.),
maxEnergy = cms.double(9999999.0),
endcap = cms.bool(False),
minTime = cms.double(-5),
maxTime = cms.double(20.)
),
cms.PSet(
depth=cms.double(2.0),
minEnergy = cms.double(5.0),
maxEnergy = cms.double(9999999.0),
endcap = cms.bool(True),
minTime = cms.double(-5),
maxTime = cms.double(20.)
),
cms.PSet(
depth=cms.double(3.0),
minEnergy = cms.double(5.),
maxEnergy = cms.double(9999999.0),
endcap = cms.bool(False),
minTime = cms.double(-5),
maxTime = cms.double(20.)
),
cms.PSet(
depth=cms.double(3.0),
minEnergy = cms.double(5.0),
maxEnergy = cms.double(9999999.0),
endcap = cms.bool(True),
minTime = cms.double(-5),
maxTime = cms.double(20.)
)
)
)
| 31.892157
| 53
| 0.456963
| 707
| 6,506
| 4.205092
| 0.050919
| 0.363269
| 0.096872
| 0.12109
| 0.947864
| 0.947864
| 0.947864
| 0.947864
| 0.947864
| 0.947864
| 0
| 0.067313
| 0.397172
| 6,506
| 203
| 54
| 32.049261
| 0.690719
| 0
| 0
| 0.954774
| 0
| 0
| 0.006763
| 0.006763
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005025
| 0
| 0.005025
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9080a3709d8f2c22a3f8dfed3e48c26f39cfefe5
| 7,695
|
py
|
Python
|
survae/tests/nn/nets/autoregressive/made.py
|
alisiahkoohi/survae_flows
|
e1747b05524c7ab540a211ed360ab3e67bc3e96d
|
[
"MIT"
] | 262
|
2020-07-05T20:57:44.000Z
|
2022-03-28T02:24:43.000Z
|
survae/tests/nn/nets/autoregressive/made.py
|
alisiahkoohi/survae_flows
|
e1747b05524c7ab540a211ed360ab3e67bc3e96d
|
[
"MIT"
] | 17
|
2020-08-15T05:43:34.000Z
|
2022-01-31T12:24:21.000Z
|
survae/tests/nn/nets/autoregressive/made.py
|
alisiahkoohi/survae_flows
|
e1747b05524c7ab540a211ed360ab3e67bc3e96d
|
[
"MIT"
] | 35
|
2020-08-24T06:55:37.000Z
|
2022-02-11T05:17:58.000Z
|
import torch
import torchtestcase
import unittest
import copy
from survae.nn.layers.autoregressive import MaskedLinear
from survae.nn.nets.autoregressive import MADE, AgnosticMADE
class MADETest(torchtestcase.TorchTestCase):
def test_shape(self):
batch_size = 16
features = 10
hidden_features = 5*[50]
num_params = 3
inputs = torch.randn(batch_size, features)
for random_order, random_mask in [(False, False),
(False, True),
(True, False),
(True, True)]:
with self.subTest(random_order=random_order,
random_mask=random_mask):
model = MADE(
features=features,
num_params=num_params,
hidden_features=hidden_features,
random_order=random_order,
random_mask=random_mask,
)
outputs = model(inputs)
self.assertEqual(outputs.dim(), 3)
self.assertEqual(outputs.shape[0], batch_size)
self.assertEqual(outputs.shape[1], features)
self.assertEqual(outputs.shape[2], num_params)
def test_total_mask_sequential(self):
features = 10
hidden_features = 5*[50]
num_params = 1
model = MADE(
features=features,
num_params=num_params,
hidden_features=hidden_features,
random_order=False,
random_mask=False,
)
total_mask = None
for module in model.modules():
if isinstance(module, MaskedLinear):
if total_mask is None:
total_mask = module.mask
else:
total_mask = module.mask @ total_mask
total_mask = (total_mask > 0).float()
reference = torch.tril(torch.ones([features, features]), -1)
self.assertEqual(total_mask, reference)
def test_total_mask_random(self):
features = 10
hidden_features = 5*[50]
num_params = 1
model = MADE(
features=features,
num_params=num_params,
hidden_features=hidden_features,
random_order=False,
random_mask=True,
)
total_mask = None
for module in model.modules():
if isinstance(module, MaskedLinear):
if total_mask is None:
total_mask = module.mask
else:
total_mask = module.mask @ total_mask
total_mask = (total_mask > 0).float()
self.assertEqual(torch.triu(total_mask), torch.zeros([features, features]))
def test_autoregressive_type_A(self):
batch_size = 16
features = 10
hidden_features = 2*[50]
num_params = 3
x = torch.randn(batch_size, features)
x_altered = copy.deepcopy(x)
x_altered[:,2] += 100.0 # Alter feature number 2
for random_mask in [True, False]:
with self.subTest(random_mask=random_mask):
module = MADE(
features=features,
num_params=num_params,
hidden_features=hidden_features,
random_order=False,
random_mask=random_mask,
)
y = module(x)
y_altered = module(x_altered)
# Assert all elements up to (and including) 2 are unaltered
self.assertEqual(y[:,:3], y_altered[:,:3])
# Assert all elements from 2 are altered
self.assertFalse((y[:,3:] == y_altered[:,3:]).view(-1).all())
class AgnosticMADETest(torchtestcase.TorchTestCase):
def test_shape(self):
batch_size = 16
features = 10
hidden_features = 5*[50]
num_params = 3
inputs = torch.randn(batch_size, features)
for order_agnostic, connect_agnostic in [(False, False),
(False, True),
(True, False),
(True, True)]:
with self.subTest(order_agnostic=order_agnostic,
connect_agnostic=connect_agnostic):
model = AgnosticMADE(
features=features,
num_params=num_params,
hidden_features=hidden_features,
order_agnostic=order_agnostic,
connect_agnostic=connect_agnostic,
num_masks=16,
)
outputs = model(inputs)
self.assertEqual(outputs.dim(), 3)
self.assertEqual(outputs.shape[0], batch_size)
self.assertEqual(outputs.shape[1], features)
self.assertEqual(outputs.shape[2], num_params)
def test_autoregressive_type_A(self):
batch_size = 16
features = 10
hidden_features = 2*[50]
num_params = 3
x = torch.randn(batch_size, features)
x_altered = copy.deepcopy(x)
x_altered[:,2] += 100.0 # Alter feature number 2
for connect_agnostic in [True, False]:
with self.subTest(connect_agnostic=connect_agnostic):
module = AgnosticMADE(
features=features,
num_params=num_params,
hidden_features=hidden_features,
order_agnostic=False,
connect_agnostic=connect_agnostic,
num_masks=2,
)
y = module(x) # Call with mask 0, mask updated to 1
_ = module(x) # Call with mask 1, mask updated to 0
y_altered = module(x_altered) # Call with mask 0, mask updated to 1
# Assert all elements up to (and including) 2 are unaltered
self.assertEqual(y[:,:3], y_altered[:,:3])
# Assert all elements from 2 are altered
self.assertFalse((y[:,3:] == y_altered[:,3:]).view(-1).all())
def test_connect_agnostic(self):
batch_size = 16
features = 10
hidden_features = 2*[50]
num_params = 3
x = torch.randn(batch_size, features)
x_altered = copy.deepcopy(x)
x_altered[:,2] += 100.0 # Alter feature number 2
for order_agnostic, connect_agnostic in [(False, True),
(True, False),
(True, True)]:
with self.subTest(order_agnostic=order_agnostic,
connect_agnostic=connect_agnostic):
module = AgnosticMADE(
features=features,
num_params=num_params,
hidden_features=hidden_features,
order_agnostic=order_agnostic,
connect_agnostic=connect_agnostic,
num_masks=2,
)
y = module(x) # Call with mask 0, mask updated to 1
y_mask1 = module(x) # Call with mask 1, mask updated to 0
y_mask0 = module(x) # Call with mask 0, mask updated to 1
# Assert elements same for same mask
self.assertTrue((y == y_mask0).view(-1).all())
# Assert some elements different for different mask
self.assertTrue((y != y_mask1).view(-1).any())
if __name__ == '__main__':
unittest.main()
| 37.173913
| 83
| 0.523327
| 786
| 7,695
| 4.914758
| 0.127226
| 0.053585
| 0.071447
| 0.04349
| 0.84028
| 0.818794
| 0.805333
| 0.790318
| 0.771421
| 0.762879
| 0
| 0.025134
| 0.395062
| 7,695
| 206
| 84
| 37.354369
| 0.804726
| 0.073164
| 0
| 0.741176
| 0
| 0
| 0.001124
| 0
| 0
| 0
| 0
| 0
| 0.094118
| 1
| 0.041176
| false
| 0
| 0.035294
| 0
| 0.088235
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
90a93c91f25a50f10124820acbe6f5b2f55ad203
| 31
|
py
|
Python
|
print_h5/__init__.py
|
gmloose/print_h5
|
5951f8e758faa9b0bc97469b93373bcc189676cf
|
[
"MIT"
] | null | null | null |
print_h5/__init__.py
|
gmloose/print_h5
|
5951f8e758faa9b0bc97469b93373bcc189676cf
|
[
"MIT"
] | null | null | null |
print_h5/__init__.py
|
gmloose/print_h5
|
5951f8e758faa9b0bc97469b93373bcc189676cf
|
[
"MIT"
] | null | null | null |
from .print_h5 import print_h5
| 15.5
| 30
| 0.83871
| 6
| 31
| 4
| 0.666667
| 0.583333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 0.129032
| 31
| 1
| 31
| 31
| 0.814815
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
90b21184da6a12bb59f68592f072248642874f4a
| 91
|
py
|
Python
|
tests/bytecode/mp-tests/list2.py
|
LabAixBidouille/micropython
|
11aa6ba456287d6c80598a7ebbebd2887ce8f5a2
|
[
"MIT"
] | 303
|
2015-07-11T17:12:55.000Z
|
2018-01-08T03:02:37.000Z
|
tests/bytecode/mp-tests/list2.py
|
LabAixBidouille/micropython
|
11aa6ba456287d6c80598a7ebbebd2887ce8f5a2
|
[
"MIT"
] | 13
|
2016-05-12T16:51:22.000Z
|
2018-01-10T22:33:25.000Z
|
tests/bytecode/mp-tests/list2.py
|
LabAixBidouille/micropython
|
11aa6ba456287d6c80598a7ebbebd2887ce8f5a2
|
[
"MIT"
] | 26
|
2018-01-18T09:15:33.000Z
|
2022-02-07T13:09:14.000Z
|
x = [()]
x = [(a)]
x = [(a,)]
x = [(a)]
x = [(a,)]
x = [a, b]
x = [(a, b)]
x = [(a, b, c)]
| 10.111111
| 15
| 0.208791
| 19
| 91
| 1
| 0.210526
| 0.736842
| 0.631579
| 0.842105
| 0.894737
| 0.894737
| 0.526316
| 0.526316
| 0
| 0
| 0
| 0
| 0.307692
| 91
| 8
| 16
| 11.375
| 0.301587
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
296a47c62a5aec25a932d0aa16dd57b46e89ebb2
| 12,668
|
py
|
Python
|
PROJECT/CS698R-Project-Foraging-in-Replenishing-Patches-7-main/agents/MAB/baselinesForgetfulness.py
|
ShivenTripathi/CS698-Deep-Reinforcement-Learning
|
184f7887cea3065d2bfa4ba05bfb249838c3dab4
|
[
"MIT"
] | null | null | null |
PROJECT/CS698R-Project-Foraging-in-Replenishing-Patches-7-main/agents/MAB/baselinesForgetfulness.py
|
ShivenTripathi/CS698-Deep-Reinforcement-Learning
|
184f7887cea3065d2bfa4ba05bfb249838c3dab4
|
[
"MIT"
] | null | null | null |
PROJECT/CS698R-Project-Foraging-in-Replenishing-Patches-7-main/agents/MAB/baselinesForgetfulness.py
|
ShivenTripathi/CS698-Deep-Reinforcement-Learning
|
184f7887cea3065d2bfa4ba05bfb249838c3dab4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# In[2]:
get_ipython().system('pip install --upgrade --editable ../gym-env')
# In[1]:
import gym
import gym_env
import matplotlib.pyplot as plt
from tqdm import tqdm
import numpy as np
env = gym.make('foraging-replenishing-patches-v0')
env.reset()
for i in range(300):
action = np.random.randint(9)
state, reward, done, _ = env.step(action)
print(action, state, reward, done)
if done:
break
# In[2]:
def pureGreedy(env,maxEpisodes, maxTime=300, optimistic=True, forgetDecay=0.5):
Q_est = np.zeros((maxEpisodes,env.action_space.n-1))
rewards = []
if optimistic == True:
Q = np.ones(env.action_space.n-1)*10**4
Q = np.zeros(env.action_space.n-1)
N = np.zeros(env.action_space.n-1)
for i in range(maxEpisodes):
env.reset()
reward = 0
while env.time_elapsed < maxTime:
a = np.random.choice(np.flatnonzero(np.isclose(Q, np.max(Q))))
s, r, terminal, info = env.step(a)
s, r, terminal, info = env.step(8)
N[a] += 1
Q[a] += (r-reward-Q[a])/N[a]
Q *= (1-forgetDecay)
Q[a] /= (1-forgetDecay)
reward = r
rewards.append(reward)
Q_est[i] = Q
return Q_est, rewards
# In[11]:
genv = gym.make('foraging-replenishing-patches-v0')
genv.reset()
_,rs = pureGreedy(env=genv, maxEpisodes=50, optimistic=True)
print(rs)
# In[12]:
genv = gym.make('foraging-replenishing-patches-v0')
genv.reset()
_,rs = pureGreedy(env=genv, maxEpisodes=50, optimistic=False)
print(rs)
# In[15]:
for i in tqdm(range(0,10,2)):
RS = []
for _ in range(5):
genv = gym.make('foraging-replenishing-patches-v0')
genv.reset()
_,rs = pureGreedy(env=genv, maxEpisodes=200, optimistic=False, forgetDecay=i/10)
RS.append(rs)
plt.rcParams["figure.figsize"] = (20,10)
plt.title("Rewards across Time Steps for Agents in Multi Armed Bandits on varying forget decay")
plt.xlabel('Episodes')
plt.ylabel('Reward')
plt.plot(np.arange(200), np.mean(RS,axis=0), label=str(i/10))
plt.legend()
plt.show()
# In[3]:
def pureExplore(env,maxEpisodes, maxTime=300, forgetDecay=0.5):
Q_est = np.zeros((maxEpisodes,env.action_space.n-1))
rewards = []
Q = np.zeros(env.action_space.n-1)
N = np.zeros(env.action_space.n-1)
for i in range(maxEpisodes):
env.reset()
reward = 0
while env.time_elapsed < maxTime:
a = np.random.randint(env.action_space.n-1)
s, r, terminal, info = env.step(a)
s, r, terminal, info = env.step(8)
N[a] += 1
Q[a] += (r-reward-Q[a])/N[a]
Q *= (1-forgetDecay)
Q[a] /= (1-forgetDecay)
reward = r
rewards.append(reward)
Q_est[i] = Q
return Q_est, rewards
# In[17]:
genv = gym.make('foraging-replenishing-patches-v0')
genv.reset()
_,rs = pureExplore(env=genv, maxEpisodes=50)
print(rs)
# In[18]:
for i in tqdm(range(0,10,2)):
RS = []
for _ in range(5):
genv = gym.make('foraging-replenishing-patches-v0')
genv.reset()
_,rs = pureExplore(env=genv, maxEpisodes=200, maxTime=300, forgetDecay=i/10)
RS.append(rs)
plt.rcParams["figure.figsize"] = (20,10)
plt.title("Rewards across Time Steps for Agents in Multi Armed Bandits on varying forget decay")
plt.xlabel('Episodes')
plt.ylabel('Reward')
plt.plot(np.arange(200), np.mean(RS,axis=0), label=str(i/10))
plt.legend()
plt.show()
# In[4]:
def epsGreedy(env,eps, maxEpisodes,maxTime=300, forgetDecay=0.5):
Q_est = np.zeros((maxEpisodes,env.action_space.n-1))
rewards = []
Q = np.zeros(env.action_space.n-1)
N = np.zeros(env.action_space.n-1)
for i in range(maxEpisodes):
env.reset()
reward = 0
while env.time_elapsed < maxTime:
if np.random.rand(1) < eps:
a = np.random.randint(env.action_space.n-1)
else:
a = np.random.choice(np.flatnonzero(np.isclose(Q, np.max(Q))))
s, r, terminal, info = env.step(a)
s, r, terminal, info = env.step(8)
N[a] += 1
Q[a] += (r-reward-Q[a])/N[a]
Q *= (1-forgetDecay)
Q[a] /= (1-forgetDecay)
reward = r
rewards.append(reward)
Q_est[i] = Q
return Q_est, rewards
# In[20]:
genv = gym.make('foraging-replenishing-patches-v0')
genv.reset()
_,rs = epsGreedy(env=genv, eps=0.3, maxEpisodes=50)
print(rs)
# In[3]:
def decEpsGreedy(env,maxEpisodes,eps_start, eps_end, decayType='exponential', maxTime=300, decayTill=200, forgetDecay=0.5):
Q_est = np.zeros((maxEpisodes,env.action_space.n-1))
rewards = []
Q = np.zeros(env.action_space.n-1)
N = np.zeros(env.action_space.n-1)
for i in range(maxEpisodes):
env.reset()
reward = 0
while env.time_elapsed < maxTime:
if decayType == 'linear':
eps = eps_start + min(decayTill-1,i)*(eps_end-eps_start)/(decayTill-1)
else:
eps = eps_start*((eps_end/eps_start)**(min(decayTill-1,i)/(decayTill-1)))
if np.random.rand(1) < eps:
a = np.random.randint(env.action_space.n-1)
else:
a = np.random.choice(np.flatnonzero(np.isclose(Q, np.max(Q))))
s, r, terminal, info = env.step(a)
s, r, terminal, info = env.step(8)
N[a] += 1
Q[a] += (r-reward-Q[a])/N[a]
Q *= (1-forgetDecay+1e-9)
Q[s] /= (1-forgetDecay+1e-9)
reward = r
rewards.append(reward)
Q_est[i] = Q
return Q_est, rewards
# In[22]:
genv = gym.make('foraging-replenishing-patches-v0')
genv.reset()
_,rs = decEpsGreedy(env=genv, maxEpisodes=50, eps_start=1, eps_end=0.1, decayType='linear', decayTill=25)
print(rs)
# In[23]:
genv = gym.make('foraging-replenishing-patches-v0')
genv.reset()
_,rs = decEpsGreedy(env=genv, maxEpisodes=50, eps_start=1, eps_end=0.1, decayType='exponential', decayTill=10)
print(rs)
# In[6]:
for i in tqdm(range(0,10,2)):
RS = []
for _ in range(5):
genv = gym.make('foraging-replenishing-patches-v0', block_type=3)
genv.reset()
_,rs = decEpsGreedy(env=genv, maxEpisodes=250, eps_start=1, eps_end=0.1, decayType='exponential', maxTime=300, decayTill=125, forgetDecay=(i/10))
RS.append(rs)
plt.rcParams["figure.figsize"] = (20,10)
plt.title("Rewards across Time Steps for Agents in Multi Armed Bandits on varying forget decay")
plt.xlabel('Episodes')
plt.ylabel('Reward')
if i==0:
plt.plot(np.arange(250), np.mean(RS,axis=0), label=str(i/10))
else:
plt.plot(np.arange(250), np.mean(RS,axis=0), label=str(np.round(1-i/10,1)))
plt.legend()
plt.show()
# In[25]:
def UCB(env, maxEpisodes, c, maxTime=300, forgetDecay=0.5):
Q_est = np.zeros((maxEpisodes,env.action_space.n-1))
rewards = []
Q = np.zeros(env.action_space.n-1)
N = np.zeros(env.action_space.n-1)
for i in range(maxEpisodes):
env.reset()
reward = 0
count = 0
while env.time_elapsed < maxTime:
if count < env.action_space.n-1:
a = count
else:
U = c*np.sqrt(np.log(i+1)/N)
a = np.random.choice(np.flatnonzero(np.isclose(Q+U, np.max(Q+U))))
s, r, terminal, info = env.step(a)
count += 1
s, r, terminal, info = env.step(8)
N[a] += 1
Q[a] += (r-reward-Q[a])/N[a]
Q *= (1-forgetDecay)
Q[a] /= (1-forgetDecay)
reward = r
rewards.append(reward)
Q_est[i] = Q
return Q_est, rewards
# In[26]:
genv = gym.make('foraging-replenishing-patches-v0')
genv.reset()
_,rs = UCB(env=genv, maxEpisodes=50, c=0.3, forgetDecay=0.7)
print(rs)
# In[27]:
for i in tqdm(range(0,10,2)):
RS = []
for _ in range(5):
genv = gym.make('foraging-replenishing-patches-v0', block_type=3)
genv.reset()
_,rs = UCB(env=genv, maxEpisodes=200, c=0.3, maxTime=300, forgetDecay=i/10)
RS.append(rs)
plt.rcParams["figure.figsize"] = (20,10)
plt.title("Rewards across Time Steps for Agents in Multi Armed Bandits on varying forget decay")
plt.xlabel('Episodes')
plt.ylabel('Reward')
plt.plot(np.arange(200), np.mean(RS,axis=0), label=str(i/10))
plt.legend()
plt.show()
# In[29]:
def AvgRewardsNGaussianBandits(N=50, episodes=10**3, block_type=3, decayTill=200):
skipFirstN = 0
pureGreedy_R = []
pureExplore_R = []
epsGreedy_R = []
decExpEpsGreedy_R = []
decLinEpsGreedy_R = []
UCB_R = []
softMax_R = []
for i in tqdm(range(N)):
genv = gym.make('foraging-replenishing-patches-v0', block_type=block_type)
_,rs = pureGreedy(env=genv, maxEpisodes = episodes)
pureGreedy_R.append(rs)
_,rs = pureExplore(env=genv, maxEpisodes=episodes)
pureExplore_R.append(rs)
_,rs = epsGreedy(env=genv, maxEpisodes=episodes, eps=0.3)
epsGreedy_R.append(rs)
_,rs = decEpsGreedy(env=genv, maxEpisodes=episodes, eps_start=1, eps_end=0.1, decayType='exponential', decayTill=decayTill)
decExpEpsGreedy_R.append(rs)
_,rs = decEpsGreedy(env=genv, maxEpisodes=episodes, eps_start=1, eps_end=0.1, decayType='linear', decayTill=decayTill)
decLinEpsGreedy_R.append(rs)
_,rs = UCB(env=genv, maxEpisodes=episodes, c=0.3)
UCB_R.append(rs)
# _,rs = softMax(env=genv, maxEpisodes=episodes, temp_start=10**2, temp_end=0.01)
# softMax_R.append(rs)
plt.rcParams["figure.figsize"] = (20,10)
plt.title("Average Rewards across Time Steps for Agents in Multi Armed Bandits")
plt.xlabel('Episodes')
plt.ylabel('Average Reward')
plt.plot(np.arange(episodes-skipFirstN), np.mean(pureGreedy_R,axis=0), label='Pure Greedy')
plt.plot(np.arange(episodes-skipFirstN), np.mean(pureExplore_R,axis=0), label='Pure Explore')
plt.plot(np.arange(episodes-skipFirstN), np.mean(epsGreedy_R,axis=0), label='Eps=0.3 Greedy')
plt.plot(np.arange(episodes-skipFirstN), np.mean(decExpEpsGreedy_R,axis=0), label='Decaying (1->0.1) Exp Eps Greedy')
plt.plot(np.arange(episodes-skipFirstN), np.mean(decLinEpsGreedy_R,axis=0), label='Decaying (1->0.1) Linear Eps Greedy')
plt.plot(np.arange(episodes-skipFirstN), np.mean(UCB_R,axis=0), label='UCB')
# plt.plot(np.arange(episodes), np.mean(softMax_R,axis=0), label='Soft Max')
plt.legend()
plt.show()
# In[27]:
# decaying epsilon greedy, but decays till 200 episodes and constant afterwards
AvgRewardsNGaussianBandits(N=1, episodes=1000, block_type=1)
# In[32]:
#decaying epsilon greedy, but decays to final value of 0.1 till the last episode
AvgRewardsNGaussianBandits(N=1, episodes=1000, block_type=1, decayTill=1000)
# In[33]:
#decaying epsilon greedy, but decays to final value of 0.1 till the last episode
#also averaging
AvgRewardsNGaussianBandits(N=25, episodes=200, block_type=1, decayTill=200)
# In[12]:
#decaying epsilon greedy, but decays to final value of 0.1 till the last episode
#also averaging
AvgRewardsNGaussianBandits(N=25, episodes=200, block_type=2)
# In[13]:
AvgRewardsNGaussianBandits(N=25, episodes=200, block_type=3)
# In[24]:
def RegretNGaussianBandits(N=50, episodes=10**3):
pureGreedy_R = []
pureExplore_R = []
epsGreedy_R = []
decEpsGreedy_R = []
UCB_R = []
softMax_R = []
for i in tqdm(range(N)):
genv = gym.make('foraging-replenishing-patches-v0')
_,rs = pureGreedy(env=genv, maxEpisodes = episodes)
pureGreedy_R.append(rs)
_,rs = pureExplore(env=genv, maxEpisodes=episodes)
pureExplore_R.append(rs)
_,rs = epsGreedy(env=genv, maxEpisodes=episodes, eps=0.3)
epsGreedy_R.append(rs)
_,rs = decEpsGreedy(env=genv, maxEpisodes=episodes, eps_start=1, eps_end=0.0)
decEpsGreedy_R.append(rs)
_,rs = UCB(env=genv, maxEpisodes=episodes, c=0.5)
UCB_R.append(rs)
# _,rs = softMax(env=genv, maxEpisodes=episodes, temp_start=10**2, temp_end=0.01)
# softMax_R.append(rs)
plt.rcParams["figure.figsize"] = (20,10)
plt.title("Regret across Time Steps for Agents in Gaussian Bandits")
plt.xlabel('Episodes')
plt.ylabel('Cumulative Regret')
plt.plot(np.arange(episodes), np.mean(np.cumsum(pureGreedy_R,axis=1),axis=0), label='Pure Greedy')
plt.plot(np.arange(episodes), np.mean(np.cumsum(pureExplore_R,axis=1),axis=0), label='Pure Explore')
plt.plot(np.arange(episodes), np.mean(np.cumsum(epsGreedy_R,axis=1),axis=0), label='Eps Greedy')
plt.plot(np.arange(episodes), np.mean(np.cumsum(decEpsGreedy_R,axis=1),axis=0), label='Decaying Eps Greedy')
plt.plot(np.arange(episodes), np.mean(np.cumsum(UCB_R,axis=1),axis=0), label='UCB')
# plt.plot(np.arange(episodes), np.mean(np.cumsum(softMax_R,axis=1),axis=0), label='Soft Max')
plt.legend()
plt.show()
RegretNGaussianBandits(N=10, episodes=200)
# In[ ]:
| 27.903084
| 149
| 0.64809
| 1,944
| 12,668
| 4.149177
| 0.097737
| 0.020828
| 0.051327
| 0.037193
| 0.83883
| 0.812918
| 0.778825
| 0.755889
| 0.731217
| 0.69452
| 0
| 0.040767
| 0.188664
| 12,668
| 453
| 150
| 27.96468
| 0.744016
| 0.074755
| 0
| 0.72069
| 0
| 0
| 0.116126
| 0.038366
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024138
| false
| 0
| 0.017241
| 0
| 0.058621
| 0.027586
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
297ef3a68fd3aa18348a3056dd5c9595cdc1c7b3
| 174
|
py
|
Python
|
__init__.py
|
JiaZhou-PU/TEAL
|
d3e08d213ecea6b2f9738b746da224d36fbe41c8
|
[
"Apache-2.0"
] | null | null | null |
__init__.py
|
JiaZhou-PU/TEAL
|
d3e08d213ecea6b2f9738b746da224d36fbe41c8
|
[
"Apache-2.0"
] | null | null | null |
__init__.py
|
JiaZhou-PU/TEAL
|
d3e08d213ecea6b2f9738b746da224d36fbe41c8
|
[
"Apache-2.0"
] | null | null | null |
from TEAL.src import Amortization
from TEAL.src import CashFlow_ExtMod
from TEAL.src import CashFlows
from TEAL.src import CashFlowUser
from TEAL.src import main as CashFlow
| 29
| 37
| 0.844828
| 28
| 174
| 5.214286
| 0.392857
| 0.273973
| 0.376712
| 0.582192
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126437
| 174
| 5
| 38
| 34.8
| 0.960526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
46249cf61de2285121d07fa8b0f6ad1cff0ba4a9
| 59,462
|
py
|
Python
|
ciscomeapi/_me_api.py
|
anuzellig/ciscomeapi
|
78961ce6ac8ef9333fb2956fc229ec2f47928520
|
[
"MIT"
] | 1
|
2021-10-20T16:07:26.000Z
|
2021-10-20T16:07:26.000Z
|
ciscomeapi/_me_api.py
|
anuzellig/cisco-mobility-express-python-api
|
78961ce6ac8ef9333fb2956fc229ec2f47928520
|
[
"MIT"
] | 5
|
2021-06-02T00:39:48.000Z
|
2021-10-19T13:40:15.000Z
|
ciscomeapi/_me_api.py
|
anuzellig/cisco-mobility-express-python-api
|
78961ce6ac8ef9333fb2956fc229ec2f47928520
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
_me_api
~~~~~~~~~~~~~~~~~~~
Autogenerated, do not edit
"""
from .helpers import request
def _acldata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/acldata.json", extra_params=extra_params)
def _aclnamelist(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/aclnamelist.json", extra_params=extra_params)
def _add_ap_group(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/add_ap_group.json", extra_params=extra_params)
def _add_mdns_profile(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/add_mdns_profile.json", extra_params=extra_params)
def _add_mdns_service(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/add_mdns_service.json", extra_params=extra_params)
def _addMeshEthrIntfVlan(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/addMeshEthrIntfVlan.json", extra_params=extra_params)
def _addMeshRadiusAuthentication(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/addMeshRadiusAuthentication.json", extra_params=extra_params)
def _addSubscription(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/addSubscription.json", extra_params=extra_params)
def _adminaccountcreate(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/adminaccountcreate.jsp", extra_params=extra_params)
def _adminaccountdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/adminaccountdata.json", extra_params=extra_params)
def _adminaccountdelete(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/adminaccountdelete.jsp", extra_params=extra_params)
def _adminaccountupdate(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/adminaccountupdate.jsp", extra_params=extra_params)
def _ap_attributes_slot0(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/ap-attributes-slot0.html", extra_params=extra_params)
def _ap_attributes_slot1(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/ap-attributes-slot1.html", extra_params=extra_params)
def _ap_chanwidth_distribution_slot0(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/ap_chanwidth_distribution_slot0.html", extra_params=extra_params)
def _ap_chanwidth_distribution_slot1(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/ap_chanwidth_distribution_slot1.html", extra_params=extra_params)
def _ap_product_id0(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/ap_product_id0.html", extra_params=extra_params)
def _ap_product_id1(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/ap_product_id1.html", extra_params=extra_params)
def _ap_slot0_chanutil_distrib(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/ap_slot0_chanutil_distrib.html", extra_params=extra_params)
def _ap_slot0_confrate_distrib(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/ap_slot0_confrate_distrib.html", extra_params=extra_params)
def _ap_slot1_chanutil_distrib(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/ap_slot1_chanutil_distrib.html", extra_params=extra_params)
def _ap_slot1_confrate_distrib(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/ap_slot1_confrate_distrib.html", extra_params=extra_params)
def _apdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/apdata.json", extra_params=extra_params)
def _apgroupdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/apgroupdata.json", extra_params=extra_params)
def _apJoinStats(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/apJoinStats.json", extra_params=extra_params)
def _applicationdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/applicationdata.json", extra_params=extra_params)
def _appruledata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/appruledata.json", extra_params=extra_params)
def _apps(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/apps.html", extra_params=extra_params)
def _aps(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/aps.html", extra_params=extra_params)
def _avc_profile_status(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/avc_profile_status.html", extra_params=extra_params)
def _avcList(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/avcList.json", extra_params=extra_params)
def _avcProfile(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/avcProfile.json", extra_params=extra_params)
def _avcProfileConf(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/avcProfileConf.json", extra_params=extra_params)
def _caleadataget(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/caleadataget.json", extra_params=extra_params)
def _caleadataset(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/caleadataset.json", extra_params=extra_params)
def _capwappost(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/capwappost.json", extra_params=extra_params)
def _cellularlistget(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/cellularlistget.json", extra_params=extra_params)
def _channelnumber(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/channelnumber.json", extra_params=extra_params)
def _client_conn_debug_data(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/client_conn_debug_data.html", extra_params=extra_params)
def _client_conn_debug_start(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/client_conn_debug_start.html", extra_params=extra_params)
def _client_conn_debug_status(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/client_conn_debug_status.html", extra_params=extra_params)
def _client_conn_debug_stop(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/client_conn_debug_stop.html", extra_params=extra_params)
def _client_connected_protocols(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/client_connected_protocols.html", extra_params=extra_params)
def _client_connection_score(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/client_connection_score.html", extra_params=extra_params)
def _client_connection_speed(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/client_connection_speed.html", extra_params=extra_params)
def _client_mac(me, method: str, _deviceIPv4Address=None, extra_params={}) -> dict:
parameter1 = {"deviceIPv4Address": _deviceIPv4Address} if _deviceIPv4Address else {}
return request(me, method, "/data/client_mac.html", parameter1, extra_params=extra_params)
def _client_proto_distribution(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/client_proto_distribution.html", extra_params=extra_params)
def _client_ss_distribution(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/client_ss_distribution.html", extra_params=extra_params)
def _client_table(me, method: str, _columns=None, extra_params={}) -> dict:
parameter1 = {"columns": _columns} if _columns else {}
return request(me, method, "/data/client-table.html", parameter1, extra_params=extra_params)
def _clientDetails(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/clientDetails.json", extra_params=extra_params)
def _clients(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/clients.html", extra_params=extra_params)
def _clrCounters(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/clrCounters.json", extra_params=extra_params)
def _cmxdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/cmxdata.json", extra_params=extra_params)
def _configpolldata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/configpolldata.json", extra_params=extra_params)
def _convertcapwap(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/convertcapwap.json", extra_params=extra_params)
def _convertme(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/convertme.json", extra_params=extra_params)
def _crashfiledata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/crashfiledata.json", extra_params=extra_params)
def _crashfilestatus(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/crashfilestatus.json", extra_params=extra_params)
def _createacl(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/createacl.json", extra_params=extra_params)
def _createRadiusAccounting(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/createRadiusAccounting.json", extra_params=extra_params)
def _createRadiusAuthentication(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/createRadiusAuthentication.json", extra_params=extra_params)
def _createTacAccounting(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/createTacAccounting.json", extra_params=extra_params)
def _createTacAuthentication(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/createTacAuthentication.json", extra_params=extra_params)
def _createTacAuthurization(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/createTacAuthurization.json", extra_params=extra_params)
def _createTLSRoute(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/createTLSRoute.json", extra_params=extra_params)
def _createVlanMapData(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/createVlanMapData.json", extra_params=extra_params)
def _createWlanRadiusAcct(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/createWlanRadiusAcct.json", extra_params=extra_params)
def _createWlanRadiusAuth(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/createWlanRadiusAuth.json", extra_params=extra_params)
def _dashboard_view_settings(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/dashboard_view_settings.html", extra_params=extra_params)
def _delete_ap_group(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/delete_ap_group.json", extra_params=extra_params)
def _delete_mdns_profile(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/delete_mdns_profile.json", extra_params=extra_params)
def _delete_mdns_service(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/delete_mdns_service.json", extra_params=extra_params)
def _delete_mdns_service_from_profile(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/delete_mdns_service_from_profile.json", extra_params=extra_params)
def _deleteacl(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/deleteacl.json", extra_params=extra_params)
def _deleteApPskUrl(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/deleteApPskUrl.json", extra_params=extra_params)
def _deleteauthsurviveclients(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/deleteauthsurviveclients.json", extra_params=extra_params)
def _deleteConfigParams(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/deleteConfigParams.json", extra_params=extra_params)
def _deletelogging(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/deletelogging.json", extra_params=extra_params)
def _deleteMeshRadiusAuthentication(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/deleteMeshRadiusAuthentication.json", extra_params=extra_params)
def _deleteopendnsprofile(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/deleteopendnsprofile.json", extra_params=extra_params)
def _deleteRadiusAccounting(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/deleteRadiusAccounting.json", extra_params=extra_params)
def _deleteRadiusAuthentication(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/deleteRadiusAuthentication.json", extra_params=extra_params)
def _deleteSubscription(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/deleteSubscription.json", extra_params=extra_params)
def _deleteTacAccounting(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/deleteTacAccounting.json", extra_params=extra_params)
def _deleteTacAuthentication(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/deleteTacAuthentication.json", extra_params=extra_params)
def _deleteTacAuthurization(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/deleteTacAuthurization.json", extra_params=extra_params)
def _deleteTLSRoute(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/deleteTLSRoute.json", extra_params=extra_params)
def _deleteVlanMapData(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/deleteVlanMapData.json", extra_params=extra_params)
def _delMeshEthrIntfVlan(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/delMeshEthrIntfVlan.json", extra_params=extra_params)
def _dhcpconfigdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/dhcpconfigdata.json", extra_params=extra_params)
def _dhcpcreate(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/dhcpcreate.jsp", extra_params=extra_params)
def _dhcpdelete(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/dhcpdelete.jsp", extra_params=extra_params)
def _dhcpdetails(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/dhcpdetails.json", extra_params=extra_params)
def _dhcpdetailsdelete(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/dhcpdetailsdelete.jsp", extra_params=extra_params)
def _dhcpdetailspost(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/dhcpdetailspost.jsp", extra_params=extra_params)
def _dhcpupdate(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/dhcpupdate.jsp", extra_params=extra_params)
def _dnsserverconfigureurl(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/dnsserverconfigureurl.json", extra_params=extra_params)
def _domainlistget(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/domainlistget.json", extra_params=extra_params)
def _dotllu_list_create(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/dotllu_list_create.json", extra_params=extra_params)
def _edit_ap_group(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/edit_ap_group.json", extra_params=extra_params)
def _export(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/export.json", extra_params=extra_params)
def _exportconfiguration(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/exportconfiguration.json", extra_params=extra_params)
def _exportfilemode(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/exportfilemode.json", extra_params=extra_params)
def _exportstatus(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/exportstatus.json", extra_params=extra_params)
def _fetchacldetails(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/fetchacldetails.json", extra_params=extra_params)
def _fetchglobalAuthdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/fetchglobalAuthdata.json", extra_params=extra_params)
def _fetchglobalparams(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/fetchglobalparams.json", extra_params=extra_params)
def _fetchMeshRadiusAuthenticationdetails(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/fetchMeshRadiusAuthenticationdetails.json", extra_params=extra_params)
def _fetchRadiusAccountingdetails(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/fetchRadiusAccountingdetails.json", extra_params=extra_params)
def _fetchRadiusAuthenticationdetails(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/fetchRadiusAuthenticationdetails.json", extra_params=extra_params)
def _fetchTacAccountingdetails(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/fetchTacAccountingdetails.json", extra_params=extra_params)
def _fetchTacAuthenticationdetails(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/fetchTacAuthenticationdetails.json", extra_params=extra_params)
def _fetchTacAuthorizationdetails(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/fetchTacAuthorizationdetails.json", extra_params=extra_params)
def _fetchUmbrellawlandata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/fetchUmbrellawlandata.json", extra_params=extra_params)
def _fetchVlanMapData(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/fetchVlanMapData.json", extra_params=extra_params)
def _fetchWlanRadiusAcctDetails(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/fetchWlanRadiusAcctDetails.json", extra_params=extra_params)
def _fetchWlanRadiusAuthDetails(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/fetchWlanRadiusAuthDetails.json", extra_params=extra_params)
def _get_ap_group_details(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/get_ap_group_details.json", extra_params=extra_params)
def _get_mdns_global_settings(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/get_mdns_global_settings.json", extra_params=extra_params)
def _get_mdns_profile_details(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/get_mdns_profile_details.json", extra_params=extra_params)
def _get_mdns_profiles_list(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/get_mdns_profiles_list.json", extra_params=extra_params)
def _get_mdns_profiles_summary_list(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/get_mdns_profiles_summary_list.json", extra_params=extra_params)
def _get_mdns_service_details(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/get_mdns_service_details.json", extra_params=extra_params)
def _get_mdns_service_list(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/get_mdns_service_list.json", extra_params=extra_params)
def _get_umbrella_profile(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/get_umbrella_profile.json", extra_params=extra_params)
def _get_wlan_preauth_acl_details(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/get_wlan_preauth_acl_details.json", extra_params=extra_params)
def _getAuthcallMacdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/getAuthcallMacdata.json", extra_params=extra_params)
def _getauthsurviveclientdetails(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/getauthsurviveclientdetails.json", extra_params=extra_params)
def _getauthsurviveclientlist(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/getauthsurviveclientlist.json", extra_params=extra_params)
def _getConfigParams(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/getConfigParams.json", extra_params=extra_params)
def _getdnsserverip(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/getdnsserverip.json", extra_params=extra_params)
def _getFCSubscription(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/getFCSubscription.json", extra_params=extra_params)
def _getmc2ucdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/getmc2ucdata.json", extra_params=extra_params)
def _getmc2ucrrcdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/getmc2ucrrcdata.json", extra_params=extra_params)
def _getMeshEthrInterfaces(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/getMeshEthrInterfaces.json", extra_params=extra_params)
def _getMeshEthrIntfVlan(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/getMeshEthrIntfVlan.json", extra_params=extra_params)
def _getserverpriority(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/getserverpriority.json", extra_params=extra_params)
def _getSubscription(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/getSubscription.json", extra_params=extra_params)
def _getTLSRoute(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/getTLSRoute.json", extra_params=extra_params)
def _getTLSSecureTunnel(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/getTLSSecureTunnel.json", extra_params=extra_params)
def _getumbrella(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/getumbrella.json", extra_params=extra_params)
def _getWebhookSubscription(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/getWebhookSubscription.json", extra_params=extra_params)
def _globalAuthdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/globalAuthdata.json", extra_params=extra_params)
def _globalInterval(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/globalInterval.json", extra_params=extra_params)
def _globalOnChangeMode(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/globalOnChangeMode.json", extra_params=extra_params)
def _guestwlandata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/guestwlandata.json", extra_params=extra_params)
def _httpproxyget(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/httpproxyget.json", extra_params=extra_params)
def _httpproxypost(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/httpproxypost.json", extra_params=extra_params)
def _icapGetApRfSpectrumData(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/icapGetApRfSpectrumData.json", extra_params=extra_params)
def _icapGetApSubList(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/icapGetApSubList.json", extra_params=extra_params)
def _icapGetClientSubList(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/icapGetClientSubList.json", extra_params=extra_params)
def _icapGetGlobalConfig(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/icapGetGlobalConfig.json", extra_params=extra_params)
def _icapGetMacData(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/icapGetMacData.json", extra_params=extra_params)
def _icapGetPtlFilterList(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/icapGetPtlFilterList.json", extra_params=extra_params)
def _icapSetGlobalConfig(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/icapSetGlobalConfig.json", extra_params=extra_params)
def _icapUpdateApSub(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/icapUpdateApSub.json", extra_params=extra_params)
def _icapUpdateClientSub(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/icapUpdateClientSub.json", extra_params=extra_params)
def _importconfiguration(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/importconfiguration.json", extra_params=extra_params)
def _interferer_table_settings(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/interferer_table_settings.html", extra_params=extra_params)
def _interferers(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/interferers.json", extra_params=extra_params)
def _ipv6acldata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/ipv6acldata.json", extra_params=extra_params)
def _loggingdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/loggingdata.json", extra_params=extra_params)
def _macfiltercreate(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/macfiltercreate.json", extra_params=extra_params)
def _macfilterdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/macfilterdata.json", extra_params=extra_params)
def _macfilterdelete(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/macfilterdelete.json", extra_params=extra_params)
def _macfilterexport(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/macfilterexport.json", extra_params=extra_params)
def _macfilterfail(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/macfilterfail.json", extra_params=extra_params)
def _macfilterupdate(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/macfilterupdate.json", extra_params=extra_params)
def _macfilterwlanprofiles(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/macfilterwlanprofiles.json", extra_params=extra_params)
def _makeme(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/makeme.json", extra_params=extra_params)
def _maxratelimit(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/maxratelimit.json", extra_params=extra_params)
def _mc2ucclients(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/mc2ucclients.json", extra_params=extra_params)
def _mc2uccreate(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/mc2uccreate.json", extra_params=extra_params)
def _mc2ucdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/mc2ucdata.json", extra_params=extra_params)
def _mc2ucdelete(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/mc2ucdelete.json", extra_params=extra_params)
def _mc2ucupdate(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/mc2ucupdate.json", extra_params=extra_params)
def _mdnsBrowserAddService(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/mdnsBrowserAddService.json", extra_params=extra_params)
def _mdnsBrowserList(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/mdnsBrowserList.json", extra_params=extra_params)
def _mdnsCreatePolicy(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/mdnsCreatePolicy.json", extra_params=extra_params)
def _mdnsDeletePolicy(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/mdnsDeletePolicy.json", extra_params=extra_params)
def _mdnsDomainNamesList(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/mdnsDomainNamesList.json", extra_params=extra_params)
def _mdnsPolicyList(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/mdnsPolicyList.json", extra_params=extra_params)
def _mdnsPolicyServiceInstanceList(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/mdnsPolicyServiceInstanceList.json", extra_params=extra_params)
def _mdnsServiceArray(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/mdnsServiceArray.json", extra_params=extra_params)
def _mdnsUpdatePolicy(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/mdnsUpdatePolicy.json", extra_params=extra_params)
def _meshConvergence(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/meshConvergence.json", extra_params=extra_params)
def _meshEthrBridging(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/meshEthrBridging.json", extra_params=extra_params)
def _meshGeneral(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/meshGeneral.json", extra_params=extra_params)
def _meshRapBackhaul(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/meshRapBackhaul.json", extra_params=extra_params)
def _meshSecurity(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/meshSecurity.json", extra_params=extra_params)
def _mgmtdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/mgmtdata.json", extra_params=extra_params)
def _ntpdeletedata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/ntpdeletedata.json", extra_params=extra_params)
def _ntpfetchserverdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/ntpfetchserverdata.json", extra_params=extra_params)
def _ntpserverdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/ntpserverdata.json", extra_params=extra_params)
def _operatordataget(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/operatordataget.json", extra_params=extra_params)
def _oss(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/oss.html", extra_params=extra_params)
def _osudataget(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/osudataget.json", extra_params=extra_params)
def _ouilistget(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/ouilistget.json", extra_params=extra_params)
def _pingtest(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/pingtest.json", extra_params=extra_params)
def _portdataget(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/portdataget.json", extra_params=extra_params)
def _postaclselected(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/postaclselected.json", extra_params=extra_params)
def _postumbrella(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/postumbrella.json", extra_params=extra_params)
def _provision_list_delete(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/provision_list_delete.json", extra_params=extra_params)
def _provisioncreate(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/provisioncreate.json", extra_params=extra_params)
def _provisiondataget(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/provisiondataget.json", extra_params=extra_params)
def _provisionlistdel(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/provisionlistdel.json", extra_params=extra_params)
def _provisionupdate(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/provisionupdate.json", extra_params=extra_params)
def _radiusauth(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/radiusauth.json", extra_params=extra_params)
def _radiusdatapost(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/radiusdatapost.json", extra_params=extra_params)
def _radiuspost(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/radiuspost.json", extra_params=extra_params)
def _realmlistget(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/realmlistget.json", extra_params=extra_params)
def _refreshumbrella(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/refreshumbrella.json", extra_params=extra_params)
def _removecrashfiledata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/removecrashfiledata.json", extra_params=extra_params)
def _resetfactorydata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/resetfactorydata.json", extra_params=extra_params)
def _resetSubscriptions(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/resetSubscriptions.json", extra_params=extra_params)
def _rf_ap_perf_settings(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rf_ap_perf_settings.html", extra_params=extra_params)
def _rf_ap_table_settings(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rf_ap_table_settings.html", extra_params=extra_params)
def _rf_client_perf_settings(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rf_client_perf_settings.html", extra_params=extra_params)
def _rf_client_table_settings(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rf_client_table_settings.html", extra_params=extra_params)
def _rf_wireless_db_settings(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rf_wireless_db_settings.html", extra_params=extra_params)
def _rfchannel(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/rfchannel.json", extra_params=extra_params)
def _rfdashboard_apview_clientdist_by_datarate_24ghz(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/apview_clientdist_by_datarate_24ghz.html", extra_params=extra_params)
def _rfdashboard_apview_clientdist_by_datarate_5ghz(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/apview_clientdist_by_datarate_5ghz.html", extra_params=extra_params)
def _rfdashboard_apview_clientsdetails(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/apview_clientsdetails.html", extra_params=extra_params)
def _rfdashboard_apview_general(me, method: str, _deviceMacAddress=None, extra_params={}) -> dict:
parameter1 = {"deviceMacAddress": _deviceMacAddress} if _deviceMacAddress else {}
return request(me, method, "/data/rfdashboard/apview_general.html", parameter1, extra_params=extra_params)
def _rfdashboard_apview_interferers_24ghz(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/apview_interferers_24ghz.html", extra_params=extra_params)
def _rfdashboard_apview_interferers_5ghz(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/apview_interferers_5ghz.html", extra_params=extra_params)
def _rfdashboard_apview_neighbors_clients_24ghz(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/apview_neighbors_clients_24ghz.html", extra_params=extra_params)
def _rfdashboard_apview_neighbors_clients_5ghz(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/apview_neighbors_clients_5ghz.html", extra_params=extra_params)
def _rfdashboard_apview_neighbors_rogues_24ghz(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/apview_neighbors_rogues_24ghz.html", extra_params=extra_params)
def _rfdashboard_apview_neighbors_rogues_5ghz(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/apview_neighbors_rogues_5ghz.html", extra_params=extra_params)
def _rfdashboard_apview_performance_list(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/apview_performance_list.html", extra_params=extra_params)
def _rfdashboard_client_deauth_data(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/client_deauth_data.html", extra_params=extra_params)
def _rfdashboard_client_debug_start(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/client_debug_start.html", extra_params=extra_params)
def _rfdashboard_client_debug_status(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/client_debug_status.html", extra_params=extra_params)
def _rfdashboard_client_debug_stop(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/client_debug_stop.html", extra_params=extra_params)
def _rfdashboard_clientperformance_clienttable(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/clientperformance_clienttable.html", extra_params=extra_params)
def _rfdashboard_clientview_802_11_connectivity(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/clientview_802_11_connectivity.html", extra_params=extra_params)
def _rfdashboard_clientview_details(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/clientview_details.html", extra_params=extra_params)
def _rfdashboard_clientview_details_network(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/clientview_details_network.html", extra_params=extra_params)
def _rfdashboard_clientview_details_qos(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/clientview_details_qos.html", extra_params=extra_params)
def _rfdashboard_clientview_details_security(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/clientview_details_security.html", extra_params=extra_params)
def _rfdashboard_clientview_mobility_data(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/clientview_mobility_data.html", extra_params=extra_params)
def _rfdashboard_clientview_pingtest(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/clientview_pingtest.html", extra_params=extra_params)
def _rfdashboard_clientview_topapps(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/clientview_topapps.html", extra_params=extra_params)
def _rfdashboard_eventlog(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/eventlog.html", extra_params=extra_params)
def _rfdashboard_packetcapture_details(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/packetcapture_details.html", extra_params=extra_params)
def _rfdashboard_signalstrength_clientdetails(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/signalstrength_clientdetails.html", extra_params=extra_params)
def _rfdashboard_signalstrength_data(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rfdashboard/signalstrength_data.html", extra_params=extra_params)
def _rfo(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/rfo.json", extra_params=extra_params)
def _rfprofileadd(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/rfprofileadd.json", extra_params=extra_params)
def _rfprofiledelete(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/rfprofiledelete.json", extra_params=extra_params)
def _rfprofilesapdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/rfprofilesapdata.json", extra_params=extra_params)
def _rfprofilesfetch(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/rfprofilesfetch.json", extra_params=extra_params)
def _rfprofileupdate(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/rfprofileupdate.json", extra_params=extra_params)
def _RogApDetect(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/RogApDetect.json", extra_params=extra_params)
def _RogApList(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/RogApList.json", extra_params=extra_params)
def _RogClApDetect(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/RogClApDetect.json", extra_params=extra_params)
def _RogClAsso(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/RogClAsso.json", extra_params=extra_params)
def _RogClList(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/RogClList.json", extra_params=extra_params)
def _rogue_ap_table_settings(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/rogue_ap_table_settings.html", extra_params=extra_params)
def _rogueAp(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/rogueAp.json", extra_params=extra_params)
def _rogueApConf(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/rogueApConf.json", extra_params=extra_params)
def _rogueCl(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/rogueCl.json", extra_params=extra_params)
def _rrm(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/rrm.json", extra_params=extra_params)
def _saveconfig(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/saveconfig.json", extra_params=extra_params)
def _searchresults(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/searchresults.html", extra_params=extra_params)
def _serverpriority(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/serverpriority.json", extra_params=extra_params)
def _set_mdns_global_settings(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/set_mdns_global_settings.json", extra_params=extra_params)
def _set_wlan_preauth_acl_details(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/set_wlan_preauth_acl_details.json", extra_params=extra_params)
def _setConfigParams(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/setConfigParams.json", extra_params=extra_params)
def _setSoftwareUpdate(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/setSoftwareUpdate.json", extra_params=extra_params)
def _snmpdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/snmpdata.json", extra_params=extra_params)
def _snmpdelete(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/snmpdelete.json", extra_params=extra_params)
def _spartan_apImageStatus(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/spartan/apImageStatus.json", extra_params=extra_params)
def _spartan_apTool(me, method: str, extra_params={}, json={}, data={}) -> dict:
return request(me, method, "/screens/webui/resource/spartan/apTool.json", extra_params=extra_params, json=json, data=data)
def _startServer(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/startServer.json", extra_params=extra_params)
def _supported_country_code(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/supported_country_code.html", extra_params=extra_params)
def _swabortdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/swabortdata.json", extra_params=extra_params)
def _swccochecknow(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/swccochecknow.json", extra_params=extra_params)
def _swccochecknowget(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/swccochecknowget.json", extra_params=extra_params)
def _swccodata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/swccodata.json", extra_params=extra_params)
def _swccopolldata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/swccopolldata.json", extra_params=extra_params)
def _swccosave(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/swccosave.json", extra_params=extra_params)
def _swcreddata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/swcreddata.json", extra_params=extra_params)
def _swgetrestartdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/swgetrestartdata.json", extra_params=extra_params)
def _swrel_laturl(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/swrel_laturl.json", extra_params=extra_params)
def _swrel_recurl(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/swrel_recurl.json", extra_params=extra_params)
def _swrestartdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/swrestartdata.json", extra_params=extra_params)
def _swsavedata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/swsavedata.json", extra_params=extra_params)
def _swtroubldata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/swtroubldata.json", extra_params=extra_params)
def _swupdatedata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/swupdatedata.json", extra_params=extra_params)
def _swupdatestatusdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/swupdatestatusdata.json", extra_params=extra_params)
def _syslogdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/syslogdata.json", extra_params=extra_params)
def _system1(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/system1.html", extra_params=extra_params)
def _system_information(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/system_information.html", extra_params=extra_params)
def _timedata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/timedata.json", extra_params=extra_params)
def _tlstunneldata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/tlstunneldata.json", extra_params=extra_params)
def _tlstunnelpostdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/tlstunnelpostdata.json", extra_params=extra_params)
def _top10_ap_clients(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/top10_ap_clients.html", extra_params=extra_params)
def _top10_ap_slot0_chanutil_overhead(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/top10_ap_slot0_chanutil_overhead.html", extra_params=extra_params)
def _top10_ap_slot0_chd(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/top10_ap_slot0_chd.html", extra_params=extra_params)
def _top10_ap_slot0_interference(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/top10_ap_slot0_interference.html", extra_params=extra_params)
def _top10_ap_slot1_chanutil_overhead(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/top10_ap_slot1_chanutil_overhead.html", extra_params=extra_params)
def _top10_ap_slot1_chd(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/top10_ap_slot1_chd.html", extra_params=extra_params)
def _top10_ap_slot1_interference(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/top10_ap_slot1_interference.html", extra_params=extra_params)
def _trapdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/trapdata.json", extra_params=extra_params)
def _trapdeletedata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/trapdeletedata.json", extra_params=extra_params)
def _trapupdatedata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/trapupdatedata.json", extra_params=extra_params)
def _umbrellawlandata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/umbrellawlandata.json", extra_params=extra_params)
def _updateMeshEthrInterfaces(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/updateMeshEthrInterfaces.json", extra_params=extra_params)
def _updateRadiusAccounting(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/updateRadiusAccounting.json", extra_params=extra_params)
def _updateRadiusAuthentication(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/updateRadiusAuthentication.json", extra_params=extra_params)
def _updatestatusdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/updatestatusdata.json", extra_params=extra_params)
def _updateTacAccounting(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/updateTacAccounting.json", extra_params=extra_params)
def _updateTacAuthentication(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/updateTacAuthentication.json", extra_params=extra_params)
def _updateTacAuthurization(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/updateTacAuthurization.json", extra_params=extra_params)
def _updateVlanMapData(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/updateVlanMapData.json", extra_params=extra_params)
def _uploadfileget(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/uploadfileget.json", extra_params=extra_params)
def _uploadfilePost(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/uploadfilePost.json", extra_params=extra_params)
def _uploadStatus(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/uploadStatus.json", extra_params=extra_params)
def _v3deletedata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/v3deletedata.json", extra_params=extra_params)
def _v3userdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/v3userdata.json", extra_params=extra_params)
def _validationdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/validationdata.json", extra_params=extra_params)
def _venuegriddata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/venuegriddata.json", extra_params=extra_params)
def _venuegriddelete(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/venuegriddelete.json", extra_params=extra_params)
def _vlantagdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/vlantagdata.json", extra_params=extra_params)
def _webhookServer(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/webhookServer.json", extra_params=extra_params)
def _wlancreate(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/wlancreate.jsp", extra_params=extra_params)
def _wlandata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/wlandata.json", extra_params=extra_params)
def _wlandelete(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/wlandelete.jsp", extra_params=extra_params)
def _wlanprofiledata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/wlanprofiledata.json", extra_params=extra_params)
def _wlans(me, method: str, extra_params={}) -> dict:
return request(me, method, "/data/wlans.html", extra_params=extra_params)
def _wlanuserscreate(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/wlanuserscreate.jsp", extra_params=extra_params)
def _wlanusersdata(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/wlanusersdata.json", extra_params=extra_params)
def _wlanusersdelete(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/wlanusersdelete.jsp", extra_params=extra_params)
def _wlanusersupdate(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/wlanusersupdate.jsp", extra_params=extra_params)
def _wsaCounters(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/wsaCounters.json", extra_params=extra_params)
def _wsaSensor(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/wsaSensor.json", extra_params=extra_params)
def _wsaServer(me, method: str, extra_params={}) -> dict:
return request(me, method, "/screens/webui/resource/wsaServer.json", extra_params=extra_params)
| 43.213663
| 126
| 0.759241
| 7,522
| 59,462
| 5.759904
| 0.050651
| 0.258967
| 0.086322
| 0.164797
| 0.808798
| 0.80379
| 0.641947
| 0.618866
| 0.588977
| 0.54847
| 0
| 0.002384
| 0.104066
| 59,462
| 1,375
| 127
| 43.245091
| 0.810883
| 0.001312
| 0
| 0
| 1
| 0
| 0.24541
| 0.243137
| 0
| 0
| 0
| 0
| 0
| 1
| 0.497076
| false
| 0
| 0.004386
| 0.49269
| 0.998538
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
465fdcc61e7306ada67872efd1e057a04ad1f9ff
| 110,781
|
py
|
Python
|
huaweicloud-sdk-vpc/huaweicloudsdkvpc/v2/vpc_async_client.py
|
Adek06/huaweicloud-sdk-python-v3
|
3d13b27d089e04a1ae567cd649b3c5509e0391d2
|
[
"Apache-2.0"
] | null | null | null |
huaweicloud-sdk-vpc/huaweicloudsdkvpc/v2/vpc_async_client.py
|
Adek06/huaweicloud-sdk-python-v3
|
3d13b27d089e04a1ae567cd649b3c5509e0391d2
|
[
"Apache-2.0"
] | null | null | null |
huaweicloud-sdk-vpc/huaweicloudsdkvpc/v2/vpc_async_client.py
|
Adek06/huaweicloud-sdk-python-v3
|
3d13b27d089e04a1ae567cd649b3c5509e0391d2
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
from __future__ import absolute_import
import datetime
import re
import importlib
import six
from huaweicloudsdkcore.client import Client, ClientBuilder
from huaweicloudsdkcore.exceptions import exceptions
from huaweicloudsdkcore.utils import http_utils
from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest
class VpcAsyncClient(Client):
"""
:param configuration: .Configuration object for this client
:param pool_threads: The number of threads to use for async requests
to the API. More threads means more concurrent API requests.
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long,
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
def __init__(self):
super(VpcAsyncClient, self).__init__()
self.model_package = importlib.import_module("huaweicloudsdkvpc.v2.model")
self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'}
@staticmethod
def new_builder(clazz):
return ClientBuilder(clazz)
def accept_vpc_peering_async(self, request):
"""接受对等连接请求
租户A名下的VPC申请和租户B的VPC建立对等连接,需要等待租户B接受该请求。此接口用于租户接受其他租户发起的对等连接请求。
:param AcceptVpcPeeringRequest request
:return: AcceptVpcPeeringResponse
"""
return self.accept_vpc_peering_with_http_info(request)
def accept_vpc_peering_with_http_info(self, request):
"""接受对等连接请求
租户A名下的VPC申请和租户B的VPC建立对等连接,需要等待租户B接受该请求。此接口用于租户接受其他租户发起的对等连接请求。
:param AcceptVpcPeeringRequest request
:return: AcceptVpcPeeringResponse
"""
all_params = ['peering_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'peering_id' in local_var_params:
path_params['peering_id'] = local_var_params['peering_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/vpc/peerings/{peering_id}/accept',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='AcceptVpcPeeringResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_port_async(self, request):
"""创建端口
创建端口。
:param CreatePortRequest request
:return: CreatePortResponse
"""
return self.create_port_with_http_info(request)
def create_port_with_http_info(self, request):
"""创建端口
创建端口。
:param CreatePortRequest request
:return: CreatePortResponse
"""
all_params = ['port']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/ports',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreatePortResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_security_group_async(self, request):
"""创建安全组
创建安全组。
:param CreateSecurityGroupRequest request
:return: CreateSecurityGroupResponse
"""
return self.create_security_group_with_http_info(request)
def create_security_group_with_http_info(self, request):
"""创建安全组
创建安全组。
:param CreateSecurityGroupRequest request
:return: CreateSecurityGroupResponse
"""
all_params = ['security_group']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/security-groups',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateSecurityGroupResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_security_group_rule_async(self, request):
"""创建安全组规则
创建安全组规则。
:param CreateSecurityGroupRuleRequest request
:return: CreateSecurityGroupRuleResponse
"""
return self.create_security_group_rule_with_http_info(request)
def create_security_group_rule_with_http_info(self, request):
"""创建安全组规则
创建安全组规则。
:param CreateSecurityGroupRuleRequest request
:return: CreateSecurityGroupRuleResponse
"""
all_params = ['security_group_rule']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/security-group-rules',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateSecurityGroupRuleResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_subnet_async(self, request):
"""创建子网
创建子网。
:param CreateSubnetRequest request
:return: CreateSubnetResponse
"""
return self.create_subnet_with_http_info(request)
def create_subnet_with_http_info(self, request):
"""创建子网
创建子网。
:param CreateSubnetRequest request
:return: CreateSubnetResponse
"""
all_params = ['subnet']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/subnets',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateSubnetResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_vpc_peering_async(self, request):
"""创建对等连接
创建对等连接。
:param CreateVpcPeeringRequest request
:return: CreateVpcPeeringResponse
"""
return self.create_vpc_peering_with_http_info(request)
def create_vpc_peering_with_http_info(self, request):
"""创建对等连接
创建对等连接。
:param CreateVpcPeeringRequest request
:return: CreateVpcPeeringResponse
"""
all_params = ['peering']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v2.0/vpc/peerings',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateVpcPeeringResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_port_async(self, request):
"""删除端口
删除端口。
:param DeletePortRequest request
:return: DeletePortResponse
"""
return self.delete_port_with_http_info(request)
def delete_port_with_http_info(self, request):
"""删除端口
删除端口。
:param DeletePortRequest request
:return: DeletePortResponse
"""
all_params = ['port_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'port_id' in local_var_params:
path_params['port_id'] = local_var_params['port_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/ports/{port_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeletePortResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_security_group_async(self, request):
"""删除安全组
删除安全组。
:param DeleteSecurityGroupRequest request
:return: DeleteSecurityGroupResponse
"""
return self.delete_security_group_with_http_info(request)
def delete_security_group_with_http_info(self, request):
"""删除安全组
删除安全组。
:param DeleteSecurityGroupRequest request
:return: DeleteSecurityGroupResponse
"""
all_params = ['security_group_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'security_group_id' in local_var_params:
path_params['security_group_id'] = local_var_params['security_group_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/security-groups/{security_group_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteSecurityGroupResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_security_group_rule_async(self, request):
"""删除安全组规则
删除安全组规则。
:param DeleteSecurityGroupRuleRequest request
:return: DeleteSecurityGroupRuleResponse
"""
return self.delete_security_group_rule_with_http_info(request)
def delete_security_group_rule_with_http_info(self, request):
"""删除安全组规则
删除安全组规则。
:param DeleteSecurityGroupRuleRequest request
:return: DeleteSecurityGroupRuleResponse
"""
all_params = ['security_group_rule_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'security_group_rule_id' in local_var_params:
path_params['security_group_rule_id'] = local_var_params['security_group_rule_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/security-group-rules/{security_group_rule_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteSecurityGroupRuleResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_subnet_async(self, request):
"""删除子网
删除子网
:param DeleteSubnetRequest request
:return: DeleteSubnetResponse
"""
return self.delete_subnet_with_http_info(request)
def delete_subnet_with_http_info(self, request):
"""删除子网
删除子网
:param DeleteSubnetRequest request
:return: DeleteSubnetResponse
"""
all_params = ['vpc_id', 'subnet_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'vpc_id' in local_var_params:
path_params['vpc_id'] = local_var_params['vpc_id']
if 'subnet_id' in local_var_params:
path_params['subnet_id'] = local_var_params['subnet_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/vpcs/{vpc_id}/subnets/{subnet_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteSubnetResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_vpc_peering_async(self, request):
"""删除对等连接
删除对等连接。 可以在在本端或对端任何一端删除对等连接。
:param DeleteVpcPeeringRequest request
:return: DeleteVpcPeeringResponse
"""
return self.delete_vpc_peering_with_http_info(request)
def delete_vpc_peering_with_http_info(self, request):
"""删除对等连接
删除对等连接。 可以在在本端或对端任何一端删除对等连接。
:param DeleteVpcPeeringRequest request
:return: DeleteVpcPeeringResponse
"""
all_params = ['peering_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'peering_id' in local_var_params:
path_params['peering_id'] = local_var_params['peering_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/vpc/peerings/{peering_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteVpcPeeringResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_ports_async(self, request):
"""查询端口列表
查询提交请求的租户的所有端口,单次查询最多返回2000条数据。
:param ListPortsRequest request
:return: ListPortsResponse
"""
return self.list_ports_with_http_info(request)
def list_ports_with_http_info(self, request):
"""查询端口列表
查询提交请求的租户的所有端口,单次查询最多返回2000条数据。
:param ListPortsRequest request
:return: ListPortsResponse
"""
all_params = ['name', 'id', 'limit', 'admin_state_up', 'network_id', 'mac_address', 'device_id', 'device_owner', 'status', 'marker', 'fixed_ips', 'enterprise_project_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'name' in local_var_params:
query_params.append(('name', local_var_params['name']))
if 'id' in local_var_params:
query_params.append(('id', local_var_params['id']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'admin_state_up' in local_var_params:
query_params.append(('admin_state_up', local_var_params['admin_state_up']))
if 'network_id' in local_var_params:
query_params.append(('network_id', local_var_params['network_id']))
if 'mac_address' in local_var_params:
query_params.append(('mac_address', local_var_params['mac_address']))
if 'device_id' in local_var_params:
query_params.append(('device_id', local_var_params['device_id']))
if 'device_owner' in local_var_params:
query_params.append(('device_owner', local_var_params['device_owner']))
if 'status' in local_var_params:
query_params.append(('status', local_var_params['status']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
if 'fixed_ips' in local_var_params:
query_params.append(('fixed_ips', local_var_params['fixed_ips']))
if 'enterprise_project_id' in local_var_params:
query_params.append(('enterprise_project_id', local_var_params['enterprise_project_id']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/ports',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListPortsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_security_group_rules_async(self, request):
"""查询安全组规则列表
查询安全组规则列表。
:param ListSecurityGroupRulesRequest request
:return: ListSecurityGroupRulesResponse
"""
return self.list_security_group_rules_with_http_info(request)
def list_security_group_rules_with_http_info(self, request):
"""查询安全组规则列表
查询安全组规则列表。
:param ListSecurityGroupRulesRequest request
:return: ListSecurityGroupRulesResponse
"""
all_params = ['marker', 'limit', 'security_group_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'security_group_id' in local_var_params:
query_params.append(('security_group_id', local_var_params['security_group_id']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/security-group-rules',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListSecurityGroupRulesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_security_groups_async(self, request):
"""查询安全组列表
查询安全组列表
:param ListSecurityGroupsRequest request
:return: ListSecurityGroupsResponse
"""
return self.list_security_groups_with_http_info(request)
def list_security_groups_with_http_info(self, request):
"""查询安全组列表
查询安全组列表
:param ListSecurityGroupsRequest request
:return: ListSecurityGroupsResponse
"""
all_params = ['limit', 'marker', 'vpc_id', 'enterprise_project_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
if 'vpc_id' in local_var_params:
query_params.append(('vpc_id', local_var_params['vpc_id']))
if 'enterprise_project_id' in local_var_params:
query_params.append(('enterprise_project_id', local_var_params['enterprise_project_id']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/security-groups',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListSecurityGroupsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_subnets_async(self, request):
"""查询子网列表
查询子网列表
:param ListSubnetsRequest request
:return: ListSubnetsResponse
"""
return self.list_subnets_with_http_info(request)
def list_subnets_with_http_info(self, request):
"""查询子网列表
查询子网列表
:param ListSubnetsRequest request
:return: ListSubnetsResponse
"""
all_params = ['limit', 'marker', 'vpc_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
if 'vpc_id' in local_var_params:
query_params.append(('vpc_id', local_var_params['vpc_id']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/subnets',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListSubnetsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_vpc_peerings_async(self, request):
"""查询对等连接列表
查询提交请求的租户的所有对等连接。根据过滤条件进行过滤。
:param ListVpcPeeringsRequest request
:return: ListVpcPeeringsResponse
"""
return self.list_vpc_peerings_with_http_info(request)
def list_vpc_peerings_with_http_info(self, request):
"""查询对等连接列表
查询提交请求的租户的所有对等连接。根据过滤条件进行过滤。
:param ListVpcPeeringsRequest request
:return: ListVpcPeeringsResponse
"""
all_params = ['limit', 'marker', 'id', 'name', 'status', 'tenant_id', 'vpc_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
if 'id' in local_var_params:
query_params.append(('id', local_var_params['id']))
if 'name' in local_var_params:
query_params.append(('name', local_var_params['name']))
if 'status' in local_var_params:
query_params.append(('status', local_var_params['status']))
if 'tenant_id' in local_var_params:
query_params.append(('tenant_id', local_var_params['tenant_id']))
if 'vpc_id' in local_var_params:
query_params.append(('vpc_id', local_var_params['vpc_id']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/vpc/peerings',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListVpcPeeringsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def reject_vpc_peering_async(self, request):
"""拒绝对等连接请求
租户A名下的VPC申请和租户B的VPC建立对等连接,需要等待租户B接受该请求。此接口用于租户拒绝其他租户发起的对等连接请求。
:param RejectVpcPeeringRequest request
:return: RejectVpcPeeringResponse
"""
return self.reject_vpc_peering_with_http_info(request)
def reject_vpc_peering_with_http_info(self, request):
"""拒绝对等连接请求
租户A名下的VPC申请和租户B的VPC建立对等连接,需要等待租户B接受该请求。此接口用于租户拒绝其他租户发起的对等连接请求。
:param RejectVpcPeeringRequest request
:return: RejectVpcPeeringResponse
"""
all_params = ['peering_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'peering_id' in local_var_params:
path_params['peering_id'] = local_var_params['peering_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/vpc/peerings/{peering_id}/reject',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='RejectVpcPeeringResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_port_async(self, request):
"""查询端口
查询单个端口详情。
:param ShowPortRequest request
:return: ShowPortResponse
"""
return self.show_port_with_http_info(request)
def show_port_with_http_info(self, request):
"""查询端口
查询单个端口详情。
:param ShowPortRequest request
:return: ShowPortResponse
"""
all_params = ['port_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'port_id' in local_var_params:
path_params['port_id'] = local_var_params['port_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/ports/{port_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowPortResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_quota_async(self, request):
"""查询配额
查询单租户在VPC服务下的网络资源配额,包括vpc配额、子网配额、安全组配额、安全组规则配额、弹性公网IP配额,vpn配额等。
:param ShowQuotaRequest request
:return: ShowQuotaResponse
"""
return self.show_quota_with_http_info(request)
def show_quota_with_http_info(self, request):
"""查询配额
查询单租户在VPC服务下的网络资源配额,包括vpc配额、子网配额、安全组配额、安全组规则配额、弹性公网IP配额,vpn配额等。
:param ShowQuotaRequest request
:return: ShowQuotaResponse
"""
all_params = ['type']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/quotas',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowQuotaResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_security_group_async(self, request):
"""查询安全组
查询单个安全组详情。
:param ShowSecurityGroupRequest request
:return: ShowSecurityGroupResponse
"""
return self.show_security_group_with_http_info(request)
def show_security_group_with_http_info(self, request):
"""查询安全组
查询单个安全组详情。
:param ShowSecurityGroupRequest request
:return: ShowSecurityGroupResponse
"""
all_params = ['security_group_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'security_group_id' in local_var_params:
path_params['security_group_id'] = local_var_params['security_group_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/security-groups/{security_group_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowSecurityGroupResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_security_group_rule_async(self, request):
"""查询安全组规则
查询单个安全组规则详情
:param ShowSecurityGroupRuleRequest request
:return: ShowSecurityGroupRuleResponse
"""
return self.show_security_group_rule_with_http_info(request)
def show_security_group_rule_with_http_info(self, request):
"""查询安全组规则
查询单个安全组规则详情
:param ShowSecurityGroupRuleRequest request
:return: ShowSecurityGroupRuleResponse
"""
all_params = ['security_group_rule_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'security_group_rule_id' in local_var_params:
path_params['security_group_rule_id'] = local_var_params['security_group_rule_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/security-group-rules/{security_group_rule_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowSecurityGroupRuleResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_subnet_async(self, request):
"""查询子网
查询子网详情。
:param ShowSubnetRequest request
:return: ShowSubnetResponse
"""
return self.show_subnet_with_http_info(request)
def show_subnet_with_http_info(self, request):
"""查询子网
查询子网详情。
:param ShowSubnetRequest request
:return: ShowSubnetResponse
"""
all_params = ['subnet_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'subnet_id' in local_var_params:
path_params['subnet_id'] = local_var_params['subnet_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/subnets/{subnet_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowSubnetResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_vpc_peering_async(self, request):
"""查询对等连接
查询对等连接详情。
:param ShowVpcPeeringRequest request
:return: ShowVpcPeeringResponse
"""
return self.show_vpc_peering_with_http_info(request)
def show_vpc_peering_with_http_info(self, request):
"""查询对等连接
查询对等连接详情。
:param ShowVpcPeeringRequest request
:return: ShowVpcPeeringResponse
"""
all_params = ['peering_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'peering_id' in local_var_params:
path_params['peering_id'] = local_var_params['peering_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/vpc/peerings/{peering_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowVpcPeeringResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_port_async(self, request):
"""更新端口
更新端口。
:param UpdatePortRequest request
:return: UpdatePortResponse
"""
return self.update_port_with_http_info(request)
def update_port_with_http_info(self, request):
"""更新端口
更新端口。
:param UpdatePortRequest request
:return: UpdatePortResponse
"""
all_params = ['port_id', 'port']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'port_id' in local_var_params:
path_params['port_id'] = local_var_params['port_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/ports/{port_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdatePortResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_subnet_async(self, request):
"""更新子网
更新子网。
:param UpdateSubnetRequest request
:return: UpdateSubnetResponse
"""
return self.update_subnet_with_http_info(request)
def update_subnet_with_http_info(self, request):
"""更新子网
更新子网。
:param UpdateSubnetRequest request
:return: UpdateSubnetResponse
"""
all_params = ['vpc_id', 'subnet_id', 'subnet']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'vpc_id' in local_var_params:
path_params['vpc_id'] = local_var_params['vpc_id']
if 'subnet_id' in local_var_params:
path_params['subnet_id'] = local_var_params['subnet_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/vpcs/{vpc_id}/subnets/{subnet_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateSubnetResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_vpc_peering_async(self, request):
"""更新对等连接
更新对等连接。
:param UpdateVpcPeeringRequest request
:return: UpdateVpcPeeringResponse
"""
return self.update_vpc_peering_with_http_info(request)
def update_vpc_peering_with_http_info(self, request):
"""更新对等连接
更新对等连接。
:param UpdateVpcPeeringRequest request
:return: UpdateVpcPeeringResponse
"""
all_params = ['peering_id', 'peering']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'peering_id' in local_var_params:
path_params['peering_id'] = local_var_params['peering_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v2.0/vpc/peerings/{peering_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateVpcPeeringResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_privateip_async(self, request):
"""申请私有IP
申请私有IP。
:param CreatePrivateipRequest request
:return: CreatePrivateipResponse
"""
return self.create_privateip_with_http_info(request)
def create_privateip_with_http_info(self, request):
"""申请私有IP
申请私有IP。
:param CreatePrivateipRequest request
:return: CreatePrivateipResponse
"""
all_params = ['privateips']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/privateips',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreatePrivateipResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_privateip_async(self, request):
"""删除私有IP
删除私有IP。
:param DeletePrivateipRequest request
:return: DeletePrivateipResponse
"""
return self.delete_privateip_with_http_info(request)
def delete_privateip_with_http_info(self, request):
"""删除私有IP
删除私有IP。
:param DeletePrivateipRequest request
:return: DeletePrivateipResponse
"""
all_params = ['privateip_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'privateip_id' in local_var_params:
path_params['privateip_id'] = local_var_params['privateip_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/privateips/{privateip_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeletePrivateipResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_privateips_async(self, request):
"""查询私有IP列表
查询指定子网下的私有IP列表。
:param ListPrivateipsRequest request
:return: ListPrivateipsResponse
"""
return self.list_privateips_with_http_info(request)
def list_privateips_with_http_info(self, request):
"""查询私有IP列表
查询指定子网下的私有IP列表。
:param ListPrivateipsRequest request
:return: ListPrivateipsResponse
"""
all_params = ['subnet_id', 'limit', 'marker']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'subnet_id' in local_var_params:
path_params['subnet_id'] = local_var_params['subnet_id']
query_params = []
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/subnets/{subnet_id}/privateips',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListPrivateipsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_network_ip_availabilities_async(self, request):
"""查询网络IP使用情况
显示一个指定网络中的IPv4地址使用情况。 包括此网络中的IP总数以及已用IP总数,以及网络下每一个子网的IP地址总数和可用IP地址总数。 > 须知 - 系统预留地址指的是子网的第1个以及最后4个地址,一般用于网关、DHCP等服务。 - 这里以及下文描述的IP地址总数、已用IP地址总数不包含系统预留地址。 - 在分配IP时,用户可以指定系统预留的IP地址。但是不论IP是如何分配的,只要是处于系统预留IP地址段的IP均不会被统计到已用IP地址数目和IP地址总数中。
:param ShowNetworkIpAvailabilitiesRequest request
:return: ShowNetworkIpAvailabilitiesResponse
"""
return self.show_network_ip_availabilities_with_http_info(request)
def show_network_ip_availabilities_with_http_info(self, request):
"""查询网络IP使用情况
显示一个指定网络中的IPv4地址使用情况。 包括此网络中的IP总数以及已用IP总数,以及网络下每一个子网的IP地址总数和可用IP地址总数。 > 须知 - 系统预留地址指的是子网的第1个以及最后4个地址,一般用于网关、DHCP等服务。 - 这里以及下文描述的IP地址总数、已用IP地址总数不包含系统预留地址。 - 在分配IP时,用户可以指定系统预留的IP地址。但是不论IP是如何分配的,只要是处于系统预留IP地址段的IP均不会被统计到已用IP地址数目和IP地址总数中。
:param ShowNetworkIpAvailabilitiesRequest request
:return: ShowNetworkIpAvailabilitiesResponse
"""
all_params = ['network_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'network_id' in local_var_params:
path_params['network_id'] = local_var_params['network_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/network-ip-availabilities/{network_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowNetworkIpAvailabilitiesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_privateip_async(self, request):
"""查询私有IP
指定ID查询私有IP。
:param ShowPrivateipRequest request
:return: ShowPrivateipResponse
"""
return self.show_privateip_with_http_info(request)
def show_privateip_with_http_info(self, request):
"""查询私有IP
指定ID查询私有IP。
:param ShowPrivateipRequest request
:return: ShowPrivateipResponse
"""
all_params = ['privateip_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'privateip_id' in local_var_params:
path_params['privateip_id'] = local_var_params['privateip_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/privateips/{privateip_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowPrivateipResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def neutron_add_firewall_rule_async(self, request):
"""插入网络ACL规则
插入一条网络ACL规则到某一网络ACL策略中。
:param NeutronAddFirewallRuleRequest request
:return: NeutronAddFirewallRuleResponse
"""
return self.neutron_add_firewall_rule_with_http_info(request)
def neutron_add_firewall_rule_with_http_info(self, request):
"""插入网络ACL规则
插入一条网络ACL规则到某一网络ACL策略中。
:param NeutronAddFirewallRuleRequest request
:return: NeutronAddFirewallRuleResponse
"""
all_params = ['firewall_policy_id', 'insert_firewall_rule']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'firewall_policy_id' in local_var_params:
path_params['firewall_policy_id'] = local_var_params['firewall_policy_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v2.0/fwaas/firewall_policies/{firewall_policy_id}/insert_rule',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='NeutronAddFirewallRuleResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def neutron_create_firewall_group_async(self, request):
"""创建网络ACL组
创建网络ACL组
:param NeutronCreateFirewallGroupRequest request
:return: NeutronCreateFirewallGroupResponse
"""
return self.neutron_create_firewall_group_with_http_info(request)
def neutron_create_firewall_group_with_http_info(self, request):
"""创建网络ACL组
创建网络ACL组
:param NeutronCreateFirewallGroupRequest request
:return: NeutronCreateFirewallGroupResponse
"""
all_params = ['firewall_group']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v2.0/fwaas/firewall_groups',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='NeutronCreateFirewallGroupResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def neutron_create_firewall_policy_async(self, request):
"""创建网络ACL策略
创建网络ACL策略。
:param NeutronCreateFirewallPolicyRequest request
:return: NeutronCreateFirewallPolicyResponse
"""
return self.neutron_create_firewall_policy_with_http_info(request)
def neutron_create_firewall_policy_with_http_info(self, request):
"""创建网络ACL策略
创建网络ACL策略。
:param NeutronCreateFirewallPolicyRequest request
:return: NeutronCreateFirewallPolicyResponse
"""
all_params = ['firewall_policy']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v2.0/fwaas/firewall_policies',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='NeutronCreateFirewallPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def neutron_create_firewall_rule_async(self, request):
"""创建网络ACL规则
创建网络ACL规则。
:param NeutronCreateFirewallRuleRequest request
:return: NeutronCreateFirewallRuleResponse
"""
return self.neutron_create_firewall_rule_with_http_info(request)
def neutron_create_firewall_rule_with_http_info(self, request):
"""创建网络ACL规则
创建网络ACL规则。
:param NeutronCreateFirewallRuleRequest request
:return: NeutronCreateFirewallRuleResponse
"""
all_params = ['firewall_rule']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v2.0/fwaas/firewall_rules',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='NeutronCreateFirewallRuleResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def neutron_delete_firewall_group_async(self, request):
"""删除网络ACL组
删除网络ACL组
:param NeutronDeleteFirewallGroupRequest request
:return: NeutronDeleteFirewallGroupResponse
"""
return self.neutron_delete_firewall_group_with_http_info(request)
def neutron_delete_firewall_group_with_http_info(self, request):
"""删除网络ACL组
删除网络ACL组
:param NeutronDeleteFirewallGroupRequest request
:return: NeutronDeleteFirewallGroupResponse
"""
all_params = ['firewall_group_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'firewall_group_id' in local_var_params:
path_params['firewall_group_id'] = local_var_params['firewall_group_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/fwaas/firewall_groups/{firewall_group_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='NeutronDeleteFirewallGroupResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def neutron_delete_firewall_policy_async(self, request):
"""删除网络ACL策略
删除网络ACL策略。
:param NeutronDeleteFirewallPolicyRequest request
:return: NeutronDeleteFirewallPolicyResponse
"""
return self.neutron_delete_firewall_policy_with_http_info(request)
def neutron_delete_firewall_policy_with_http_info(self, request):
"""删除网络ACL策略
删除网络ACL策略。
:param NeutronDeleteFirewallPolicyRequest request
:return: NeutronDeleteFirewallPolicyResponse
"""
all_params = ['firewall_policy_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'firewall_policy_id' in local_var_params:
path_params['firewall_policy_id'] = local_var_params['firewall_policy_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/fwaas/firewall_policies/{firewall_policy_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='NeutronDeleteFirewallPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def neutron_delete_firewall_rule_async(self, request):
"""删除网络ACL规则
删除网络ACL规则。
:param NeutronDeleteFirewallRuleRequest request
:return: NeutronDeleteFirewallRuleResponse
"""
return self.neutron_delete_firewall_rule_with_http_info(request)
def neutron_delete_firewall_rule_with_http_info(self, request):
"""删除网络ACL规则
删除网络ACL规则。
:param NeutronDeleteFirewallRuleRequest request
:return: NeutronDeleteFirewallRuleResponse
"""
all_params = ['firewall_rule_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'firewall_rule_id' in local_var_params:
path_params['firewall_rule_id'] = local_var_params['firewall_rule_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/fwaas/firewall_rules/{firewall_rule_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='NeutronDeleteFirewallRuleResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def neutron_list_firewall_groups_async(self, request):
"""查询所有网络ACL组
查询提交请求的租户有权限操作的所有网络ACL组信息。单次查询最多返回2000条数据,超过2000后会返回分页标记。
:param NeutronListFirewallGroupsRequest request
:return: NeutronListFirewallGroupsResponse
"""
return self.neutron_list_firewall_groups_with_http_info(request)
def neutron_list_firewall_groups_with_http_info(self, request):
"""查询所有网络ACL组
查询提交请求的租户有权限操作的所有网络ACL组信息。单次查询最多返回2000条数据,超过2000后会返回分页标记。
:param NeutronListFirewallGroupsRequest request
:return: NeutronListFirewallGroupsResponse
"""
all_params = ['marker', 'limit', 'id', 'name', 'description', 'ingress_firewall_policy_id', 'egress_firewall_policy_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'id' in local_var_params:
query_params.append(('id', local_var_params['id']))
collection_formats['id'] = 'multi'
if 'name' in local_var_params:
query_params.append(('name', local_var_params['name']))
collection_formats['name'] = 'multi'
if 'description' in local_var_params:
query_params.append(('description', local_var_params['description']))
collection_formats['description'] = 'multi'
if 'ingress_firewall_policy_id' in local_var_params:
query_params.append(('ingress_firewall_policy_id', local_var_params['ingress_firewall_policy_id']))
if 'egress_firewall_policy_id' in local_var_params:
query_params.append(('egress_firewall_policy_id', local_var_params['egress_firewall_policy_id']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/fwaas/firewall_groups',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='NeutronListFirewallGroupsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def neutron_list_firewall_policies_async(self, request):
"""查询所有网络ACL策略
查询提交请求的租户有权限操作的所有网络ACL策略信息。单次查询最多返回2000条数据,超过2000后会返回分页标记。
:param NeutronListFirewallPoliciesRequest request
:return: NeutronListFirewallPoliciesResponse
"""
return self.neutron_list_firewall_policies_with_http_info(request)
def neutron_list_firewall_policies_with_http_info(self, request):
"""查询所有网络ACL策略
查询提交请求的租户有权限操作的所有网络ACL策略信息。单次查询最多返回2000条数据,超过2000后会返回分页标记。
:param NeutronListFirewallPoliciesRequest request
:return: NeutronListFirewallPoliciesResponse
"""
all_params = ['limit', 'marker', 'id', 'name', 'description', 'tenant_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
if 'id' in local_var_params:
query_params.append(('id', local_var_params['id']))
collection_formats['id'] = 'multi'
if 'name' in local_var_params:
query_params.append(('name', local_var_params['name']))
collection_formats['name'] = 'multi'
if 'description' in local_var_params:
query_params.append(('description', local_var_params['description']))
collection_formats['description'] = 'multi'
if 'tenant_id' in local_var_params:
query_params.append(('tenant_id', local_var_params['tenant_id']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/fwaas/firewall_policies',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='NeutronListFirewallPoliciesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def neutron_list_firewall_rules_async(self, request):
"""查询所有网络ACL规则
查询提交请求的租户有权限操作的所有网络ACL规则信息。单次查询最多返回2000条数据,超过2000后会返回分页标记。
:param NeutronListFirewallRulesRequest request
:return: NeutronListFirewallRulesResponse
"""
return self.neutron_list_firewall_rules_with_http_info(request)
def neutron_list_firewall_rules_with_http_info(self, request):
"""查询所有网络ACL规则
查询提交请求的租户有权限操作的所有网络ACL规则信息。单次查询最多返回2000条数据,超过2000后会返回分页标记。
:param NeutronListFirewallRulesRequest request
:return: NeutronListFirewallRulesResponse
"""
all_params = ['marker', 'limit', 'id', 'name', 'description', 'action', 'tenant_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'id' in local_var_params:
query_params.append(('id', local_var_params['id']))
collection_formats['id'] = 'multi'
if 'name' in local_var_params:
query_params.append(('name', local_var_params['name']))
collection_formats['name'] = 'multi'
if 'description' in local_var_params:
query_params.append(('description', local_var_params['description']))
collection_formats['description'] = 'multi'
if 'action' in local_var_params:
query_params.append(('action', local_var_params['action']))
if 'tenant_id' in local_var_params:
query_params.append(('tenant_id', local_var_params['tenant_id']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/fwaas/firewall_rules',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='NeutronListFirewallRulesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def neutron_remove_firewall_rule_async(self, request):
"""移除网络ACL规则
从某一网络ACL策略中移除一条网络ACL规则。
:param NeutronRemoveFirewallRuleRequest request
:return: NeutronRemoveFirewallRuleResponse
"""
return self.neutron_remove_firewall_rule_with_http_info(request)
def neutron_remove_firewall_rule_with_http_info(self, request):
"""移除网络ACL规则
从某一网络ACL策略中移除一条网络ACL规则。
:param NeutronRemoveFirewallRuleRequest request
:return: NeutronRemoveFirewallRuleResponse
"""
all_params = ['firewall_policy_id', 'remove_firewall_rule']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'firewall_policy_id' in local_var_params:
path_params['firewall_policy_id'] = local_var_params['firewall_policy_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v2.0/fwaas/firewall_policies/{firewall_policy_id}/remove_rule',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='NeutronRemoveFirewallRuleResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def neutron_show_firewall_group_async(self, request):
"""查询特定网络ACL组详情
查询特定网络ACL组详情。
:param NeutronShowFirewallGroupRequest request
:return: NeutronShowFirewallGroupResponse
"""
return self.neutron_show_firewall_group_with_http_info(request)
def neutron_show_firewall_group_with_http_info(self, request):
"""查询特定网络ACL组详情
查询特定网络ACL组详情。
:param NeutronShowFirewallGroupRequest request
:return: NeutronShowFirewallGroupResponse
"""
all_params = ['firewall_group_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'firewall_group_id' in local_var_params:
path_params['firewall_group_id'] = local_var_params['firewall_group_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/fwaas/firewall_groups/{firewall_group_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='NeutronShowFirewallGroupResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def neutron_show_firewall_policy_async(self, request):
"""查询特定网络ACL策略详情
查询特定网络ACL策略详情。
:param NeutronShowFirewallPolicyRequest request
:return: NeutronShowFirewallPolicyResponse
"""
return self.neutron_show_firewall_policy_with_http_info(request)
def neutron_show_firewall_policy_with_http_info(self, request):
"""查询特定网络ACL策略详情
查询特定网络ACL策略详情。
:param NeutronShowFirewallPolicyRequest request
:return: NeutronShowFirewallPolicyResponse
"""
all_params = ['firewall_policy_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'firewall_policy_id' in local_var_params:
path_params['firewall_policy_id'] = local_var_params['firewall_policy_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/fwaas/firewall_policies/{firewall_policy_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='NeutronShowFirewallPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def neutron_show_firewall_rule_async(self, request):
"""查询特定网络ACL规则
查询特定网络ACL规则。
:param NeutronShowFirewallRuleRequest request
:return: NeutronShowFirewallRuleResponse
"""
return self.neutron_show_firewall_rule_with_http_info(request)
def neutron_show_firewall_rule_with_http_info(self, request):
"""查询特定网络ACL规则
查询特定网络ACL规则。
:param NeutronShowFirewallRuleRequest request
:return: NeutronShowFirewallRuleResponse
"""
all_params = ['firewall_rule_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'firewall_rule_id' in local_var_params:
path_params['firewall_rule_id'] = local_var_params['firewall_rule_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/fwaas/firewall_rules/{firewall_rule_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='NeutronShowFirewallRuleResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def neutron_update_firewall_group_async(self, request):
"""更新网络ACL组
更新网络ACL组。
:param NeutronUpdateFirewallGroupRequest request
:return: NeutronUpdateFirewallGroupResponse
"""
return self.neutron_update_firewall_group_with_http_info(request)
def neutron_update_firewall_group_with_http_info(self, request):
"""更新网络ACL组
更新网络ACL组。
:param NeutronUpdateFirewallGroupRequest request
:return: NeutronUpdateFirewallGroupResponse
"""
all_params = ['firewall_group_id', 'firewall_group']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'firewall_group_id' in local_var_params:
path_params['firewall_group_id'] = local_var_params['firewall_group_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v2.0/fwaas/firewall_groups/{firewall_group_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='NeutronUpdateFirewallGroupResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def neutron_update_firewall_policy_async(self, request):
"""更新网络ACL策略
更新网络ACL策略。
:param NeutronUpdateFirewallPolicyRequest request
:return: NeutronUpdateFirewallPolicyResponse
"""
return self.neutron_update_firewall_policy_with_http_info(request)
def neutron_update_firewall_policy_with_http_info(self, request):
"""更新网络ACL策略
更新网络ACL策略。
:param NeutronUpdateFirewallPolicyRequest request
:return: NeutronUpdateFirewallPolicyResponse
"""
all_params = ['firewall_policy_id', 'firewall_policy']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'firewall_policy_id' in local_var_params:
path_params['firewall_policy_id'] = local_var_params['firewall_policy_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v2.0/fwaas/firewall_policies/{firewall_policy_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='NeutronUpdateFirewallPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def neutron_update_firewall_rule_async(self, request):
"""更新网络ACL规则
更新网络ACL规则。
:param NeutronUpdateFirewallRuleRequest request
:return: NeutronUpdateFirewallRuleResponse
"""
return self.neutron_update_firewall_rule_with_http_info(request)
def neutron_update_firewall_rule_with_http_info(self, request):
"""更新网络ACL规则
更新网络ACL规则。
:param NeutronUpdateFirewallRuleRequest request
:return: NeutronUpdateFirewallRuleResponse
"""
all_params = ['firewall_rule_id', 'firewall_rule']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'firewall_rule_id' in local_var_params:
path_params['firewall_rule_id'] = local_var_params['firewall_rule_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v2.0/fwaas/firewall_rules/{firewall_rule_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='NeutronUpdateFirewallRuleResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_vpc_async(self, request):
"""创建VPC
创建虚拟私有云。
:param CreateVpcRequest request
:return: CreateVpcResponse
"""
return self.create_vpc_with_http_info(request)
def create_vpc_with_http_info(self, request):
"""创建VPC
创建虚拟私有云。
:param CreateVpcRequest request
:return: CreateVpcResponse
"""
all_params = ['vpc']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/vpcs',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateVpcResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_vpc_route_async(self, request):
"""创建VPC路由
创建路由
:param CreateVpcRouteRequest request
:return: CreateVpcRouteResponse
"""
return self.create_vpc_route_with_http_info(request)
def create_vpc_route_with_http_info(self, request):
"""创建VPC路由
创建路由
:param CreateVpcRouteRequest request
:return: CreateVpcRouteResponse
"""
all_params = ['route']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v2.0/vpc/routes',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateVpcRouteResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_vpc_async(self, request):
"""删除VPC
删除虚拟私有云。
:param DeleteVpcRequest request
:return: DeleteVpcResponse
"""
return self.delete_vpc_with_http_info(request)
def delete_vpc_with_http_info(self, request):
"""删除VPC
删除虚拟私有云。
:param DeleteVpcRequest request
:return: DeleteVpcResponse
"""
all_params = ['vpc_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'vpc_id' in local_var_params:
path_params['vpc_id'] = local_var_params['vpc_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/vpcs/{vpc_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteVpcResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_vpc_route_async(self, request):
"""删除VPC路由
删除路由
:param DeleteVpcRouteRequest request
:return: DeleteVpcRouteResponse
"""
return self.delete_vpc_route_with_http_info(request)
def delete_vpc_route_with_http_info(self, request):
"""删除VPC路由
删除路由
:param DeleteVpcRouteRequest request
:return: DeleteVpcRouteResponse
"""
all_params = ['route_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'route_id' in local_var_params:
path_params['route_id'] = local_var_params['route_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/vpc/routes/{route_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteVpcRouteResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_vpc_routes_async(self, request):
"""查询VPC路由列表
查询提交请求的租户的所有路由列表,并根据过滤条件进行过滤。
:param ListVpcRoutesRequest request
:return: ListVpcRoutesResponse
"""
return self.list_vpc_routes_with_http_info(request)
def list_vpc_routes_with_http_info(self, request):
"""查询VPC路由列表
查询提交请求的租户的所有路由列表,并根据过滤条件进行过滤。
:param ListVpcRoutesRequest request
:return: ListVpcRoutesResponse
"""
all_params = ['limit', 'marker', 'id', 'type', 'vpc_id', 'destination', 'tenant_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
if 'id' in local_var_params:
query_params.append(('id', local_var_params['id']))
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
if 'vpc_id' in local_var_params:
query_params.append(('vpc_id', local_var_params['vpc_id']))
if 'destination' in local_var_params:
query_params.append(('destination', local_var_params['destination']))
if 'tenant_id' in local_var_params:
query_params.append(('tenant_id', local_var_params['tenant_id']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/vpc/routes',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListVpcRoutesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_vpcs_async(self, request):
"""查询VPC列表
查询虚拟私有云列表。
:param ListVpcsRequest request
:return: ListVpcsResponse
"""
return self.list_vpcs_with_http_info(request)
def list_vpcs_with_http_info(self, request):
"""查询VPC列表
查询虚拟私有云列表。
:param ListVpcsRequest request
:return: ListVpcsResponse
"""
all_params = ['limit', 'marker', 'id', 'enterprise_project_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
if 'id' in local_var_params:
query_params.append(('id', local_var_params['id']))
if 'enterprise_project_id' in local_var_params:
query_params.append(('enterprise_project_id', local_var_params['enterprise_project_id']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/vpcs',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListVpcsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_vpc_async(self, request):
"""查询VPC
查询虚拟私有云。
:param ShowVpcRequest request
:return: ShowVpcResponse
"""
return self.show_vpc_with_http_info(request)
def show_vpc_with_http_info(self, request):
"""查询VPC
查询虚拟私有云。
:param ShowVpcRequest request
:return: ShowVpcResponse
"""
all_params = ['vpc_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'vpc_id' in local_var_params:
path_params['vpc_id'] = local_var_params['vpc_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/vpcs/{vpc_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowVpcResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_vpc_route_async(self, request):
"""查询VPC路由
查询路由详情
:param ShowVpcRouteRequest request
:return: ShowVpcRouteResponse
"""
return self.show_vpc_route_with_http_info(request)
def show_vpc_route_with_http_info(self, request):
"""查询VPC路由
查询路由详情
:param ShowVpcRouteRequest request
:return: ShowVpcRouteResponse
"""
all_params = ['route_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'route_id' in local_var_params:
path_params['route_id'] = local_var_params['route_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/v2.0/vpc/routes/{route_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowVpcRouteResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_vpc_async(self, request):
"""更新VPC
更新虚拟私有云。
:param UpdateVpcRequest request
:return: UpdateVpcResponse
"""
return self.update_vpc_with_http_info(request)
def update_vpc_with_http_info(self, request):
"""更新VPC
更新虚拟私有云。
:param UpdateVpcRequest request
:return: UpdateVpcResponse
"""
all_params = ['vpc_id', 'vpc']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'vpc_id' in local_var_params:
path_params['vpc_id'] = local_var_params['vpc_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v1/{project_id}/vpcs/{vpc_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateVpcResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None,
post_params=None, response_type=None, response_headers=None, auth_settings=None,
collection_formats=None, request_type=None):
"""Makes the HTTP request and returns deserialized data.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response_type: Response data type.
:param response_headers: Header should be added to response data.
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param request_type: Request data type.
:return:
Return the response directly.
"""
return self.do_http_request(
method=method,
resource_path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body,
post_params=post_params,
response_type=response_type,
response_headers=response_headers,
collection_formats=collection_formats,
request_type=request_type,
async_request=True)
| 29.676132
| 243
| 0.623194
| 10,872
| 110,781
| 5.93819
| 0.038723
| 0.043866
| 0.076766
| 0.02974
| 0.927587
| 0.912035
| 0.894145
| 0.863522
| 0.842224
| 0.708457
| 0
| 0.002181
| 0.29237
| 110,781
| 3,732
| 244
| 29.684084
| 0.821372
| 0.118883
| 0
| 0.841905
| 0
| 0
| 0.09514
| 0.047355
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055714
| false
| 0
| 0.004762
| 0.000476
| 0.117143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4670a6b6aef2d4a201d154a4f60e6d08a2ea3497
| 4,828
|
py
|
Python
|
msgraph-cli-extensions/v1_0/cloudcommunications_v1_0/azext_cloudcommunications_v1_0/generated/_params.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
msgraph-cli-extensions/v1_0/cloudcommunications_v1_0/azext_cloudcommunications_v1_0/generated/_params.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
msgraph-cli-extensions/v1_0/cloudcommunications_v1_0/azext_cloudcommunications_v1_0/generated/_params.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-lines
# pylint: disable=too-many-statements
from msgraph.cli.core.commands.validators import validate_file_or_dict
from azext_cloudcommunications_v1_0.action import (
AddAudioConferencing,
AddChatInfo,
AddJoinInformation
)
def load_arguments(self, _):
with self.argument_context('cloudcommunications user create-online-meeting') as c:
c.argument('user_id', type=str, help='key: id of user')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('audio_conferencing', action=AddAudioConferencing, nargs='+', help='audioConferencing')
c.argument('chat_info', action=AddChatInfo, nargs='+', help='chatInfo')
c.argument('creation_date_time', help='The meeting creation time in UTC. Read-only.')
c.argument('end_date_time', help='The meeting end time in UTC.')
c.argument('external_id', type=str, help='')
c.argument('join_information', action=AddJoinInformation, nargs='+', help='itemBody')
c.argument('join_web_url', type=str, help='The join URL of the online meeting. Read-only.')
c.argument('start_date_time', help='The meeting start time in UTC.')
c.argument('subject', type=str, help='The subject of the online meeting.')
c.argument('video_teleconference_id', type=str, help='The video teleconferencing ID. Read-only.')
c.argument('attendees', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.',
arg_group='Participants')
c.argument('organizer', type=validate_file_or_dict, help='meetingParticipantInfo Expected value: '
'json-string/@json-file.', arg_group='Participants')
with self.argument_context('cloudcommunications user delete-online-meeting') as c:
c.argument('user_id', type=str, help='key: id of user')
c.argument('online_meeting_id', type=str, help='key: id of onlineMeeting')
c.argument('if_match', type=str, help='ETag')
with self.argument_context('cloudcommunications user list-online-meeting') as c:
c.argument('user_id', type=str, help='key: id of user')
c.argument('orderby', nargs='+', help='Order items by property values')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('cloudcommunications user show-online-meeting') as c:
c.argument('user_id', type=str, help='key: id of user')
c.argument('online_meeting_id', type=str, help='key: id of onlineMeeting')
c.argument('select', nargs='+', help='Select properties to be returned')
c.argument('expand', nargs='+', help='Expand related entities')
with self.argument_context('cloudcommunications user update-online-meeting') as c:
c.argument('user_id', type=str, help='key: id of user')
c.argument('online_meeting_id', type=str, help='key: id of onlineMeeting')
c.argument('id_', options_list=['--id'], type=str, help='Read-only.')
c.argument('audio_conferencing', action=AddAudioConferencing, nargs='+', help='audioConferencing')
c.argument('chat_info', action=AddChatInfo, nargs='+', help='chatInfo')
c.argument('creation_date_time', help='The meeting creation time in UTC. Read-only.')
c.argument('end_date_time', help='The meeting end time in UTC.')
c.argument('external_id', type=str, help='')
c.argument('join_information', action=AddJoinInformation, nargs='+', help='itemBody')
c.argument('join_web_url', type=str, help='The join URL of the online meeting. Read-only.')
c.argument('start_date_time', help='The meeting start time in UTC.')
c.argument('subject', type=str, help='The subject of the online meeting.')
c.argument('video_teleconference_id', type=str, help='The video teleconferencing ID. Read-only.')
c.argument('attendees', type=validate_file_or_dict, help=' Expected value: json-string/@json-file.',
arg_group='Participants')
c.argument('organizer', type=validate_file_or_dict, help='meetingParticipantInfo Expected value: '
'json-string/@json-file.', arg_group='Participants')
| 63.526316
| 109
| 0.656172
| 605
| 4,828
| 5.115702
| 0.221488
| 0.116317
| 0.067528
| 0.058805
| 0.821325
| 0.821325
| 0.776737
| 0.776737
| 0.776737
| 0.776737
| 0
| 0.000504
| 0.178128
| 4,828
| 75
| 110
| 64.373333
| 0.779486
| 0.104805
| 0
| 0.75
| 0
| 0
| 0.418182
| 0.057851
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017857
| false
| 0
| 0.035714
| 0
| 0.053571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
46abbfcc778c76906014c1f8a2a2d4053c094694
| 22,174
|
py
|
Python
|
tests/benchmarks/micro_benchmarks/test_gpcnet_performance.py
|
SiriusKY/superbenchmark
|
4074f12c1c38fb393c16fa9be73e9afc4dc5883a
|
[
"MIT"
] | null | null | null |
tests/benchmarks/micro_benchmarks/test_gpcnet_performance.py
|
SiriusKY/superbenchmark
|
4074f12c1c38fb393c16fa9be73e9afc4dc5883a
|
[
"MIT"
] | null | null | null |
tests/benchmarks/micro_benchmarks/test_gpcnet_performance.py
|
SiriusKY/superbenchmark
|
4074f12c1c38fb393c16fa9be73e9afc4dc5883a
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
"""Tests for GPCNet benchmark."""
import os
import numbers
import unittest
from pathlib import Path
from superbench.benchmarks import BenchmarkRegistry, Platform, BenchmarkType
class GPCNetBenchmarkTest(unittest.TestCase): # noqa: E501
"""Tests for GPCNetBenchmark benchmark."""
def setUp(self):
"""Method called to prepare the test fixture."""
# Create fake binary file just for testing.
os.environ['SB_MICRO_PATH'] = '/tmp/superbench'
binary_path = os.path.join(os.getenv('SB_MICRO_PATH'), 'bin')
Path(binary_path).mkdir(parents=True, exist_ok=True)
self.__binary_files = []
for bin_name in ['network_test', 'network_load_test']:
self.__binary_files.append(Path(binary_path, bin_name))
Path(binary_path, bin_name).touch(mode=0o755, exist_ok=True)
def tearDown(self):
"""Method called after the test method has been called and the result recorded."""
for bin_file in self.__binary_files:
bin_file.unlink()
def test_gpcnet_network_test(self):
"""Test gpcnet-network-test benchmark."""
raw_output = """# noqa: E501
Network Tests v1.3
Test with 2 MPI ranks (2 nodes)
Legend
RR = random ring communication pattern
Nat = natural ring communication pattern
Lat = latency
BW = bandwidth
BW+Sync = bandwidth with barrier
+------------------------------------------------------------------------------+
| Isolated Network Tests |
+---------------------------------+--------------+--------------+--------------+
| Name | Avg | 99% | Units |
+---------------------------------+--------------+--------------+--------------+
| RR Two-sided Lat (8 B) | 10000.0 | 10000.0 | usec |
+---------------------------------+--------------+--------------+--------------+
| RR Get Lat (8 B) | 10000.0 | 10000.0 | usec |
+---------------------------------+--------------+--------------+--------------+
| RR Two-sided BW (131072 B) | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+
| RR Put BW (131072 B) | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+
| RR Two-sided BW+Sync (131072 B) | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+
| Nat Two-sided BW (131072 B) | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+
| Multiple Allreduce (8 B) | 10000.0 | 10000.0 | usec |
+---------------------------------+--------------+--------------+--------------+
| Multiple Alltoall (4096 B) | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+
"""
# Check registry.
benchmark_name = 'gpcnet-network-test'
(benchmark_class,
predefine_params) = BenchmarkRegistry._BenchmarkRegistry__select_benchmark(benchmark_name, Platform.CPU)
assert (benchmark_class)
# Check preprocess
benchmark = benchmark_class(benchmark_name)
ret = benchmark._preprocess()
assert (ret)
expect_command = 'network_test'
command = benchmark._bin_name + benchmark._commands[0].split(benchmark._bin_name)[1]
assert (command == expect_command)
raw_output_no_execution = """
ERROR: this application must be run on at least 2 nodes
--------------------------------------------------------------------------
Primary job terminated normally, but 1 process returned
a non-zero exit code. Per user-direction, the job has been aborted.
--------------------------------------------------------------------------
--------------------------------------------------------------------------
mpirun detected that one or more processes exited with non-zero status, thus causing
the job to be terminated. The first process to do so was:
Process name: [[63697,1],0]
Exit code: 1
--------------------------------------------------------------------------
"""
assert (benchmark._process_raw_result(0, raw_output_no_execution))
assert (len(benchmark.result) == 0)
# Check function process_raw_data.
# Positive case - valid raw output.
assert (benchmark._process_raw_result(0, raw_output))
test_name = 'IsolatedNetworkTests'
metric_list = [
'RRTwo-sidedLat(8B)', 'RRGetLat(8B)', 'RRTwo-sidedBW(131072B)', 'RRPutBW(131072B)',
'RRTwo-sidedBW+Sync(131072B)', 'NatTwo-sidedBW(131072B)', 'MultipleAllreduce(8B)', 'MultipleAlltoall(4096B)'
]
for metric_medium in metric_list:
for suffix in ['Avg', '99%']:
metric = test_name + '_' + metric_medium + '_' + suffix
assert (metric in benchmark.result)
assert (len(benchmark.result[metric]) == 1)
assert (isinstance(benchmark.result[metric][0], numbers.Number))
# Negative case - Add invalid raw output.
assert (benchmark._process_raw_result(0, 'ERROR') is False)
# Check basic information.
assert (benchmark.name == 'gpcnet-network-test')
assert (benchmark.type == BenchmarkType.MICRO)
assert (benchmark._bin_name == 'network_test')
def test_gpcnet_network_load(self): # noqa: C901
"""Test gpcnet-network-load-test benchmark."""
raw_output = """# noqa: E501
NetworkLoad Tests v1.3
Test with 10 MPI ranks (10 nodes)
2 nodes running Network Tests
8 nodes running Congestion Tests (min 100 nodes per congestor)
Legend
RR = random ring communication pattern
Lat = latency
BW = bandwidth
BW+Sync = bandwidth with barrier
+------------------------------------------------------------------------------------------------------------------------------------------+
| Isolated Network Tests |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Name | Min | Max | Avg | Avg(Worst) | 99% | 99.9% | Units |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| RR Two-sided Lat (8 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | usec |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| RR Two-sided BW+Sync (131072 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Multiple Allreduce (8 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | usec |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
+------------------------------------------------------------------------------------------------------------------------------------------+
| Isolated Congestion Tests |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Name | Min | Max | Avg | Avg(Worst) | 99% | 99.9% | Units |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Alltoall (4096 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Two-sided Incast (4096 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Put Incast (4096 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Get Bcast (4096 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
+------------------------------------------------------------------------------------------------------------------------------------------+
| Network Tests running with Congestion Tests ( RR Two-sided Lat Network Test) |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Name | Min | Max | Avg | Avg(Worst) | 99% | 99.9% | Units |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| RR Two-sided Lat (8 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | usec |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Alltoall (4096 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Two-sided Incast (4096 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Put Incast (4096 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Get Bcast (4096 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
+------------------------------------------------------------------------------------------------------------------------------------------+
| Network Tests running with Congestion Tests (RR Two-sided BW+Sync Network Test) |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Name | Min | Max | Avg | Avg(Worst) | 99% | 99.9% | Units |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| RR Two-sided BW+Sync (131072 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Alltoall (4096 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Two-sided Incast (4096 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Put Incast (4096 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Get Bcast (4096 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
+------------------------------------------------------------------------------------------------------------------------------------------+
| Network Tests running with Congestion Tests ( Multiple Allreduce Network Test) |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Name | Min | Max | Avg | Avg(Worst) | 99% | 99.9% | Units |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Multiple Allreduce (8 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | usec |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Alltoall (4096 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Two-sided Incast (4096 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Put Incast (4096 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
| Get Bcast (4096 B) | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | 10000.0 | MiB/s/rank |
+---------------------------------+--------------+--------------+--------------+--------------+--------------+--------------+--------------+
+------------------------------------------------------------------------------+
| Network Tests running with Congestion Tests - Key Results |
+---------------------------------+--------------------------------------------+
| Name | Congestion Impact Factor |
+---------------------------------+----------------------+---------------------+
| | Avg | 99% |
+---------------------------------+----------------------+---------------------+
| RR Two-sided Lat (8 B) | 0.0X | 0.0X |
+---------------------------------+----------------------+---------------------+
| RR Two-sided BW+Sync (131072 B) | 0.0X | 0.0X |
+---------------------------------+----------------------+---------------------+
| Multiple Allreduce (8 B) | 0.0X | 0.0X |
+---------------------------------+----------------------+---------------------+
"""
# Check registry.
benchmark_name = 'gpcnet-network-load-test'
(benchmark_class,
predefine_params) = BenchmarkRegistry._BenchmarkRegistry__select_benchmark(benchmark_name, Platform.CPU)
assert (benchmark_class)
# Check preprocess
benchmark = benchmark_class(benchmark_name)
ret = benchmark._preprocess()
assert (ret)
expect_command = 'network_load_test'
command = benchmark._bin_name + benchmark._commands[0].split(benchmark._bin_name)[1]
assert (command == expect_command)
# Check function process_raw_data.
raw_output_no_execution = """
ERROR: this application must be run on at least 10 nodes
--------------------------------------------------------------------------
Primary job terminated normally, but 1 process returned
a non-zero exit code. Per user-direction, the job has been aborted.
--------------------------------------------------------------------------
--------------------------------------------------------------------------
mpirun detected that one or more processes exited with non-zero status, thus causing
the job to be terminated. The first process to do so was:
Process name: [[63697,1],0]
Exit code: 1
--------------------------------------------------------------------------
"""
assert (benchmark._process_raw_result(0, raw_output_no_execution))
assert (len(benchmark.result) == 0)
# Positive case - valid raw output.
assert (benchmark._process_raw_result(0, raw_output))
test_name = 'IsolatedNetworkTests'
metric_list = ['RRTwo-sidedLat(8B)', 'RRTwo-sidedBW+Sync(131072B)', 'MultipleAllreduce(8B)']
for metric_medium in metric_list:
for suffix in ['Max', 'Min', 'Avg', '99.9%']:
metric = test_name + '_' + metric_medium + '_' + suffix
assert (metric in benchmark.result)
assert (len(benchmark.result[metric]) == 1)
assert (isinstance(benchmark.result[metric][0], numbers.Number))
test_name = 'IsolatedCongestionTests'
metric_list = ['GetBcast(4096B)', 'PutIncast(4096B)', 'Two-sidedIncast(4096B)', 'Alltoall(4096B)']
for metric_medium in metric_list:
for suffix in ['Max', 'Min', 'Avg', '99.9%']:
metric = test_name + '_' + metric_medium + '_' + suffix
assert (metric in benchmark.result)
assert (len(benchmark.result[metric]) == 1)
assert (isinstance(benchmark.result[metric][0], numbers.Number))
test_name = 'NetworkTestsrunningwithCongestionTests(RRTwo-sidedLatNetworkTest)'
metric_list = [
'GetBcast(4096B)', 'PutIncast(4096B)', 'Two-sidedIncast(4096B)', 'Alltoall(4096B)', 'RRTwo-sidedLat(8B)'
]
for metric_medium in metric_list:
for suffix in ['Max', 'Min', 'Avg', '99.9%']:
metric = test_name + '_' + metric_medium + '_' + suffix
assert (metric in benchmark.result)
assert (len(benchmark.result[metric]) == 1)
assert (isinstance(benchmark.result[metric][0], numbers.Number))
test_name = 'NetworkTestsrunningwithCongestionTests(RRTwo-sidedBW+SyncNetworkTest)'
metric_list = [
'GetBcast(4096B)', 'PutIncast(4096B)', 'Two-sidedIncast(4096B)', 'Alltoall(4096B)',
'RRTwo-sidedBW+Sync(131072B)'
]
for metric_medium in metric_list:
for suffix in ['Max', 'Min', 'Avg', '99.9%']:
metric = test_name + '_' + metric_medium + '_' + suffix
assert (metric in benchmark.result)
assert (len(benchmark.result[metric]) == 1)
assert (isinstance(benchmark.result[metric][0], numbers.Number))
test_name = 'NetworkTestsrunningwithCongestionTests(MultipleAllreduceNetworkTest)'
metric_list = [
'GetBcast(4096B)', 'PutIncast(4096B)', 'Two-sidedIncast(4096B)', 'Alltoall(4096B)', 'MultipleAllreduce(8B)'
]
for metric_medium in metric_list:
for suffix in ['Max', 'Min', 'Avg', '99.9%']:
metric = test_name + '_' + metric_medium + '_' + suffix
assert (metric in benchmark.result)
assert (len(benchmark.result[metric]) == 1)
assert (isinstance(benchmark.result[metric][0], numbers.Number))
test_name = 'NetworkTestsrunningwithCongestionTests-KeyResults'
metric_list = ['RRTwo-sidedLat(8B)', 'RRTwo-sidedBW+Sync(131072B)', 'MultipleAllreduce(8B)']
for metric_medium in metric_list:
for suffix in ['Avg', '99%']:
metric = test_name + '_' + metric_medium + '_' + suffix
assert (metric in benchmark.result)
assert (len(benchmark.result[metric]) == 1)
assert (isinstance(benchmark.result[metric][0], numbers.Number))
# Negative case - Add invalid raw output.
assert (benchmark._process_raw_result(0, 'ERROR') is False)
# Check basic information.
assert (benchmark.name == 'gpcnet-network-load-test')
assert (benchmark.type == BenchmarkType.MICRO)
assert (benchmark._bin_name == 'network_load_test')
| 69.510972
| 140
| 0.361414
| 1,682
| 22,174
| 4.660523
| 0.139715
| 0.11328
| 0.165582
| 0.180635
| 0.833652
| 0.813624
| 0.784666
| 0.779691
| 0.777012
| 0.772292
| 0
| 0.074926
| 0.239199
| 22,174
| 318
| 141
| 69.72956
| 0.389745
| 0.032696
| 0
| 0.732342
| 0
| 0.126394
| 0.730352
| 0.368283
| 0
| 0
| 0
| 0
| 0.152416
| 1
| 0.01487
| false
| 0
| 0.018587
| 0
| 0.037175
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
3117e5abd03786688a0271ff36e547e76a1835a0
| 8,093
|
py
|
Python
|
tests/test_SO3tensor.py
|
ehthiede/python-pygelib
|
8140ce7ab05a47dad202b2b7f1e8aee5309723e2
|
[
"MIT"
] | null | null | null |
tests/test_SO3tensor.py
|
ehthiede/python-pygelib
|
8140ce7ab05a47dad202b2b7f1e8aee5309723e2
|
[
"MIT"
] | null | null | null |
tests/test_SO3tensor.py
|
ehthiede/python-pygelib
|
8140ce7ab05a47dad202b2b7f1e8aee5309723e2
|
[
"MIT"
] | null | null | null |
import torch
import pytest
import operator
from pygelib.SO3TensorArray import SO3TensorArray
class TestSO3TensorArrayMult(object):
# Pointwise Multiplication tests
def test_array_mult(self):
a_parts = [torch.randn(2, 4, 5) for i in range(3)]
b_parts = [torch.randn(2, 4, 5) for i in range(3)]
self._base_test_mult(a_parts, b_parts)
def test_single_array_mult(self):
a_parts = [torch.randn(2, 4, 5) for i in range(3)]
b_parts = [torch.randn(2) for i in range(3)]
self._base_test_mult(a_parts, b_parts)
def _base_test_mult(self, a_parts, b_parts):
a = SO3TensorArray(a_parts)
b = SO3TensorArray(b_parts)
c = a * b
for a_pt, b_pt, c_pt in zip(a_parts, b_parts, c._data):
assert(torch.allclose(c_pt[0], a_pt[0] * b_pt[0] - a_pt[1] * b_pt[1]))
assert(torch.allclose(c_pt[1], a_pt[0] * b_pt[1] + a_pt[1] * b_pt[0]))
@pytest.mark.parametrize('b', [-1, 2.1+0.4j,
torch.complex(torch.randn(4, 5),
torch.randn(4, 5))
])
def test_scalar_mult(self, b):
a_parts = [torch.randn(2, 4, 5) for i in range(3)]
a = SO3TensorArray(a_parts)
c = a * b
for a_pt, c_pt in zip(a_parts, c._data):
assert(torch.allclose(c_pt[0], a_pt[0] * b.real - a_pt[1] * b.imag))
assert(torch.allclose(c_pt[1], a_pt[0] * b.imag + a_pt[1] * b.real))
def test_scalar_mult_real(self):
a_parts = [torch.randn(2, 4, 5) for i in range(3)]
a = SO3TensorArray(a_parts)
b = torch.randn(4, 5)
c = a * b
for a_pt, c_pt in zip(a_parts, c._data):
assert(torch.allclose(c_pt[0], a_pt[0] * b))
assert(torch.allclose(c_pt[1], torch.zeros_like(a_pt[0])))
class TestSO3TensorArrayDiv(object):
# Pointwise Multiplication tests
def test_array_mult(self):
a_parts = [torch.randn(2, 4, 5) for i in range(3)]
b_parts = [torch.randn(2, 4, 5) for i in range(3)]
self._base_test_mult(a_parts, b_parts)
def test_single_array_mult(self):
a_parts = [torch.randn(2, 4, 5) for i in range(3)]
b_parts = [torch.randn(2) for i in range(3)]
self._base_test_mult(a_parts, b_parts)
def _base_test_mult(self, a_parts, b_parts):
a = SO3TensorArray(a_parts)
b = SO3TensorArray(b_parts)
c = a / b
for a_pt, b_pt, c_pt in zip(a_parts, b_parts, c._data):
ref_real = (a_pt[0] * b_pt[0] + a_pt[1] * b_pt[1])
ref_real /= (b_pt[0] * b_pt[0] + b_pt[1] * b_pt[1])
ref_imag = (a_pt[1] * b_pt[0] - a_pt[0] * b_pt[1])
ref_imag /= (b_pt[0] * b_pt[0] + b_pt[1] * b_pt[1])
assert(torch.allclose(c_pt[0], ref_real))
assert(torch.allclose(c_pt[1], ref_imag))
@pytest.mark.parametrize('b', [-1, 2.1+0.4j,
torch.complex(torch.randn(4, 5),
torch.randn(4, 5))
])
def test_scalar_mult(self, b):
a_parts = [torch.randn(2, 4, 5) for i in range(3)]
a = SO3TensorArray(a_parts)
c = a * b
for a_pt, c_pt in zip(a_parts, c._data):
assert(torch.allclose(c_pt[0], a_pt[0] * b.real - a_pt[1] * b.imag))
assert(torch.allclose(c_pt[1], a_pt[0] * b.imag + a_pt[1] * b.real))
def test_scalar_mult_real(self):
a_parts = [torch.randn(2, 4, 5) for i in range(3)]
a = SO3TensorArray(a_parts)
b = torch.randn(4, 5)
c = a * b
for a_pt, c_pt in zip(a_parts, c._data):
assert(torch.allclose(c_pt[0], a_pt[0] * b))
assert(torch.allclose(c_pt[1], torch.zeros_like(a_pt[0])))
class TestSO3TensorArrayMatMul(object):
# Matrix Multiplication tests
def test_array_matmul(self):
a_parts = [torch.randn(2, 4, 5) for i in range(3)]
b_parts = [torch.randn(2, 5, 6) for i in range(3)]
a = SO3TensorArray(a_parts)
b = SO3TensorArray(b_parts)
c = a @ b
for a_pt, b_pt, c_pt in zip(a_parts, b_parts, c._data):
assert(torch.allclose(c_pt[0], a_pt[0] @ b_pt[0] - a_pt[1] @ b_pt[1]))
assert(torch.allclose(c_pt[1], a_pt[0] @ b_pt[1] + a_pt[1] @ b_pt[0]))
def test_scalar_matmul(self):
a_parts = [torch.randn(2, 4, 5) for i in range(3)]
b = torch.complex(torch.randn(5, 3), torch.randn(5, 3))
a = SO3TensorArray(a_parts)
c = a @ b
for a_pt, c_pt in zip(a_parts, c._data):
assert(torch.allclose(c_pt[0], a_pt[0] @ b.real - a_pt[1] @ b.imag))
assert(torch.allclose(c_pt[1], a_pt[0] @ b.imag + a_pt[1] @ b.real))
def test_scalar_matmul_real(self):
a_parts = [torch.randn(2, 4, 5) for i in range(3)]
a = SO3TensorArray(a_parts)
b = torch.randn(5, 3)
c = a @ b
for a_pt, c_pt in zip(a_parts, c._data):
assert(torch.allclose(c_pt[0], a_pt[0] @ b))
assert(torch.allclose(c_pt[1], torch.zeros_like(a_pt[0] @ b)))
class TestSO3TensorArrayAddSubtract(object):
# Addition/Subtraction tests
@pytest.mark.parametrize('op', [operator.add, operator.sub])
def test_array_add(self, op):
a_parts = [torch.randn(2, 4, 5) for i in range(3)]
b_parts = [torch.randn(2, 4, 5) for i in range(3)]
self._base_test_add(a_parts, b_parts, op)
@pytest.mark.parametrize('op', [operator.add, operator.sub])
def test_single_array_add(self, op):
a_parts = [torch.randn(2, 4, 5) for i in range(3)]
b_parts = [torch.randn(2) for i in range(3)]
self._base_test_add(a_parts, b_parts, op)
def _base_test_add(self, a_parts, b_parts, op):
a = SO3TensorArray(a_parts)
b = SO3TensorArray(b_parts)
c = op(a, b)
for a_pt, b_pt, c_pt in zip(a_parts, b_parts, c._data):
assert(torch.allclose(c_pt[0], op(a_pt[0], b_pt[0])))
assert(torch.allclose(c_pt[1], op(a_pt[1], b_pt[1])))
@pytest.mark.parametrize('op', [operator.add, operator.sub])
def test_tensor_add(self, op):
a_parts = [torch.randn(2, 4, 5) for i in range(3)]
a = SO3TensorArray(a_parts)
b = torch.complex(torch.randn(4, 5), torch.randn(4, 5))
c = op(a, b)
for a_pt, c_pt in zip(a_parts, c._data):
assert(torch.allclose(c_pt[0], op(a_pt[0], b.real)))
assert(torch.allclose(c_pt[1], op(a_pt[1], b.imag)))
@pytest.mark.parametrize('b', [-1, 2.1+0.4j,
torch.complex(torch.randn(4, 5),
torch.randn(4, 5))
])
@pytest.mark.parametrize('op', [operator.add, operator.sub])
def test_scalar_add(self, b, op):
a_parts = [torch.randn(2, 4, 5) for i in range(3)]
a = SO3TensorArray(a_parts)
c = op(a, b)
for a_pt, c_pt in zip(a_parts, c._data):
assert(torch.allclose(c_pt[0], op(a_pt[0], b.real)))
assert(torch.allclose(c_pt[1], op(a_pt[1], b.imag)))
@pytest.mark.parametrize('op', [operator.add, operator.sub])
def test_scalar_add_real(self, op):
a_parts = [torch.randn(2, 4, 5) for i in range(3)]
a = SO3TensorArray(a_parts)
b = torch.randn(4, 5)
c = op(a, b)
for a_pt, c_pt in zip(a_parts, c._data):
assert(torch.allclose(c_pt[0], op(a_pt[0], b)))
assert(torch.allclose(c_pt[1], a_pt[1]))
class TestSO3TensorArrayInterface(object):
def test_get_real_imag(self):
a_parts = [torch.randn(2, 4, 5) for i in range(3)]
real_class = SO3TensorArray(a_parts).real
imag_class = SO3TensorArray(a_parts).imag
for a_in, a_out in zip(a_parts, real_class):
assert(torch.allclose(a_in[0], a_out))
for a_in, a_out in zip(a_parts, imag_class):
assert(torch.allclose(a_in[1], a_out))
| 40.873737
| 82
| 0.56518
| 1,359
| 8,093
| 3.145695
| 0.047829
| 0.078596
| 0.124444
| 0.121637
| 0.892632
| 0.873216
| 0.85614
| 0.847485
| 0.846316
| 0.826433
| 0
| 0.042852
| 0.28778
| 8,093
| 197
| 83
| 41.081218
| 0.69882
| 0.014333
| 0
| 0.701863
| 0
| 0
| 0.001631
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 1
| 0.124224
| false
| 0
| 0.024845
| 0
| 0.180124
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3128bfd7b547b457101f07bc581712fcf50d8511
| 2,324
|
py
|
Python
|
modulo_2/pega_id_pizzas_esfihas.py
|
kallif003/Sistema-Delivery
|
b9c68e3a02ff0d60c7513dd09c08d722e76b138b
|
[
"MIT"
] | null | null | null |
modulo_2/pega_id_pizzas_esfihas.py
|
kallif003/Sistema-Delivery
|
b9c68e3a02ff0d60c7513dd09c08d722e76b138b
|
[
"MIT"
] | null | null | null |
modulo_2/pega_id_pizzas_esfihas.py
|
kallif003/Sistema-Delivery
|
b9c68e3a02ff0d60c7513dd09c08d722e76b138b
|
[
"MIT"
] | null | null | null |
def pega_id(*args):
try:
telaAtualizarProdutos = args[0]
telaErro = args[1]
cursor = args[2]
id = telaAtualizarProdutos.cod_atualizar.text()
if telaAtualizarProdutos.broto_atualizar.isChecked():
cursor.execute("select * from broto where id = %s" % id)
pizza = cursor.fetchall()
telaAtualizarProdutos.produto_atualizar.setText(str(pizza[0][1]))
telaAtualizarProdutos.ingredientes_atualizar.setText(str(pizza[0][4]))
telaAtualizarProdutos.valor_atualizar.setText(str(pizza[0][3]))
if telaAtualizarProdutos.seis_atualizar.isChecked():
cursor.execute("select * from seisPedacos where id = %s" % id)
pizza = cursor.fetchall()
telaAtualizarProdutos.produto_atualizar.setText(str(pizza[0][1]))
telaAtualizarProdutos.ingredientes_atualizar.setText(str(pizza[0][4]))
telaAtualizarProdutos.valor_atualizar.setText(str(pizza[0][3]))
if telaAtualizarProdutos.oito_atualizar.isChecked():
cursor.execute("select * from oitoPedacos where id = %s" % id)
pizza = cursor.fetchall()
telaAtualizarProdutos.produto_atualizar.setText(str(pizza[0][1]))
telaAtualizarProdutos.ingredientes_atualizar.setText(str(pizza[0][4]))
telaAtualizarProdutos.valor_atualizar.setText(str(pizza[0][3]))
if telaAtualizarProdutos.dez_atualizar.isChecked():
cursor.execute("select * from dezPedacos where id = %s" % id)
pizza = cursor.fetchall()
telaAtualizarProdutos.produto_atualizar.setText(str(pizza[0][1]))
telaAtualizarProdutos.ingredientes_atualizar.setText(str(pizza[0][4]))
telaAtualizarProdutos.valor_atualizar.setText(str(pizza[0][3]))
if telaAtualizarProdutos.esfiha_atualizar.isChecked():
cursor.execute("select * from esfihas where id = %s" % id)
pizza = cursor.fetchall()
telaAtualizarProdutos.produto_atualizar.setText(str(pizza[0][1]))
telaAtualizarProdutos.ingredientes_atualizar.setText(str(pizza[0][4]))
telaAtualizarProdutos.valor_atualizar.setText(str(pizza[0][3]))
except:
telaErro.show()
telaErro.label.setText(' Erro, tente novamente!')
| 50.521739
| 82
| 0.66222
| 233
| 2,324
| 6.51073
| 0.193133
| 0.158207
| 0.187871
| 0.23731
| 0.835201
| 0.835201
| 0.700066
| 0.700066
| 0.700066
| 0.700066
| 0
| 0.018232
| 0.22117
| 2,324
| 45
| 83
| 51.644444
| 0.81989
| 0
| 0
| 0.512821
| 0
| 0
| 0.089501
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025641
| false
| 0
| 0
| 0
| 0.025641
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
31c0a10470bb26c4296759610e87bfb4c3594b9d
| 26,877
|
py
|
Python
|
contrib/runners/orquesta_runner/tests/unit/test_with_items.py
|
saucetray/st2
|
8f507d6c8d9483c8371e386fe2b7998596856fd7
|
[
"Apache-2.0"
] | 2
|
2021-08-04T01:04:06.000Z
|
2021-08-04T01:04:08.000Z
|
contrib/runners/orquesta_runner/tests/unit/test_with_items.py
|
saucetray/st2
|
8f507d6c8d9483c8371e386fe2b7998596856fd7
|
[
"Apache-2.0"
] | null | null | null |
contrib/runners/orquesta_runner/tests/unit/test_with_items.py
|
saucetray/st2
|
8f507d6c8d9483c8371e386fe2b7998596856fd7
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import mock
from orquesta import statuses as wf_statuses
import st2tests
from oslo_config import cfg
# XXX: actionsensor import depends on config being setup.
import st2tests.config as tests_config
tests_config.parse_args()
from tests.unit import base
from st2actions.workflows import workflows
from st2common.bootstrap import actionsregistrar
from st2common.bootstrap import runnersregistrar
from st2common.constants import action as action_constants
from st2common.models.db import liveaction as lv_db_models
from st2common.persistence import execution as ex_db_access
from st2common.persistence import liveaction as lv_db_access
from st2common.persistence import workflow as wf_db_access
from st2common.runners import base as runners
from st2common.services import action as action_service
from st2common.services import executions as execution_service
from st2common.transport import liveaction as lv_ac_xport
from st2common.transport import workflow as wf_ex_xport
from st2common.transport import publishers
from st2common.util import action_db as action_utils
from st2tests.mocks import liveaction as mock_lv_ac_xport
from st2tests.mocks import workflow as mock_wf_ex_xport
TEST_PACK = 'orquesta_tests'
TEST_PACK_PATH = st2tests.fixturesloader.get_fixtures_packs_base_path() + '/' + TEST_PACK
PACKS = [
TEST_PACK_PATH,
st2tests.fixturesloader.get_fixtures_packs_base_path() + '/core'
]
@mock.patch.object(
publishers.CUDPublisher,
'publish_update',
mock.MagicMock(return_value=None))
@mock.patch.object(
lv_ac_xport.LiveActionPublisher,
'publish_create',
mock.MagicMock(side_effect=mock_lv_ac_xport.MockLiveActionPublisher.publish_create))
@mock.patch.object(
lv_ac_xport.LiveActionPublisher,
'publish_state',
mock.MagicMock(side_effect=mock_lv_ac_xport.MockLiveActionPublisher.publish_state))
@mock.patch.object(
wf_ex_xport.WorkflowExecutionPublisher,
'publish_create',
mock.MagicMock(side_effect=mock_wf_ex_xport.MockWorkflowExecutionPublisher.publish_create))
@mock.patch.object(
wf_ex_xport.WorkflowExecutionPublisher,
'publish_state',
mock.MagicMock(side_effect=mock_wf_ex_xport.MockWorkflowExecutionPublisher.publish_state))
class OrquestaWithItemsTest(st2tests.ExecutionDbTestCase):
@classmethod
def setUpClass(cls):
super(OrquestaWithItemsTest, cls).setUpClass()
# Register runners.
runnersregistrar.register_runners()
# Register test pack(s).
actions_registrar = actionsregistrar.ActionsRegistrar(
use_pack_cache=False,
fail_on_failure=True
)
for pack in PACKS:
actions_registrar.register_from_pack(pack)
@classmethod
def get_runner_class(cls, runner_name):
return runners.get_runner(runner_name, runner_name).__class__
def set_execution_status(self, lv_ac_db_id, status):
lv_ac_db = action_utils.update_liveaction_status(
status=status,
liveaction_id=lv_ac_db_id,
publish=False
)
ac_ex_db = execution_service.update_execution(
lv_ac_db,
publish=False
)
return lv_ac_db, ac_ex_db
def test_with_items(self):
num_items = 3
wf_meta = base.get_wf_fixture_meta_data(TEST_PACK_PATH, 'with-items.yaml')
lv_ac_db = lv_db_models.LiveActionDB(action=wf_meta['name'])
lv_ac_db, ac_ex_db = action_service.request(lv_ac_db)
# Assert action execution is running.
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_RUNNING)
wf_ex_db = wf_db_access.WorkflowExecution.query(action_execution=str(ac_ex_db.id))[0]
self.assertEqual(wf_ex_db.status, action_constants.LIVEACTION_STATUS_RUNNING)
# Process the with items task.
query_filters = {'workflow_execution': str(wf_ex_db.id), 'task_id': 'task1'}
t1_ex_db = wf_db_access.TaskExecution.query(**query_filters)[0]
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
self.assertEqual(len(t1_ac_ex_dbs), num_items)
status = [
ac_ex.status == action_constants.LIVEACTION_STATUS_SUCCEEDED
for ac_ex in t1_ac_ex_dbs
]
self.assertTrue(all(status))
for t1_ac_ex_db in t1_ac_ex_dbs:
workflows.get_engine().process(t1_ac_ex_db)
t1_ex_db = wf_db_access.TaskExecution.get_by_id(t1_ex_db.id)
self.assertEqual(t1_ex_db.status, wf_statuses.SUCCEEDED)
# Assert the main workflow is completed.
wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id)
self.assertEqual(wf_ex_db.status, wf_statuses.SUCCEEDED)
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED)
def test_with_items_failure(self):
num_items = 10
wf_meta = base.get_wf_fixture_meta_data(TEST_PACK_PATH, 'with-items-failure.yaml')
lv_ac_db = lv_db_models.LiveActionDB(action=wf_meta['name'])
lv_ac_db, ac_ex_db = action_service.request(lv_ac_db)
# Assert action execution is running.
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_RUNNING)
wf_ex_db = wf_db_access.WorkflowExecution.query(action_execution=str(ac_ex_db.id))[0]
self.assertEqual(wf_ex_db.status, action_constants.LIVEACTION_STATUS_RUNNING)
# Process the with items task.
query_filters = {'workflow_execution': str(wf_ex_db.id), 'task_id': 'task1'}
t1_ex_db = wf_db_access.TaskExecution.query(**query_filters)[0]
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
self.assertEqual(len(t1_ac_ex_dbs), num_items)
for i in range(0, num_items):
if not i % 2:
expected_status = action_constants.LIVEACTION_STATUS_SUCCEEDED
else:
expected_status = action_constants.LIVEACTION_STATUS_FAILED
self.assertEqual(t1_ac_ex_dbs[i].status, expected_status)
for t1_ac_ex_db in t1_ac_ex_dbs:
workflows.get_engine().process(t1_ac_ex_db)
t1_ex_db = wf_db_access.TaskExecution.get_by_id(t1_ex_db.id)
self.assertEqual(t1_ex_db.status, wf_statuses.FAILED)
# Assert the main workflow is completed.
wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id)
self.assertEqual(wf_ex_db.status, wf_statuses.FAILED)
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_FAILED)
def test_with_items_empty_list(self):
items = []
num_items = len(items)
wf_input = {'members': items}
wf_meta = base.get_wf_fixture_meta_data(TEST_PACK_PATH, 'with-items.yaml')
lv_ac_db = lv_db_models.LiveActionDB(action=wf_meta['name'], parameters=wf_input)
lv_ac_db, ac_ex_db = action_service.request(lv_ac_db)
# Wait for the liveaction to complete.
lv_ac_db = self._wait_on_status(lv_ac_db, action_constants.LIVEACTION_STATUS_SUCCEEDED)
# Retrieve records from database.
wf_ex_db = wf_db_access.WorkflowExecution.query(action_execution=str(ac_ex_db.id))[0]
query_filters = {'workflow_execution': str(wf_ex_db.id), 'task_id': 'task1'}
t1_ex_db = wf_db_access.TaskExecution.query(**query_filters)[0]
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
# Ensure there is no action executions for the task and the task is already completed.
self.assertEqual(len(t1_ac_ex_dbs), num_items)
self.assertEqual(t1_ex_db.status, wf_statuses.SUCCEEDED)
self.assertDictEqual(t1_ex_db.result, {'items': []})
# Assert the main workflow is completed.
wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id)
self.assertEqual(wf_ex_db.status, wf_statuses.SUCCEEDED)
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED)
self.assertDictEqual(lv_ac_db.result, {'output': {'items': []}})
def test_with_items_concurrency(self):
num_items = 3
concurrency = 2
wf_input = {'concurrency': concurrency}
wf_meta = base.get_wf_fixture_meta_data(TEST_PACK_PATH, 'with-items-concurrency.yaml')
lv_ac_db = lv_db_models.LiveActionDB(action=wf_meta['name'], parameters=wf_input)
lv_ac_db, ac_ex_db = action_service.request(lv_ac_db)
# Assert action execution is running.
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_RUNNING)
wf_ex_db = wf_db_access.WorkflowExecution.query(action_execution=str(ac_ex_db.id))[0]
self.assertEqual(wf_ex_db.status, action_constants.LIVEACTION_STATUS_RUNNING)
# Process the first set of action executions from with items concurrency.
query_filters = {'workflow_execution': str(wf_ex_db.id), 'task_id': 'task1'}
t1_ex_db = wf_db_access.TaskExecution.query(**query_filters)[0]
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
self.assertEqual(len(t1_ac_ex_dbs), concurrency)
status = [
ac_ex.status == action_constants.LIVEACTION_STATUS_SUCCEEDED
for ac_ex in t1_ac_ex_dbs
]
self.assertTrue(all(status))
for t1_ac_ex_db in t1_ac_ex_dbs:
workflows.get_engine().process(t1_ac_ex_db)
t1_ex_db = wf_db_access.TaskExecution.get_by_id(t1_ex_db.id)
self.assertEqual(t1_ex_db.status, wf_statuses.RUNNING)
wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id)
self.assertEqual(wf_ex_db.status, wf_statuses.RUNNING)
# Process the second set of action executions from with items concurrency.
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
self.assertEqual(len(t1_ac_ex_dbs), num_items)
status = [
ac_ex.status == action_constants.LIVEACTION_STATUS_SUCCEEDED
for ac_ex in t1_ac_ex_dbs
]
self.assertTrue(all(status))
for t1_ac_ex_db in t1_ac_ex_dbs[concurrency:]:
workflows.get_engine().process(t1_ac_ex_db)
t1_ex_db = wf_db_access.TaskExecution.get_by_id(t1_ex_db.id)
self.assertEqual(t1_ex_db.status, wf_statuses.SUCCEEDED)
# Assert the main workflow is completed.
wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id)
self.assertEqual(wf_ex_db.status, wf_statuses.SUCCEEDED)
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED)
def test_with_items_cancellation(self):
num_items = 3
wf_meta = base.get_wf_fixture_meta_data(TEST_PACK_PATH, 'with-items-concurrency.yaml')
lv_ac_db = lv_db_models.LiveActionDB(action=wf_meta['name'])
lv_ac_db, ac_ex_db = action_service.request(lv_ac_db)
# Assert the workflow execution is running.
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_RUNNING)
wf_ex_db = wf_db_access.WorkflowExecution.query(action_execution=str(ac_ex_db.id))[0]
self.assertEqual(wf_ex_db.status, action_constants.LIVEACTION_STATUS_RUNNING)
query_filters = {'workflow_execution': str(wf_ex_db.id), 'task_id': 'task1'}
t1_ex_db = wf_db_access.TaskExecution.query(**query_filters)[0]
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
self.assertEqual(t1_ex_db.status, wf_statuses.RUNNING)
self.assertEqual(len(t1_ac_ex_dbs), num_items)
# Reset the action executions to running status.
for ac_ex in t1_ac_ex_dbs:
self.set_execution_status(
ac_ex.liveaction['id'],
action_constants.LIVEACTION_STATUS_RUNNING
)
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
status = [
ac_ex.status == action_constants.LIVEACTION_STATUS_RUNNING
for ac_ex in t1_ac_ex_dbs
]
self.assertTrue(all(status))
# Cancels the workflow execution.
requester = cfg.CONF.system_user.user
lv_ac_db, ac_ex_db = action_service.request_cancellation(lv_ac_db, requester)
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_CANCELING)
# Manually succeed the action executions and process completion.
for ac_ex in t1_ac_ex_dbs:
self.set_execution_status(
ac_ex.liveaction['id'],
action_constants.LIVEACTION_STATUS_SUCCEEDED
)
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
status = [
ac_ex.status == action_constants.LIVEACTION_STATUS_SUCCEEDED
for ac_ex in t1_ac_ex_dbs
]
self.assertTrue(all(status))
for t1_ac_ex_db in t1_ac_ex_dbs:
workflows.get_engine().process(t1_ac_ex_db)
# Check that the workflow execution is canceled.
wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id)
self.assertEqual(wf_ex_db.status, wf_statuses.CANCELED)
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_CANCELED)
def test_with_items_concurrency_cancellation(self):
concurrency = 2
wf_input = {'concurrency': concurrency}
wf_meta = base.get_wf_fixture_meta_data(TEST_PACK_PATH, 'with-items-concurrency.yaml')
lv_ac_db = lv_db_models.LiveActionDB(action=wf_meta['name'], parameters=wf_input)
lv_ac_db, ac_ex_db = action_service.request(lv_ac_db)
# Assert the workflow execution is running.
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_RUNNING)
wf_ex_db = wf_db_access.WorkflowExecution.query(action_execution=str(ac_ex_db.id))[0]
self.assertEqual(wf_ex_db.status, action_constants.LIVEACTION_STATUS_RUNNING)
query_filters = {'workflow_execution': str(wf_ex_db.id), 'task_id': 'task1'}
t1_ex_db = wf_db_access.TaskExecution.query(**query_filters)[0]
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
self.assertEqual(t1_ex_db.status, wf_statuses.RUNNING)
self.assertEqual(len(t1_ac_ex_dbs), concurrency)
# Reset the action executions to running status.
for ac_ex in t1_ac_ex_dbs:
self.set_execution_status(
ac_ex.liveaction['id'],
action_constants.LIVEACTION_STATUS_RUNNING
)
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
status = [
ac_ex.status == action_constants.LIVEACTION_STATUS_RUNNING
for ac_ex in t1_ac_ex_dbs
]
self.assertTrue(all(status))
# Cancel the workflow execution.
requester = cfg.CONF.system_user.user
lv_ac_db, ac_ex_db = action_service.request_cancellation(lv_ac_db, requester)
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_CANCELING)
# Manually succeed the action executions and process completion.
for ac_ex in t1_ac_ex_dbs:
self.set_execution_status(
ac_ex.liveaction['id'],
action_constants.LIVEACTION_STATUS_SUCCEEDED
)
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
status = [
ac_ex.status == action_constants.LIVEACTION_STATUS_SUCCEEDED
for ac_ex in t1_ac_ex_dbs
]
self.assertTrue(all(status))
for t1_ac_ex_db in t1_ac_ex_dbs:
workflows.get_engine().process(t1_ac_ex_db)
# Check that the workflow execution is canceled.
wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id)
self.assertEqual(wf_ex_db.status, wf_statuses.CANCELED)
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_CANCELED)
def test_with_items_pause_and_resume(self):
num_items = 3
wf_meta = base.get_wf_fixture_meta_data(TEST_PACK_PATH, 'with-items-concurrency.yaml')
lv_ac_db = lv_db_models.LiveActionDB(action=wf_meta['name'])
lv_ac_db, ac_ex_db = action_service.request(lv_ac_db)
# Assert the workflow execution is running.
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_RUNNING)
wf_ex_db = wf_db_access.WorkflowExecution.query(action_execution=str(ac_ex_db.id))[0]
self.assertEqual(wf_ex_db.status, action_constants.LIVEACTION_STATUS_RUNNING)
query_filters = {'workflow_execution': str(wf_ex_db.id), 'task_id': 'task1'}
t1_ex_db = wf_db_access.TaskExecution.query(**query_filters)[0]
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
self.assertEqual(t1_ex_db.status, wf_statuses.RUNNING)
self.assertEqual(len(t1_ac_ex_dbs), num_items)
# Reset the action executions to running status.
for ac_ex in t1_ac_ex_dbs:
self.set_execution_status(
ac_ex.liveaction['id'],
action_constants.LIVEACTION_STATUS_RUNNING
)
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
status = [
ac_ex.status == action_constants.LIVEACTION_STATUS_RUNNING
for ac_ex in t1_ac_ex_dbs
]
self.assertTrue(all(status))
# Pause the workflow execution.
requester = cfg.CONF.system_user.user
lv_ac_db, ac_ex_db = action_service.request_pause(lv_ac_db, requester)
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_PAUSING)
# Manually succeed the action executions and process completion.
for ac_ex in t1_ac_ex_dbs:
self.set_execution_status(
ac_ex.liveaction['id'],
action_constants.LIVEACTION_STATUS_SUCCEEDED
)
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
status = [
ac_ex.status == action_constants.LIVEACTION_STATUS_SUCCEEDED
for ac_ex in t1_ac_ex_dbs
]
self.assertTrue(all(status))
for t1_ac_ex_db in t1_ac_ex_dbs:
workflows.get_engine().process(t1_ac_ex_db)
# Check that the workflow execution is paused.
wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id)
self.assertEqual(wf_ex_db.status, wf_statuses.PAUSED)
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_PAUSED)
# Resume the workflow execution.
requester = cfg.CONF.system_user.user
lv_ac_db, ac_ex_db = action_service.request_resume(lv_ac_db, requester)
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_RESUMING)
# Check that the workflow execution is completed.
wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id)
self.assertEqual(wf_ex_db.status, wf_statuses.SUCCEEDED)
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED)
def test_with_items_concurrency_pause_and_resume(self):
num_items = 3
concurrency = 2
wf_input = {'concurrency': concurrency}
wf_meta = base.get_wf_fixture_meta_data(TEST_PACK_PATH, 'with-items-concurrency.yaml')
lv_ac_db = lv_db_models.LiveActionDB(action=wf_meta['name'], parameters=wf_input)
lv_ac_db, ac_ex_db = action_service.request(lv_ac_db)
# Assert the workflow execution is running.
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_RUNNING)
wf_ex_db = wf_db_access.WorkflowExecution.query(action_execution=str(ac_ex_db.id))[0]
self.assertEqual(wf_ex_db.status, action_constants.LIVEACTION_STATUS_RUNNING)
query_filters = {'workflow_execution': str(wf_ex_db.id), 'task_id': 'task1'}
t1_ex_db = wf_db_access.TaskExecution.query(**query_filters)[0]
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
self.assertEqual(t1_ex_db.status, wf_statuses.RUNNING)
self.assertEqual(len(t1_ac_ex_dbs), concurrency)
# Reset the action executions to running status.
for ac_ex in t1_ac_ex_dbs:
self.set_execution_status(
ac_ex.liveaction['id'],
action_constants.LIVEACTION_STATUS_RUNNING
)
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
status = [
ac_ex.status == action_constants.LIVEACTION_STATUS_RUNNING
for ac_ex in t1_ac_ex_dbs
]
self.assertTrue(all(status))
# Pause the workflow execution.
requester = cfg.CONF.system_user.user
lv_ac_db, ac_ex_db = action_service.request_pause(lv_ac_db, requester)
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_PAUSING)
# Manually succeed the action executions and process completion.
for ac_ex in t1_ac_ex_dbs:
self.set_execution_status(
ac_ex.liveaction['id'],
action_constants.LIVEACTION_STATUS_SUCCEEDED
)
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
status = [
ac_ex.status == action_constants.LIVEACTION_STATUS_SUCCEEDED
for ac_ex in t1_ac_ex_dbs
]
self.assertTrue(all(status))
for t1_ac_ex_db in t1_ac_ex_dbs:
workflows.get_engine().process(t1_ac_ex_db)
# Check that the workflow execution is paused.
wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id)
self.assertEqual(wf_ex_db.status, wf_statuses.PAUSED)
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_PAUSED)
# Resume the workflow execution.
requester = cfg.CONF.system_user.user
lv_ac_db, ac_ex_db = action_service.request_resume(lv_ac_db, requester)
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_RESUMING)
# Check that the workflow execution is running.
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_RUNNING)
# Check new set of action execution is scheduled.
t1_ac_ex_dbs = ex_db_access.ActionExecution.query(task_execution=str(t1_ex_db.id))
self.assertEqual(len(t1_ac_ex_dbs), num_items)
# Manually process the last action execution.
workflows.get_engine().process(t1_ac_ex_dbs[2])
# Check that the workflow execution is completed.
wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id)
self.assertEqual(wf_ex_db.status, wf_statuses.SUCCEEDED)
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED)
def test_subworkflow_with_items_empty_list(self):
wf_input = {'members': []}
wf_meta = base.get_wf_fixture_meta_data(TEST_PACK_PATH, 'with-items-empty-parent.yaml')
lv_ac_db = lv_db_models.LiveActionDB(action=wf_meta['name'], parameters=wf_input)
lv_ac_db, ac_ex_db = action_service.request(lv_ac_db)
# Identify the records for the main workflow.
wf_ex_db = wf_db_access.WorkflowExecution.query(action_execution=str(ac_ex_db.id))[0]
tk_ex_dbs = wf_db_access.TaskExecution.query(workflow_execution=str(wf_ex_db.id))
self.assertEqual(len(tk_ex_dbs), 1)
# Identify the records for the tasks.
t1_ac_ex_db = ex_db_access.ActionExecution.query(task_execution=str(tk_ex_dbs[0].id))[0]
t1_wf_ex_db = wf_db_access.WorkflowExecution.query(action_execution=str(t1_ac_ex_db.id))[0]
self.assertEqual(t1_ac_ex_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED)
self.assertEqual(t1_wf_ex_db.status, wf_statuses.SUCCEEDED)
# Manually processing completion of the subworkflow in task1.
workflows.get_engine().process(t1_ac_ex_db)
t1_ex_db = wf_db_access.TaskExecution.get_by_id(tk_ex_dbs[0].id)
self.assertEqual(t1_ex_db.status, wf_statuses.SUCCEEDED)
# Check that the workflow execution is completed.
wf_ex_db = wf_db_access.WorkflowExecution.get_by_id(wf_ex_db.id)
self.assertEqual(wf_ex_db.status, wf_statuses.SUCCEEDED)
lv_ac_db = lv_db_access.LiveAction.get_by_id(str(lv_ac_db.id))
self.assertEqual(lv_ac_db.status, action_constants.LIVEACTION_STATUS_SUCCEEDED)
| 43.845024
| 99
| 0.718272
| 3,922
| 26,877
| 4.516573
| 0.063488
| 0.039517
| 0.039968
| 0.02896
| 0.851248
| 0.828271
| 0.814949
| 0.805578
| 0.782037
| 0.777464
| 0
| 0.009252
| 0.199725
| 26,877
| 612
| 100
| 43.916667
| 0.81431
| 0.099565
| 0
| 0.720482
| 0
| 0
| 0.0273
| 0.007705
| 0
| 0
| 0
| 0
| 0.195181
| 1
| 0.028916
| false
| 0
| 0.057831
| 0.00241
| 0.093976
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
31c15490908bbd2e8263af7a2c7db5e1fdfc27e0
| 2,741
|
py
|
Python
|
tests/command_builder/test_command_builder_is_installed.py
|
EddLabs/eddington-static
|
cdd1d9514c4eea1bd06c24894b3922e6cc3fb1f5
|
[
"Apache-2.0"
] | null | null | null |
tests/command_builder/test_command_builder_is_installed.py
|
EddLabs/eddington-static
|
cdd1d9514c4eea1bd06c24894b3922e6cc3fb1f5
|
[
"Apache-2.0"
] | null | null | null |
tests/command_builder/test_command_builder_is_installed.py
|
EddLabs/eddington-static
|
cdd1d9514c4eea1bd06c24894b3922e6cc3fb1f5
|
[
"Apache-2.0"
] | null | null | null |
from statue.command_builder import CommandBuilder
from tests.constants import COMMAND1, COMMAND_HELP_STRING1
from tests.util import dummy_version, dummy_versions
def test_command_builder_not_installed(mock_get_package):
mock_get_package.return_value = None
command_builder = CommandBuilder(name=COMMAND1, help=COMMAND_HELP_STRING1)
assert command_builder.version is None
assert command_builder.installed_version is None
assert not command_builder.installed()
assert command_builder.installed_version_match()
assert not command_builder.installed_correctly()
def test_command_builder_is_installed_without_specified_version(mock_get_package):
version = dummy_version()
mock_get_package.return_value.version = version
command_builder = CommandBuilder(name=COMMAND1, help=COMMAND_HELP_STRING1)
assert command_builder.version is None
assert command_builder.installed_version == version
assert command_builder.installed()
assert command_builder.installed_version_match()
assert command_builder.installed_correctly()
def test_command_builder_is_installed_correctly(mock_get_package):
version = dummy_version()
mock_get_package.return_value.version = version
command_builder = CommandBuilder(
name=COMMAND1, help=COMMAND_HELP_STRING1, version=version
)
assert command_builder.version == version
assert command_builder.installed_version == version
assert command_builder.installed()
assert command_builder.installed_version_match()
assert command_builder.installed_correctly()
def test_command_builder_is_installed_incorrectly(mock_get_package):
version, installed_version = dummy_versions(2)
mock_get_package.return_value.version = installed_version
command_builder = CommandBuilder(
name=COMMAND1, help=COMMAND_HELP_STRING1, version=version
)
assert command_builder.version == version
assert command_builder.installed_version == installed_version
assert command_builder.installed()
assert not command_builder.installed_version_match()
assert not command_builder.installed_correctly()
def test_command_builder_set_version_as_installed(mock_get_package):
version, installed_version = dummy_versions(2)
mock_get_package.return_value.version = installed_version
command_builder = CommandBuilder(
name=COMMAND1, help=COMMAND_HELP_STRING1, version=version
)
command_builder.set_version_as_installed()
assert command_builder.version == installed_version
assert command_builder.installed_version == installed_version
assert command_builder.installed()
assert command_builder.installed_version_match()
assert command_builder.installed_correctly()
| 39.157143
| 82
| 0.808829
| 325
| 2,741
| 6.415385
| 0.110769
| 0.248441
| 0.201439
| 0.222542
| 0.876259
| 0.858513
| 0.831655
| 0.823981
| 0.823981
| 0.823981
| 0
| 0.0059
| 0.134258
| 2,741
| 69
| 83
| 39.724638
| 0.872735
| 0
| 0
| 0.685185
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.462963
| 1
| 0.092593
| false
| 0
| 0.055556
| 0
| 0.148148
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
31c77f30e5bd8d77c70a58032b467c9ceb34b26e
| 266
|
py
|
Python
|
onnxmltools/convert/xgboost/shape_calculators/Regressor.py
|
xhochy/onnxmltools
|
cb2782b155ff67dc1e586f36a27c5d032070c801
|
[
"Apache-2.0"
] | 623
|
2018-02-16T20:43:01.000Z
|
2022-03-31T05:00:17.000Z
|
onnxmltools/convert/xgboost/shape_calculators/Regressor.py
|
xhochy/onnxmltools
|
cb2782b155ff67dc1e586f36a27c5d032070c801
|
[
"Apache-2.0"
] | 339
|
2018-02-26T21:27:04.000Z
|
2022-03-31T03:16:50.000Z
|
onnxmltools/convert/xgboost/shape_calculators/Regressor.py
|
xhochy/onnxmltools
|
cb2782b155ff67dc1e586f36a27c5d032070c801
|
[
"Apache-2.0"
] | 152
|
2018-02-24T01:20:22.000Z
|
2022-03-31T07:41:35.000Z
|
# SPDX-License-Identifier: Apache-2.0
from ...common._registration import register_shape_calculator
from ...common.shape_calculator import calculate_linear_regressor_output_shapes
register_shape_calculator('XGBRegressor', calculate_linear_regressor_output_shapes)
| 38
| 83
| 0.87218
| 32
| 266
| 6.8125
| 0.59375
| 0.206422
| 0.211009
| 0.275229
| 0.330275
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007968
| 0.056391
| 266
| 6
| 84
| 44.333333
| 0.860558
| 0.131579
| 0
| 0
| 0
| 0
| 0.052402
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
31dd398630b870fb7df7fd44c69b241c1a9cb158
| 122
|
py
|
Python
|
pythonExamples/FirstPythonLect/pythonLect4.py
|
davidruffner/computation-physics-nyu-2009
|
754d48f820773cd6b1b3cd3a7444363b78e1ea00
|
[
"MIT"
] | null | null | null |
pythonExamples/FirstPythonLect/pythonLect4.py
|
davidruffner/computation-physics-nyu-2009
|
754d48f820773cd6b1b3cd3a7444363b78e1ea00
|
[
"MIT"
] | null | null | null |
pythonExamples/FirstPythonLect/pythonLect4.py
|
davidruffner/computation-physics-nyu-2009
|
754d48f820773cd6b1b3cd3a7444363b78e1ea00
|
[
"MIT"
] | null | null | null |
print range(10)
print range(1,10)
print range(1,10+1)
print range(50,60+1)
print range(100,0,-1)
print range(100,0-3,-5)
| 15.25
| 23
| 0.696721
| 28
| 122
| 3.035714
| 0.357143
| 0.705882
| 0.388235
| 0.305882
| 0.682353
| 0
| 0
| 0
| 0
| 0
| 0
| 0.229358
| 0.106557
| 122
| 7
| 24
| 17.428571
| 0.550459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
73232755e222e8bc141f44fcf748be0fd469cd06
| 8,304
|
py
|
Python
|
docs/timEndpointPython.py
|
danrasband/jpo-ode
|
afb8536ee99ace019fa55c6572a10e68f7074806
|
[
"Apache-2.0"
] | 75
|
2016-11-24T12:59:43.000Z
|
2021-11-30T09:09:57.000Z
|
docs/timEndpointPython.py
|
danrasband/jpo-ode
|
afb8536ee99ace019fa55c6572a10e68f7074806
|
[
"Apache-2.0"
] | 179
|
2016-11-22T21:18:45.000Z
|
2022-03-16T20:02:13.000Z
|
docs/timEndpointPython.py
|
danrasband/jpo-ode
|
afb8536ee99ace019fa55c6572a10e68f7074806
|
[
"Apache-2.0"
] | 43
|
2016-11-28T11:37:07.000Z
|
2022-01-17T05:14:30.000Z
|
import requests
url = "http://localhost:8080/tim"
payload = "{\r\n\t\"timContent\": {\r\n\t\t\"msgcnt\": \"1\",\r\n\t\t\"timestamp\": \"op\",\r\n\t\t\"packetID\": \"op\",\r\n\t\t\"urlB\": \"op\",\r\n\t\t\"travelerDataFrame\": [{\r\n\t\t\t\"header\": {\r\n\t\t\t\t\"sspindex\": \"1\",\r\n\t\t\t\t\"travelerInfoType\": \"1\",\r\n\t\t\t\t\"msgId\": {\r\n\t\t\t\t\t\"FurtherInfoID\": \"1\",\r\n\t\t\t\t\t\"RoadSignID\": {\r\n\t\t\t\t\t\t\"position3D\": {\r\n\t\t\t\t\t\t\t\"latitude\": \"1\",\r\n\t\t\t\t\t\t\t\"longitude\": \"1\",\r\n\t\t\t\t\t\t\t\"elevation\": \"1\",\r\n\t\t\t\t\t\t\t\"regional\": \"op\"\r\n\t\t\t\t\t\t},\r\n\t\t\t\t\t\t\"HeadingSlice\": \"1010011010010100\",\r\n\t\t\t\t\t\t\"MUTCDCode\": \"op\",\r\n\t\t\t\t\t\t\"MsgCRC\": \"op\"\r\n\t\t\t\t\t}\r\n\t\t\t\t},\r\n\t\t\t\t\"DYear\": \"op\",\r\n\t\t\t\t\"MinuteOfTheYear\": \"0\",\r\n\t\t\t\t\"MinutesDuration\": \"0\",\r\n\t\t\t\t\"SignPriority\": \"0\"\r\n\t\t\t},\r\n\t\t\t\"region\": {\r\n\t\t\t\t\"sspindex\": \"0\",\r\n\t\t\t\t\"GeographicalPath\": [{\r\n\t\t\t\t\t\"name\": \"op\",\r\n\t\t\t\t\t\"id\": \"op\",\r\n\t\t\t\t\t\"anchor\": \"op\",\r\n\t\t\t\t\t\"laneWidth\": \"op\",\r\n\t\t\t\t\t\"diretionality\": \"op\",\r\n\t\t\t\t\t\"closedPath\": \"op\",\r\n\t\t\t\t\t\"direction\": \"op\",\r\n\t\t\t\t\t\"description\": [{\r\n\t\t\t\t\t\t\"path\": [{\r\n\t\t\t\t\t\t\t\"scale\": \"op\",\r\n\t\t\t\t\t\t\t\"offset\": [{\r\n\t\t\t\t\t\t\t\t\"xy\": [{\r\n\t\t\t\t\t\t\t\t\t\"nodes\": [{\r\n\t\t\t\t\t\t\t\t\t\t\"delta\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL1\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL2\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL3\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL4\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL5\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL6\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}]\r\n\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\"attributes\": \"op\"\r\n\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\"computed\": [{\r\n\t\t\t\t\t\t\t\t\t\t\"referenceLaneID\": \"1\",\r\n\t\t\t\t\t\t\t\t\t\t\"offsetXaxis\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\"small\": \"1\",\r\n\t\t\t\t\t\t\t\t\t\t\t\"large\": \"1\"\r\n\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\"offsetYaxis\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\"small\": \"1\",\r\n\t\t\t\t\t\t\t\t\t\t\t\"large\": \"1\"\r\n\t\t\t\t\t\t\t\t\t\t}]\r\n\t\t\t\t\t\t\t\t\t}]\r\n\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\"ll\": [{\r\n\t\t\t\t\t\t\t\t\t\"nodes\": [{\r\n\t\t\t\t\t\t\t\t\t\t\"delta\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL1\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL2\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL3\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL4\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL5\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL6\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}]\r\n\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\"attributes\": \"op\"\r\n\t\t\t\t\t\t\t\t\t}]\r\n\t\t\t\t\t\t\t\t}]\r\n\t\t\t\t\t\t\t}]\r\n\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\"geometry\": [{\r\n\t\t\t\t\t\t\t\"direction\": \"1001101100100100\",\r\n\t\t\t\t\t\t\t\"circle\": [{\r\n\t\t\t\t\t\t\t\t\"center\": [{\r\n\t\t\t\t\t\t\t\t\t\"latitude\": \"1\",\r\n\t\t\t\t\t\t\t\t\t\"longitude\": \"1\",\r\n\t\t\t\t\t\t\t\t\t\"elevation\": \"1\",\r\n\t\t\t\t\t\t\t\t\t\"regional\": \"op\"\r\n\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\"radius\": \"3\",\r\n\t\t\t\t\t\t\t\t\"units\": \"4\"\r\n\t\t\t\t\t\t\t}]\r\n\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\"oldRegion\": [{\r\n\t\t\t\t\t\t\t\"direction\": \"1001101100100100\",\r\n\t\t\t\t\t\t\t\"area\": [{\r\n\t\t\t\t\t\t\t\t\"shapePointSet\": [{\r\n\t\t\t\t\t\t\t\t\t\"nodes\": [{\r\n\t\t\t\t\t\t\t\t\t\t\"delta\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL1\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL2\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL3\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL4\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL5\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\t\"node-LL6\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lon\": \"20\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\"lat\": \"21\"\r\n\t\t\t\t\t\t\t\t\t\t\t}]\r\n\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\"attributes\": \"op\"\r\n\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\"computed\": [{\r\n\t\t\t\t\t\t\t\t\t\t\"referenceLaneID\": \"1\",\r\n\t\t\t\t\t\t\t\t\t\t\"offsetXaxis\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\"small\": \"1\",\r\n\t\t\t\t\t\t\t\t\t\t\t\"large\": \"1\"\r\n\t\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\t\"offsetYaxis\": [{\r\n\t\t\t\t\t\t\t\t\t\t\t\"small\": \"1\",\r\n\t\t\t\t\t\t\t\t\t\t\t\"large\": \"1\"\r\n\t\t\t\t\t\t\t\t\t\t}]\r\n\t\t\t\t\t\t\t\t\t}]\r\n\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\"circle\": [{\r\n\t\t\t\t\t\t\t\t\t\"center\": [{\r\n\t\t\t\t\t\t\t\t\t\t\"latitude\": \"1\",\r\n\t\t\t\t\t\t\t\t\t\t\"longitude\": \"1\",\r\n\t\t\t\t\t\t\t\t\t\t\"elevation\": \"1\",\r\n\t\t\t\t\t\t\t\t\t\t\"regional\": \"op\"\r\n\t\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\t\"radius\": \"3\",\r\n\t\t\t\t\t\t\t\t\t\"units\": \"4\"\r\n\t\t\t\t\t\t\t\t}],\r\n\t\t\t\t\t\t\t\t\"regionPointSet\": [{\r\n\t\t\t\t\t\t\t\t\t\"nodeList\": [{\r\n\t\t\t\t\t\t\t\t\t\t\"xOffset\": \"1\",\r\n\t\t\t\t\t\t\t\t\t\t\"yOffset\": \"1\"\r\n\t\t\t\t\t\t\t\t\t}]\r\n\t\t\t\t\t\t\t\t}]\r\n\t\t\t\t\t\t\t}]\r\n\t\t\t\t\t\t}]\r\n\t\t\t\t\t}]\r\n\t\t\t\t}]\r\n\t\t\t},\r\n\t\t\t\"content\": {\r\n\t\t\t\t\"sspMsgRights1\": \"1\",\r\n\t\t\t\t\"sspMsgRights2\": \"2\",\r\n\t\t\t\t\"contentType\": {\r\n\t\t\t\t\t\"advisory\": [{\r\n\t\t\t\t\t\t\"ITISCodes\": \"268\",\r\n\t\t\t\t\t\t\"ITIStext\": \"Speed Limit\"\r\n\t\t\t\t\t} ],\r\n\t\t\t\t\t\"workZone\": [],\r\n\t\t\t\t\t\"genericSign\": [],\r\n\t\t\t\t\t\"speedLimit\": [],\r\n\t\t\t\t\t\"exitService\": []\r\n\t\t\t\t},\r\n\t\t\t\t\"URL-Short\": \"op\",\r\n\t\t\t\t\"regional\": \"op\"\r\n\t\t\t}\r\n\t\t}],\r\n\t\t\"regional\": \"op\"\r\n\t},\r\n\t\"RSUs\": [{\r\n\t\t\"target\": \"127.0.0.1\",\r\n\t\t\"username\": \"v3user\",\r\n\t\t\"password\": \"password\",\r\n\t\t\"retries\": \"1\",\r\n\t\t\"timeout\": \"2000\"\r\n\t}], \r\n\t\"snmp\": {\r\n\t\t\"rsuid\": \"8300\",\r\n\t\t\"msgid\": \"31\",\r\n\t\t\"mode\": \"1\",\r\n\t\t\"channel\": \"178\",\r\n\t\t\"interval\": \"1\",\r\n\t\t\"deliverystart\": \"010114111530\",\r\n\t\t\"deliverystop\": \"010114130000\",\r\n\t\t\"enable\": \"1\",\r\n\t\t\"status\": \"4\"\r\n\t}\r\n}"
headers = {
'content-type': "application/json",
'cache-control': "no-cache",
'postman-token': "e3aeeabb-569d-e3e9-0816-2b271aace18d"
}
response = requests.request("POST", url, data=payload, headers=headers)
print(response.text)
| 553.6
| 8,005
| 0.483984
| 2,666
| 8,304
| 1.507502
| 0.055514
| 0.805175
| 1.038318
| 1.179398
| 0.770341
| 0.755909
| 0.747698
| 0.713113
| 0.684996
| 0.678776
| 0
| 0.030689
| 0.026855
| 8,304
| 14
| 8,006
| 593.142857
| 0.46665
| 0
| 0
| 0
| 0
| 11
| 0.691835
| 0.512163
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.1
| 0.1
| 0
| 0.1
| 0.1
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 15
|
7325b0352411a882c5951bf66d6f639d33acbff6
| 13,665
|
py
|
Python
|
snow/plugins/fix1_configuresnow.py
|
vPeteWalker/gts21
|
90d827028f5c8f21457de81c0b718f7adbe9a262
|
[
"MIT"
] | null | null | null |
snow/plugins/fix1_configuresnow.py
|
vPeteWalker/gts21
|
90d827028f5c8f21457de81c0b718f7adbe9a262
|
[
"MIT"
] | null | null | null |
snow/plugins/fix1_configuresnow.py
|
vPeteWalker/gts21
|
90d827028f5c8f21457de81c0b718f7adbe9a262
|
[
"MIT"
] | 2
|
2021-04-06T16:59:32.000Z
|
2021-05-05T18:50:34.000Z
|
cat <<EOF > /tmp/ConfigureSNOW.py
from selenium import webdriver
from selenium.webdriver.firefox.options import Options
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
import os, sys, time
options = Options()
options.headless = True
profile = webdriver.FirefoxProfile()
browser = webdriver.Firefox(options=options,firefox_profile=profile)
# Service Now instance id need to be changes in the url
snow_url = "https://@@{SNOWInstanceURL}@@/nav_to.do?"
snow_username = "admin"
snow_password = "@@{SNOWAdminPassword}@@"
snow_users = "https://@@{SNOWInstanceURL}@@/sys_user_list.do?sysparm_userpref_module%3Dc5aa0fff0a0a0aa7009a39da035ea396%26sysparm_clear_stack%3Dtrue"
snow_groups = "https://@@{SNOWInstanceURL}@@/sys_user_group_list.do?sysparm_userpref_module%3Dc5aa68730a0a0aa70036ced8b58ca05c%26sysparm_clear_stack%3Dtrue"
user_criteria_plugin_url = "https://@@{SNOWInstanceURL}@@/\$allappsmgmt.do?sysparm_redirect%3Dtrue&sysparm_search=User%20Criteria%20Scoped%20API"
glide_validate_property_url = "https://@@{SNOWInstanceURL}@@/sys_properties_list.do?sysparm_query=nameSTARTSWITHglide.sc.guide.tab.validate&sysparm_first_row=1&sysparm_view=&sysparm_choice_query_raw=&sysparm_list_header_search=true"
glide_reset_cascade_url = "https://@@{SNOWInstanceURL}@@/sys_properties_list.do?sysparm_query=nameSTARTSWITHglide.sc.reset_cascade&sysparm_first_row=1&sysparm_view=&sysparm_choice_query_raw=&sysparm_list_header_search=true"
tables_url = "https://@@{SNOWInstanceURL}@@/sys_db_object_list.do?sysparm_query=labelSTARTSWITHCatalog%20Client%20Scripts&sysparm_first_row=1&sysparm_view=&sysparm_choice_query_raw=&sysparm_list_header_search=true"
sys_property = "sys_properties.list"
property_name = "glide.sc.guide.tab.validate"
###Login to the ServiceNow Instance
browser.get(snow_url)
time.sleep(30)
#frame = browser.find_element_by_xpath
browser.switch_to.frame('gsft_main')
time.sleep(5)
browser.find_element_by_id("user_name").send_keys(snow_username)
browser.find_element_by_id("user_password").send_keys(snow_password)
browser.find_element_by_id("sysverb_login").click()
print('Logged in to SNOW')
browser.switch_to_default_content()
##Upload & Install build (v1.0+1.1)
browser.get(snow_url)
time.sleep(30)
browser.find_element_by_id("filter").send_keys("sys_remote_update_set_list.do")
browser.find_element_by_id("filter").send_keys(Keys.ENTER)
time.sleep(10)
browser.switch_to.frame('gsft_main')
time.sleep(10)
browser.find_element_by_link_text("Nutanix Calm").click()
browser.implicitly_wait(10)
try:
browser.find_element_by_id("preview_update_set").click()
browser.implicitly_wait(180)
browser.find_element_by_xpath("//*[@id='sysparm_button_close']").click()
browser.implicitly_wait(10)
except:
print('Update set preview already run')
try:
browser.find_element_by_link_text("Skip remote update").click()
browser.implicitly_wait(10)
browser.find_element_by_link_text("Skip remote update").click()
browser.implicitly_wait(10)
browser.find_element_by_link_text("Skip remote update").click()
browser.implicitly_wait(10)
browser.find_element_by_link_text("Skip remote update").click()
browser.implicitly_wait(10)
browser.find_element_by_link_text("Skip remote update").click()
browser.implicitly_wait(10)
except:
print('Skip remote update not there, committing update set')
browser.find_element_by_xpath("//*[@id='c38b2cab0a0a0b5000470398d9e60c36']").click()
browser.implicitly_wait(600)
browser.find_element_by_xpath("//*[@id='sysparm_button_close']").click()
browser.implicitly_wait(10)
print('Calm v1.1 plugin installed')
browser.switch_to_default_content()
######Upload & Install build (v1.2_5)
browser.get(snow_url)
time.sleep(30)
browser.find_element_by_id("filter").send_keys("sys_remote_update_set_list.do")
browser.find_element_by_id("filter").send_keys(Keys.ENTER)
time.sleep(5)
browser.switch_to.frame('gsft_main')
time.sleep(5)
browser.find_element_by_link_text("Import Update Set from XML").click()
time.sleep(5)
browser.find_element_by_xpath("//*[@id='attachFile']").send_keys("/tmp/Nutanix Calm V1.2_5.xml")
browser.implicitly_wait(10)
browser.find_element_by_xpath("/html/body/div[2]/form/div[3]/div[2]/input").click()
browser.implicitly_wait(300)
browser.find_element_by_link_text("Nutanix Calm V1.2_5").click()
browser.implicitly_wait(5)
browser.find_element_by_id("preview_update_set").click()
browser.implicitly_wait(90)
browser.find_element_by_xpath("//*[@id='sysparm_button_close']").click()
browser.implicitly_wait(10)
try:
browser.find_element_by_link_text("Skip remote update").click()
browser.implicitly_wait(10)
except:
print('Skip remote update not there')
browser.find_element_by_xpath("//*[@id='c38b2cab0a0a0b5000470398d9e60c36']").click()
browser.implicitly_wait(600)
browser.find_element_by_xpath("//*[@id='sysparm_button_close']").click()
print('Calm v1.2.5 plugin installed')
browser.switch_to_default_content()
######Upload & Install build (v1.3_4)
browser.get(snow_url)
time.sleep(30)
browser.find_element_by_id("filter").send_keys("sys_remote_update_set_list.do")
browser.find_element_by_id("filter").send_keys(Keys.ENTER)
time.sleep(5)
browser.switch_to.frame('gsft_main')
time.sleep(5)
browser.find_element_by_link_text("Import Update Set from XML").click()
time.sleep(5)
browser.find_element_by_xpath("//*[@id='attachFile']").send_keys("/tmp/Nutanix Calm V1.3_4.xml")
browser.implicitly_wait(10)
browser.find_element_by_xpath("/html/body/div[2]/form/div[3]/div[2]/input").click()
browser.implicitly_wait(300)
browser.find_element_by_link_text("Nutanix Calm V1.3_4").click()
browser.implicitly_wait(5)
browser.find_element_by_id("preview_update_set").click()
browser.implicitly_wait(90)
browser.find_element_by_xpath("//*[@id='sysparm_button_close']").click()
browser.implicitly_wait(10)
browser.find_element_by_xpath("//*[@id='c38b2cab0a0a0b5000470398d9e60c36']").click()
browser.implicitly_wait(600)
browser.find_element_by_xpath("//*[@id='sysparm_button_close']").click()
print('Calm v1.3.4 plugin installed')
browser.switch_to_default_content()
###glide.sc.reset_cascade Property Change
browser.get(glide_reset_cascade_url)
time.sleep(30)
browser.find_element_by_link_text("glide.sc.reset_cascade").click()
time.sleep(10)
browser.find_element_by_id("sys_properties.value").clear()
browser.find_element_by_id("sys_properties.value").send_keys("true")
browser.find_element_by_id("sysverb_update").click()
time.sleep(15)
print('glide.sc.reset_cascade updated')
###Table permission update
#browser.get(tables_url)
#time.sleep(30)
#browser.find_element_by_link_text("Catalog Client Scripts").click()
#time.sleep(5)
#browser.find_element_by_xpath("//*[@id='tabs2_section']/span[3]/span[1]/span[2]").click()
#time.sleep(5)
#browser.find_element_by_xpath("//*[@id='label.ni.sys_db_object.update_access']").click()
#time.sleep(5)
#browser.find_element_by_id("sysverb_update").click()
#time.sleep(5)
#print('Catalog Client Scripts permissions updated')
######Upload & Install build (v1.4.22)
browser.get(snow_url)
time.sleep(30)
browser.find_element_by_id("filter").send_keys("sys_remote_update_set_list.do")
browser.find_element_by_id("filter").send_keys(Keys.ENTER)
time.sleep(5)
browser.switch_to.frame('gsft_main')
time.sleep(5)
browser.find_element_by_link_text("Import Update Set from XML").click()
time.sleep(5)
browser.find_element_by_xpath("//*[@id='attachFile']").send_keys("/tmp/Nutanix Calm V1.4.2.xml")
browser.implicitly_wait(10)
browser.find_element_by_xpath("/html/body/div[2]/form/div[3]/div[2]/input").click()
browser.implicitly_wait(300)
browser.find_element_by_link_text("Nutanix Calm V1.4.2").click()
browser.implicitly_wait(5)
browser.find_element_by_id("preview_update_set").click()
browser.implicitly_wait(90)
browser.find_element_by_xpath("//*[@id='sysparm_button_close']").click()
browser.implicitly_wait(10)
#browser.find_element_by_link_text("Skip remote update").click()
#browser.implicitly_wait(10)
browser.find_element_by_xpath("//*[@id='c38b2cab0a0a0b5000470398d9e60c36']").click()
browser.implicitly_wait(10)
browser.find_element_by_xpath("//*[@id='ok_button']").click()
browser.implicitly_wait(300)
browser.find_element_by_xpath("//*[@id='sysparm_button_close']").click()
print('Calm v1.4.2 plugin installed')
browser.switch_to_default_content()
######Upload & Install build (v1.4.3)
browser.get(snow_url)
time.sleep(30)
browser.find_element_by_id("filter").send_keys("sys_remote_update_set_list.do")
browser.find_element_by_id("filter").send_keys(Keys.ENTER)
time.sleep(5)
browser.switch_to.frame('gsft_main')
time.sleep(5)
browser.find_element_by_link_text("Import Update Set from XML").click()
time.sleep(5)
browser.find_element_by_xpath("//*[@id='attachFile']").send_keys("/tmp/Nutanix Calm V1.4.3.xml")
browser.implicitly_wait(10)
browser.find_element_by_xpath("/html/body/div[2]/form/div[3]/div[2]/input").click()
browser.implicitly_wait(300)
browser.find_element_by_link_text("Nutanix Calm V1.4.3").click()
browser.implicitly_wait(5)
browser.find_element_by_id("preview_update_set").click()
browser.implicitly_wait(90)
browser.find_element_by_xpath("//*[@id='sysparm_button_close']").click()
browser.implicitly_wait(10)
browser.find_element_by_xpath("//*[@id='c38b2cab0a0a0b5000470398d9e60c36']").click()
browser.implicitly_wait(300)
browser.find_element_by_xpath("//*[@id='sysparm_button_close']").click()
print('Calm v1.4.3 plugin installed')
browser.switch_to_default_content()
######Upload & Install "Nutanix Calm - User Approval" Workflow
browser.get(snow_url)
time.sleep(30)
browser.find_element_by_id("filter").send_keys("sys_remote_update_set_list.do")
browser.find_element_by_id("filter").send_keys(Keys.ENTER)
time.sleep(5)
browser.switch_to.frame('gsft_main')
time.sleep(5)
browser.find_element_by_link_text("Import Update Set from XML").click()
time.sleep(5)
browser.find_element_by_xpath("//*[@id='attachFile']").send_keys("/tmp/Nutanix Calm User Approval Workflow 1.0.xml")
browser.implicitly_wait(10)
browser.find_element_by_xpath("/html/body/div[2]/form/div[3]/div[2]/input").click()
browser.implicitly_wait(300)
browser.find_element_by_link_text("User Approval Workflow").click()
browser.implicitly_wait(5)
browser.find_element_by_id("preview_update_set").click()
browser.implicitly_wait(90)
browser.find_element_by_xpath("//*[@id='sysparm_button_close']").click()
browser.implicitly_wait(10)
browser.find_element_by_xpath("//*[@id='c38b2cab0a0a0b5000470398d9e60c36']").click()
browser.implicitly_wait(300)
browser.find_element_by_xpath("//*[@id='sysparm_button_close']").click()
print('User Approval Workflow installed')
browser.switch_to_default_content()
#Create Calm Users group
browser.get(snow_groups)
time.sleep(30)
browser.find_element_by_id("sysverb_new").click()
time.sleep(5)
browser.find_element_by_xpath("//*[@id='sys_user_group.name']").send_keys("Calm Users")
browser.find_element_by_xpath("//*[@id='sysverb_insert']").click()
time.sleep(5)
browser.find_element_by_link_text("Calm Users").click()
time.sleep(5)
browser.find_element_by_id("sysverb_edit_m2m").click()
time.sleep(5)
browser.find_element_by_id("_sys_user_role").send_keys("x_nuta2_nutanix_ca.user")
time.sleep(5)
browser.find_element_by_xpath("//*[text()='x_nuta2_nutanix_ca.user']").click()
time.sleep(5)
browser.find_element_by_id("add_to_collection_button").click()
time.sleep(5)
browser.find_element_by_id("select_0_sysverb_save").click()
time.sleep(5)
browser.find_element_by_id("sysverb_update").click()
time.sleep(5)
print('Calm Users group created')
#Create Nutanix Calm users
browser.get(snow_users)
time.sleep(30)
for x in range(1, 11):
name = "operator{}".format('%02d' % x)
browser.find_element_by_id("sysverb_new").click()
time.sleep(5)
browser.find_element_by_xpath("//*[@id='sys_user.user_name']").send_keys(name)
browser.find_element_by_xpath("//*[@id='sys_user.user_password']").send_keys("nutanix/4u")
browser.find_element_by_xpath("//*[@id='sysverb_insert']").click()
time.sleep(5)
browser.find_element_by_link_text(name).click()
time.sleep(5)
browser.find_element_by_xpath("//*[@id='tabs2_list']/span[3]/span/span[2]").click()
time.sleep(3)
browser.find_element_by_xpath("/html/body/div[2]/div[2]/div/div[3]/span/div[2]/div[1]/div/div[1]/button[3]").click()
time.sleep(5)
browser.find_element_by_id("_sys_user_group").send_keys("Calm Users")
time.sleep(5)
browser.find_element_by_xpath("//*[text()='Calm Users']").click()
time.sleep(5)
browser.find_element_by_id("add_to_collection_button").click()
time.sleep(5)
browser.find_element_by_id("select_0_sysverb_save").click()
time.sleep(5)
browser.find_element_by_id("sysverb_update").click()
time.sleep(10)
print('{} user account created'.format(name))
######End
browser.quit()
EOF
wget -P /tmp/ https://raw.githubusercontent.com/nutanix/Calm-Servicenow-Plugin/master/v1.1/Nutanix%20Full%20Certified%20Build\(v1.0%2Bv1.1\).xml
wget -P /tmp/ https://raw.githubusercontent.com/nutanix/Calm-Servicenow-Plugin/master/v1.2/Nutanix%20Calm%20V1.2_5.xml
wget -P /tmp/ https://raw.githubusercontent.com/nutanixworkshops/gts21/master/snow/plugins/Nutanix%20Calm%20V1.3_4.xml
wget -P /tmp/ https://raw.githubusercontent.com/nutanixworkshops/gts21/master/snow/plugins/Nutanix%20Calm%20V1.4.2.xml
wget -P /tmp/ https://raw.githubusercontent.com/nutanixworkshops/gts21/master/snow/plugins/Nutanix%20Calm%20V1.4.3.xml
wget -P /tmp/ https://raw.githubusercontent.com/nutanixworkshops/gts21/master/snow/plugins/Nutanix%20Calm%20User%20Approval%20Workflow%201.0.xml
python3 /tmp/ConfigureSNOW.py
| 45.09901
| 232
| 0.786901
| 2,076
| 13,665
| 4.874759
| 0.105491
| 0.107609
| 0.176087
| 0.195652
| 0.802668
| 0.780336
| 0.770751
| 0.761265
| 0.744269
| 0.718676
| 0
| 0.039142
| 0.052104
| 13,665
| 302
| 233
| 45.248344
| 0.742145
| 0.071277
| 0
| 0.687259
| 0
| 0.019305
| 0.3214
| 0.128465
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.011583
| 0.03861
| null | null | 0.050193
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
733fe621092d3b1cd99899f8ccd1cfd8f184a8ac
| 95,767
|
py
|
Python
|
training/getResNetProtoTxt.py
|
Pandinosaurus/openpose_train
|
5c6bb1ef82ccc8d4321697ea87eb1a703ae03c57
|
[
"MIT-CMU"
] | 496
|
2019-10-01T02:51:22.000Z
|
2022-03-10T14:38:25.000Z
|
training/getResNetProtoTxt.py
|
GenerousMan/openpose_train
|
8ace22b37cbdf1991feb21a2fd685d641a44cdab
|
[
"MIT-CMU"
] | 55
|
2019-10-02T15:15:21.000Z
|
2022-03-17T08:52:29.000Z
|
training/getResNetProtoTxt.py
|
GenerousMan/openpose_train
|
8ace22b37cbdf1991feb21a2fd685d641a44cdab
|
[
"MIT-CMU"
] | 174
|
2019-10-01T13:21:47.000Z
|
2022-03-30T09:03:43.000Z
|
def getResNet50Init():
string = '\
layer {\n\
bottom: "image"\n\
top: "conv1"\n\
name: "conv1"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 64\n\
kernel_size: 7\n\
pad: 3\n\
stride: 2\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "conv1"\n\
top: "conv1"\n\
name: "bn_conv1"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "conv1"\n\
top: "conv1"\n\
name: "scale_conv1"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "conv1"\n\
top: "conv1"\n\
name: "conv1_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "conv1"\n\
top: "pool1"\n\
name: "pool1"\n\
type: "Pooling"\n\
pooling_param {\n\
kernel_size: 3\n\
stride: 2\n\
pool: MAX\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "pool1"\n\
top: "res2a_branch1"\n\
name: "res2a_branch1"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 256\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2a_branch1"\n\
top: "res2a_branch1"\n\
name: "bn2a_branch1"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2a_branch1"\n\
top: "res2a_branch1"\n\
name: "scale2a_branch1"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "pool1"\n\
top: "res2a_branch2a"\n\
name: "res2a_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 64\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2a_branch2a"\n\
top: "res2a_branch2a"\n\
name: "bn2a_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2a_branch2a"\n\
top: "res2a_branch2a"\n\
name: "scale2a_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2a_branch2a"\n\
top: "res2a_branch2a"\n\
name: "res2a_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res2a_branch2a"\n\
top: "res2a_branch2b"\n\
name: "res2a_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 64\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2a_branch2b"\n\
top: "res2a_branch2b"\n\
name: "bn2a_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2a_branch2b"\n\
top: "res2a_branch2b"\n\
name: "scale2a_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2a_branch2b"\n\
top: "res2a_branch2b"\n\
name: "res2a_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res2a_branch2b"\n\
top: "res2a_branch2c"\n\
name: "res2a_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 256\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2a_branch2c"\n\
top: "res2a_branch2c"\n\
name: "bn2a_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2a_branch2c"\n\
top: "res2a_branch2c"\n\
name: "scale2a_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2a_branch1"\n\
bottom: "res2a_branch2c"\n\
top: "res2a"\n\
name: "res2a"\n\
type: "Eltwise"\n\
}\n\
\n\
layer {\n\
bottom: "res2a"\n\
top: "res2a"\n\
name: "res2a_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res2a"\n\
top: "res2b_branch2a"\n\
name: "res2b_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 64\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2b_branch2a"\n\
top: "res2b_branch2a"\n\
name: "bn2b_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2b_branch2a"\n\
top: "res2b_branch2a"\n\
name: "scale2b_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2b_branch2a"\n\
top: "res2b_branch2a"\n\
name: "res2b_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res2b_branch2a"\n\
top: "res2b_branch2b"\n\
name: "res2b_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 64\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2b_branch2b"\n\
top: "res2b_branch2b"\n\
name: "bn2b_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2b_branch2b"\n\
top: "res2b_branch2b"\n\
name: "scale2b_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2b_branch2b"\n\
top: "res2b_branch2b"\n\
name: "res2b_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res2b_branch2b"\n\
top: "res2b_branch2c"\n\
name: "res2b_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 256\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2b_branch2c"\n\
top: "res2b_branch2c"\n\
name: "bn2b_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2b_branch2c"\n\
top: "res2b_branch2c"\n\
name: "scale2b_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2a"\n\
bottom: "res2b_branch2c"\n\
top: "res2b"\n\
name: "res2b"\n\
type: "Eltwise"\n\
}\n\
\n\
layer {\n\
bottom: "res2b"\n\
top: "res2b"\n\
name: "res2b_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res2b"\n\
top: "res2c_branch2a"\n\
name: "res2c_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 64\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2c_branch2a"\n\
top: "res2c_branch2a"\n\
name: "bn2c_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2c_branch2a"\n\
top: "res2c_branch2a"\n\
name: "scale2c_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2c_branch2a"\n\
top: "res2c_branch2a"\n\
name: "res2c_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res2c_branch2a"\n\
top: "res2c_branch2b"\n\
name: "res2c_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 64\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2c_branch2b"\n\
top: "res2c_branch2b"\n\
name: "bn2c_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2c_branch2b"\n\
top: "res2c_branch2b"\n\
name: "scale2c_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2c_branch2b"\n\
top: "res2c_branch2b"\n\
name: "res2c_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res2c_branch2b"\n\
top: "res2c_branch2c"\n\
name: "res2c_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 256\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2c_branch2c"\n\
top: "res2c_branch2c"\n\
name: "bn2c_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2c_branch2c"\n\
top: "res2c_branch2c"\n\
name: "scale2c_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2b"\n\
bottom: "res2c_branch2c"\n\
top: "res2c"\n\
name: "res2c"\n\
type: "Eltwise"\n\
}\n\
\n\
layer {\n\
bottom: "res2c"\n\
top: "res2c"\n\
name: "res2c_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res2c"\n\
top: "res3a_branch1"\n\
name: "res3a_branch1"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 512\n\
kernel_size: 1\n\
pad: 0\n\
stride: 2\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3a_branch1"\n\
top: "res3a_branch1"\n\
name: "bn3a_branch1"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3a_branch1"\n\
top: "res3a_branch1"\n\
name: "scale3a_branch1"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res2c"\n\
top: "res3a_branch2a"\n\
name: "res3a_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 1\n\
pad: 0\n\
stride: 2\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3a_branch2a"\n\
top: "res3a_branch2a"\n\
name: "bn3a_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3a_branch2a"\n\
top: "res3a_branch2a"\n\
name: "scale3a_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3a_branch2a"\n\
top: "res3a_branch2a"\n\
name: "res3a_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res3a_branch2a"\n\
top: "res3a_branch2b"\n\
name: "res3a_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3a_branch2b"\n\
top: "res3a_branch2b"\n\
name: "bn3a_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3a_branch2b"\n\
top: "res3a_branch2b"\n\
name: "scale3a_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3a_branch2b"\n\
top: "res3a_branch2b"\n\
name: "res3a_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res3a_branch2b"\n\
top: "res3a_branch2c"\n\
name: "res3a_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 512\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3a_branch2c"\n\
top: "res3a_branch2c"\n\
name: "bn3a_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3a_branch2c"\n\
top: "res3a_branch2c"\n\
name: "scale3a_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3a_branch1"\n\
bottom: "res3a_branch2c"\n\
top: "res3a"\n\
name: "res3a"\n\
type: "Eltwise"\n\
}\n\
\n\
layer {\n\
bottom: "res3a"\n\
top: "res3a"\n\
name: "res3a_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res3a"\n\
top: "res3b_branch2a"\n\
name: "res3b_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3b_branch2a"\n\
top: "res3b_branch2a"\n\
name: "bn3b_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3b_branch2a"\n\
top: "res3b_branch2a"\n\
name: "scale3b_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3b_branch2a"\n\
top: "res3b_branch2a"\n\
name: "res3b_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res3b_branch2a"\n\
top: "res3b_branch2b"\n\
name: "res3b_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3b_branch2b"\n\
top: "res3b_branch2b"\n\
name: "bn3b_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3b_branch2b"\n\
top: "res3b_branch2b"\n\
name: "scale3b_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3b_branch2b"\n\
top: "res3b_branch2b"\n\
name: "res3b_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res3b_branch2b"\n\
top: "res3b_branch2c"\n\
name: "res3b_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 512\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3b_branch2c"\n\
top: "res3b_branch2c"\n\
name: "bn3b_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3b_branch2c"\n\
top: "res3b_branch2c"\n\
name: "scale3b_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3a"\n\
bottom: "res3b_branch2c"\n\
top: "res3b"\n\
name: "res3b"\n\
type: "Eltwise"\n\
}\n\
\n\
layer {\n\
bottom: "res3b"\n\
top: "res3b"\n\
name: "res3b_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res3b"\n\
top: "res3c_branch2a"\n\
name: "res3c_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3c_branch2a"\n\
top: "res3c_branch2a"\n\
name: "bn3c_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3c_branch2a"\n\
top: "res3c_branch2a"\n\
name: "scale3c_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3c_branch2a"\n\
top: "res3c_branch2a"\n\
name: "res3c_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res3c_branch2a"\n\
top: "res3c_branch2b"\n\
name: "res3c_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3c_branch2b"\n\
top: "res3c_branch2b"\n\
name: "bn3c_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3c_branch2b"\n\
top: "res3c_branch2b"\n\
name: "scale3c_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3c_branch2b"\n\
top: "res3c_branch2b"\n\
name: "res3c_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res3c_branch2b"\n\
top: "res3c_branch2c"\n\
name: "res3c_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 512\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3c_branch2c"\n\
top: "res3c_branch2c"\n\
name: "bn3c_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3c_branch2c"\n\
top: "res3c_branch2c"\n\
name: "scale3c_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3b"\n\
bottom: "res3c_branch2c"\n\
top: "res3c"\n\
name: "res3c"\n\
type: "Eltwise"\n\
}\n\
\n\
layer {\n\
bottom: "res3c"\n\
top: "res3c"\n\
name: "res3c_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res3c"\n\
top: "res3d_branch2a"\n\
name: "res3d_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3d_branch2a"\n\
top: "res3d_branch2a"\n\
name: "bn3d_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3d_branch2a"\n\
top: "res3d_branch2a"\n\
name: "scale3d_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3d_branch2a"\n\
top: "res3d_branch2a"\n\
name: "res3d_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res3d_branch2a"\n\
top: "res3d_branch2b"\n\
name: "res3d_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3d_branch2b"\n\
top: "res3d_branch2b"\n\
name: "bn3d_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3d_branch2b"\n\
top: "res3d_branch2b"\n\
name: "scale3d_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3d_branch2b"\n\
top: "res3d_branch2b"\n\
name: "res3d_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
\n\
layer {\n\
bottom: "res3d_branch2b"\n\
top: "res3d_branch2c"\n\
name: "res3d_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 512\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3d_branch2c"\n\
top: "res3d_branch2c"\n\
name: "bn3d_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3d_branch2c"\n\
top: "res3d_branch2c"\n\
name: "scale3d_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
\n\
layer {\n\
bottom: "res3c"\n\
bottom: "res3d_branch2c"\n\
top: "res3d"\n\
name: "res3d"\n\
type: "Eltwise"\n\
}\n\
\n\
layer {\n\
bottom: "res3d"\n\
top: "res3d"\n\
name: "res3d_relu"\n\
type: "ReLU"\n\
}\n'
return string
def getResNet152Init():
string = '\
layer {\n\
bottom: "image"\n\
top: "conv1"\n\
name: "conv1"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 64\n\
kernel_size: 7\n\
pad: 3\n\
stride: 2\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "conv1"\n\
top: "conv1"\n\
name: "bn_conv1"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "conv1"\n\
top: "conv1"\n\
name: "scale_conv1"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "conv1"\n\
bottom: "conv1"\n\
name: "conv1_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "conv1"\n\
top: "pool1"\n\
name: "pool1"\n\
type: "Pooling"\n\
pooling_param {\n\
kernel_size: 3\n\
stride: 2\n\
pool: MAX\n\
}\n\
}\n\
layer {\n\
bottom: "pool1"\n\
top: "res2a_branch1"\n\
name: "res2a_branch1"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 256\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res2a_branch1"\n\
top: "res2a_branch1"\n\
name: "bn2a_branch1"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res2a_branch1"\n\
top: "res2a_branch1"\n\
name: "scale2a_branch1"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
bottom: "pool1"\n\
top: "res2a_branch2a"\n\
name: "res2a_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 64\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res2a_branch2a"\n\
top: "res2a_branch2a"\n\
name: "bn2a_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res2a_branch2a"\n\
top: "res2a_branch2a"\n\
name: "scale2a_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res2a_branch2a"\n\
bottom: "res2a_branch2a"\n\
name: "res2a_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res2a_branch2a"\n\
top: "res2a_branch2b"\n\
name: "res2a_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 64\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res2a_branch2b"\n\
top: "res2a_branch2b"\n\
name: "bn2a_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res2a_branch2b"\n\
top: "res2a_branch2b"\n\
name: "scale2a_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res2a_branch2b"\n\
bottom: "res2a_branch2b"\n\
name: "res2a_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res2a_branch2b"\n\
top: "res2a_branch2c"\n\
name: "res2a_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 256\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res2a_branch2c"\n\
top: "res2a_branch2c"\n\
name: "bn2a_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res2a_branch2c"\n\
top: "res2a_branch2c"\n\
name: "scale2a_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
bottom: "res2a_branch1"\n\
bottom: "res2a_branch2c"\n\
top: "res2a"\n\
name: "res2a"\n\
type: "Eltwise"\n\
}\n\
layer {\n\
bottom: "res2a"\n\
top: "res2a"\n\
name: "res2a_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res2a"\n\
top: "res2b_branch2a"\n\
name: "res2b_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 64\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res2b_branch2a"\n\
top: "res2b_branch2a"\n\
name: "bn2b_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res2b_branch2a"\n\
top: "res2b_branch2a"\n\
name: "scale2b_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res2b_branch2a"\n\
bottom: "res2b_branch2a"\n\
name: "res2b_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res2b_branch2a"\n\
top: "res2b_branch2b"\n\
name: "res2b_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 64\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res2b_branch2b"\n\
top: "res2b_branch2b"\n\
name: "bn2b_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res2b_branch2b"\n\
top: "res2b_branch2b"\n\
name: "scale2b_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res2b_branch2b"\n\
bottom: "res2b_branch2b"\n\
name: "res2b_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res2b_branch2b"\n\
top: "res2b_branch2c"\n\
name: "res2b_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 256\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res2b_branch2c"\n\
top: "res2b_branch2c"\n\
name: "bn2b_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res2b_branch2c"\n\
top: "res2b_branch2c"\n\
name: "scale2b_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
bottom: "res2a"\n\
bottom: "res2b_branch2c"\n\
top: "res2b"\n\
name: "res2b"\n\
type: "Eltwise"\n\
}\n\
layer {\n\
bottom: "res2b"\n\
top: "res2b"\n\
name: "res2b_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res2b"\n\
top: "res2c_branch2a"\n\
name: "res2c_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 64\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res2c_branch2a"\n\
top: "res2c_branch2a"\n\
name: "bn2c_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res2c_branch2a"\n\
top: "res2c_branch2a"\n\
name: "scale2c_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res2c_branch2a"\n\
bottom: "res2c_branch2a"\n\
name: "res2c_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res2c_branch2a"\n\
top: "res2c_branch2b"\n\
name: "res2c_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 64\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res2c_branch2b"\n\
top: "res2c_branch2b"\n\
name: "bn2c_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res2c_branch2b"\n\
top: "res2c_branch2b"\n\
name: "scale2c_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res2c_branch2b"\n\
bottom: "res2c_branch2b"\n\
name: "res2c_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res2c_branch2b"\n\
top: "res2c_branch2c"\n\
name: "res2c_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 256\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res2c_branch2c"\n\
top: "res2c_branch2c"\n\
name: "bn2c_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res2c_branch2c"\n\
top: "res2c_branch2c"\n\
name: "scale2c_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
bottom: "res2b"\n\
bottom: "res2c_branch2c"\n\
top: "res2c"\n\
name: "res2c"\n\
type: "Eltwise"\n\
}\n\
layer {\n\
bottom: "res2c"\n\
top: "res2c"\n\
name: "res2c_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res2c"\n\
top: "res3a_branch1"\n\
name: "res3a_branch1"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 512\n\
kernel_size: 1\n\
pad: 0\n\
stride: 2\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3a_branch1"\n\
top: "res3a_branch1"\n\
name: "bn3a_branch1"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3a_branch1"\n\
top: "res3a_branch1"\n\
name: "scale3a_branch1"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
bottom: "res2c"\n\
top: "res3a_branch2a"\n\
name: "res3a_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 1\n\
pad: 0\n\
stride: 2\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3a_branch2a"\n\
top: "res3a_branch2a"\n\
name: "bn3a_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3a_branch2a"\n\
top: "res3a_branch2a"\n\
name: "scale3a_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res3a_branch2a"\n\
bottom: "res3a_branch2a"\n\
name: "res3a_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3a_branch2a"\n\
top: "res3a_branch2b"\n\
name: "res3a_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3a_branch2b"\n\
top: "res3a_branch2b"\n\
name: "bn3a_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3a_branch2b"\n\
top: "res3a_branch2b"\n\
name: "scale3a_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res3a_branch2b"\n\
bottom: "res3a_branch2b"\n\
name: "res3a_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3a_branch2b"\n\
top: "res3a_branch2c"\n\
name: "res3a_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 512\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3a_branch2c"\n\
top: "res3a_branch2c"\n\
name: "bn3a_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3a_branch2c"\n\
top: "res3a_branch2c"\n\
name: "scale3a_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3a_branch1"\n\
bottom: "res3a_branch2c"\n\
top: "res3a"\n\
name: "res3a"\n\
type: "Eltwise"\n\
}\n\
layer {\n\
bottom: "res3a"\n\
top: "res3a"\n\
name: "res3a_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3a"\n\
top: "res3b1_branch2a"\n\
name: "res3b1_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b1_branch2a"\n\
top: "res3b1_branch2a"\n\
name: "bn3b1_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b1_branch2a"\n\
top: "res3b1_branch2a"\n\
name: "scale3b1_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res3b1_branch2a"\n\
bottom: "res3b1_branch2a"\n\
name: "res3b1_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b1_branch2a"\n\
top: "res3b1_branch2b"\n\
name: "res3b1_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b1_branch2b"\n\
top: "res3b1_branch2b"\n\
name: "bn3b1_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b1_branch2b"\n\
top: "res3b1_branch2b"\n\
name: "scale3b1_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res3b1_branch2b"\n\
bottom: "res3b1_branch2b"\n\
name: "res3b1_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b1_branch2b"\n\
top: "res3b1_branch2c"\n\
name: "res3b1_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 512\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b1_branch2c"\n\
top: "res3b1_branch2c"\n\
name: "bn3b1_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b1_branch2c"\n\
top: "res3b1_branch2c"\n\
name: "scale3b1_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3a"\n\
bottom: "res3b1_branch2c"\n\
top: "res3b1"\n\
name: "res3b1"\n\
type: "Eltwise"\n\
}\n\
layer {\n\
bottom: "res3b1"\n\
top: "res3b1"\n\
name: "res3b1_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b1"\n\
top: "res3b2_branch2a"\n\
name: "res3b2_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b2_branch2a"\n\
top: "res3b2_branch2a"\n\
name: "bn3b2_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b2_branch2a"\n\
top: "res3b2_branch2a"\n\
name: "scale3b2_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res3b2_branch2a"\n\
bottom: "res3b2_branch2a"\n\
name: "res3b2_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b2_branch2a"\n\
top: "res3b2_branch2b"\n\
name: "res3b2_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b2_branch2b"\n\
top: "res3b2_branch2b"\n\
name: "bn3b2_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b2_branch2b"\n\
top: "res3b2_branch2b"\n\
name: "scale3b2_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res3b2_branch2b"\n\
bottom: "res3b2_branch2b"\n\
name: "res3b2_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b2_branch2b"\n\
top: "res3b2_branch2c"\n\
name: "res3b2_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 512\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b2_branch2c"\n\
top: "res3b2_branch2c"\n\
name: "bn3b2_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b2_branch2c"\n\
top: "res3b2_branch2c"\n\
name: "scale3b2_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b1"\n\
bottom: "res3b2_branch2c"\n\
top: "res3b2"\n\
name: "res3b2"\n\
type: "Eltwise"\n\
}\n\
layer {\n\
bottom: "res3b2"\n\
top: "res3b2"\n\
name: "res3b2_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b2"\n\
top: "res3b3_branch2a"\n\
name: "res3b3_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b3_branch2a"\n\
top: "res3b3_branch2a"\n\
name: "bn3b3_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b3_branch2a"\n\
top: "res3b3_branch2a"\n\
name: "scale3b3_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res3b3_branch2a"\n\
bottom: "res3b3_branch2a"\n\
name: "res3b3_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b3_branch2a"\n\
top: "res3b3_branch2b"\n\
name: "res3b3_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b3_branch2b"\n\
top: "res3b3_branch2b"\n\
name: "bn3b3_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b3_branch2b"\n\
top: "res3b3_branch2b"\n\
name: "scale3b3_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res3b3_branch2b"\n\
bottom: "res3b3_branch2b"\n\
name: "res3b3_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b3_branch2b"\n\
top: "res3b3_branch2c"\n\
name: "res3b3_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 512\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b3_branch2c"\n\
top: "res3b3_branch2c"\n\
name: "bn3b3_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b3_branch2c"\n\
top: "res3b3_branch2c"\n\
name: "scale3b3_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b2"\n\
bottom: "res3b3_branch2c"\n\
top: "res3b3"\n\
name: "res3b3"\n\
type: "Eltwise"\n\
}\n\
layer {\n\
bottom: "res3b3"\n\
top: "res3b3"\n\
name: "res3b3_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b3"\n\
top: "res3b4_branch2a"\n\
name: "res3b4_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b4_branch2a"\n\
top: "res3b4_branch2a"\n\
name: "bn3b4_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b4_branch2a"\n\
top: "res3b4_branch2a"\n\
name: "scale3b4_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res3b4_branch2a"\n\
bottom: "res3b4_branch2a"\n\
name: "res3b4_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b4_branch2a"\n\
top: "res3b4_branch2b"\n\
name: "res3b4_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b4_branch2b"\n\
top: "res3b4_branch2b"\n\
name: "bn3b4_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b4_branch2b"\n\
top: "res3b4_branch2b"\n\
name: "scale3b4_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res3b4_branch2b"\n\
bottom: "res3b4_branch2b"\n\
name: "res3b4_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b4_branch2b"\n\
top: "res3b4_branch2c"\n\
name: "res3b4_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 512\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b4_branch2c"\n\
top: "res3b4_branch2c"\n\
name: "bn3b4_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b4_branch2c"\n\
top: "res3b4_branch2c"\n\
name: "scale3b4_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b3"\n\
bottom: "res3b4_branch2c"\n\
top: "res3b4"\n\
name: "res3b4"\n\
type: "Eltwise"\n\
}\n\
layer {\n\
bottom: "res3b4"\n\
top: "res3b4"\n\
name: "res3b4_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b4"\n\
top: "res3b5_branch2a"\n\
name: "res3b5_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b5_branch2a"\n\
top: "res3b5_branch2a"\n\
name: "bn3b5_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b5_branch2a"\n\
top: "res3b5_branch2a"\n\
name: "scale3b5_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res3b5_branch2a"\n\
bottom: "res3b5_branch2a"\n\
name: "res3b5_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b5_branch2a"\n\
top: "res3b5_branch2b"\n\
name: "res3b5_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b5_branch2b"\n\
top: "res3b5_branch2b"\n\
name: "bn3b5_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b5_branch2b"\n\
top: "res3b5_branch2b"\n\
name: "scale3b5_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res3b5_branch2b"\n\
bottom: "res3b5_branch2b"\n\
name: "res3b5_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b5_branch2b"\n\
top: "res3b5_branch2c"\n\
name: "res3b5_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 512\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b5_branch2c"\n\
top: "res3b5_branch2c"\n\
name: "bn3b5_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b5_branch2c"\n\
top: "res3b5_branch2c"\n\
name: "scale3b5_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b4"\n\
bottom: "res3b5_branch2c"\n\
top: "res3b5"\n\
name: "res3b5"\n\
type: "Eltwise"\n\
}\n\
layer {\n\
bottom: "res3b5"\n\
top: "res3b5"\n\
name: "res3b5_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b5"\n\
top: "res3b6_branch2a"\n\
name: "res3b6_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b6_branch2a"\n\
top: "res3b6_branch2a"\n\
name: "bn3b6_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b6_branch2a"\n\
top: "res3b6_branch2a"\n\
name: "scale3b6_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res3b6_branch2a"\n\
bottom: "res3b6_branch2a"\n\
name: "res3b6_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b6_branch2a"\n\
top: "res3b6_branch2b"\n\
name: "res3b6_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b6_branch2b"\n\
top: "res3b6_branch2b"\n\
name: "bn3b6_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b6_branch2b"\n\
top: "res3b6_branch2b"\n\
name: "scale3b6_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res3b6_branch2b"\n\
bottom: "res3b6_branch2b"\n\
name: "res3b6_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b6_branch2b"\n\
top: "res3b6_branch2c"\n\
name: "res3b6_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 512\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b6_branch2c"\n\
top: "res3b6_branch2c"\n\
name: "bn3b6_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b6_branch2c"\n\
top: "res3b6_branch2c"\n\
name: "scale3b6_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b5"\n\
bottom: "res3b6_branch2c"\n\
top: "res3b6"\n\
name: "res3b6"\n\
type: "Eltwise"\n\
}\n\
layer {\n\
bottom: "res3b6"\n\
top: "res3b6"\n\
name: "res3b6_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b6"\n\
top: "res3b7_branch2a"\n\
name: "res3b7_branch2a"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b7_branch2a"\n\
top: "res3b7_branch2a"\n\
name: "bn3b7_branch2a"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b7_branch2a"\n\
top: "res3b7_branch2a"\n\
name: "scale3b7_branch2a"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res3b7_branch2a"\n\
bottom: "res3b7_branch2a"\n\
name: "res3b7_branch2a_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b7_branch2a"\n\
top: "res3b7_branch2b"\n\
name: "res3b7_branch2b"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 128\n\
kernel_size: 3\n\
pad: 1\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b7_branch2b"\n\
top: "res3b7_branch2b"\n\
name: "bn3b7_branch2b"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b7_branch2b"\n\
top: "res3b7_branch2b"\n\
name: "scale3b7_branch2b"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
top: "res3b7_branch2b"\n\
bottom: "res3b7_branch2b"\n\
name: "res3b7_branch2b_relu"\n\
type: "ReLU"\n\
}\n\
layer {\n\
bottom: "res3b7_branch2b"\n\
top: "res3b7_branch2c"\n\
name: "res3b7_branch2c"\n\
type: "Convolution"\n\
convolution_param {\n\
num_output: 512\n\
kernel_size: 1\n\
pad: 0\n\
stride: 1\n\
bias_term: false\n\
}\n\
}\n\
layer {\n\
bottom: "res3b7_branch2c"\n\
top: "res3b7_branch2c"\n\
name: "bn3b7_branch2c"\n\
type: "BatchNorm"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b7_branch2c"\n\
top: "res3b7_branch2c"\n\
name: "scale3b7_branch2c"\n\
type: "Scale"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
bottom: "res3b6"\n\
bottom: "res3b7_branch2c"\n\
top: "res3b7"\n\
name: "res3b7"\n\
type: "Eltwise"\n\
}\n\
layer {\n\
bottom: "res3b7"\n\
top: "res3b7"\n\
name: "res3b7_relu"\n\
type: "ReLU"\n\
}\n'
return string
def getResNet101v2Init():
string = '\
layer {\n\
name: "conv1"\n\
type: "Convolution"\n\
bottom: "image"\n\
top: "conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 64\n\
pad: 3\n\
kernel_size: 7\n\
stride: 2\n\
}\n\
}\n\
layer {\n\
name: "conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "conv1"\n\
top: "conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "conv1_scale"\n\
type: "Scale"\n\
bottom: "conv1"\n\
top: "conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "conv1_relu"\n\
type: "ReLU"\n\
bottom: "conv1"\n\
top: "conv1"\n\
}\n\
layer {\n\
name: "pool1"\n\
type: "Pooling"\n\
bottom: "conv1"\n\
top: "pool1"\n\
pooling_param {\n\
pool: MAX\n\
kernel_size: 3\n\
stride: 2\n\
}\n\
}\n\
layer {\n\
name: "res1_conv1"\n\
type: "Convolution"\n\
bottom: "pool1"\n\
top: "res1_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 64\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res1_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res1_conv1"\n\
top: "res1_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res1_conv1_scale"\n\
type: "Scale"\n\
bottom: "res1_conv1"\n\
top: "res1_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res1_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res1_conv1"\n\
top: "res1_conv1"\n\
}\n\
layer {\n\
name: "res1_conv2"\n\
type: "Convolution"\n\
bottom: "res1_conv1"\n\
top: "res1_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 64\n\
pad: 1\n\
kernel_size: 3\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res1_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res1_conv2"\n\
top: "res1_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res1_conv2_scale"\n\
type: "Scale"\n\
bottom: "res1_conv2"\n\
top: "res1_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res1_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res1_conv2"\n\
top: "res1_conv2"\n\
}\n\
layer {\n\
name: "res1_conv3"\n\
type: "Convolution"\n\
bottom: "res1_conv2"\n\
top: "res1_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 256\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res1_match_conv"\n\
type: "Convolution"\n\
bottom: "pool1"\n\
top: "res1_match_conv"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 256\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res1_eletwise"\n\
type: "Eltwise"\n\
bottom: "res1_match_conv"\n\
bottom: "res1_conv3"\n\
top: "res1_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res2_bn"\n\
type: "BatchNorm"\n\
bottom: "res1_eletwise"\n\
top: "res2_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res2_scale"\n\
type: "Scale"\n\
bottom: "res2_bn"\n\
top: "res2_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res2_relu"\n\
type: "ReLU"\n\
bottom: "res2_bn"\n\
top: "res2_bn"\n\
}\n\
layer {\n\
name: "res2_conv1"\n\
type: "Convolution"\n\
bottom: "res2_bn"\n\
top: "res2_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 64\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res2_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res2_conv1"\n\
top: "res2_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res2_conv1_scale"\n\
type: "Scale"\n\
bottom: "res2_conv1"\n\
top: "res2_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res2_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res2_conv1"\n\
top: "res2_conv1"\n\
}\n\
layer {\n\
name: "res2_conv2"\n\
type: "Convolution"\n\
bottom: "res2_conv1"\n\
top: "res2_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 64\n\
pad: 1\n\
kernel_size: 3\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res2_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res2_conv2"\n\
top: "res2_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res2_conv2_scale"\n\
type: "Scale"\n\
bottom: "res2_conv2"\n\
top: "res2_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res2_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res2_conv2"\n\
top: "res2_conv2"\n\
}\n\
layer {\n\
name: "res2_conv3"\n\
type: "Convolution"\n\
bottom: "res2_conv2"\n\
top: "res2_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 256\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res2_eletwise"\n\
type: "Eltwise"\n\
bottom: "res1_eletwise"\n\
bottom: "res2_conv3"\n\
top: "res2_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res3_bn"\n\
type: "BatchNorm"\n\
bottom: "res2_eletwise"\n\
top: "res3_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res3_scale"\n\
type: "Scale"\n\
bottom: "res3_bn"\n\
top: "res3_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res3_relu"\n\
type: "ReLU"\n\
bottom: "res3_bn"\n\
top: "res3_bn"\n\
}\n\
layer {\n\
name: "res3_conv1"\n\
type: "Convolution"\n\
bottom: "res3_bn"\n\
top: "res3_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 64\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res3_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res3_conv1"\n\
top: "res3_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res3_conv1_scale"\n\
type: "Scale"\n\
bottom: "res3_conv1"\n\
top: "res3_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res3_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res3_conv1"\n\
top: "res3_conv1"\n\
}\n\
layer {\n\
name: "res3_conv2"\n\
type: "Convolution"\n\
bottom: "res3_conv1"\n\
top: "res3_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 64\n\
pad: 1\n\
kernel_size: 3\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res3_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res3_conv2"\n\
top: "res3_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res3_conv2_scale"\n\
type: "Scale"\n\
bottom: "res3_conv2"\n\
top: "res3_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res3_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res3_conv2"\n\
top: "res3_conv2"\n\
}\n\
layer {\n\
name: "res3_conv3"\n\
type: "Convolution"\n\
bottom: "res3_conv2"\n\
top: "res3_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 256\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res3_eletwise"\n\
type: "Eltwise"\n\
bottom: "res2_eletwise"\n\
bottom: "res3_conv3"\n\
top: "res3_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res4_bn"\n\
type: "BatchNorm"\n\
bottom: "res3_eletwise"\n\
top: "res4_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res4_scale"\n\
type: "Scale"\n\
bottom: "res4_bn"\n\
top: "res4_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res4_relu"\n\
type: "ReLU"\n\
bottom: "res4_bn"\n\
top: "res4_bn"\n\
}\n\
layer {\n\
name: "res4_conv1"\n\
type: "Convolution"\n\
bottom: "res4_bn"\n\
top: "res4_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res4_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res4_conv1"\n\
top: "res4_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res4_conv1_scale"\n\
type: "Scale"\n\
bottom: "res4_conv1"\n\
top: "res4_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res4_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res4_conv1"\n\
top: "res4_conv1"\n\
}\n\
layer {\n\
name: "res4_conv2"\n\
type: "Convolution"\n\
bottom: "res4_conv1"\n\
top: "res4_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 1\n\
kernel_size: 3\n\
stride: 2\n\
}\n\
}\n\
layer {\n\
name: "res4_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res4_conv2"\n\
top: "res4_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res4_conv2_scale"\n\
type: "Scale"\n\
bottom: "res4_conv2"\n\
top: "res4_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res4_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res4_conv2"\n\
top: "res4_conv2"\n\
}\n\
layer {\n\
name: "res4_conv3"\n\
type: "Convolution"\n\
bottom: "res4_conv2"\n\
top: "res4_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 512\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res4_match_conv"\n\
type: "Convolution"\n\
bottom: "res4_bn"\n\
top: "res4_match_conv"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 512\n\
pad: 0\n\
kernel_size: 1\n\
stride: 2\n\
}\n\
}\n\
layer {\n\
name: "res4_eletwise"\n\
type: "Eltwise"\n\
bottom: "res4_match_conv"\n\
bottom: "res4_conv3"\n\
top: "res4_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res5_bn"\n\
type: "BatchNorm"\n\
bottom: "res4_eletwise"\n\
top: "res5_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res5_scale"\n\
type: "Scale"\n\
bottom: "res5_bn"\n\
top: "res5_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res5_relu"\n\
type: "ReLU"\n\
bottom: "res5_bn"\n\
top: "res5_bn"\n\
}\n\
layer {\n\
name: "res5_conv1"\n\
type: "Convolution"\n\
bottom: "res5_bn"\n\
top: "res5_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res5_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res5_conv1"\n\
top: "res5_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res5_conv1_scale"\n\
type: "Scale"\n\
bottom: "res5_conv1"\n\
top: "res5_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res5_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res5_conv1"\n\
top: "res5_conv1"\n\
}\n\
layer {\n\
name: "res5_conv2"\n\
type: "Convolution"\n\
bottom: "res5_conv1"\n\
top: "res5_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 1\n\
kernel_size: 3\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res5_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res5_conv2"\n\
top: "res5_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res5_conv2_scale"\n\
type: "Scale"\n\
bottom: "res5_conv2"\n\
top: "res5_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res5_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res5_conv2"\n\
top: "res5_conv2"\n\
}\n\
layer {\n\
name: "res5_conv3"\n\
type: "Convolution"\n\
bottom: "res5_conv2"\n\
top: "res5_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 512\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res5_eletwise"\n\
type: "Eltwise"\n\
bottom: "res4_eletwise"\n\
bottom: "res5_conv3"\n\
top: "res5_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res6_bn"\n\
type: "BatchNorm"\n\
bottom: "res5_eletwise"\n\
top: "res6_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res6_scale"\n\
type: "Scale"\n\
bottom: "res6_bn"\n\
top: "res6_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res6_relu"\n\
type: "ReLU"\n\
bottom: "res6_bn"\n\
top: "res6_bn"\n\
}\n\
layer {\n\
name: "res6_conv1"\n\
type: "Convolution"\n\
bottom: "res6_bn"\n\
top: "res6_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res6_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res6_conv1"\n\
top: "res6_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res6_conv1_scale"\n\
type: "Scale"\n\
bottom: "res6_conv1"\n\
top: "res6_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res6_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res6_conv1"\n\
top: "res6_conv1"\n\
}\n\
layer {\n\
name: "res6_conv2"\n\
type: "Convolution"\n\
bottom: "res6_conv1"\n\
top: "res6_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 1\n\
kernel_size: 3\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res6_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res6_conv2"\n\
top: "res6_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res6_conv2_scale"\n\
type: "Scale"\n\
bottom: "res6_conv2"\n\
top: "res6_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res6_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res6_conv2"\n\
top: "res6_conv2"\n\
}\n\
layer {\n\
name: "res6_conv3"\n\
type: "Convolution"\n\
bottom: "res6_conv2"\n\
top: "res6_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 512\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res6_eletwise"\n\
type: "Eltwise"\n\
bottom: "res5_eletwise"\n\
bottom: "res6_conv3"\n\
top: "res6_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res7_bn"\n\
type: "BatchNorm"\n\
bottom: "res6_eletwise"\n\
top: "res7_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res7_scale"\n\
type: "Scale"\n\
bottom: "res7_bn"\n\
top: "res7_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res7_relu"\n\
type: "ReLU"\n\
bottom: "res7_bn"\n\
top: "res7_bn"\n\
}\n\
layer {\n\
name: "res7_conv1"\n\
type: "Convolution"\n\
bottom: "res7_bn"\n\
top: "res7_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res7_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res7_conv1"\n\
top: "res7_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res7_conv1_scale"\n\
type: "Scale"\n\
bottom: "res7_conv1"\n\
top: "res7_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res7_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res7_conv1"\n\
top: "res7_conv1"\n\
}\n\
layer {\n\
name: "res7_conv2"\n\
type: "Convolution"\n\
bottom: "res7_conv1"\n\
top: "res7_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 1\n\
kernel_size: 3\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res7_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res7_conv2"\n\
top: "res7_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res7_conv2_scale"\n\
type: "Scale"\n\
bottom: "res7_conv2"\n\
top: "res7_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res7_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res7_conv2"\n\
top: "res7_conv2"\n\
}\n\
layer {\n\
name: "res7_conv3"\n\
type: "Convolution"\n\
bottom: "res7_conv2"\n\
top: "res7_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 512\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res7_eletwise"\n\
type: "Eltwise"\n\
bottom: "res6_eletwise"\n\
bottom: "res7_conv3"\n\
top: "res7_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res8_bn"\n\
type: "BatchNorm"\n\
bottom: "res7_eletwise"\n\
top: "res8_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res8_scale"\n\
type: "Scale"\n\
bottom: "res8_bn"\n\
top: "res8_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res8_relu"\n\
type: "ReLU"\n\
bottom: "res8_bn"\n\
top: "res8_bn"\n\
}\n\
layer {\n\
name: "res8_conv1"\n\
type: "Convolution"\n\
bottom: "res8_bn"\n\
top: "res8_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 256\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res8_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res8_conv1"\n\
top: "res8_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res8_conv1_scale"\n\
type: "Scale"\n\
bottom: "res8_conv1"\n\
top: "res8_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res8_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res8_conv1"\n\
top: "res8_conv1"\n\
}\n'
return string
def getResNet152v2Init():
string = '\
layer {\n\
name: "conv1"\n\
type: "Convolution"\n\
bottom: "image"\n\
top: "conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 64\n\
pad: 3\n\
kernel_size: 7\n\
stride: 2\n\
}\n\
}\n\
layer {\n\
name: "conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "conv1"\n\
top: "conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "conv1_scale"\n\
type: "Scale"\n\
bottom: "conv1"\n\
top: "conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "conv1_relu"\n\
type: "ReLU"\n\
bottom: "conv1"\n\
top: "conv1"\n\
}\n\
layer {\n\
name: "pool1"\n\
type: "Pooling"\n\
bottom: "conv1"\n\
top: "pool1"\n\
pooling_param {\n\
pool: MAX\n\
kernel_size: 3\n\
stride: 2\n\
}\n\
}\n\
layer {\n\
name: "res1_conv1"\n\
type: "Convolution"\n\
bottom: "pool1"\n\
top: "res1_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 64\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res1_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res1_conv1"\n\
top: "res1_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res1_conv1_scale"\n\
type: "Scale"\n\
bottom: "res1_conv1"\n\
top: "res1_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res1_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res1_conv1"\n\
top: "res1_conv1"\n\
}\n\
layer {\n\
name: "res1_conv2"\n\
type: "Convolution"\n\
bottom: "res1_conv1"\n\
top: "res1_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 64\n\
pad: 1\n\
kernel_size: 3\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res1_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res1_conv2"\n\
top: "res1_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res1_conv2_scale"\n\
type: "Scale"\n\
bottom: "res1_conv2"\n\
top: "res1_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res1_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res1_conv2"\n\
top: "res1_conv2"\n\
}\n\
layer {\n\
name: "res1_conv3"\n\
type: "Convolution"\n\
bottom: "res1_conv2"\n\
top: "res1_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 256\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res1_match_conv"\n\
type: "Convolution"\n\
bottom: "pool1"\n\
top: "res1_match_conv"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 256\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
bias_filler {\n\
type: "constant"\n\
value: 0.2\n\
}\n\
}\n\
}\n\
layer {\n\
name: "res1_eletwise"\n\
type: "Eltwise"\n\
bottom: "res1_match_conv"\n\
bottom: "res1_conv3"\n\
top: "res1_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res2_bn"\n\
type: "BatchNorm"\n\
bottom: "res1_eletwise"\n\
top: "res2_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res2_scale"\n\
type: "Scale"\n\
bottom: "res2_bn"\n\
top: "res2_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res2_relu"\n\
type: "ReLU"\n\
bottom: "res2_bn"\n\
top: "res2_bn"\n\
}\n\
layer {\n\
name: "res2_conv1"\n\
type: "Convolution"\n\
bottom: "res2_bn"\n\
top: "res2_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 64\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res2_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res2_conv1"\n\
top: "res2_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res2_conv1_scale"\n\
type: "Scale"\n\
bottom: "res2_conv1"\n\
top: "res2_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res2_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res2_conv1"\n\
top: "res2_conv1"\n\
}\n\
layer {\n\
name: "res2_conv2"\n\
type: "Convolution"\n\
bottom: "res2_conv1"\n\
top: "res2_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 64\n\
pad: 1\n\
kernel_size: 3\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res2_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res2_conv2"\n\
top: "res2_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res2_conv2_scale"\n\
type: "Scale"\n\
bottom: "res2_conv2"\n\
top: "res2_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res2_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res2_conv2"\n\
top: "res2_conv2"\n\
}\n\
layer {\n\
name: "res2_conv3"\n\
type: "Convolution"\n\
bottom: "res2_conv2"\n\
top: "res2_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 256\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res2_eletwise"\n\
type: "Eltwise"\n\
bottom: "res1_eletwise"\n\
bottom: "res2_conv3"\n\
top: "res2_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res3_bn"\n\
type: "BatchNorm"\n\
bottom: "res2_eletwise"\n\
top: "res3_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res3_scale"\n\
type: "Scale"\n\
bottom: "res3_bn"\n\
top: "res3_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res3_relu"\n\
type: "ReLU"\n\
bottom: "res3_bn"\n\
top: "res3_bn"\n\
}\n\
layer {\n\
name: "res3_conv1"\n\
type: "Convolution"\n\
bottom: "res3_bn"\n\
top: "res3_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 64\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res3_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res3_conv1"\n\
top: "res3_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res3_conv1_scale"\n\
type: "Scale"\n\
bottom: "res3_conv1"\n\
top: "res3_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res3_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res3_conv1"\n\
top: "res3_conv1"\n\
}\n\
layer {\n\
name: "res3_conv2"\n\
type: "Convolution"\n\
bottom: "res3_conv1"\n\
top: "res3_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 64\n\
pad: 1\n\
kernel_size: 3\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res3_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res3_conv2"\n\
top: "res3_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res3_conv2_scale"\n\
type: "Scale"\n\
bottom: "res3_conv2"\n\
top: "res3_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res3_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res3_conv2"\n\
top: "res3_conv2"\n\
}\n\
layer {\n\
name: "res3_conv3"\n\
type: "Convolution"\n\
bottom: "res3_conv2"\n\
top: "res3_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 256\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res3_eletwise"\n\
type: "Eltwise"\n\
bottom: "res2_eletwise"\n\
bottom: "res3_conv3"\n\
top: "res3_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res4_bn"\n\
type: "BatchNorm"\n\
bottom: "res3_eletwise"\n\
top: "res4_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res4_scale"\n\
type: "Scale"\n\
bottom: "res4_bn"\n\
top: "res4_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res4_relu"\n\
type: "ReLU"\n\
bottom: "res4_bn"\n\
top: "res4_bn"\n\
}\n\
layer {\n\
name: "res4_conv1"\n\
type: "Convolution"\n\
bottom: "res4_bn"\n\
top: "res4_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res4_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res4_conv1"\n\
top: "res4_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res4_conv1_scale"\n\
type: "Scale"\n\
bottom: "res4_conv1"\n\
top: "res4_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res4_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res4_conv1"\n\
top: "res4_conv1"\n\
}\n\
layer {\n\
name: "res4_conv2"\n\
type: "Convolution"\n\
bottom: "res4_conv1"\n\
top: "res4_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 1\n\
kernel_size: 3\n\
stride: 2\n\
}\n\
}\n\
layer {\n\
name: "res4_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res4_conv2"\n\
top: "res4_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res4_conv2_scale"\n\
type: "Scale"\n\
bottom: "res4_conv2"\n\
top: "res4_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res4_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res4_conv2"\n\
top: "res4_conv2"\n\
}\n\
layer {\n\
name: "res4_conv3"\n\
type: "Convolution"\n\
bottom: "res4_conv2"\n\
top: "res4_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 512\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res4_match_conv"\n\
type: "Convolution"\n\
bottom: "res4_bn"\n\
top: "res4_match_conv"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 512\n\
pad: 0\n\
kernel_size: 1\n\
stride: 2\n\
bias_filler {\n\
type: "constant"\n\
value: 0.2\n\
}\n\
}\n\
}\n\
layer {\n\
name: "res4_eletwise"\n\
type: "Eltwise"\n\
bottom: "res4_match_conv"\n\
bottom: "res4_conv3"\n\
top: "res4_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res5_bn"\n\
type: "BatchNorm"\n\
bottom: "res4_eletwise"\n\
top: "res5_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res5_scale"\n\
type: "Scale"\n\
bottom: "res5_bn"\n\
top: "res5_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res5_relu"\n\
type: "ReLU"\n\
bottom: "res5_bn"\n\
top: "res5_bn"\n\
}\n\
layer {\n\
name: "res5_conv1"\n\
type: "Convolution"\n\
bottom: "res5_bn"\n\
top: "res5_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res5_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res5_conv1"\n\
top: "res5_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res5_conv1_scale"\n\
type: "Scale"\n\
bottom: "res5_conv1"\n\
top: "res5_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res5_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res5_conv1"\n\
top: "res5_conv1"\n\
}\n\
layer {\n\
name: "res5_conv2"\n\
type: "Convolution"\n\
bottom: "res5_conv1"\n\
top: "res5_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 1\n\
kernel_size: 3\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res5_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res5_conv2"\n\
top: "res5_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res5_conv2_scale"\n\
type: "Scale"\n\
bottom: "res5_conv2"\n\
top: "res5_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res5_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res5_conv2"\n\
top: "res5_conv2"\n\
}\n\
layer {\n\
name: "res5_conv3"\n\
type: "Convolution"\n\
bottom: "res5_conv2"\n\
top: "res5_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 512\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res5_eletwise"\n\
type: "Eltwise"\n\
bottom: "res4_eletwise"\n\
bottom: "res5_conv3"\n\
top: "res5_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res6_bn"\n\
type: "BatchNorm"\n\
bottom: "res5_eletwise"\n\
top: "res6_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res6_scale"\n\
type: "Scale"\n\
bottom: "res6_bn"\n\
top: "res6_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res6_relu"\n\
type: "ReLU"\n\
bottom: "res6_bn"\n\
top: "res6_bn"\n\
}\n\
layer {\n\
name: "res6_conv1"\n\
type: "Convolution"\n\
bottom: "res6_bn"\n\
top: "res6_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res6_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res6_conv1"\n\
top: "res6_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res6_conv1_scale"\n\
type: "Scale"\n\
bottom: "res6_conv1"\n\
top: "res6_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res6_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res6_conv1"\n\
top: "res6_conv1"\n\
}\n\
layer {\n\
name: "res6_conv2"\n\
type: "Convolution"\n\
bottom: "res6_conv1"\n\
top: "res6_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 1\n\
kernel_size: 3\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res6_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res6_conv2"\n\
top: "res6_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res6_conv2_scale"\n\
type: "Scale"\n\
bottom: "res6_conv2"\n\
top: "res6_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res6_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res6_conv2"\n\
top: "res6_conv2"\n\
}\n\
layer {\n\
name: "res6_conv3"\n\
type: "Convolution"\n\
bottom: "res6_conv2"\n\
top: "res6_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 512\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res6_eletwise"\n\
type: "Eltwise"\n\
bottom: "res5_eletwise"\n\
bottom: "res6_conv3"\n\
top: "res6_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res7_bn"\n\
type: "BatchNorm"\n\
bottom: "res6_eletwise"\n\
top: "res7_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res7_scale"\n\
type: "Scale"\n\
bottom: "res7_bn"\n\
top: "res7_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res7_relu"\n\
type: "ReLU"\n\
bottom: "res7_bn"\n\
top: "res7_bn"\n\
}\n\
layer {\n\
name: "res7_conv1"\n\
type: "Convolution"\n\
bottom: "res7_bn"\n\
top: "res7_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res7_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res7_conv1"\n\
top: "res7_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res7_conv1_scale"\n\
type: "Scale"\n\
bottom: "res7_conv1"\n\
top: "res7_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res7_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res7_conv1"\n\
top: "res7_conv1"\n\
}\n\
layer {\n\
name: "res7_conv2"\n\
type: "Convolution"\n\
bottom: "res7_conv1"\n\
top: "res7_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 1\n\
kernel_size: 3\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res7_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res7_conv2"\n\
top: "res7_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res7_conv2_scale"\n\
type: "Scale"\n\
bottom: "res7_conv2"\n\
top: "res7_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res7_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res7_conv2"\n\
top: "res7_conv2"\n\
}\n\
layer {\n\
name: "res7_conv3"\n\
type: "Convolution"\n\
bottom: "res7_conv2"\n\
top: "res7_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 512\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res7_eletwise"\n\
type: "Eltwise"\n\
bottom: "res6_eletwise"\n\
bottom: "res7_conv3"\n\
top: "res7_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res8_bn"\n\
type: "BatchNorm"\n\
bottom: "res7_eletwise"\n\
top: "res8_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res8_scale"\n\
type: "Scale"\n\
bottom: "res8_bn"\n\
top: "res8_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res8_relu"\n\
type: "ReLU"\n\
bottom: "res8_bn"\n\
top: "res8_bn"\n\
}\n\
layer {\n\
name: "res8_conv1"\n\
type: "Convolution"\n\
bottom: "res8_bn"\n\
top: "res8_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res8_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res8_conv1"\n\
top: "res8_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res8_conv1_scale"\n\
type: "Scale"\n\
bottom: "res8_conv1"\n\
top: "res8_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res8_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res8_conv1"\n\
top: "res8_conv1"\n\
}\n\
layer {\n\
name: "res8_conv2"\n\
type: "Convolution"\n\
bottom: "res8_conv1"\n\
top: "res8_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 1\n\
kernel_size: 3\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res8_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res8_conv2"\n\
top: "res8_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res8_conv2_scale"\n\
type: "Scale"\n\
bottom: "res8_conv2"\n\
top: "res8_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res8_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res8_conv2"\n\
top: "res8_conv2"\n\
}\n\
layer {\n\
name: "res8_conv3"\n\
type: "Convolution"\n\
bottom: "res8_conv2"\n\
top: "res8_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 512\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res8_eletwise"\n\
type: "Eltwise"\n\
bottom: "res7_eletwise"\n\
bottom: "res8_conv3"\n\
top: "res8_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res9_bn"\n\
type: "BatchNorm"\n\
bottom: "res8_eletwise"\n\
top: "res9_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res9_scale"\n\
type: "Scale"\n\
bottom: "res9_bn"\n\
top: "res9_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res9_relu"\n\
type: "ReLU"\n\
bottom: "res9_bn"\n\
top: "res9_bn"\n\
}\n\
layer {\n\
name: "res9_conv1"\n\
type: "Convolution"\n\
bottom: "res9_bn"\n\
top: "res9_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res9_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res9_conv1"\n\
top: "res9_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res9_conv1_scale"\n\
type: "Scale"\n\
bottom: "res9_conv1"\n\
top: "res9_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res9_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res9_conv1"\n\
top: "res9_conv1"\n\
}\n\
layer {\n\
name: "res9_conv2"\n\
type: "Convolution"\n\
bottom: "res9_conv1"\n\
top: "res9_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 1\n\
kernel_size: 3\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res9_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res9_conv2"\n\
top: "res9_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res9_conv2_scale"\n\
type: "Scale"\n\
bottom: "res9_conv2"\n\
top: "res9_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res9_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res9_conv2"\n\
top: "res9_conv2"\n\
}\n\
layer {\n\
name: "res9_conv3"\n\
type: "Convolution"\n\
bottom: "res9_conv2"\n\
top: "res9_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 512\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res9_eletwise"\n\
type: "Eltwise"\n\
bottom: "res8_eletwise"\n\
bottom: "res9_conv3"\n\
top: "res9_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res10_bn"\n\
type: "BatchNorm"\n\
bottom: "res9_eletwise"\n\
top: "res10_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res10_scale"\n\
type: "Scale"\n\
bottom: "res10_bn"\n\
top: "res10_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res10_relu"\n\
type: "ReLU"\n\
bottom: "res10_bn"\n\
top: "res10_bn"\n\
}\n\
layer {\n\
name: "res10_conv1"\n\
type: "Convolution"\n\
bottom: "res10_bn"\n\
top: "res10_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res10_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res10_conv1"\n\
top: "res10_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res10_conv1_scale"\n\
type: "Scale"\n\
bottom: "res10_conv1"\n\
top: "res10_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res10_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res10_conv1"\n\
top: "res10_conv1"\n\
}\n\
layer {\n\
name: "res10_conv2"\n\
type: "Convolution"\n\
bottom: "res10_conv1"\n\
top: "res10_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 1\n\
kernel_size: 3\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res10_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res10_conv2"\n\
top: "res10_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res10_conv2_scale"\n\
type: "Scale"\n\
bottom: "res10_conv2"\n\
top: "res10_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res10_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res10_conv2"\n\
top: "res10_conv2"\n\
}\n\
layer {\n\
name: "res10_conv3"\n\
type: "Convolution"\n\
bottom: "res10_conv2"\n\
top: "res10_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 512\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res10_eletwise"\n\
type: "Eltwise"\n\
bottom: "res9_eletwise"\n\
bottom: "res10_conv3"\n\
top: "res10_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res11_bn"\n\
type: "BatchNorm"\n\
bottom: "res10_eletwise"\n\
top: "res11_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res11_scale"\n\
type: "Scale"\n\
bottom: "res11_bn"\n\
top: "res11_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res11_relu"\n\
type: "ReLU"\n\
bottom: "res11_bn"\n\
top: "res11_bn"\n\
}\n\
layer {\n\
name: "res11_conv1"\n\
type: "Convolution"\n\
bottom: "res11_bn"\n\
top: "res11_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res11_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res11_conv1"\n\
top: "res11_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res11_conv1_scale"\n\
type: "Scale"\n\
bottom: "res11_conv1"\n\
top: "res11_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res11_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res11_conv1"\n\
top: "res11_conv1"\n\
}\n\
layer {\n\
name: "res11_conv2"\n\
type: "Convolution"\n\
bottom: "res11_conv1"\n\
top: "res11_conv2"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 128\n\
pad: 1\n\
kernel_size: 3\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res11_conv2_bn"\n\
type: "BatchNorm"\n\
bottom: "res11_conv2"\n\
top: "res11_conv2"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res11_conv2_scale"\n\
type: "Scale"\n\
bottom: "res11_conv2"\n\
top: "res11_conv2"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res11_conv2_relu"\n\
type: "ReLU"\n\
bottom: "res11_conv2"\n\
top: "res11_conv2"\n\
}\n\
layer {\n\
name: "res11_conv3"\n\
type: "Convolution"\n\
bottom: "res11_conv2"\n\
top: "res11_conv3"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 512\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res11_eletwise"\n\
type: "Eltwise"\n\
bottom: "res10_eletwise"\n\
bottom: "res11_conv3"\n\
top: "res11_eletwise"\n\
eltwise_param {\n\
operation: SUM\n\
}\n\
}\n\
layer {\n\
name: "res12_bn"\n\
type: "BatchNorm"\n\
bottom: "res11_eletwise"\n\
top: "res12_bn"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res12_scale"\n\
type: "Scale"\n\
bottom: "res12_bn"\n\
top: "res12_bn"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res12_relu"\n\
type: "ReLU"\n\
bottom: "res12_bn"\n\
top: "res12_bn"\n\
}\n\
layer {\n\
name: "res12_conv1"\n\
type: "Convolution"\n\
bottom: "res12_bn"\n\
top: "res12_conv1"\n\
convolution_param {\n\
bias_term: false\n\
num_output: 256\n\
pad: 0\n\
kernel_size: 1\n\
stride: 1\n\
}\n\
}\n\
layer {\n\
name: "res12_conv1_bn"\n\
type: "BatchNorm"\n\
bottom: "res12_conv1"\n\
top: "res12_conv1"\n\
batch_norm_param {\n\
use_global_stats: true\n\
}\n\
}\n\
layer {\n\
name: "res12_conv1_scale"\n\
type: "Scale"\n\
bottom: "res12_conv1"\n\
top: "res12_conv1"\n\
scale_param {\n\
bias_term: true\n\
}\n\
}\n\
layer {\n\
name: "res12_conv1_relu"\n\
type: "ReLU"\n\
bottom: "res12_conv1"\n\
top: "res12_conv1"\n\
}\n'
return string
| 19.269014
| 35
| 0.56909
| 14,491
| 95,767
| 3.567939
| 0.008143
| 0.038489
| 0.068777
| 0.078603
| 0.988105
| 0.979015
| 0.950873
| 0.924956
| 0.915169
| 0.893681
| 0
| 0.05211
| 0.236136
| 95,767
| 4,969
| 36
| 19.272892
| 0.654669
| 0
| 0
| 0.958676
| 0
| 0
| 0.231345
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000822
| false
| 0
| 0
| 0
| 0.001645
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b439ca318f67d77f2e405602deea500ae94435a6
| 122
|
py
|
Python
|
src/database/base.py
|
JuanFKurucz/proyecto-seguridad
|
feb805c785afc57de19244e7916f232d3798a768
|
[
"MIT"
] | null | null | null |
src/database/base.py
|
JuanFKurucz/proyecto-seguridad
|
feb805c785afc57de19244e7916f232d3798a768
|
[
"MIT"
] | null | null | null |
src/database/base.py
|
JuanFKurucz/proyecto-seguridad
|
feb805c785afc57de19244e7916f232d3798a768
|
[
"MIT"
] | null | null | null |
from src.database.models.user import User
from src.database.models.file import File
from src.database.session import Base
| 30.5
| 41
| 0.836066
| 20
| 122
| 5.1
| 0.45
| 0.205882
| 0.441176
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098361
| 122
| 3
| 42
| 40.666667
| 0.927273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
b453de84607dcd084082cdaa70e0f24244d6027a
| 92,239
|
py
|
Python
|
app/test/unittest/test_executive_device.py
|
michalkoziara/IoT-RESTful-Webservice
|
ecb0f3e09cded3190f3646e5cd6c913056d94981
|
[
"bzip2-1.0.6"
] | 2
|
2021-09-24T02:45:32.000Z
|
2021-11-15T09:44:44.000Z
|
app/test/unittest/test_executive_device.py
|
PKramek/IoT-RESTful-Webservice-1
|
ecb0f3e09cded3190f3646e5cd6c913056d94981
|
[
"bzip2-1.0.6"
] | null | null | null |
app/test/unittest/test_executive_device.py
|
PKramek/IoT-RESTful-Webservice-1
|
ecb0f3e09cded3190f3646e5cd6c913056d94981
|
[
"bzip2-1.0.6"
] | 1
|
2021-09-11T11:47:32.000Z
|
2021-09-11T11:47:32.000Z
|
from unittest.mock import Mock
from unittest.mock import patch
import pytest
from app.main.model import ExecutiveDevice
from app.main.repository.admin_repository import AdminRepository
from app.main.repository.base_repository import BaseRepository
from app.main.repository.deleted_device_repository import DeletedDeviceRepository
from app.main.repository.device_group_repository import DeviceGroupRepository
from app.main.repository.executive_device_repository import ExecutiveDeviceRepository
from app.main.repository.executive_type_repository import ExecutiveTypeRepository
from app.main.repository.formula_repository import FormulaRepository
from app.main.repository.unconfigured_device_repository import UnconfiguredDeviceRepository
from app.main.repository.user_group_repository import UserGroupRepository
from app.main.service.executive_device_service import ExecutiveDeviceService
from app.main.util.constants import Constants
def test_get_executive_device_info_should_return_device_info_when_valid_product_key_device_key_and_user_id(
create_executive_device,
create_device_group,
create_executive_type,
create_formula,
create_user_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
executive_type = create_executive_type()
executive_device = create_executive_device()
formula = create_formula()
user_group = create_user_group()
test_user_id = 1
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_device_key_and_device_group_id'
) as get_executive_device_by_device_key_and_device_group_id_mock:
get_executive_device_by_device_key_and_device_group_id_mock.return_value = executive_device
with patch.object(
ExecutiveTypeRepository,
'get_executive_type_by_id'
) as get_executive_type_by_id_mock:
get_executive_type_by_id_mock.return_value = executive_type
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key'
) as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(FormulaRepository, 'get_formula_by_id') as get_formula_by_id_mock:
get_formula_by_id_mock.return_value = formula
with patch.object(
UserGroupRepository,
'get_user_group_by_user_id_and_executive_device_device_key'
) as get_user_group_by_user_id_and_executive_device_device_key_mock:
get_user_group_by_user_id_and_executive_device_device_key_mock.return_value = user_group
with patch.object(
ExecutiveDeviceService,
'get_executive_device_state_value'
) as get_executive_device_state_value_mock:
get_executive_device_state_value_mock.return_value = "test"
result, result_values = executive_device_service_instance.get_executive_device_info(
executive_device.device_key,
device_group.product_key,
test_user_id,
False
)
assert result == Constants.RESPONSE_MESSAGE_OK
assert result_values
assert result_values['name'] == executive_device.name
assert result_values['state'] == "test"
assert result_values['isUpdated'] == executive_device.is_updated
assert result_values['isActive'] == executive_device.is_active
assert result_values['isAssigned'] == executive_device.is_assigned
assert result_values['isFormulaUsed'] == executive_device.is_formula_used
assert result_values['positiveState'] == "test"
assert result_values['negativeState'] == "test"
assert result_values['defaultState'] == "test"
assert result_values['deviceKey'] == executive_device.device_key
assert result_values['deviceTypeName'] == executive_type.name
assert result_values['deviceUserGroup'] == user_group.name
assert result_values['formulaName'] == formula.name
def test_get_executive_device_info_should_return_device_info_when_valid_user_is_admin(
create_executive_device,
create_device_group,
create_executive_type,
create_formula,
create_user_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
executive_type = create_executive_type()
executive_device = create_executive_device()
formula = create_formula()
user_group = create_user_group()
test_user_id = device_group.admin_id
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_device_key_and_device_group_id'
) as get_executive_device_by_device_key_and_device_group_id_mock:
get_executive_device_by_device_key_and_device_group_id_mock.return_value = executive_device
with patch.object(
ExecutiveTypeRepository,
'get_executive_type_by_id'
) as get_executive_type_by_id_mock:
get_executive_type_by_id_mock.return_value = executive_type
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key'
) as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(FormulaRepository, 'get_formula_by_id') as get_formula_by_id_mock:
get_formula_by_id_mock.return_value = formula
with patch.object(
UserGroupRepository,
'get_user_group_by_id'
) as get_user_group_by_id_mock:
get_user_group_by_id_mock.return_value = user_group
with patch.object(
ExecutiveDeviceService,
'get_executive_device_state_value'
) as get_executive_device_state_value_mock:
get_executive_device_state_value_mock.return_value = "test"
result, result_values = executive_device_service_instance.get_executive_device_info(
executive_device.device_key,
device_group.product_key,
test_user_id,
True
)
assert result == Constants.RESPONSE_MESSAGE_OK
assert result_values
assert result_values['name'] == executive_device.name
assert result_values['state'] == "test"
assert result_values['isUpdated'] == executive_device.is_updated
assert result_values['isActive'] == executive_device.is_active
assert result_values['isAssigned'] == executive_device.is_assigned
assert result_values['isFormulaUsed'] == executive_device.is_formula_used
assert result_values['positiveState'] == "test"
assert result_values['negativeState'] == "test"
assert result_values['defaultState'] == "test"
assert result_values['deviceKey'] == executive_device.device_key
assert result_values['deviceTypeName'] == executive_type.name
assert result_values['deviceUserGroup'] == user_group.name
assert result_values['formulaName'] == formula.name
def test_get_executive_device_info_should_return_device_info_when_user_is_not_in_the_same_user_group_as_device_and_device_is_not_in_any_user_group(
create_executive_device,
create_device_group,
create_executive_type,
create_formula,
create_user_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
executive_type = create_executive_type()
executive_device = create_executive_device()
executive_device.user_group_id = None
formula = create_formula()
test_user_id = 13
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_device_key_and_device_group_id'
) as get_executive_device_by_device_key_and_device_group_id_mock:
get_executive_device_by_device_key_and_device_group_id_mock.return_value = executive_device
with patch.object(
ExecutiveTypeRepository,
'get_executive_type_by_id'
) as get_executive_type_by_id_mock:
get_executive_type_by_id_mock.return_value = executive_type
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key'
) as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(FormulaRepository, 'get_formula_by_id') as get_formula_by_id_mock:
get_formula_by_id_mock.return_value = formula
with patch.object(
UserGroupRepository,
'get_user_group_by_user_id_and_executive_device_device_key'
) as get_user_group_by_user_id_and_executive_device_device_key_mock:
get_user_group_by_user_id_and_executive_device_device_key_mock.return_value = None
with patch.object(
ExecutiveDeviceService,
'get_executive_device_state_value'
) as get_executive_device_state_value_mock:
get_executive_device_state_value_mock.return_value = "test"
result, result_values = executive_device_service_instance.get_executive_device_info(
executive_device.device_key,
device_group.product_key,
test_user_id,
False
)
assert result == Constants.RESPONSE_MESSAGE_OK
assert result_values
assert result_values['name'] == executive_device.name
assert result_values['state'] == "test"
assert result_values['isUpdated'] == executive_device.is_updated
assert result_values['isActive'] == executive_device.is_active
assert result_values['isAssigned'] == executive_device.is_assigned
assert result_values['positiveState'] == "test"
assert result_values['negativeState'] == "test"
assert result_values['defaultState'] == "test"
assert result_values['deviceKey'] == executive_device.device_key
assert result_values['deviceTypeName'] == executive_type.name
assert result_values['deviceUserGroup'] is None
assert result_values['formulaName'] == formula.name
def test_get_executive_device_info_should_return_device_info_when_device_is_not_in_any_group_and_user_is_admin_admin(
create_executive_device,
create_device_group,
create_executive_type,
create_formula):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
executive_type = create_executive_type()
executive_device = create_executive_device()
executive_device.user_group_id = None
formula = create_formula()
test_user_id = device_group.admin_id
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_device_key_and_device_group_id'
) as get_executive_device_by_device_key_and_device_group_id_mock:
get_executive_device_by_device_key_and_device_group_id_mock.return_value = executive_device
with patch.object(
ExecutiveTypeRepository,
'get_executive_type_by_id'
) as get_executive_type_by_id_mock:
get_executive_type_by_id_mock.return_value = executive_type
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key'
) as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(FormulaRepository, 'get_formula_by_id') as get_formula_by_id_mock:
get_formula_by_id_mock.return_value = formula
with patch.object(
UserGroupRepository,
'get_user_group_by_user_id_and_executive_device_device_key'
) as get_user_group_by_user_id_and_executive_device_device_key_mock:
get_user_group_by_user_id_and_executive_device_device_key_mock.return_value = None
with patch.object(
ExecutiveDeviceService,
'get_executive_device_state_value'
) as get_executive_device_state_value_mock:
get_executive_device_state_value_mock.return_value = "test"
result, result_values = executive_device_service_instance.get_executive_device_info(
executive_device.device_key,
device_group.product_key,
test_user_id,
True
)
assert result == Constants.RESPONSE_MESSAGE_OK
assert result_values
assert result_values['name'] == executive_device.name
assert result_values['state'] == "test"
assert result_values['isUpdated'] == executive_device.is_updated
assert result_values['isActive'] == executive_device.is_active
assert result_values['isAssigned'] == executive_device.is_assigned
assert result_values['positiveState'] == "test"
assert result_values['negativeState'] == "test"
assert result_values['defaultState'] == "test"
assert result_values['deviceKey'] == executive_device.device_key
assert result_values['deviceTypeName'] == executive_type.name
assert result_values['deviceUserGroup'] is None
assert result_values['formulaName'] == formula.name
def test_get_executive_device_info_should_not_return_device_info_when_no_user_id(
create_executive_device,
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_device = create_executive_device()
device_group = create_device_group()
result, result_values = executive_device_service_instance.get_executive_device_info(
executive_device.device_key,
device_group.product_key,
None,
False
)
assert result == Constants.RESPONSE_MESSAGE_USER_NOT_DEFINED
assert result_values is None
def test_get_executive_device_info_should_not_return_device_info_when_no_device_key(create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
test_user_id = 1
result, result_values = executive_device_service_instance.get_executive_device_info(
None,
device_group.product_key,
test_user_id,
False
)
assert result == Constants.RESPONSE_MESSAGE_DEVICE_KEY_NOT_FOUND
assert result_values is None
def test_get_executive_device_info_should_not_return_device_info_when_no_product_key(create_executive_device):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_device = create_executive_device()
test_user_id = 1
result, result_values = executive_device_service_instance.get_executive_device_info(
executive_device.device_key,
None,
test_user_id,
False
)
assert result == Constants.RESPONSE_MESSAGE_PRODUCT_KEY_NOT_FOUND
assert result_values is None
def test_get_executive_device_info_should_not_return_device_info_when_user_is_not_in_the_same_user_group_as_device_and_device_is_in_another_user_group(
create_device_group,
create_executive_device):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
executive_device = create_executive_device()
test_user_id = 1
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_device_key_and_device_group_id'
) as get_executive_device_by_device_key_and_device_group_id_mock:
get_executive_device_by_device_key_and_device_group_id_mock.return_value = executive_device
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key'
) as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
UserGroupRepository,
'get_user_group_by_user_id_and_executive_device_device_key'
) as get_user_group_by_user_id_and_executive_device_device_key_mock:
get_user_group_by_user_id_and_executive_device_device_key_mock.return_value = None
result, result_values = executive_device_service_instance.get_executive_device_info(
executive_device.device_key,
device_group.product_key,
test_user_id,
False
)
assert result == Constants.RESPONSE_MESSAGE_USER_DOES_NOT_HAVE_PRIVILEGES
assert result_values is None
def test_get_executive_device_info_should_not_return_error_message_when_admin_is_not_admin_of_device_group(
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
test_user_id = device_group.admin_id + 2
test_device_key = '1'
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key'
) as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
result, result_values = executive_device_service_instance.get_executive_device_info(
test_device_key,
device_group.product_key,
test_user_id,
True
)
assert result == Constants.RESPONSE_MESSAGE_USER_DOES_NOT_HAVE_PRIVILEGES
assert result_values is None
def test_get_executive_device_info_should_not_return_device_info_when_executive_device_is_not_in_device_group(
create_device_group,
create_user_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
user_group = create_user_group()
test_user_id = 1
test_device_key = '1'
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_device_key_and_device_group_id'
) as get_executive_device_by_device_key_and_device_group_id_mock:
get_executive_device_by_device_key_and_device_group_id_mock.return_value = None
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key'
) as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
UserGroupRepository,
'get_user_group_by_user_id_and_executive_device_device_key'
) as get_user_group_by_user_id_and_executive_device_device_key_mock:
get_user_group_by_user_id_and_executive_device_device_key_mock.return_value = user_group
result, result_values = executive_device_service_instance.get_executive_device_info(
test_device_key,
device_group.product_key,
test_user_id,
False
)
assert result == Constants.RESPONSE_MESSAGE_DEVICE_KEY_NOT_FOUND
assert result_values is None
def test_get_executive_device_info_should_not_return_device_info_when_device_group_does_not_exist(
create_device_group,
create_executive_device,
create_user_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
executive_device = create_executive_device()
user_group = create_user_group()
test_user_id = 1
test_device_key = '1'
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_device_key_and_device_group_id'
) as get_executive_device_by_device_key_and_device_group_id_mock:
get_executive_device_by_device_key_and_device_group_id_mock.return_value = executive_device
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key'
) as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = None
with patch.object(
UserGroupRepository,
'get_user_group_by_user_id_and_executive_device_device_key'
) as get_user_group_by_user_id_and_executive_device_device_key_mock:
get_user_group_by_user_id_and_executive_device_device_key_mock.return_value = user_group
result, result_values = executive_device_service_instance.get_executive_device_info(
test_device_key,
device_group.product_key,
test_user_id,
False
)
assert result == Constants.RESPONSE_MESSAGE_PRODUCT_KEY_NOT_FOUND
assert result_values is None
def test_set_device_state_should_set_device_state_when_called_with_right_parameters(
create_executive_type,
create_executive_device):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_type = create_executive_type()
executive_device = create_executive_device()
executive_device.is_active = True
test_device_group_id = executive_device.device_group_id
values = {
'deviceKey': executive_device.device_key,
'state': 0.5,
'isActive': True
}
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_device_key_and_device_group_id'
) as get_executive_device_by_device_key_and_device_group_id_mock:
get_executive_device_by_device_key_and_device_group_id_mock.return_value = executive_device
with patch.object(
ExecutiveTypeRepository,
'get_executive_type_by_id'
) as get_executive_type_by_id_mock:
get_executive_type_by_id_mock.return_value = executive_type
with patch.object(
ExecutiveDeviceService,
'_state_in_range'
) as _state_in_range_mock:
_state_in_range_mock.return_value = True
with patch.object(
ExecutiveDeviceRepository,
'update_database'
) as update_database_mock:
update_database_mock.return_value = True
executive_device_service_instance.set_device_state(test_device_group_id, values)
assert executive_device.is_active == values['isActive']
assert executive_device.state == values['state']
update_database_mock.assert_called_once()
def test_set_device_state_should_not_set_device_state_when_state_not_in_range(
create_executive_type,
create_executive_device):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_type = create_executive_type()
executive_device = create_executive_device()
executive_device.is_active = True
test_device_group_id = executive_device.device_group_id
values = {
'deviceKey': executive_device.device_key,
'state': 0.5,
'isActive': True
}
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_device_key_and_device_group_id'
) as get_executive_device_by_device_key_and_device_group_id_mock:
get_executive_device_by_device_key_and_device_group_id_mock.return_value = executive_device
with patch.object(
ExecutiveTypeRepository,
'get_executive_type_by_id'
) as get_executive_type_by_id_mock:
get_executive_type_by_id_mock.return_value = executive_type
with patch.object(
ExecutiveDeviceService,
'_state_in_range'
) as _state_in_range_mock:
_state_in_range_mock.return_value = False
assert not executive_device_service_instance.set_device_state(test_device_group_id, values)
def test_set_device_state_should_not_set_device_state_when_called_with_wrong_dictionary():
device_group_id = 1
values = {
'deviceKey': 1,
'test': 0.5,
'isActive': False
}
executive_device_service_instance = ExecutiveDeviceService.get_instance()
assert not executive_device_service_instance.set_device_state(device_group_id, values)
def test_set_device_state_should_not_set_device_when_device_not_in_device_group():
device_group_id = 1
values = {
'deviceKey': 1,
'state': 0.5,
'isActive': False
}
executive_device_service_instance = ExecutiveDeviceService.get_instance()
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_device_key_and_device_group_id'
) as get_executive_device_by_device_key_and_device_group_id_mock:
get_executive_device_by_device_key_and_device_group_id_mock.return_value = None
assert not executive_device_service_instance.set_device_state(device_group_id, values)
@pytest.mark.parametrize("state_range_min,state_range_max,value", [
(-1, 2, 0),
(1.0, 2.0, 2.0),
(-2.0, -1.0, -2.0),
(-2.0, -1.0, -1.5)])
def test_is_decimal_state_in_range_should_return_true_when_value_in_range(
state_range_min, state_range_max, value,
create_executive_type,
get_executive_type_default_values):
executive_type_values = get_executive_type_default_values()
executive_type_values['state_type'] = 'Decimal'
executive_type_values['state_range_min'] = state_range_min
executive_type_values['state_range_max'] = state_range_max
executive_type = create_executive_type(executive_type_values)
executive_device_service_instance = ExecutiveDeviceService.get_instance()
assert executive_device_service_instance._is_decimal_state_in_range(value, executive_type)
@pytest.mark.parametrize("state_range_min,state_range_max,value", [
(-1, 2, 2.1),
(1.0, 2.0, 20),
(-2.0, -1.0, -2.5),
(-2.0, -1.0, True),
(-2.0, -1.0, "Test"),
(-2.0, -1.0, 0)])
def test_is_decimal_state_in_range_should_return_false_when_value_not_in_range_or_wrong_type(
state_range_min, state_range_max, value,
create_executive_type,
get_executive_type_default_values):
executive_type_values = get_executive_type_default_values()
executive_type_values['state_type'] = 'Decimal'
executive_type_values['state_range_min'] = state_range_min
executive_type_values['state_range_max'] = state_range_max
executive_type = create_executive_type(executive_type_values)
executive_device_service_instance = ExecutiveDeviceService.get_instance()
assert not executive_device_service_instance._is_decimal_state_in_range(value, executive_type)
def test_get_list_of_executive_devices_should_return_list_of_executive_devices_when_user_is_admin_of_device_group(
get_executive_device_default_values,
create_executive_device,
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
second_executive_device_values = get_executive_device_default_values()
second_executive_device_values['id'] += 1
second_executive_device_values['name'] = 'second executive device'
second_executive_device_values['user_group_id'] = 1
first_executive_device_values = create_executive_device()
second_executive_device_values = create_executive_device(second_executive_device_values)
expected_output_values = [
{
'name': first_executive_device_values.name,
'deviceKey': first_executive_device_values.device_key,
'isActive': first_executive_device_values.is_active
},
{
'name': second_executive_device_values.name,
'deviceKey': second_executive_device_values.device_key,
'isActive': second_executive_device_values.is_active
}
]
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
ExecutiveDeviceRepository,
'get_executive_devices_by_device_group_id'
) as get_updated_executive_devices_by_device_group_id_mock:
get_updated_executive_devices_by_device_group_id_mock.return_value = [
first_executive_device_values,
second_executive_device_values]
result, result_values = executive_device_service_instance.get_list_of_executive_devices(
device_group.product_key,
device_group.admin_id,
True
)
assert result == Constants.RESPONSE_MESSAGE_OK
assert result_values == expected_output_values
def test_get_list_of_executive_devices_should_return_error_message_when_user_is_not_admin(
get_executive_device_default_values,
create_executive_device,
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
second_executive_device_values = get_executive_device_default_values()
second_executive_device_values['id'] += 1
second_executive_device_values['name'] = 'second executive device'
second_executive_device_values['user_group_id'] = 1
first_executive_device_values = create_executive_device()
second_executive_device_values = create_executive_device(second_executive_device_values)
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
ExecutiveDeviceRepository,
'get_updated_executive_devices_by_device_group_id'
) as get_updated_executive_devices_by_device_group_id_mock:
get_updated_executive_devices_by_device_group_id_mock.return_value = [
first_executive_device_values,
second_executive_device_values]
result, _ = executive_device_service_instance.get_list_of_executive_devices(
device_group.product_key,
device_group.admin_id,
False
)
assert result == Constants.RESPONSE_MESSAGE_USER_DOES_NOT_HAVE_PRIVILEGES
def test_get_list_of_executive_devices_should_return_error_message_when_user_is_not_admin_of_device_group(
get_executive_device_default_values,
create_executive_device,
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
second_executive_device_values = get_executive_device_default_values()
second_executive_device_values['id'] += 1
second_executive_device_values['name'] = 'second executive device'
second_executive_device_values['user_group_id'] = 1
first_executive_device_values = create_executive_device()
second_executive_device_values = create_executive_device(second_executive_device_values)
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
ExecutiveDeviceRepository,
'get_updated_executive_devices_by_device_group_id'
) as get_updated_executive_devices_by_device_group_id_mock:
get_updated_executive_devices_by_device_group_id_mock.return_value = [
first_executive_device_values,
second_executive_device_values]
result, _ = executive_device_service_instance.get_list_of_executive_devices(
device_group.product_key,
5 + device_group.admin_id,
True
)
assert result == Constants.RESPONSE_MESSAGE_USER_DOES_NOT_HAVE_PRIVILEGES
def test_get_list_of_executive_devices_should_return_empty_list_when_there_are_no_devices_in_device_group(
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
ExecutiveDeviceRepository,
'get_executive_devices_by_device_group_id'
) as get_updated_executive_devices_by_device_group_id_mock:
get_updated_executive_devices_by_device_group_id_mock.return_value = []
result, result_values = executive_device_service_instance.get_list_of_executive_devices(
device_group.product_key,
device_group.admin_id,
True
)
assert result == Constants.RESPONSE_MESSAGE_OK
assert result_values == []
def test_get_list_of_executive_devices_should_return_empty_list_when_device_group_not_found():
executive_device_service_instance = ExecutiveDeviceService.get_instance()
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = None
result, _ = executive_device_service_instance.get_list_of_executive_devices(
'device_group.product_key',
'device_group.admin_id',
True
)
assert result == Constants.RESPONSE_MESSAGE_PRODUCT_KEY_NOT_FOUND
@pytest.mark.parametrize("product_key, user_id, is_admin, expected_result", [
('product_key', None, False, Constants.RESPONSE_MESSAGE_USER_NOT_DEFINED),
('product_key', 'user_id', None, Constants.RESPONSE_MESSAGE_USER_NOT_DEFINED),
(None, 'user_id', False, Constants.RESPONSE_MESSAGE_PRODUCT_KEY_NOT_FOUND)
])
def test_get_list_of_executive_devices_should_return_empty_list_when_one_of_parameters_is_none(
product_key, user_id, is_admin, expected_result
):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
result, _ = executive_device_service_instance.get_list_of_executive_devices(
product_key,
user_id,
is_admin
)
assert result == expected_result
def test_get_list_of_unassigned_executive_devices_should_return_list_of_unassigned_executive_devices_when_user_is_not_admin_and_right_parameters_are_passed(
get_executive_device_default_values,
create_executive_device,
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
second_executive_device_values = get_executive_device_default_values()
second_executive_device_values['id'] += 1
second_executive_device_values['name'] = 'second executive device'
first_executive_device_values = create_executive_device()
second_executive_device_values = create_executive_device(second_executive_device_values)
expected_output_values = [
{
'name': first_executive_device_values.name,
'deviceKey': first_executive_device_values.device_key,
'isActive': first_executive_device_values.is_active
},
{
'name': second_executive_device_values.name,
'deviceKey': second_executive_device_values.device_key,
'isActive': second_executive_device_values.is_active
}
]
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
DeviceGroupRepository,
'get_device_group_by_user_id_and_product_key'
) as get_device_group_by_user_id_and_product_key_mock:
get_device_group_by_user_id_and_product_key_mock.return_value = device_group
with patch.object(
ExecutiveDeviceRepository,
'get_executive_devices_by_device_group_id_that_are_not_in_user_group'
) as get_executive_devices_by_device_group_id_that_are_not_in_user_group_mock:
get_executive_devices_by_device_group_id_that_are_not_in_user_group_mock.return_value = [
first_executive_device_values,
second_executive_device_values]
result, result_values = executive_device_service_instance.get_list_of_unassigned_executive_devices(
device_group.product_key,
'test_user_id',
False
)
assert result == Constants.RESPONSE_MESSAGE_OK
assert result_values == expected_output_values
def test_get_list_of_unassigned_executive_devices_should_return_list_of_unassigned_executive_devices_when_user_is_admin_and_right_parameters_are_passed(
get_executive_device_default_values,
create_executive_device,
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
second_executive_device_values = get_executive_device_default_values()
second_executive_device_values['id'] += 1
second_executive_device_values['name'] = 'second executive device'
first_executive_device_values = create_executive_device()
second_executive_device_values = create_executive_device(second_executive_device_values)
expected_output_values = [
{
'name': first_executive_device_values.name,
'deviceKey': first_executive_device_values.device_key,
'isActive': first_executive_device_values.is_active
},
{
'name': second_executive_device_values.name,
'deviceKey': second_executive_device_values.device_key,
'isActive': second_executive_device_values.is_active
}
]
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
DeviceGroupRepository,
'get_device_group_by_user_id_and_product_key'
) as get_device_group_by_user_id_and_product_key_mock:
get_device_group_by_user_id_and_product_key_mock.return_value = device_group
with patch.object(
ExecutiveDeviceRepository,
'get_executive_devices_by_device_group_id_that_are_not_in_user_group'
) as get_executive_devices_by_device_group_id_that_are_not_in_user_group_mock:
get_executive_devices_by_device_group_id_that_are_not_in_user_group_mock.return_value = [
first_executive_device_values,
second_executive_device_values]
result, result_values = executive_device_service_instance.get_list_of_unassigned_executive_devices(
device_group.product_key,
device_group.admin_id,
True
)
assert result == Constants.RESPONSE_MESSAGE_OK
assert result_values == expected_output_values
def test_get_list_of_unassigned_executive_devices_should_return_empty_list_when_there_are_not_any_unassigned_executive_devices(
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
expected_output_values = []
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
DeviceGroupRepository,
'get_device_group_by_user_id_and_product_key'
) as get_device_group_by_user_id_and_product_key_mock:
get_device_group_by_user_id_and_product_key_mock.return_value = device_group
with patch.object(
ExecutiveDeviceRepository,
'get_executive_devices_by_device_group_id_that_are_not_in_user_group'
) as get_executive_devices_by_device_group_id_that_are_not_in_user_group_mock:
get_executive_devices_by_device_group_id_that_are_not_in_user_group_mock.return_value = []
result, result_values = executive_device_service_instance.get_list_of_unassigned_executive_devices(
device_group.product_key,
'test_user_id',
False
)
assert result == Constants.RESPONSE_MESSAGE_OK
assert result_values == expected_output_values
def test_get_list_of_unassigned_executive_devices_should_error_message_when_admin_id_is_different_than_user_id_and_user_is_admin(
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
DeviceGroupRepository,
'get_device_group_by_user_id_and_product_key'
) as get_device_group_by_user_id_and_product_key_mock:
get_device_group_by_user_id_and_product_key_mock.return_value = device_group
result, result_values = executive_device_service_instance.get_list_of_unassigned_executive_devices(
device_group.product_key,
'test_user_id',
True
)
assert result == Constants.RESPONSE_MESSAGE_USER_DOES_NOT_HAVE_PRIVILEGES
assert result_values is None
def test_get_list_of_unassigned_executive_devices_should_error_message_when_user_not_in_device_group(
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
DeviceGroupRepository,
'get_device_group_by_user_id_and_product_key'
) as get_device_group_by_user_id_and_product_key_mock:
get_device_group_by_user_id_and_product_key_mock.return_value = None
result, result_values = executive_device_service_instance.get_list_of_unassigned_executive_devices(
device_group.product_key,
'test_user_id',
False
)
assert result == Constants.RESPONSE_MESSAGE_USER_DOES_NOT_HAVE_PRIVILEGES
assert result_values is None
def test_get_list_of_unassigned_executive_devices_should_error_message_when_device_group_not_found(
get_sensor_default_values,
create_sensor,
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = None
result, result_values = executive_device_service_instance.get_list_of_unassigned_executive_devices(
'device_group.product_key',
'test_user_id',
False
)
assert result == Constants.RESPONSE_MESSAGE_PRODUCT_KEY_NOT_FOUND
assert result_values is None
@pytest.mark.parametrize("product_key, user_id, is_admin, expected_result", [
('product_key', None, False, Constants.RESPONSE_MESSAGE_USER_NOT_DEFINED),
('product_key', 'user_id', None, Constants.RESPONSE_MESSAGE_USER_NOT_DEFINED),
(None, 'user_id', False, Constants.RESPONSE_MESSAGE_PRODUCT_KEY_NOT_FOUND)
])
def test_get_list_of_unassigned_executive_devices_should_error_message_when_one_of_parameters_is_none(
product_key, user_id, is_admin, expected_result,
get_sensor_default_values,
create_sensor,
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = None
result, result_values = executive_device_service_instance.get_list_of_unassigned_executive_devices(
product_key,
user_id,
is_admin
)
assert result == expected_result
assert result_values is None
def test_add_executive_device_to_device_group_should_add_sensor_to_device_group_when_valid_request(
create_device_group, create_unconfigured_device, create_executive_type, create_admin):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
unconfigured_device = create_unconfigured_device()
executive_type = create_executive_type()
admin = create_admin()
device_key = "test device_key"
password = unconfigured_device.password
device_name = 'test_exec_device_name'
executive_type_name = 'test_executive_type_name'
assert device_group.admin_id == admin.id
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
UnconfiguredDeviceRepository,
'get_unconfigured_device_by_device_key_and_device_group_id'
) as get_unconfigured_device_by_device_key_and_device_group_id_mock:
get_unconfigured_device_by_device_key_and_device_group_id_mock.return_value = unconfigured_device
with patch.object(
ExecutiveTypeRepository,
'get_executive_type_by_device_group_id_and_name'
) as get_executive_type_by_device_group_id_and_name_mock:
get_executive_type_by_device_group_id_and_name_mock.return_value = executive_type
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_name_and_user_group_id'
) as get_executive_device_by_name_and_user_group_id_mock:
get_executive_device_by_name_and_user_group_id_mock.return_value = None
with patch.object(ExecutiveDevice, '__init__') as exec_device_init_mock:
exec_device_init_mock.return_value = None
with patch.object(
BaseRepository,
'save_but_do_not_commit') as save_but_do_not_commit_mock:
with patch.object(
BaseRepository,
'delete_but_do_not_commit') as delete_but_do_not_commit_mock:
with patch.object(
BaseRepository,
'update_database') as update_database_mock:
update_database_mock.return_value = True
result = executive_device_service_instance.add_executive_device_to_device_group(
device_group.product_key,
admin.id,
True,
device_key,
password,
device_name,
executive_type_name
)
assert result == Constants.RESPONSE_MESSAGE_CREATED
exec_device_init_mock.assert_called_with(
name=device_name,
state=executive_type.default_state,
is_updated=False,
is_active=False,
is_assigned=False,
is_formula_used=False,
positive_state=None,
negative_state=None,
device_key=device_key,
executive_type_id=executive_type.id,
user_group_id=None,
device_group_id=device_group.id,
formula_id=None)
save_but_do_not_commit_mock.assert_called_once()
delete_but_do_not_commit_mock.assert_called_once_with(unconfigured_device)
update_database_mock.assert_called_once()
def test_add_executive_device_to_device_group_should_return_error_message_when_not_successfull_db_update(
create_device_group, create_unconfigured_device, create_executive_type, create_admin):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
unconfigured_device = create_unconfigured_device()
executive_type = create_executive_type()
admin = create_admin()
device_key = "test device_key"
password = unconfigured_device.password
device_name = 'test_exec_device_name'
executive_type_name = 'test_executive_type_name'
assert device_group.admin_id == admin.id
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
UnconfiguredDeviceRepository,
'get_unconfigured_device_by_device_key_and_device_group_id'
) as get_unconfigured_device_by_device_key_and_device_group_id_mock:
get_unconfigured_device_by_device_key_and_device_group_id_mock.return_value = unconfigured_device
with patch.object(
ExecutiveTypeRepository,
'get_executive_type_by_device_group_id_and_name'
) as get_executive_type_by_device_group_id_and_name_mock:
get_executive_type_by_device_group_id_and_name_mock.return_value = executive_type
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_name_and_user_group_id'
) as get_executive_device_by_name_and_user_group_id_mock:
get_executive_device_by_name_and_user_group_id_mock.return_value = None
with patch.object(ExecutiveDevice, '__init__') as sensor_init_mock:
sensor_init_mock.return_value = None
with patch.object(
BaseRepository,
'save_but_do_not_commit') as save_but_do_not_commit_mock:
with patch.object(
BaseRepository,
'delete_but_do_not_commit') as delete_but_do_not_commit_mock:
with patch.object(
BaseRepository,
'update_database') as update_database_mock:
update_database_mock.return_value = False
result = executive_device_service_instance.add_executive_device_to_device_group(
device_group.product_key,
admin.id,
True,
device_key,
password,
device_name,
executive_type_name
)
assert result == Constants.RESPONSE_MESSAGE_CONFLICTING_DATA
sensor_init_mock.assert_called_with(
name=device_name,
state=executive_type.default_state,
is_updated=False,
is_active=False,
is_assigned=False,
is_formula_used=False,
positive_state=None,
negative_state=None,
device_key=device_key,
executive_type_id=executive_type.id,
user_group_id=None,
device_group_id=device_group.id,
formula_id=None)
save_but_do_not_commit_mock.assert_called_once()
delete_but_do_not_commit_mock.assert_called_once_with(unconfigured_device)
update_database_mock.assert_called_once()
def test_add_sensor_to_device_group_should_return_error_message_when_sensor_type_not_found(
create_device_group, create_unconfigured_device, create_executive_type, create_admin):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
unconfigured_device = create_unconfigured_device()
admin = create_admin()
device_key = "test device_key"
password = unconfigured_device.password
device_name = 'test_exec_device_name'
executive_type_name = 'test_executive_type_name'
assert device_group.admin_id == admin.id
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
UnconfiguredDeviceRepository,
'get_unconfigured_device_by_device_key_and_device_group_id'
) as get_unconfigured_device_by_device_key_and_device_group_id_mock:
get_unconfigured_device_by_device_key_and_device_group_id_mock.return_value = unconfigured_device
with patch.object(
ExecutiveTypeRepository,
'get_executive_type_by_device_group_id_and_name'
) as get_executive_type_by_device_group_id_and_name_mock:
get_executive_type_by_device_group_id_and_name_mock.return_value = None
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_name_and_user_group_id'
) as get_executive_device_by_name_and_user_group_id_mock:
get_executive_device_by_name_and_user_group_id_mock.return_value = None
result = executive_device_service_instance.add_executive_device_to_device_group(
device_group.product_key,
admin.id,
True,
device_key,
password,
device_name,
executive_type_name
)
assert result == Constants.RESPONSE_MESSAGE_EXECUTIVE_TYPE_NAME_NOT_DEFINED
def test_add_sensor_to_device_group_should_return_error_message_when_device_name_already_in_device_group(
create_device_group, create_unconfigured_device, create_executive_type, create_admin):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
unconfigured_device = create_unconfigured_device()
executive_type = create_executive_type()
admin = create_admin()
device_key = "test device_key"
password = unconfigured_device.password
device_name = 'test_exec_device_name'
executive_type_name = 'test_executive_type_name'
assert device_group.admin_id == admin.id
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
UnconfiguredDeviceRepository,
'get_unconfigured_device_by_device_key_and_device_group_id'
) as get_unconfigured_device_by_device_key_and_device_group_id_mock:
get_unconfigured_device_by_device_key_and_device_group_id_mock.return_value = unconfigured_device
with patch.object(
ExecutiveTypeRepository,
'get_executive_type_by_device_group_id_and_name'
) as get_executive_type_by_device_group_id_and_name_mock:
get_executive_type_by_device_group_id_and_name_mock.return_value = executive_type
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_name_and_user_group_id'
) as get_executive_device_by_name_and_user_group_id_mock:
get_executive_device_by_name_and_user_group_id_mock.return_value = Mock()
result = executive_device_service_instance.add_executive_device_to_device_group(
device_group.product_key,
admin.id,
True,
device_key,
password,
device_name,
executive_type_name
)
assert result == Constants.RESPONSE_MESSAGE_EXECUTIVE_DEVICE_NAME_ALREADY_DEFINED
def test_add_sensor_to_device_group_should_return_error_message_when_unconfigured_device_not_found(
create_device_group, create_unconfigured_device, create_executive_type, create_admin):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
unconfigured_device = create_unconfigured_device()
admin = create_admin()
device_key = "test device_key"
password = unconfigured_device.password
device_name = 'test_exec_device_name'
executive_type_name = 'test_executive_type_name'
assert device_group.admin_id == admin.id
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
UnconfiguredDeviceRepository,
'get_unconfigured_device_by_device_key_and_device_group_id'
) as get_unconfigured_device_by_device_key_and_device_group_id_mock:
get_unconfigured_device_by_device_key_and_device_group_id_mock.return_value = None
result = executive_device_service_instance.add_executive_device_to_device_group(
device_group.product_key,
admin.id,
True,
device_key,
password,
device_name,
executive_type_name
)
assert result == Constants.RESPONSE_MESSAGE_UNCONFIGURED_DEVICE_NOT_FOUND
def test_add_sensor_to_device_group_should_return_error_message_when_wrong_password_is_passed(
create_device_group, create_unconfigured_device, create_executive_type, create_admin):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
unconfigured_device = create_unconfigured_device()
admin = create_admin()
device_key = "test device_key"
password = unconfigured_device.password + 'test'
device_name = 'test_exec_device_name'
executive_type_name = 'test_executive_type_name'
assert device_group.admin_id == admin.id
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
UnconfiguredDeviceRepository,
'get_unconfigured_device_by_device_key_and_device_group_id'
) as get_unconfigured_device_by_device_key_and_device_group_id_mock:
get_unconfigured_device_by_device_key_and_device_group_id_mock.return_value = unconfigured_device
result = executive_device_service_instance.add_executive_device_to_device_group(
device_group.product_key,
admin.id,
True,
device_key,
password,
device_name,
executive_type_name
)
assert result == Constants.RESPONSE_MESSAGE_WRONG_PASSWORD
def test_add_sensor_to_device_group_should_return_error_message_when_when_admin_id_is_different_from_device_group_admin_id(
create_device_group, create_unconfigured_device, create_executive_type, create_admin):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
device_group = create_device_group()
unconfigured_device = create_unconfigured_device()
admin = create_admin()
device_key = "test device_key"
password = unconfigured_device.password + 'test'
device_name = 'test_exec_device_name'
executive_type_name = 'test_executive_type_name'
admin.id += 1
assert device_group.admin_id != admin.id
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
result = executive_device_service_instance.add_executive_device_to_device_group(
device_group.product_key,
admin.id,
True,
device_key,
password,
device_name,
executive_type_name
)
assert result == Constants.RESPONSE_MESSAGE_USER_DOES_NOT_HAVE_PRIVILEGES
def test_add_sensor_to_device_group_should_return_error_message_when_when_device_group_not_found(
create_device_group, create_unconfigured_device, create_executive_type, create_admin):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
admin = create_admin()
device_key = "test device_key"
password = 'test'
device_name = 'test_exec_device_name'
executive_type_name = 'test_executive_type_name'
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key') as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = None
result = executive_device_service_instance.add_executive_device_to_device_group(
'device_group.product_key',
admin.id,
True,
device_key,
password,
device_name,
executive_type_name
)
assert result == Constants.RESPONSE_MESSAGE_PRODUCT_KEY_NOT_FOUND
@pytest.mark.parametrize(
"product_key, admin_id, is_admin, device_key, password, device_name, executive_type_name, expected_result", [
(None, 'admin_id', True, "test device_key", 'password', 'test_device_name', 'test_executive_type_name',
Constants.RESPONSE_MESSAGE_PRODUCT_KEY_NOT_FOUND),
('product_key', None, True, "test device_key", 'password', 'test_device_name', 'test_executive_type_name',
Constants.RESPONSE_MESSAGE_USER_NOT_DEFINED),
('product_key', 'admin_id', None, "test device_key", 'password', 'test_device_name', 'test_executive_type_name',
Constants.RESPONSE_MESSAGE_USER_NOT_DEFINED),
('product_key', 'admin_id', True, None, 'password', 'test_device_name', 'test_executive_type_name',
Constants.RESPONSE_MESSAGE_BAD_REQUEST),
('product_key', 'admin_id', True, "test device_key", None, 'test_device_name', 'test_executive_type_name',
Constants.RESPONSE_MESSAGE_BAD_REQUEST),
('product_key', 'admin_id', True, "test device_key", 'password', None, 'test_executive_type_name',
Constants.RESPONSE_MESSAGE_BAD_REQUEST),
('product_key', 'admin_id', True, "test device_key", 'password', 'test_device_name', None,
Constants.RESPONSE_MESSAGE_BAD_REQUEST),
])
def test_add_sensor_to_device_group_should_return_error_message_when_one_of_parameters_is_none(
product_key, admin_id, is_admin, device_key, password, device_name, executive_type_name, expected_result
):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
result = executive_device_service_instance.add_executive_device_to_device_group(
product_key,
admin_id,
is_admin,
device_key,
password,
device_name,
executive_type_name
)
assert result == expected_result
def test__change_device_user_group_should_change_devices_user_group_if_user_in_old_and_new_user_groups(
create_executive_device,
create_user_group,
create_user):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_device = create_executive_device()
user = create_user()
old_user_group = create_user_group()
assert executive_device.user_group_id == old_user_group.id
new_user_group = create_user_group()
old_user_group.users = [user]
new_user_group.users = [user]
with patch.object(
UserGroupRepository,
'get_user_group_by_id'
) as get_user_group_by_id_mock:
get_user_group_by_id_mock.return_value = old_user_group
status, error_msg = executive_device_service_instance._change_device_user_group(
executive_device,
user,
False,
new_user_group
)
assert status is True
assert error_msg is None
assert executive_device.user_group_id == new_user_group.id
def test_change_device_user_group_should_change_devices_user_group_if_user_is_none_and_is_admin(
create_executive_device,
create_user_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_device = create_executive_device()
old_user_group = create_user_group()
assert executive_device.user_group_id == old_user_group.id
new_user_group = create_user_group()
with patch.object(
UserGroupRepository,
'get_user_group_by_id'
) as get_user_group_by_id_mock:
get_user_group_by_id_mock.return_value = old_user_group
status, error_msg = executive_device_service_instance._change_device_user_group(
executive_device,
None,
True,
new_user_group
)
assert status is True
assert error_msg is None
assert executive_device.user_group_id == new_user_group.id
@pytest.mark.parametrize('user_in_old_user_group, user_in_new_user_group',
[(True, False), (False, True), (False, False)])
def test__change_device_user_group_should_return_error_message_when_user_not_in_old_or_new_user_groups(
user_in_old_user_group, user_in_new_user_group,
create_executive_device,
create_user_group,
create_user):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_device = create_executive_device()
user = create_user()
old_user_group = create_user_group()
assert executive_device.user_group_id == old_user_group.id
new_user_group = create_user_group()
if user_in_old_user_group:
old_user_group.users = [user]
else:
old_user_group.users = []
if user_in_new_user_group:
new_user_group.users = [user]
else:
new_user_group.users = []
with patch.object(
UserGroupRepository,
'get_user_group_by_id'
) as get_user_group_by_id_mock:
get_user_group_by_id_mock.return_value = old_user_group
status, error_msg = executive_device_service_instance._change_device_user_group(
executive_device,
user,
False,
new_user_group,
)
assert status is False
assert error_msg == Constants.RESPONSE_MESSAGE_USER_DOES_NOT_HAVE_PRIVILEGES
assert executive_device.user_group_id == old_user_group.id
def test__change_device_type_should_change_device_type_if_device_type_in_device_group(
create_executive_device,
get_executive_type_default_values,
create_executive_type):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_type_values = get_executive_type_default_values()
executive_type_values['id'] += 1
executive_type_values['name'] = 'Test'
executive_type = create_executive_type(executive_type_values)
executive_device = create_executive_device()
assert executive_device.executive_type_id != executive_type.id
with patch.object(
ExecutiveTypeRepository,
'get_executive_type_by_device_group_id_and_name'
) as get_executive_type_by_device_group_id_and_name_mock:
get_executive_type_by_device_group_id_and_name_mock.return_value = executive_type
status, returned_exec_type, error_msg, is_type_changed = executive_device_service_instance._change_device_type(
executive_device,
'device_group_id',
executive_type.name
)
assert status is True
assert returned_exec_type is executive_type
assert error_msg is None
assert executive_device.executive_type_id == executive_type.id
assert is_type_changed is True
def test__change_device_type_should_return_error_message_when_exec_type_not_found(
create_executive_device):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_device = create_executive_device()
old_type_id = executive_device.id
with patch.object(
ExecutiveTypeRepository,
'get_executive_type_by_device_group_id_and_name'
) as get_executive_type_by_device_group_id_and_name_mock:
get_executive_type_by_device_group_id_and_name_mock.return_value = None
status, returned_exec_type, error_msg, is_type_changed = executive_device_service_instance._change_device_type(
executive_device,
'device_group_id',
'executive_type_name'
)
assert status is False
assert returned_exec_type is None
assert error_msg == Constants.RESPONSE_MESSAGE_EXECUTIVE_TYPE_NOT_FOUND
assert executive_device.executive_type_id == old_type_id
assert is_type_changed is False
@pytest.mark.parametrize('user_group_is_none', [True, False])
def test__change_device_name_should_change_devices_name_if_name_is_not_in_user_group_or_user_group_is_none(
user_group_is_none,
create_executive_device,
create_user_group,
):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_device = create_executive_device()
if user_group_is_none:
user_group = None
else:
user_group = create_user_group()
changed_name = 'Changed name'
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_name_and_user_group_id'
) as get_executive_device_by_name_and_user_group_id_mock:
if user_group_is_none:
get_executive_device_by_name_and_user_group_id_mock.return_value = None
else:
get_executive_device_by_name_and_user_group_id_mock.return_value = executive_device
status, error_msg = executive_device_service_instance._change_device_name(
executive_device, changed_name, user_group)
assert status is True
assert error_msg is None
assert executive_device.name == changed_name
def test__change_device_name_should_not_change_devices_name_if_name_is_defined_in_user_group(
create_executive_device,
create_user_group,
):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_device = create_executive_device()
second_executive_device = Mock()
second_executive_device.id.return_value = executive_device.id
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_name_and_user_group_id'
) as get_executive_device_by_name_and_user_group_id_mock:
get_executive_device_by_name_and_user_group_id_mock.return_value = second_executive_device
status, error_msg = executive_device_service_instance._change_device_name(
executive_device, 'changed_name', Mock())
assert status is False
assert error_msg == Constants.RESPONSE_MESSAGE_EXECUTIVE_DEVICE_NAME_ALREADY_DEFINED
def test__change_device_formula_related_fields_should_change_devices_fields_if_all_parameters_are_correct(
create_executive_device, create_formula
):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_device = create_executive_device()
formula = create_formula()
formula_name = formula.name
positive_state = "Positive"
negative_state = "Negative"
is_formula_used = True
with patch.object(
ExecutiveDeviceService,
'_state_in_range'
) as _state_in_range_mock:
_state_in_range_mock.return_value = True
with patch.object(
FormulaRepository,
'get_formula_by_name_and_user_group_id'
) as get_formula_by_name_and_user_group_id_mock:
get_formula_by_name_and_user_group_id_mock.return_value = formula
status, returned_formula, error_msg = \
executive_device_service_instance._change_device_formula_related_fields(
executive_device,
formula_name,
positive_state,
negative_state, is_formula_used,
Mock(),
Mock()
)
assert status is True
assert returned_formula is formula
assert error_msg is None
assert executive_device.formula_id == formula.id
assert executive_device.negative_state == negative_state
assert executive_device.positive_state == positive_state
assert executive_device.is_formula_used is is_formula_used
def test__change_device_formula_related_fields_should_change_device_fields_when_positive_state_is_None(
create_executive_device, create_formula
):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_device = create_executive_device()
formula = create_formula()
formula_name = formula.name
positive_state = None
negative_state = "Negative"
is_formula_used = False
with patch.object(
ExecutiveDeviceService,
'_state_in_range'
) as _state_in_range_mock:
_state_in_range_mock.return_value = True
with patch.object(
FormulaRepository,
'get_formula_by_name_and_user_group_id'
) as get_formula_by_name_and_user_group_id_mock:
get_formula_by_name_and_user_group_id_mock.return_value = formula
status, returned_formula, error_msg = \
executive_device_service_instance._change_device_formula_related_fields(
executive_device,
formula_name,
positive_state,
negative_state, is_formula_used,
Mock(),
Mock()
)
assert status is True
assert returned_formula is formula
assert error_msg is None
assert executive_device.formula_id == formula.id
assert executive_device.negative_state == negative_state
assert executive_device.positive_state == positive_state
assert executive_device.is_formula_used is is_formula_used
@pytest.mark.parametrize('formula_name, positive_state, negative_state, is_formula_used',
[
(None, "positive", "negative", False),
("formula_name", None, "negative", False),
("formula_name", "positive", None, False),
])
def test_change_device_formula_related_fields_should_change_device_fields_when_positive_state_is_None(
formula_name, positive_state, negative_state, is_formula_used,
create_executive_device, create_formula
):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_device = create_executive_device()
executive_device.formula_id = None
formula = create_formula()
formula_name = formula_name
positive_state = positive_state
negative_state = negative_state
is_formula_used = is_formula_used
with patch.object(
ExecutiveDeviceService,
'_state_in_range'
) as _state_in_range_mock:
_state_in_range_mock.return_value = True
with patch.object(
FormulaRepository,
'get_formula_by_name_and_user_group_id'
) as get_formula_by_name_and_user_group_id_mock:
get_formula_by_name_and_user_group_id_mock.return_value = formula
status, returned_formula, error_msg = \
executive_device_service_instance._change_device_formula_related_fields(
executive_device,
formula_name,
positive_state,
negative_state, is_formula_used,
Mock(),
Mock()
)
assert status is True
assert error_msg is None
assert executive_device.negative_state == negative_state
assert executive_device.positive_state == positive_state
assert executive_device.is_formula_used is is_formula_used
if formula_name is not None:
assert returned_formula is formula
assert executive_device.formula_id == formula.id
else:
assert executive_device.formula_id is None
assert returned_formula is None
@pytest.mark.parametrize('formula_name, positive_state, negative_state, is_formula_used',
[
(None, "positive", "negative", True),
("formula_name", None, "negative", True),
("formula_name", "positive", None, True),
("formula_name", "positive", "negative", None),
("formula_name", None, None, None),
("formula_name", None, None, True),
(None, "positive", None, None),
(None, None, "negative", None),
(None, None, None, True),
])
def test_change_device_formula_related_fields_should_return_error_message_when_wrong_combination_of_parameters(
formula_name, positive_state, negative_state, is_formula_used
):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_device_mock = Mock(spec=ExecutiveDevice)
executive_device_mock.formula_id = 1
with patch.object(
FormulaRepository,
'get_formula_by_name_and_user_group_id'
) as get_formula_by_name_and_user_group_id_mock:
get_formula_by_name_and_user_group_id_mock.return_value = Mock()
status, returned_formula, error_msg = \
executive_device_service_instance._change_device_formula_related_fields(
executive_device_mock,
formula_name,
positive_state,
negative_state, is_formula_used,
Mock(),
Mock()
)
assert status is False
assert returned_formula is None
assert error_msg == Constants.RESPONSE_MESSAGE_PARTIALLY_WRONG_DATA_FROM_FRONTEND
def test__change_device_formula_related_fields_should_return_error_message_when_formula_not_found(
create_executive_device
):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_device = create_executive_device()
formula_name = 'formula.name'
positive_state = "Positive"
negative_state = "Negative"
is_formula_used = True
with patch.object(
FormulaRepository,
'get_formula_by_name_and_user_group_id'
) as get_formula_by_name_and_user_group_id_mock:
get_formula_by_name_and_user_group_id_mock.return_value = None
status, returned_formula, error_msg = \
executive_device_service_instance._change_device_formula_related_fields(
executive_device,
formula_name,
positive_state,
negative_state, is_formula_used,
Mock(),
Mock()
)
assert status is False
assert returned_formula is None
assert error_msg == Constants.RESPONSE_MESSAGE_FORMULA_NOT_FOUND
def test_delete_executive_device_should_delete_sensor_when_right_parameters_are_passed(
create_executive_device,
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_device = create_executive_device()
admin_id = 1
is_admin = True
admin_mock = Mock()
admin_mock.id.return_value = admin_id
device_group = create_device_group()
device_group.admin_id = admin_mock.id
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key'
) as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
AdminRepository,
'get_admin_by_id'
) as get_admin_by_id_mock:
get_admin_by_id_mock.return_value = admin_mock
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_device_key_and_device_group_id'
) as get_executive_device_by_device_key_and_device_group_id:
get_executive_device_by_device_key_and_device_group_id.return_value = executive_device
with patch.object(
ExecutiveDeviceRepository,
'delete_but_do_not_commit'
) as delete_but_do_not_commit_mock:
with patch.object(
DeletedDeviceRepository,
'save'
) as save_mock:
save_mock.return_value = True
result = executive_device_service_instance.delete_executive_device(
executive_device.device_key,
'product_key',
admin_id,
is_admin
)
assert result == Constants.RESPONSE_MESSAGE_OK
delete_but_do_not_commit_mock.assert_called_once_with(executive_device)
def test_delete_executive_device_should_return_error_message_when_unsuccessful_db_deletion(
create_executive_device,
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
executive_device = create_executive_device()
admin_id = 1
is_admin = True
admin_mock = Mock()
admin_mock.id.return_value = admin_id
device_group = create_device_group()
device_group.admin_id = admin_mock.id
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key'
) as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
AdminRepository,
'get_admin_by_id'
) as get_admin_by_id_mock:
get_admin_by_id_mock.return_value = admin_mock
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_device_key_and_device_group_id'
) as get_executive_device_by_device_key_and_device_group_id:
get_executive_device_by_device_key_and_device_group_id.return_value = executive_device
with patch.object(
ExecutiveDeviceRepository,
'delete_but_do_not_commit'
) as delete_but_do_not_commit_mock:
with patch.object(
DeletedDeviceRepository,
'save'
) as save_mock:
save_mock.return_value = False
result = executive_device_service_instance.delete_executive_device(
executive_device.device_key,
'product_key',
admin_id,
is_admin
)
assert result == Constants.RESPONSE_MESSAGE_ERROR
delete_but_do_not_commit_mock.assert_called_once_with(executive_device)
def test_delete_executive_should_return_error_message_when_admin_in_not_assigned_to_device_group(
create_executive_device,
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
admin_id = 1
is_admin = True
executive_device = create_executive_device()
admin_mock = Mock()
admin_mock.id.return_value = admin_id
device_group = create_device_group()
device_group.admin_id = admin_id + 1
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key'
) as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
AdminRepository,
'get_admin_by_id'
) as get_admin_by_id_mock:
get_admin_by_id_mock.return_value = admin_mock
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_device_key_and_device_group_id'
) as get_executive_device_by_device_key_and_device_group_id:
get_executive_device_by_device_key_and_device_group_id.return_value = executive_device
with patch.object(
ExecutiveDeviceRepository,
'delete'
) as delete_mock:
delete_mock.return_value = False
result = executive_device_service_instance.delete_executive_device(
'executive_device.device_key',
'product_key', admin_id,
is_admin)
assert result == Constants.RESPONSE_MESSAGE_USER_DOES_NOT_HAVE_PRIVILEGES
def test_delete_executive_device_should_delete_sensor_when_exec_device_not_found(
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
admin_id = 1
is_admin = True
admin_mock = Mock()
admin_mock.id.return_value = admin_id
device_group = create_device_group()
device_group.admin_id = admin_mock.id
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key'
) as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
AdminRepository,
'get_admin_by_id'
) as get_admin_by_id_mock:
get_admin_by_id_mock.return_value = admin_mock
with patch.object(
ExecutiveDeviceRepository,
'get_executive_device_by_device_key_and_device_group_id'
) as get_executive_device_by_device_key_and_device_group_id:
get_executive_device_by_device_key_and_device_group_id.return_value = None
result = executive_device_service_instance.delete_executive_device(
'executive_device.device_key',
'product_key', admin_id,
is_admin)
assert result == Constants.RESPONSE_MESSAGE_EXECUTIVE_DEVICE_NOT_FOUND
def test_delete_executive_device_should_return_error_message_when_admin_in_not_admin(
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
admin_id = 1
is_admin = False
admin_mock = Mock()
admin_mock.id.return_value = admin_id
device_group = create_device_group()
device_group.admin_id = admin_mock.id
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key'
) as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
AdminRepository,
'get_admin_by_id'
) as get_admin_by_id_mock:
get_admin_by_id_mock.return_value = admin_mock
result = executive_device_service_instance.delete_executive_device(
'executive_device.device_key',
'product_key', admin_id,
is_admin)
assert result == Constants.RESPONSE_MESSAGE_USER_DOES_NOT_HAVE_PRIVILEGES
def test_delete_executive_device_should_return_error_message_when_admin_in_not_found(
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
admin_id = 1
is_admin = False
device_group = create_device_group()
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key'
) as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = device_group
with patch.object(
AdminRepository,
'get_admin_by_id'
) as get_admin_by_id_mock:
get_admin_by_id_mock.return_value = None
result = executive_device_service_instance.delete_executive_device(
'executive_device.device_key',
'product_key', admin_id,
is_admin)
assert result == Constants.RESPONSE_MESSAGE_USER_DOES_NOT_HAVE_PRIVILEGES
def test_delete_executive_device_should_return_error_message_when_device_group_not_found(
create_device_group):
executive_device_service_instance = ExecutiveDeviceService.get_instance()
admin_id = 1
is_admin = False
with patch.object(
DeviceGroupRepository,
'get_device_group_by_product_key'
) as get_device_group_by_product_key_mock:
get_device_group_by_product_key_mock.return_value = None
result = executive_device_service_instance.delete_executive_device(
'executive_device.device_key',
'product_key', admin_id,
is_admin)
assert result == Constants.RESPONSE_MESSAGE_PRODUCT_KEY_NOT_FOUND
| 40.849867
| 156
| 0.699552
| 10,681
| 92,239
| 5.430203
| 0.016384
| 0.135776
| 0.034397
| 0.033103
| 0.959069
| 0.946741
| 0.934379
| 0.926276
| 0.922052
| 0.908552
| 0
| 0.001361
| 0.251184
| 92,239
| 2,257
| 157
| 40.867966
| 0.838367
| 0
| 0
| 0.843628
| 0
| 0
| 0.086265
| 0.055475
| 0
| 0
| 0
| 0
| 0.106845
| 1
| 0.032276
| false
| 0.016694
| 0.008347
| 0
| 0.040623
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b4567d0735cf2ca49c778a93979ed6bcdd49e9c2
| 16,862
|
py
|
Python
|
tests/sc/test_alerts.py
|
widnyana/pyTenable
|
be6194bbf11a2283543a2c355deb37b52cd1a93b
|
[
"MIT"
] | null | null | null |
tests/sc/test_alerts.py
|
widnyana/pyTenable
|
be6194bbf11a2283543a2c355deb37b52cd1a93b
|
[
"MIT"
] | null | null | null |
tests/sc/test_alerts.py
|
widnyana/pyTenable
|
be6194bbf11a2283543a2c355deb37b52cd1a93b
|
[
"MIT"
] | null | null | null |
from tenable.errors import *
from ..checker import check
import datetime, sys, pytest, os
@pytest.fixture
def alert(request, vcr, sc):
with vcr.use_cassette('alert'):
alert = sc.alerts.create(
('severity', '=', '3,4'),
name='Example Alert',
trigger=('sumip', '>=', '100'),
action=[{
'type': 'notification',
'message': 'Example Message',
'users': [{'id': 1}]
}])
def teardown():
try:
sc.alerts.delete(int(alert['id']))
except APIError:
pass
request.addfinalizer(teardown)
return alert
def test_alerts_constructor_name_typeerror(sc):
with pytest.raises(TypeError):
sc.alerts._constructor(name=1)
def test_alerts_constructor_description_typeerror(sc):
with pytest.raises(TypeError):
sc.alerts._constructor(description=1)
def test_alerts_constructor_query_typeerror(sc):
with pytest.raises(TypeError):
sc.alerts._constructor(query=1)
def test_alerts_constructor_always_exec_on_trigger_typeerror(sc):
with pytest.raises(TypeError):
sc.alerts._constructor(always_exec_on_trigger='nope')
def test_alerts_constructor_trigger_typeerror(sc):
with pytest.raises(TypeError):
sc.alerts._constructor(trigger=1)
def test_alerts_constructor_trigger_name_typeerror(sc):
with pytest.raises(TypeError):
sc.alerts._constructor(trigger=(1, '=', 'something'))
def test_alerts_constructor_trigger_operator_typeerror(sc):
with pytest.raises(TypeError):
sc.alerts._constructor(trigger=('name', 1, 'something'))
def test_alerts_constructor_trigger_operator_unexpectedvalueerror(sc):
with pytest.raises(UnexpectedValueError):
sc.alerts._constructor(trigger=('name', 'eq', 'something'))
def test_alerts_constructor_trigger_value_typeerror(sc):
with pytest.raises(TypeError):
sc.alerts._constructor(trigger=('name', '=', 1))
def test_alerts_constructor(sc):
r = sc.alerts._constructor(
('severity', '=', '3,4'),
name='Example Alert',
trigger=('sumip', '>=', '100'),
action=[{
'type': 'notification',
'message': 'Example Message',
'users': [{'id': 1}]}])
assert r == {
'name': 'Example Alert',
'action': [{
'message': 'Example Message',
'type': 'notification',
'users': [{'id': 1}]
}],
'query': {
'type': 'vuln',
'filters': [{
'filterName': 'severity',
'operator': '=',
'value': '3,4'
}]},
'triggerName': 'sumip',
'triggerOperator': '>=',
'triggerValue': '100'
}
def test_alerts_list_fields_typeerror(sc):
with pytest.raises(TypeError):
sc.alerts.list(fields=1)
@pytest.mark.vcr()
def test_alerts_list_success(sc, alert):
alerts = sc.alerts.list()
assert isinstance(alerts, dict)
for a in alerts['manageable']:
check(a, 'description', str)
check(a, 'id', str)
check(a, 'name', str)
check(a, 'status', str)
def test_alerts_details_id_typeerror(sc):
with pytest.raises(TypeError):
sc.alerts.details('nope')
def test_alerts_fields_typeerror(sc):
with pytest.raises(TypeError):
sc.alerts.details(1, fields=1)
@pytest.mark.vcr()
def test_alerts_details_success(sc, alert):
a = sc.alerts.details(int(alert['id']))
assert isinstance(a, dict)
check(a, 'action', list)
for i in a['action']:
assert isinstance(i, dict)
check(i, 'type', str)
check(i, 'definition', dict)
if i['type'] == 'notification':
check(i['definition'], 'message', str)
check(i['definition'], 'users', list)
for u in i['definition']['users']:
check(u, 'firstname', str)
check(u, 'id', str)
check(u, 'lastname', str)
check(u, 'username', str)
if i['type'] == 'email':
check(i['definition'], 'subject', str)
check(i['definition'], 'message', str)
check(i['definition'], 'addresses', str)
check(i['definition'], 'users', list)
for u in i['definition']['users']:
check(u, 'firstname', str)
check(u, 'id', str)
check(u, 'lastname', str)
check(u, 'username', str)
check(i['definition'], 'includeResults', str)
if i['type'] == 'report':
check(i['definition'], 'report', dict)
check(i['definition']['report'], 'id', str)
if i['type'] == 'scan':
check(i['definition'], 'scan', dict)
check(i['definition']['scan'], 'id', int)
if i['type'] == 'syslog':
check(i['definition'], 'host', str)
check(i['definition'], 'port', str)
check(i['definition'], 'message', str)
check(i['definition'], 'severity', str)
if i['type'] == 'ticket':
check(i['definition'], 'assignee', dict)
check(i['definition']['assignee'], 'id', str)
check(i['definition'], 'name', str)
check(i['definition'], 'description', str)
check(i['definition'], 'notes', str)
check(i, 'id', str)
check(i, 'objectID', str, allow_none=True)
check(i, 'users', list)
for u in i['users']:
check(u, 'firstname', str)
check(u, 'id', str)
check(u, 'lastname', str)
check(u, 'username', str)
check(a, 'canManage', str)
check(a, 'canUse', str)
check(a, 'description', str)
check(a, 'didTriggerLastEvaluation', str)
check(a, 'executeOnEveryTrigger', str)
check(a, 'id', str)
check(a, 'lastEvaluated', str)
check(a, 'lastTriggered', str)
check(a, 'modifiedTime', str)
check(a, 'name', str)
check(a, 'owner', dict)
check(a['owner'], 'firstname', str)
check(a['owner'], 'id', str)
check(a['owner'], 'lastname', str)
check(a['owner'], 'username', str)
check(a, 'ownerGroup', dict)
check(a['ownerGroup'], 'description', str)
check(a['ownerGroup'], 'id', str)
check(a['ownerGroup'], 'name', str)
check(a, 'query', dict)
check(a['query'], 'description', str)
check(a['query'], 'id', str)
check(a['query'], 'name', str)
check(a, 'schedule', dict)
check(a['schedule'], 'type', str)
check(a, 'status', str)
check(a, 'triggerName', str)
check(a, 'triggerOperator', str)
check(a, 'triggerValue', str)
@pytest.mark.vcr()
def test_alerts_create_success(sc, alert):
a = alert
assert isinstance(a, dict)
check(a, 'action', list)
for i in a['action']:
assert isinstance(i, dict)
check(i, 'type', str)
check(i, 'definition', dict)
if i['type'] == 'notification':
check(i['definition'], 'message', str)
check(i['definition'], 'users', list)
for u in i['definition']['users']:
check(u, 'firstname', str)
check(u, 'id', str)
check(u, 'lastname', str)
check(u, 'username', str)
if i['type'] == 'email':
check(i['definition'], 'subject', str)
check(i['definition'], 'message', str)
check(i['definition'], 'addresses', str)
check(i['definition'], 'users', list)
for u in i['definition']['users']:
check(u, 'firstname', str)
check(u, 'id', str)
check(u, 'lastname', str)
check(u, 'username', str)
check(i['definition'], 'includeResults', str)
if i['type'] == 'report':
check(i['definition'], 'report', dict)
check(i['definition']['report'], 'id', str)
if i['type'] == 'scan':
check(i['definition'], 'scan', dict)
check(i['definition']['scan'], 'id', int)
if i['type'] == 'syslog':
check(i['definition'], 'host', str)
check(i['definition'], 'port', str)
check(i['definition'], 'message', str)
check(i['definition'], 'severity', str)
if i['type'] == 'ticket':
check(i['definition'], 'assignee', dict)
check(i['definition']['assignee'], 'id', str)
check(i['definition'], 'name', str)
check(i['definition'], 'description', str)
check(i['definition'], 'notes', str)
check(i, 'id', str)
check(i, 'objectID', str, allow_none=True)
check(i, 'users', list)
for u in i['users']:
check(u, 'firstname', str)
check(u, 'id', str)
check(u, 'lastname', str)
check(u, 'username', str)
check(a, 'canManage', str)
check(a, 'canUse', str)
check(a, 'description', str)
check(a, 'didTriggerLastEvaluation', str)
check(a, 'executeOnEveryTrigger', str)
check(a, 'id', str)
check(a, 'lastEvaluated', str)
check(a, 'lastTriggered', str)
check(a, 'modifiedTime', str)
check(a, 'name', str)
check(a, 'owner', dict)
check(a['owner'], 'firstname', str)
check(a['owner'], 'id', str)
check(a['owner'], 'lastname', str)
check(a['owner'], 'username', str)
check(a, 'ownerGroup', dict)
check(a['ownerGroup'], 'description', str)
check(a['ownerGroup'], 'id', str)
check(a['ownerGroup'], 'name', str)
check(a, 'query', dict)
check(a['query'], 'description', str)
check(a['query'], 'id', str)
check(a['query'], 'name', str)
check(a, 'schedule', dict)
check(a['schedule'], 'type', str)
check(a, 'status', str)
check(a, 'triggerName', str)
check(a, 'triggerOperator', str)
check(a, 'triggerValue', str)
def test_alerts_update_id_typerror(sc):
with pytest.raises(TypeError):
sc.alerts.update('one')
@pytest.mark.vcr()
def test_alerts_update_success(sc, alert):
a = sc.alerts.update(int(alert['id']), name='new name for example')
assert isinstance(a, dict)
check(a, 'action', list)
for i in a['action']:
assert isinstance(i, dict)
check(i, 'type', str)
check(i, 'definition', dict)
if i['type'] == 'notification':
check(i['definition'], 'message', str)
check(i['definition'], 'users', list)
for u in i['definition']['users']:
check(u, 'firstname', str)
check(u, 'id', str)
check(u, 'lastname', str)
check(u, 'username', str)
if i['type'] == 'email':
check(i['definition'], 'subject', str)
check(i['definition'], 'message', str)
check(i['definition'], 'addresses', str)
check(i['definition'], 'users', list)
for u in i['definition']['users']:
check(u, 'firstname', str)
check(u, 'id', str)
check(u, 'lastname', str)
check(u, 'username', str)
check(i['definition'], 'includeResults', str)
if i['type'] == 'report':
check(i['definition'], 'report', dict)
check(i['definition']['report'], 'id', str)
if i['type'] == 'scan':
check(i['definition'], 'scan', dict)
check(i['definition']['scan'], 'id', int)
if i['type'] == 'syslog':
check(i['definition'], 'host', str)
check(i['definition'], 'port', str)
check(i['definition'], 'message', str)
check(i['definition'], 'severity', str)
if i['type'] == 'ticket':
check(i['definition'], 'assignee', dict)
check(i['definition']['assignee'], 'id', str)
check(i['definition'], 'name', str)
check(i['definition'], 'description', str)
check(i['definition'], 'notes', str)
check(i, 'id', str)
check(i, 'objectID', str, allow_none=True)
check(i, 'users', list)
for u in i['users']:
check(u, 'firstname', str)
check(u, 'id', str)
check(u, 'lastname', str)
check(u, 'username', str)
check(a, 'canManage', str)
check(a, 'canUse', str)
check(a, 'description', str)
check(a, 'didTriggerLastEvaluation', str)
check(a, 'executeOnEveryTrigger', str)
check(a, 'id', str)
check(a, 'lastEvaluated', str)
check(a, 'lastTriggered', str)
check(a, 'modifiedTime', str)
check(a, 'name', str)
check(a, 'owner', dict)
check(a['owner'], 'firstname', str)
check(a['owner'], 'id', str)
check(a['owner'], 'lastname', str)
check(a['owner'], 'username', str)
check(a, 'ownerGroup', dict)
check(a['ownerGroup'], 'description', str)
check(a['ownerGroup'], 'id', str)
check(a['ownerGroup'], 'name', str)
check(a, 'query', dict)
check(a['query'], 'description', str)
check(a['query'], 'id', str)
check(a['query'], 'name', str)
check(a, 'schedule', dict)
check(a['schedule'], 'type', str)
check(a, 'status', str)
check(a, 'triggerName', str)
check(a, 'triggerOperator', str)
check(a, 'triggerValue', str)
def test_alerts_delete_id_typeerror(sc):
with pytest.raises(TypeError):
sc.alerts.delete('one')
@pytest.mark.vcr()
def test_alerts_delete_success(sc, alert):
sc.alerts.delete(int(alert['id']))
def test_alerts_execute_id_typeerror(sc):
with pytest.raises(TypeError):
sc.alerts.execute('one')
@pytest.mark.vcr()
def test_alerts_execute_successs(sc, alert):
a = sc.alerts.execute(int(alert['id']))
assert isinstance(a, dict)
check(a, 'action', list)
for i in a['action']:
assert isinstance(i, dict)
check(i, 'type', str)
check(i, 'definition', dict)
if i['type'] == 'notification':
check(i['definition'], 'message', str)
check(i['definition'], 'users', list)
for u in i['definition']['users']:
check(u, 'firstname', str)
check(u, 'id', str)
check(u, 'lastname', str)
check(u, 'username', str)
if i['type'] == 'email':
check(i['definition'], 'subject', str)
check(i['definition'], 'message', str)
check(i['definition'], 'addresses', str)
check(i['definition'], 'users', list)
for u in i['definition']['users']:
check(u, 'firstname', str)
check(u, 'id', str)
check(u, 'lastname', str)
check(u, 'username', str)
check(i['definition'], 'includeResults', str)
if i['type'] == 'report':
check(i['definition'], 'report', dict)
check(i['definition']['report'], 'id', str)
if i['type'] == 'scan':
check(i['definition'], 'scan', dict)
check(i['definition']['scan'], 'id', int)
if i['type'] == 'syslog':
check(i['definition'], 'host', str)
check(i['definition'], 'port', str)
check(i['definition'], 'message', str)
check(i['definition'], 'severity', str)
if i['type'] == 'ticket':
check(i['definition'], 'assignee', dict)
check(i['definition']['assignee'], 'id', str)
check(i['definition'], 'name', str)
check(i['definition'], 'description', str)
check(i['definition'], 'notes', str)
check(i, 'id', str)
check(i, 'objectID', str, allow_none=True)
check(i, 'users', list)
for u in i['users']:
check(u, 'firstname', str)
check(u, 'id', str)
check(u, 'lastname', str)
check(u, 'username', str)
check(a, 'canManage', str)
check(a, 'canUse', str)
check(a, 'description', str)
check(a, 'didTriggerLastEvaluation', str)
check(a, 'executeOnEveryTrigger', str)
check(a, 'id', str)
check(a, 'lastEvaluated', str)
check(a, 'lastTriggered', str)
check(a, 'modifiedTime', str)
check(a, 'name', str)
check(a, 'owner', dict)
check(a['owner'], 'firstname', str)
check(a['owner'], 'id', str)
check(a['owner'], 'lastname', str)
check(a['owner'], 'username', str)
check(a, 'ownerGroup', dict)
check(a['ownerGroup'], 'description', str)
check(a['ownerGroup'], 'id', str)
check(a['ownerGroup'], 'name', str)
check(a, 'query', dict)
check(a['query'], 'description', str)
check(a['query'], 'id', str)
check(a['query'], 'name', str)
check(a, 'schedule', dict)
check(a['schedule'], 'type', str)
check(a, 'status', str)
check(a, 'triggerName', str)
check(a, 'triggerOperator', str)
check(a, 'triggerValue', str)
| 36.897155
| 71
| 0.539082
| 1,931
| 16,862
| 4.653547
| 0.062144
| 0.173603
| 0.10316
| 0.101491
| 0.90385
| 0.887937
| 0.857
| 0.839751
| 0.821723
| 0.781438
| 0
| 0.00229
| 0.274819
| 16,862
| 457
| 72
| 36.897155
| 0.732581
| 0
| 0
| 0.824885
| 0
| 0
| 0.236909
| 0.010674
| 0
| 0
| 0
| 0
| 0.023041
| 1
| 0.0553
| false
| 0.002304
| 0.006912
| 0
| 0.064516
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
81f4638f3e7069d994835efb2cb64c9143d6d7bb
| 55,280
|
py
|
Python
|
sdk/python/pulumi_gcp/storage/bucket_object.py
|
la3mmchen/pulumi-gcp
|
0e3c6fecd062dff78a4fd95b7ebd5ce4492ad1ea
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/storage/bucket_object.py
|
la3mmchen/pulumi-gcp
|
0e3c6fecd062dff78a4fd95b7ebd5ce4492ad1ea
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/storage/bucket_object.py
|
la3mmchen/pulumi-gcp
|
0e3c6fecd062dff78a4fd95b7ebd5ce4492ad1ea
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['BucketObjectArgs', 'BucketObject']
@pulumi.input_type
class BucketObjectArgs:
def __init__(__self__, *,
bucket: pulumi.Input[str],
cache_control: Optional[pulumi.Input[str]] = None,
content: Optional[pulumi.Input[str]] = None,
content_disposition: Optional[pulumi.Input[str]] = None,
content_encoding: Optional[pulumi.Input[str]] = None,
content_language: Optional[pulumi.Input[str]] = None,
content_type: Optional[pulumi.Input[str]] = None,
customer_encryption: Optional[pulumi.Input['BucketObjectCustomerEncryptionArgs']] = None,
detect_md5hash: Optional[pulumi.Input[str]] = None,
event_based_hold: Optional[pulumi.Input[bool]] = None,
kms_key_name: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[Union[pulumi.Asset, pulumi.Archive]]] = None,
storage_class: Optional[pulumi.Input[str]] = None,
temporary_hold: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a BucketObject resource.
:param pulumi.Input[str] bucket: The name of the containing bucket.
:param pulumi.Input[str] cache_control: [Cache-Control](https://tools.ietf.org/html/rfc7234#section-5.2)
directive to specify caching behavior of object data. If omitted and object is accessible to all anonymous users, the default will be public, max-age=3600
:param pulumi.Input[str] content: Data as `string` to be uploaded. Must be defined if `source` is not. **Note**: The `content` field is marked as sensitive.
:param pulumi.Input[str] content_disposition: [Content-Disposition](https://tools.ietf.org/html/rfc6266) of the object data.
:param pulumi.Input[str] content_encoding: [Content-Encoding](https://tools.ietf.org/html/rfc7231#section-3.1.2.2) of the object data.
:param pulumi.Input[str] content_language: [Content-Language](https://tools.ietf.org/html/rfc7231#section-3.1.3.2) of the object data.
:param pulumi.Input[str] content_type: [Content-Type](https://tools.ietf.org/html/rfc7231#section-3.1.1.5) of the object data. Defaults to "application/octet-stream" or "text/plain; charset=utf-8".
:param pulumi.Input['BucketObjectCustomerEncryptionArgs'] customer_encryption: Enables object encryption with Customer-Supplied Encryption Key (CSEK). Google [documentation about CSEK.](https://cloud.google.com/storage/docs/encryption/customer-supplied-keys)
Structure is documented below.
:param pulumi.Input[bool] event_based_hold: Whether an object is under event-based hold. Event-based hold is a way to retain objects until an event occurs, which is
signified by the hold's release (i.e. this value is set to false). After being released (set to false), such objects
will be subject to bucket-level retention (if any).
:param pulumi.Input[str] kms_key_name: The resource name of the Cloud KMS key that will be used to [encrypt](https://cloud.google.com/storage/docs/encryption/using-customer-managed-keys) the object.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] metadata: User-provided metadata, in key/value pairs.
:param pulumi.Input[str] name: The name of the object. If you're interpolating the name of this object, see `output_name` instead.
:param pulumi.Input[Union[pulumi.Asset, pulumi.Archive]] source: A path to the data you want to upload. Must be defined
if `content` is not.
:param pulumi.Input[str] storage_class: The [StorageClass](https://cloud.google.com/storage/docs/storage-classes) of the new bucket object.
Supported values include: `MULTI_REGIONAL`, `REGIONAL`, `NEARLINE`, `COLDLINE`, `ARCHIVE`. If not provided, this defaults to the bucket's default
storage class or to a [standard](https://cloud.google.com/storage/docs/storage-classes#standard) class.
:param pulumi.Input[bool] temporary_hold: Whether an object is under temporary hold. While this flag is set to true, the object is protected against deletion and
overwrites.
"""
pulumi.set(__self__, "bucket", bucket)
if cache_control is not None:
pulumi.set(__self__, "cache_control", cache_control)
if content is not None:
pulumi.set(__self__, "content", content)
if content_disposition is not None:
pulumi.set(__self__, "content_disposition", content_disposition)
if content_encoding is not None:
pulumi.set(__self__, "content_encoding", content_encoding)
if content_language is not None:
pulumi.set(__self__, "content_language", content_language)
if content_type is not None:
pulumi.set(__self__, "content_type", content_type)
if customer_encryption is not None:
pulumi.set(__self__, "customer_encryption", customer_encryption)
if detect_md5hash is not None:
pulumi.set(__self__, "detect_md5hash", detect_md5hash)
if event_based_hold is not None:
pulumi.set(__self__, "event_based_hold", event_based_hold)
if kms_key_name is not None:
pulumi.set(__self__, "kms_key_name", kms_key_name)
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if name is not None:
pulumi.set(__self__, "name", name)
if source is not None:
pulumi.set(__self__, "source", source)
if storage_class is not None:
pulumi.set(__self__, "storage_class", storage_class)
if temporary_hold is not None:
pulumi.set(__self__, "temporary_hold", temporary_hold)
@property
@pulumi.getter
def bucket(self) -> pulumi.Input[str]:
"""
The name of the containing bucket.
"""
return pulumi.get(self, "bucket")
@bucket.setter
def bucket(self, value: pulumi.Input[str]):
pulumi.set(self, "bucket", value)
@property
@pulumi.getter(name="cacheControl")
def cache_control(self) -> Optional[pulumi.Input[str]]:
"""
[Cache-Control](https://tools.ietf.org/html/rfc7234#section-5.2)
directive to specify caching behavior of object data. If omitted and object is accessible to all anonymous users, the default will be public, max-age=3600
"""
return pulumi.get(self, "cache_control")
@cache_control.setter
def cache_control(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cache_control", value)
@property
@pulumi.getter
def content(self) -> Optional[pulumi.Input[str]]:
"""
Data as `string` to be uploaded. Must be defined if `source` is not. **Note**: The `content` field is marked as sensitive.
"""
return pulumi.get(self, "content")
@content.setter
def content(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content", value)
@property
@pulumi.getter(name="contentDisposition")
def content_disposition(self) -> Optional[pulumi.Input[str]]:
"""
[Content-Disposition](https://tools.ietf.org/html/rfc6266) of the object data.
"""
return pulumi.get(self, "content_disposition")
@content_disposition.setter
def content_disposition(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content_disposition", value)
@property
@pulumi.getter(name="contentEncoding")
def content_encoding(self) -> Optional[pulumi.Input[str]]:
"""
[Content-Encoding](https://tools.ietf.org/html/rfc7231#section-3.1.2.2) of the object data.
"""
return pulumi.get(self, "content_encoding")
@content_encoding.setter
def content_encoding(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content_encoding", value)
@property
@pulumi.getter(name="contentLanguage")
def content_language(self) -> Optional[pulumi.Input[str]]:
"""
[Content-Language](https://tools.ietf.org/html/rfc7231#section-3.1.3.2) of the object data.
"""
return pulumi.get(self, "content_language")
@content_language.setter
def content_language(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content_language", value)
@property
@pulumi.getter(name="contentType")
def content_type(self) -> Optional[pulumi.Input[str]]:
"""
[Content-Type](https://tools.ietf.org/html/rfc7231#section-3.1.1.5) of the object data. Defaults to "application/octet-stream" or "text/plain; charset=utf-8".
"""
return pulumi.get(self, "content_type")
@content_type.setter
def content_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content_type", value)
@property
@pulumi.getter(name="customerEncryption")
def customer_encryption(self) -> Optional[pulumi.Input['BucketObjectCustomerEncryptionArgs']]:
"""
Enables object encryption with Customer-Supplied Encryption Key (CSEK). Google [documentation about CSEK.](https://cloud.google.com/storage/docs/encryption/customer-supplied-keys)
Structure is documented below.
"""
return pulumi.get(self, "customer_encryption")
@customer_encryption.setter
def customer_encryption(self, value: Optional[pulumi.Input['BucketObjectCustomerEncryptionArgs']]):
pulumi.set(self, "customer_encryption", value)
@property
@pulumi.getter(name="detectMd5hash")
def detect_md5hash(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "detect_md5hash")
@detect_md5hash.setter
def detect_md5hash(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "detect_md5hash", value)
@property
@pulumi.getter(name="eventBasedHold")
def event_based_hold(self) -> Optional[pulumi.Input[bool]]:
"""
Whether an object is under event-based hold. Event-based hold is a way to retain objects until an event occurs, which is
signified by the hold's release (i.e. this value is set to false). After being released (set to false), such objects
will be subject to bucket-level retention (if any).
"""
return pulumi.get(self, "event_based_hold")
@event_based_hold.setter
def event_based_hold(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "event_based_hold", value)
@property
@pulumi.getter(name="kmsKeyName")
def kms_key_name(self) -> Optional[pulumi.Input[str]]:
"""
The resource name of the Cloud KMS key that will be used to [encrypt](https://cloud.google.com/storage/docs/encryption/using-customer-managed-keys) the object.
"""
return pulumi.get(self, "kms_key_name")
@kms_key_name.setter
def kms_key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kms_key_name", value)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
User-provided metadata, in key/value pairs.
"""
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the object. If you're interpolating the name of this object, see `output_name` instead.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def source(self) -> Optional[pulumi.Input[Union[pulumi.Asset, pulumi.Archive]]]:
"""
A path to the data you want to upload. Must be defined
if `content` is not.
"""
return pulumi.get(self, "source")
@source.setter
def source(self, value: Optional[pulumi.Input[Union[pulumi.Asset, pulumi.Archive]]]):
pulumi.set(self, "source", value)
@property
@pulumi.getter(name="storageClass")
def storage_class(self) -> Optional[pulumi.Input[str]]:
"""
The [StorageClass](https://cloud.google.com/storage/docs/storage-classes) of the new bucket object.
Supported values include: `MULTI_REGIONAL`, `REGIONAL`, `NEARLINE`, `COLDLINE`, `ARCHIVE`. If not provided, this defaults to the bucket's default
storage class or to a [standard](https://cloud.google.com/storage/docs/storage-classes#standard) class.
"""
return pulumi.get(self, "storage_class")
@storage_class.setter
def storage_class(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "storage_class", value)
@property
@pulumi.getter(name="temporaryHold")
def temporary_hold(self) -> Optional[pulumi.Input[bool]]:
"""
Whether an object is under temporary hold. While this flag is set to true, the object is protected against deletion and
overwrites.
"""
return pulumi.get(self, "temporary_hold")
@temporary_hold.setter
def temporary_hold(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "temporary_hold", value)
@pulumi.input_type
class _BucketObjectState:
def __init__(__self__, *,
bucket: Optional[pulumi.Input[str]] = None,
cache_control: Optional[pulumi.Input[str]] = None,
content: Optional[pulumi.Input[str]] = None,
content_disposition: Optional[pulumi.Input[str]] = None,
content_encoding: Optional[pulumi.Input[str]] = None,
content_language: Optional[pulumi.Input[str]] = None,
content_type: Optional[pulumi.Input[str]] = None,
crc32c: Optional[pulumi.Input[str]] = None,
customer_encryption: Optional[pulumi.Input['BucketObjectCustomerEncryptionArgs']] = None,
detect_md5hash: Optional[pulumi.Input[str]] = None,
event_based_hold: Optional[pulumi.Input[bool]] = None,
kms_key_name: Optional[pulumi.Input[str]] = None,
md5hash: Optional[pulumi.Input[str]] = None,
media_link: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
output_name: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[Union[pulumi.Asset, pulumi.Archive]]] = None,
storage_class: Optional[pulumi.Input[str]] = None,
temporary_hold: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering BucketObject resources.
:param pulumi.Input[str] bucket: The name of the containing bucket.
:param pulumi.Input[str] cache_control: [Cache-Control](https://tools.ietf.org/html/rfc7234#section-5.2)
directive to specify caching behavior of object data. If omitted and object is accessible to all anonymous users, the default will be public, max-age=3600
:param pulumi.Input[str] content: Data as `string` to be uploaded. Must be defined if `source` is not. **Note**: The `content` field is marked as sensitive.
:param pulumi.Input[str] content_disposition: [Content-Disposition](https://tools.ietf.org/html/rfc6266) of the object data.
:param pulumi.Input[str] content_encoding: [Content-Encoding](https://tools.ietf.org/html/rfc7231#section-3.1.2.2) of the object data.
:param pulumi.Input[str] content_language: [Content-Language](https://tools.ietf.org/html/rfc7231#section-3.1.3.2) of the object data.
:param pulumi.Input[str] content_type: [Content-Type](https://tools.ietf.org/html/rfc7231#section-3.1.1.5) of the object data. Defaults to "application/octet-stream" or "text/plain; charset=utf-8".
:param pulumi.Input[str] crc32c: (Computed) Base 64 CRC32 hash of the uploaded data.
:param pulumi.Input['BucketObjectCustomerEncryptionArgs'] customer_encryption: Enables object encryption with Customer-Supplied Encryption Key (CSEK). Google [documentation about CSEK.](https://cloud.google.com/storage/docs/encryption/customer-supplied-keys)
Structure is documented below.
:param pulumi.Input[bool] event_based_hold: Whether an object is under event-based hold. Event-based hold is a way to retain objects until an event occurs, which is
signified by the hold's release (i.e. this value is set to false). After being released (set to false), such objects
will be subject to bucket-level retention (if any).
:param pulumi.Input[str] kms_key_name: The resource name of the Cloud KMS key that will be used to [encrypt](https://cloud.google.com/storage/docs/encryption/using-customer-managed-keys) the object.
:param pulumi.Input[str] md5hash: (Computed) Base 64 MD5 hash of the uploaded data.
:param pulumi.Input[str] media_link: (Computed) A url reference to download this object.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] metadata: User-provided metadata, in key/value pairs.
:param pulumi.Input[str] name: The name of the object. If you're interpolating the name of this object, see `output_name` instead.
:param pulumi.Input[str] output_name: (Computed) The name of the object. Use this field in interpolations with `storage.ObjectACL` to recreate
`storage.ObjectACL` resources when your `storage.BucketObject` is recreated.
:param pulumi.Input[str] self_link: (Computed) A url reference to this object.
:param pulumi.Input[Union[pulumi.Asset, pulumi.Archive]] source: A path to the data you want to upload. Must be defined
if `content` is not.
:param pulumi.Input[str] storage_class: The [StorageClass](https://cloud.google.com/storage/docs/storage-classes) of the new bucket object.
Supported values include: `MULTI_REGIONAL`, `REGIONAL`, `NEARLINE`, `COLDLINE`, `ARCHIVE`. If not provided, this defaults to the bucket's default
storage class or to a [standard](https://cloud.google.com/storage/docs/storage-classes#standard) class.
:param pulumi.Input[bool] temporary_hold: Whether an object is under temporary hold. While this flag is set to true, the object is protected against deletion and
overwrites.
"""
if bucket is not None:
pulumi.set(__self__, "bucket", bucket)
if cache_control is not None:
pulumi.set(__self__, "cache_control", cache_control)
if content is not None:
pulumi.set(__self__, "content", content)
if content_disposition is not None:
pulumi.set(__self__, "content_disposition", content_disposition)
if content_encoding is not None:
pulumi.set(__self__, "content_encoding", content_encoding)
if content_language is not None:
pulumi.set(__self__, "content_language", content_language)
if content_type is not None:
pulumi.set(__self__, "content_type", content_type)
if crc32c is not None:
pulumi.set(__self__, "crc32c", crc32c)
if customer_encryption is not None:
pulumi.set(__self__, "customer_encryption", customer_encryption)
if detect_md5hash is not None:
pulumi.set(__self__, "detect_md5hash", detect_md5hash)
if event_based_hold is not None:
pulumi.set(__self__, "event_based_hold", event_based_hold)
if kms_key_name is not None:
pulumi.set(__self__, "kms_key_name", kms_key_name)
if md5hash is not None:
pulumi.set(__self__, "md5hash", md5hash)
if media_link is not None:
pulumi.set(__self__, "media_link", media_link)
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if name is not None:
pulumi.set(__self__, "name", name)
if output_name is not None:
pulumi.set(__self__, "output_name", output_name)
if self_link is not None:
pulumi.set(__self__, "self_link", self_link)
if source is not None:
pulumi.set(__self__, "source", source)
if storage_class is not None:
pulumi.set(__self__, "storage_class", storage_class)
if temporary_hold is not None:
pulumi.set(__self__, "temporary_hold", temporary_hold)
@property
@pulumi.getter
def bucket(self) -> Optional[pulumi.Input[str]]:
"""
The name of the containing bucket.
"""
return pulumi.get(self, "bucket")
@bucket.setter
def bucket(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "bucket", value)
@property
@pulumi.getter(name="cacheControl")
def cache_control(self) -> Optional[pulumi.Input[str]]:
"""
[Cache-Control](https://tools.ietf.org/html/rfc7234#section-5.2)
directive to specify caching behavior of object data. If omitted and object is accessible to all anonymous users, the default will be public, max-age=3600
"""
return pulumi.get(self, "cache_control")
@cache_control.setter
def cache_control(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cache_control", value)
@property
@pulumi.getter
def content(self) -> Optional[pulumi.Input[str]]:
"""
Data as `string` to be uploaded. Must be defined if `source` is not. **Note**: The `content` field is marked as sensitive.
"""
return pulumi.get(self, "content")
@content.setter
def content(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content", value)
@property
@pulumi.getter(name="contentDisposition")
def content_disposition(self) -> Optional[pulumi.Input[str]]:
"""
[Content-Disposition](https://tools.ietf.org/html/rfc6266) of the object data.
"""
return pulumi.get(self, "content_disposition")
@content_disposition.setter
def content_disposition(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content_disposition", value)
@property
@pulumi.getter(name="contentEncoding")
def content_encoding(self) -> Optional[pulumi.Input[str]]:
"""
[Content-Encoding](https://tools.ietf.org/html/rfc7231#section-3.1.2.2) of the object data.
"""
return pulumi.get(self, "content_encoding")
@content_encoding.setter
def content_encoding(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content_encoding", value)
@property
@pulumi.getter(name="contentLanguage")
def content_language(self) -> Optional[pulumi.Input[str]]:
"""
[Content-Language](https://tools.ietf.org/html/rfc7231#section-3.1.3.2) of the object data.
"""
return pulumi.get(self, "content_language")
@content_language.setter
def content_language(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content_language", value)
@property
@pulumi.getter(name="contentType")
def content_type(self) -> Optional[pulumi.Input[str]]:
"""
[Content-Type](https://tools.ietf.org/html/rfc7231#section-3.1.1.5) of the object data. Defaults to "application/octet-stream" or "text/plain; charset=utf-8".
"""
return pulumi.get(self, "content_type")
@content_type.setter
def content_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content_type", value)
@property
@pulumi.getter
def crc32c(self) -> Optional[pulumi.Input[str]]:
"""
(Computed) Base 64 CRC32 hash of the uploaded data.
"""
return pulumi.get(self, "crc32c")
@crc32c.setter
def crc32c(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "crc32c", value)
@property
@pulumi.getter(name="customerEncryption")
def customer_encryption(self) -> Optional[pulumi.Input['BucketObjectCustomerEncryptionArgs']]:
"""
Enables object encryption with Customer-Supplied Encryption Key (CSEK). Google [documentation about CSEK.](https://cloud.google.com/storage/docs/encryption/customer-supplied-keys)
Structure is documented below.
"""
return pulumi.get(self, "customer_encryption")
@customer_encryption.setter
def customer_encryption(self, value: Optional[pulumi.Input['BucketObjectCustomerEncryptionArgs']]):
pulumi.set(self, "customer_encryption", value)
@property
@pulumi.getter(name="detectMd5hash")
def detect_md5hash(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "detect_md5hash")
@detect_md5hash.setter
def detect_md5hash(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "detect_md5hash", value)
@property
@pulumi.getter(name="eventBasedHold")
def event_based_hold(self) -> Optional[pulumi.Input[bool]]:
"""
Whether an object is under event-based hold. Event-based hold is a way to retain objects until an event occurs, which is
signified by the hold's release (i.e. this value is set to false). After being released (set to false), such objects
will be subject to bucket-level retention (if any).
"""
return pulumi.get(self, "event_based_hold")
@event_based_hold.setter
def event_based_hold(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "event_based_hold", value)
@property
@pulumi.getter(name="kmsKeyName")
def kms_key_name(self) -> Optional[pulumi.Input[str]]:
"""
The resource name of the Cloud KMS key that will be used to [encrypt](https://cloud.google.com/storage/docs/encryption/using-customer-managed-keys) the object.
"""
return pulumi.get(self, "kms_key_name")
@kms_key_name.setter
def kms_key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kms_key_name", value)
@property
@pulumi.getter
def md5hash(self) -> Optional[pulumi.Input[str]]:
"""
(Computed) Base 64 MD5 hash of the uploaded data.
"""
return pulumi.get(self, "md5hash")
@md5hash.setter
def md5hash(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "md5hash", value)
@property
@pulumi.getter(name="mediaLink")
def media_link(self) -> Optional[pulumi.Input[str]]:
"""
(Computed) A url reference to download this object.
"""
return pulumi.get(self, "media_link")
@media_link.setter
def media_link(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "media_link", value)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
User-provided metadata, in key/value pairs.
"""
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the object. If you're interpolating the name of this object, see `output_name` instead.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="outputName")
def output_name(self) -> Optional[pulumi.Input[str]]:
"""
(Computed) The name of the object. Use this field in interpolations with `storage.ObjectACL` to recreate
`storage.ObjectACL` resources when your `storage.BucketObject` is recreated.
"""
return pulumi.get(self, "output_name")
@output_name.setter
def output_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "output_name", value)
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> Optional[pulumi.Input[str]]:
"""
(Computed) A url reference to this object.
"""
return pulumi.get(self, "self_link")
@self_link.setter
def self_link(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "self_link", value)
@property
@pulumi.getter
def source(self) -> Optional[pulumi.Input[Union[pulumi.Asset, pulumi.Archive]]]:
"""
A path to the data you want to upload. Must be defined
if `content` is not.
"""
return pulumi.get(self, "source")
@source.setter
def source(self, value: Optional[pulumi.Input[Union[pulumi.Asset, pulumi.Archive]]]):
pulumi.set(self, "source", value)
@property
@pulumi.getter(name="storageClass")
def storage_class(self) -> Optional[pulumi.Input[str]]:
"""
The [StorageClass](https://cloud.google.com/storage/docs/storage-classes) of the new bucket object.
Supported values include: `MULTI_REGIONAL`, `REGIONAL`, `NEARLINE`, `COLDLINE`, `ARCHIVE`. If not provided, this defaults to the bucket's default
storage class or to a [standard](https://cloud.google.com/storage/docs/storage-classes#standard) class.
"""
return pulumi.get(self, "storage_class")
@storage_class.setter
def storage_class(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "storage_class", value)
@property
@pulumi.getter(name="temporaryHold")
def temporary_hold(self) -> Optional[pulumi.Input[bool]]:
"""
Whether an object is under temporary hold. While this flag is set to true, the object is protected against deletion and
overwrites.
"""
return pulumi.get(self, "temporary_hold")
@temporary_hold.setter
def temporary_hold(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "temporary_hold", value)
class BucketObject(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
bucket: Optional[pulumi.Input[str]] = None,
cache_control: Optional[pulumi.Input[str]] = None,
content: Optional[pulumi.Input[str]] = None,
content_disposition: Optional[pulumi.Input[str]] = None,
content_encoding: Optional[pulumi.Input[str]] = None,
content_language: Optional[pulumi.Input[str]] = None,
content_type: Optional[pulumi.Input[str]] = None,
customer_encryption: Optional[pulumi.Input[pulumi.InputType['BucketObjectCustomerEncryptionArgs']]] = None,
detect_md5hash: Optional[pulumi.Input[str]] = None,
event_based_hold: Optional[pulumi.Input[bool]] = None,
kms_key_name: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[Union[pulumi.Asset, pulumi.Archive]]] = None,
storage_class: Optional[pulumi.Input[str]] = None,
temporary_hold: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
Creates a new object inside an existing bucket in Google cloud storage service (GCS).
[ACLs](https://cloud.google.com/storage/docs/access-control/lists) can be applied using the `storage.ObjectACL` resource.
For more information see
[the official documentation](https://cloud.google.com/storage/docs/key-terms#objects)
and
[API](https://cloud.google.com/storage/docs/json_api/v1/objects).
## Example Usage
Example creating a public object in an existing `image-store` bucket.
```python
import pulumi
import pulumi_gcp as gcp
picture = gcp.storage.BucketObject("picture",
bucket="image-store",
source=pulumi.FileAsset("/images/nature/garden-tiger-moth.jpg"))
```
## Import
This resource does not support import.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] bucket: The name of the containing bucket.
:param pulumi.Input[str] cache_control: [Cache-Control](https://tools.ietf.org/html/rfc7234#section-5.2)
directive to specify caching behavior of object data. If omitted and object is accessible to all anonymous users, the default will be public, max-age=3600
:param pulumi.Input[str] content: Data as `string` to be uploaded. Must be defined if `source` is not. **Note**: The `content` field is marked as sensitive.
:param pulumi.Input[str] content_disposition: [Content-Disposition](https://tools.ietf.org/html/rfc6266) of the object data.
:param pulumi.Input[str] content_encoding: [Content-Encoding](https://tools.ietf.org/html/rfc7231#section-3.1.2.2) of the object data.
:param pulumi.Input[str] content_language: [Content-Language](https://tools.ietf.org/html/rfc7231#section-3.1.3.2) of the object data.
:param pulumi.Input[str] content_type: [Content-Type](https://tools.ietf.org/html/rfc7231#section-3.1.1.5) of the object data. Defaults to "application/octet-stream" or "text/plain; charset=utf-8".
:param pulumi.Input[pulumi.InputType['BucketObjectCustomerEncryptionArgs']] customer_encryption: Enables object encryption with Customer-Supplied Encryption Key (CSEK). Google [documentation about CSEK.](https://cloud.google.com/storage/docs/encryption/customer-supplied-keys)
Structure is documented below.
:param pulumi.Input[bool] event_based_hold: Whether an object is under event-based hold. Event-based hold is a way to retain objects until an event occurs, which is
signified by the hold's release (i.e. this value is set to false). After being released (set to false), such objects
will be subject to bucket-level retention (if any).
:param pulumi.Input[str] kms_key_name: The resource name of the Cloud KMS key that will be used to [encrypt](https://cloud.google.com/storage/docs/encryption/using-customer-managed-keys) the object.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] metadata: User-provided metadata, in key/value pairs.
:param pulumi.Input[str] name: The name of the object. If you're interpolating the name of this object, see `output_name` instead.
:param pulumi.Input[Union[pulumi.Asset, pulumi.Archive]] source: A path to the data you want to upload. Must be defined
if `content` is not.
:param pulumi.Input[str] storage_class: The [StorageClass](https://cloud.google.com/storage/docs/storage-classes) of the new bucket object.
Supported values include: `MULTI_REGIONAL`, `REGIONAL`, `NEARLINE`, `COLDLINE`, `ARCHIVE`. If not provided, this defaults to the bucket's default
storage class or to a [standard](https://cloud.google.com/storage/docs/storage-classes#standard) class.
:param pulumi.Input[bool] temporary_hold: Whether an object is under temporary hold. While this flag is set to true, the object is protected against deletion and
overwrites.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: BucketObjectArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates a new object inside an existing bucket in Google cloud storage service (GCS).
[ACLs](https://cloud.google.com/storage/docs/access-control/lists) can be applied using the `storage.ObjectACL` resource.
For more information see
[the official documentation](https://cloud.google.com/storage/docs/key-terms#objects)
and
[API](https://cloud.google.com/storage/docs/json_api/v1/objects).
## Example Usage
Example creating a public object in an existing `image-store` bucket.
```python
import pulumi
import pulumi_gcp as gcp
picture = gcp.storage.BucketObject("picture",
bucket="image-store",
source=pulumi.FileAsset("/images/nature/garden-tiger-moth.jpg"))
```
## Import
This resource does not support import.
:param str resource_name: The name of the resource.
:param BucketObjectArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(BucketObjectArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
bucket: Optional[pulumi.Input[str]] = None,
cache_control: Optional[pulumi.Input[str]] = None,
content: Optional[pulumi.Input[str]] = None,
content_disposition: Optional[pulumi.Input[str]] = None,
content_encoding: Optional[pulumi.Input[str]] = None,
content_language: Optional[pulumi.Input[str]] = None,
content_type: Optional[pulumi.Input[str]] = None,
customer_encryption: Optional[pulumi.Input[pulumi.InputType['BucketObjectCustomerEncryptionArgs']]] = None,
detect_md5hash: Optional[pulumi.Input[str]] = None,
event_based_hold: Optional[pulumi.Input[bool]] = None,
kms_key_name: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[Union[pulumi.Asset, pulumi.Archive]]] = None,
storage_class: Optional[pulumi.Input[str]] = None,
temporary_hold: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = BucketObjectArgs.__new__(BucketObjectArgs)
if bucket is None and not opts.urn:
raise TypeError("Missing required property 'bucket'")
__props__.__dict__["bucket"] = bucket
__props__.__dict__["cache_control"] = cache_control
__props__.__dict__["content"] = content
__props__.__dict__["content_disposition"] = content_disposition
__props__.__dict__["content_encoding"] = content_encoding
__props__.__dict__["content_language"] = content_language
__props__.__dict__["content_type"] = content_type
__props__.__dict__["customer_encryption"] = customer_encryption
__props__.__dict__["detect_md5hash"] = detect_md5hash
__props__.__dict__["event_based_hold"] = event_based_hold
__props__.__dict__["kms_key_name"] = kms_key_name
__props__.__dict__["metadata"] = metadata
__props__.__dict__["name"] = name
__props__.__dict__["source"] = source
__props__.__dict__["storage_class"] = storage_class
__props__.__dict__["temporary_hold"] = temporary_hold
__props__.__dict__["crc32c"] = None
__props__.__dict__["md5hash"] = None
__props__.__dict__["media_link"] = None
__props__.__dict__["output_name"] = None
__props__.__dict__["self_link"] = None
super(BucketObject, __self__).__init__(
'gcp:storage/bucketObject:BucketObject',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
bucket: Optional[pulumi.Input[str]] = None,
cache_control: Optional[pulumi.Input[str]] = None,
content: Optional[pulumi.Input[str]] = None,
content_disposition: Optional[pulumi.Input[str]] = None,
content_encoding: Optional[pulumi.Input[str]] = None,
content_language: Optional[pulumi.Input[str]] = None,
content_type: Optional[pulumi.Input[str]] = None,
crc32c: Optional[pulumi.Input[str]] = None,
customer_encryption: Optional[pulumi.Input[pulumi.InputType['BucketObjectCustomerEncryptionArgs']]] = None,
detect_md5hash: Optional[pulumi.Input[str]] = None,
event_based_hold: Optional[pulumi.Input[bool]] = None,
kms_key_name: Optional[pulumi.Input[str]] = None,
md5hash: Optional[pulumi.Input[str]] = None,
media_link: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
output_name: Optional[pulumi.Input[str]] = None,
self_link: Optional[pulumi.Input[str]] = None,
source: Optional[pulumi.Input[Union[pulumi.Asset, pulumi.Archive]]] = None,
storage_class: Optional[pulumi.Input[str]] = None,
temporary_hold: Optional[pulumi.Input[bool]] = None) -> 'BucketObject':
"""
Get an existing BucketObject resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] bucket: The name of the containing bucket.
:param pulumi.Input[str] cache_control: [Cache-Control](https://tools.ietf.org/html/rfc7234#section-5.2)
directive to specify caching behavior of object data. If omitted and object is accessible to all anonymous users, the default will be public, max-age=3600
:param pulumi.Input[str] content: Data as `string` to be uploaded. Must be defined if `source` is not. **Note**: The `content` field is marked as sensitive.
:param pulumi.Input[str] content_disposition: [Content-Disposition](https://tools.ietf.org/html/rfc6266) of the object data.
:param pulumi.Input[str] content_encoding: [Content-Encoding](https://tools.ietf.org/html/rfc7231#section-3.1.2.2) of the object data.
:param pulumi.Input[str] content_language: [Content-Language](https://tools.ietf.org/html/rfc7231#section-3.1.3.2) of the object data.
:param pulumi.Input[str] content_type: [Content-Type](https://tools.ietf.org/html/rfc7231#section-3.1.1.5) of the object data. Defaults to "application/octet-stream" or "text/plain; charset=utf-8".
:param pulumi.Input[str] crc32c: (Computed) Base 64 CRC32 hash of the uploaded data.
:param pulumi.Input[pulumi.InputType['BucketObjectCustomerEncryptionArgs']] customer_encryption: Enables object encryption with Customer-Supplied Encryption Key (CSEK). Google [documentation about CSEK.](https://cloud.google.com/storage/docs/encryption/customer-supplied-keys)
Structure is documented below.
:param pulumi.Input[bool] event_based_hold: Whether an object is under event-based hold. Event-based hold is a way to retain objects until an event occurs, which is
signified by the hold's release (i.e. this value is set to false). After being released (set to false), such objects
will be subject to bucket-level retention (if any).
:param pulumi.Input[str] kms_key_name: The resource name of the Cloud KMS key that will be used to [encrypt](https://cloud.google.com/storage/docs/encryption/using-customer-managed-keys) the object.
:param pulumi.Input[str] md5hash: (Computed) Base 64 MD5 hash of the uploaded data.
:param pulumi.Input[str] media_link: (Computed) A url reference to download this object.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] metadata: User-provided metadata, in key/value pairs.
:param pulumi.Input[str] name: The name of the object. If you're interpolating the name of this object, see `output_name` instead.
:param pulumi.Input[str] output_name: (Computed) The name of the object. Use this field in interpolations with `storage.ObjectACL` to recreate
`storage.ObjectACL` resources when your `storage.BucketObject` is recreated.
:param pulumi.Input[str] self_link: (Computed) A url reference to this object.
:param pulumi.Input[Union[pulumi.Asset, pulumi.Archive]] source: A path to the data you want to upload. Must be defined
if `content` is not.
:param pulumi.Input[str] storage_class: The [StorageClass](https://cloud.google.com/storage/docs/storage-classes) of the new bucket object.
Supported values include: `MULTI_REGIONAL`, `REGIONAL`, `NEARLINE`, `COLDLINE`, `ARCHIVE`. If not provided, this defaults to the bucket's default
storage class or to a [standard](https://cloud.google.com/storage/docs/storage-classes#standard) class.
:param pulumi.Input[bool] temporary_hold: Whether an object is under temporary hold. While this flag is set to true, the object is protected against deletion and
overwrites.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _BucketObjectState.__new__(_BucketObjectState)
__props__.__dict__["bucket"] = bucket
__props__.__dict__["cache_control"] = cache_control
__props__.__dict__["content"] = content
__props__.__dict__["content_disposition"] = content_disposition
__props__.__dict__["content_encoding"] = content_encoding
__props__.__dict__["content_language"] = content_language
__props__.__dict__["content_type"] = content_type
__props__.__dict__["crc32c"] = crc32c
__props__.__dict__["customer_encryption"] = customer_encryption
__props__.__dict__["detect_md5hash"] = detect_md5hash
__props__.__dict__["event_based_hold"] = event_based_hold
__props__.__dict__["kms_key_name"] = kms_key_name
__props__.__dict__["md5hash"] = md5hash
__props__.__dict__["media_link"] = media_link
__props__.__dict__["metadata"] = metadata
__props__.__dict__["name"] = name
__props__.__dict__["output_name"] = output_name
__props__.__dict__["self_link"] = self_link
__props__.__dict__["source"] = source
__props__.__dict__["storage_class"] = storage_class
__props__.__dict__["temporary_hold"] = temporary_hold
return BucketObject(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def bucket(self) -> pulumi.Output[str]:
"""
The name of the containing bucket.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter(name="cacheControl")
def cache_control(self) -> pulumi.Output[Optional[str]]:
"""
[Cache-Control](https://tools.ietf.org/html/rfc7234#section-5.2)
directive to specify caching behavior of object data. If omitted and object is accessible to all anonymous users, the default will be public, max-age=3600
"""
return pulumi.get(self, "cache_control")
@property
@pulumi.getter
def content(self) -> pulumi.Output[Optional[str]]:
"""
Data as `string` to be uploaded. Must be defined if `source` is not. **Note**: The `content` field is marked as sensitive.
"""
return pulumi.get(self, "content")
@property
@pulumi.getter(name="contentDisposition")
def content_disposition(self) -> pulumi.Output[Optional[str]]:
"""
[Content-Disposition](https://tools.ietf.org/html/rfc6266) of the object data.
"""
return pulumi.get(self, "content_disposition")
@property
@pulumi.getter(name="contentEncoding")
def content_encoding(self) -> pulumi.Output[Optional[str]]:
"""
[Content-Encoding](https://tools.ietf.org/html/rfc7231#section-3.1.2.2) of the object data.
"""
return pulumi.get(self, "content_encoding")
@property
@pulumi.getter(name="contentLanguage")
def content_language(self) -> pulumi.Output[Optional[str]]:
"""
[Content-Language](https://tools.ietf.org/html/rfc7231#section-3.1.3.2) of the object data.
"""
return pulumi.get(self, "content_language")
@property
@pulumi.getter(name="contentType")
def content_type(self) -> pulumi.Output[str]:
"""
[Content-Type](https://tools.ietf.org/html/rfc7231#section-3.1.1.5) of the object data. Defaults to "application/octet-stream" or "text/plain; charset=utf-8".
"""
return pulumi.get(self, "content_type")
@property
@pulumi.getter
def crc32c(self) -> pulumi.Output[str]:
"""
(Computed) Base 64 CRC32 hash of the uploaded data.
"""
return pulumi.get(self, "crc32c")
@property
@pulumi.getter(name="customerEncryption")
def customer_encryption(self) -> pulumi.Output[Optional['outputs.BucketObjectCustomerEncryption']]:
"""
Enables object encryption with Customer-Supplied Encryption Key (CSEK). Google [documentation about CSEK.](https://cloud.google.com/storage/docs/encryption/customer-supplied-keys)
Structure is documented below.
"""
return pulumi.get(self, "customer_encryption")
@property
@pulumi.getter(name="detectMd5hash")
def detect_md5hash(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "detect_md5hash")
@property
@pulumi.getter(name="eventBasedHold")
def event_based_hold(self) -> pulumi.Output[Optional[bool]]:
"""
Whether an object is under event-based hold. Event-based hold is a way to retain objects until an event occurs, which is
signified by the hold's release (i.e. this value is set to false). After being released (set to false), such objects
will be subject to bucket-level retention (if any).
"""
return pulumi.get(self, "event_based_hold")
@property
@pulumi.getter(name="kmsKeyName")
def kms_key_name(self) -> pulumi.Output[str]:
"""
The resource name of the Cloud KMS key that will be used to [encrypt](https://cloud.google.com/storage/docs/encryption/using-customer-managed-keys) the object.
"""
return pulumi.get(self, "kms_key_name")
@property
@pulumi.getter
def md5hash(self) -> pulumi.Output[str]:
"""
(Computed) Base 64 MD5 hash of the uploaded data.
"""
return pulumi.get(self, "md5hash")
@property
@pulumi.getter(name="mediaLink")
def media_link(self) -> pulumi.Output[str]:
"""
(Computed) A url reference to download this object.
"""
return pulumi.get(self, "media_link")
@property
@pulumi.getter
def metadata(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
User-provided metadata, in key/value pairs.
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the object. If you're interpolating the name of this object, see `output_name` instead.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="outputName")
def output_name(self) -> pulumi.Output[str]:
"""
(Computed) The name of the object. Use this field in interpolations with `storage.ObjectACL` to recreate
`storage.ObjectACL` resources when your `storage.BucketObject` is recreated.
"""
return pulumi.get(self, "output_name")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> pulumi.Output[str]:
"""
(Computed) A url reference to this object.
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter
def source(self) -> pulumi.Output[Optional[Union[pulumi.Asset, pulumi.Archive]]]:
"""
A path to the data you want to upload. Must be defined
if `content` is not.
"""
return pulumi.get(self, "source")
@property
@pulumi.getter(name="storageClass")
def storage_class(self) -> pulumi.Output[str]:
"""
The [StorageClass](https://cloud.google.com/storage/docs/storage-classes) of the new bucket object.
Supported values include: `MULTI_REGIONAL`, `REGIONAL`, `NEARLINE`, `COLDLINE`, `ARCHIVE`. If not provided, this defaults to the bucket's default
storage class or to a [standard](https://cloud.google.com/storage/docs/storage-classes#standard) class.
"""
return pulumi.get(self, "storage_class")
@property
@pulumi.getter(name="temporaryHold")
def temporary_hold(self) -> pulumi.Output[Optional[bool]]:
"""
Whether an object is under temporary hold. While this flag is set to true, the object is protected against deletion and
overwrites.
"""
return pulumi.get(self, "temporary_hold")
| 50.855566
| 284
| 0.661234
| 6,852
| 55,280
| 5.174402
| 0.04714
| 0.077873
| 0.072655
| 0.071979
| 0.939219
| 0.929827
| 0.919814
| 0.91186
| 0.908222
| 0.884896
| 0
| 0.008978
| 0.224258
| 55,280
| 1,086
| 285
| 50.902394
| 0.817807
| 0.405933
| 0
| 0.817734
| 1
| 0
| 0.104095
| 0.012663
| 0
| 0
| 0
| 0
| 0
| 1
| 0.167488
| false
| 0.001642
| 0.011494
| 0.004926
| 0.280788
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c300c4fe978ce8b4169c7c4e76540ec192a4a74c
| 221
|
py
|
Python
|
folder1/f1b.py
|
redeye999/pyneta
|
96aebbf5f59a9abdbd9d21d29a0e80a988fcf45a
|
[
"Apache-2.0"
] | null | null | null |
folder1/f1b.py
|
redeye999/pyneta
|
96aebbf5f59a9abdbd9d21d29a0e80a988fcf45a
|
[
"Apache-2.0"
] | null | null | null |
folder1/f1b.py
|
redeye999/pyneta
|
96aebbf5f59a9abdbd9d21d29a0e80a988fcf45a
|
[
"Apache-2.0"
] | null | null | null |
irint("Hello NYC")
print("Hello NYC")
print("Hello NYC")
print("Hello NYC")
print("Ola Mira")
print("Ola Mira")
print("Ola Mira")
print("Ola Mira")
print("Ola Mira")
print("Ola Mira")
print("Ola Mira")
print("Ola Mira")
| 15.785714
| 18
| 0.669683
| 36
| 221
| 4.111111
| 0.166667
| 0.432432
| 0.648649
| 0.804054
| 0.966216
| 0.966216
| 0.966216
| 0.966216
| 0.966216
| 0.648649
| 0
| 0
| 0.113122
| 221
| 13
| 19
| 17
| 0.755102
| 0
| 0
| 0.916667
| 0
| 0
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.916667
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 12
|
c30b7e876671719992edc91ba30a572f5a1fccb2
| 45
|
py
|
Python
|
pyjswidgets/pyjamas/Window.safari.py
|
takipsizad/pyjs
|
54db0ba6747aca744f9f3c3e985a17e913dfb951
|
[
"ECL-2.0",
"Apache-2.0"
] | 739
|
2015-01-01T02:05:11.000Z
|
2022-03-30T15:26:16.000Z
|
pyjswidgets/pyjamas/Window.safari.py
|
takipsizad/pyjs
|
54db0ba6747aca744f9f3c3e985a17e913dfb951
|
[
"ECL-2.0",
"Apache-2.0"
] | 33
|
2015-03-25T23:17:04.000Z
|
2021-08-19T08:25:22.000Z
|
pyjswidgets/pyjamas/Window.safari.py
|
takipsizad/pyjs
|
54db0ba6747aca744f9f3c3e985a17e913dfb951
|
[
"ECL-2.0",
"Apache-2.0"
] | 167
|
2015-01-01T22:27:47.000Z
|
2022-03-17T13:29:19.000Z
|
def getDocumentRoot():
return doc().body
| 15
| 22
| 0.688889
| 5
| 45
| 6.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177778
| 45
| 2
| 23
| 22.5
| 0.837838
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
5efdd8d85d868421c14678895c11c286175ee50b
| 4,394
|
py
|
Python
|
input_verification/check_households.py
|
atruszkowska/NR-population-revac
|
3f35be85755bc7233cd1330ea1fbc0346c8dc04c
|
[
"MIT"
] | 3
|
2020-11-04T03:59:08.000Z
|
2021-01-24T09:47:24.000Z
|
input_verification/check_households.py
|
atruszkowska/NR-ABM-population
|
87d4fe9dcec4aa0c8dec41547e7589952c371b1f
|
[
"MIT"
] | null | null | null |
input_verification/check_households.py
|
atruszkowska/NR-ABM-population
|
87d4fe9dcec4aa0c8dec41547e7589952c371b1f
|
[
"MIT"
] | 3
|
2020-11-03T17:19:12.000Z
|
2021-08-05T13:35:14.000Z
|
# ------------------------------------------------------------------
#
# Module for testing household structure
#
# ------------------------------------------------------------------
def print_houses_and_age(fname, agents):
''' Outputs house ID | age of every agent that lives there '''
# Dict with house ID vs. a list of agent ages
houses = {}
retirement_homes = {}
for agent in agents:
if agent['RetirementHome'] == True:
if str(agent['houseID']) in retirement_homes:
retirement_homes[str(agent['houseID'])].append(agent['yrs'])
else:
retirement_homes[str(agent['houseID'])] = []
retirement_homes[str(agent['houseID'])].append(agent['yrs'])
continue
if str(agent['houseID']) in houses:
houses[str(agent['houseID'])].append(agent['yrs'])
else:
houses[str(agent['houseID'])] = []
houses[str(agent['houseID'])].append(agent['yrs'])
# Save to file
with open(fname, 'w') as fout:
for key, value in retirement_homes.items():
fout.write(key + ' ' + (' ').join([str(x) for x in value]) + '\n')
for key, value in houses.items():
fout.write(key + ' ' + (' ').join([str(x) for x in value]) + '\n')
def print_houses_and_work_status(fname, fname_fam, agents):
''' Outputs house ID | and work flag of every
agent that lives there; includes the hospitals '''
# fname_fam is for separate file with families
# Dict with house ID vs. a list of agent ages
houses = {}
families = {}
retirement_homes = {}
for agent in agents:
works = agent['works'] or agent['worksHospital']
if agent['RetirementHome'] == True:
if str(agent['houseID']) in retirement_homes:
retirement_homes[str(agent['houseID'])].append(works)
else:
retirement_homes[str(agent['houseID'])] = []
retirement_homes[str(agent['houseID'])].append(works)
continue
if str(agent['houseID']) in houses:
houses[str(agent['houseID'])].append(works)
else:
houses[str(agent['houseID'])] = []
houses[str(agent['houseID'])].append(works)
if agent['isFamily'] == True:
if str(agent['houseID']) in families:
families[str(agent['houseID'])].append(works)
else:
families[str(agent['houseID'])] = []
families[str(agent['houseID'])].append(works)
# Save to file
with open(fname, 'w') as fout:
for key, value in retirement_homes.items():
fout.write(key + ' ' + (' ').join([str(x) for x in value]) + '\n')
for key, value in houses.items():
fout.write(key + ' ' + (' ').join([str(x) for x in value]) + '\n')
with open(fname_fam, 'w') as fout:
for key, value in families.items():
fout.write(key + ' ' + (' ').join([str(x) for x in value]) + '\n')
def print_houses_and_work_ID(fname, agents):
''' Outputs house ID | and work ID of every
agent that lives there; no work is marked as 0;
hospitals are marked by a negative value '''
# Dict with house ID vs. a list of agent ages
houses = {}
for agent in agents:
if agent['works']:
ID = agent['workID']
elif agent['worksHospital']:
ID = -1*agent['hospitalID']
else:
ID = 0
if str(agent['houseID']) in houses:
houses[str(agent['houseID'])].append(ID)
else:
houses[str(agent['houseID'])] = []
houses[str(agent['houseID'])].append(ID)
# Save to file
with open(fname, 'w') as fout:
for key, value in houses.items():
fout.write(key + ' ' + (' ').join([str(x) for x in value]) + '\n')
def print_houses_and_student_status(fname, agents):
''' Outputs house ID | and student flag of every
agent that lives there; includes the hospital '''
# Dict with house ID vs. a list of agent ages
houses = {}
retirement_homes = {}
for agent in agents:
if agent['RetirementHome'] == True:
if str(agent['houseID']) in retirement_homes:
retirement_homes[str(agent['houseID'])].append(agent['student'])
else:
retirement_homes[str(agent['houseID'])] = []
retirement_homes[str(agent['houseID'])].append(agent['student'])
continue
if str(agent['houseID']) in houses:
houses[str(agent['houseID'])].append(agent['student'])
else:
houses[str(agent['houseID'])] = []
houses[str(agent['houseID'])].append(agent['student'])
# Save to file
with open(fname, 'w') as fout:
for key, value in retirement_homes.items():
fout.write(key + ' ' + (' ').join([str(x) for x in value]) + '\n')
for key, value in houses.items():
fout.write(key + ' ' + (' ').join([str(x) for x in value]) + '\n')
| 31.84058
| 69
| 0.620391
| 605
| 4,394
| 4.446281
| 0.130579
| 0.095167
| 0.178439
| 0.124907
| 0.857621
| 0.848327
| 0.736059
| 0.717472
| 0.710037
| 0.679554
| 0
| 0.000829
| 0.176149
| 4,394
| 137
| 70
| 32.072993
| 0.742265
| 0.187301
| 0
| 0.824176
| 0
| 0
| 0.114521
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043956
| false
| 0
| 0
| 0
| 0.043956
| 0.043956
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6f1ad641eb1f936dfcb8584211205b226c8cf5b3
| 21,452
|
py
|
Python
|
tests/test_scrump.py
|
profintegra/stumpy
|
66b3402d91820005b466e1da6fe353b61e6246c5
|
[
"BSD-3-Clause"
] | 2,296
|
2019-05-03T19:26:39.000Z
|
2022-03-31T20:42:08.000Z
|
tests/test_scrump.py
|
vishalbelsare/stumpy
|
5f192a0a41fbb44f144cc4b676d525f19aaeaa98
|
[
"BSD-3-Clause"
] | 436
|
2019-05-06T14:14:01.000Z
|
2022-03-31T20:39:31.000Z
|
tests/test_scrump.py
|
vishalbelsare/stumpy
|
5f192a0a41fbb44f144cc4b676d525f19aaeaa98
|
[
"BSD-3-Clause"
] | 318
|
2019-05-04T01:36:05.000Z
|
2022-03-31T20:31:11.000Z
|
import numpy as np
import numpy.testing as npt
from stumpy import scrump, stump, config
from stumpy.scrump import prescrump
import pytest
import naive
test_data = [
(
np.array([9, 8100, -60, 7], dtype=np.float64),
np.array([584, -11, 23, 79, 1001, 0, -19], dtype=np.float64),
),
(
np.random.uniform(-1000, 1000, [8]).astype(np.float64),
np.random.uniform(-1000, 1000, [64]).astype(np.float64),
),
]
window_size = [8, 16, 32]
substitution_locations = [(slice(0, 0), 0, -1, slice(1, 3), [0, 3])]
substitution_values = [np.nan, np.inf]
percentages = [(0.01, 0.1, 1.0)]
@pytest.mark.parametrize("T_A, T_B", test_data)
def test_prescrump_self_join(T_A, T_B):
m = 3
zone = int(np.ceil(m / 4))
for s in range(1, zone + 1):
seed = np.random.randint(100000)
np.random.seed(seed)
ref_P, ref_I = naive.prescrump(T_B, m, T_B, s=s, exclusion_zone=zone)
np.random.seed(seed)
comp_P, comp_I = prescrump(T_B, m, s=s)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
@pytest.mark.parametrize("T_A, T_B", test_data)
def test_prescrump_A_B_join(T_A, T_B):
m = 3
zone = int(np.ceil(m / 4))
for s in range(1, zone + 1):
seed = np.random.randint(100000)
np.random.seed(seed)
ref_P, ref_I = naive.prescrump(T_A, m, T_B, s=s)
np.random.seed(seed)
comp_P, comp_I = prescrump(T_A, m, T_B=T_B, s=s)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
@pytest.mark.parametrize("T_A, T_B", test_data)
def test_prescrump_A_B_join_swap(T_A, T_B):
m = 3
zone = int(np.ceil(m / 4))
for s in range(1, zone + 1):
seed = np.random.randint(100000)
np.random.seed(seed)
ref_P, ref_I = naive.prescrump(T_B, m, T_A, s=s)
np.random.seed(seed)
comp_P, comp_I = prescrump(T_B, m, T_B=T_A, s=s)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
@pytest.mark.parametrize("T_A, T_B", test_data)
@pytest.mark.parametrize("m", window_size)
def test_prescrump_self_join_larger_window(T_A, T_B, m):
if len(T_B) > m:
zone = int(np.ceil(m / 4))
for s in range(1, zone + 1):
seed = np.random.randint(100000)
np.random.seed(seed)
ref_P, ref_I = naive.prescrump(T_B, m, T_B, s=s, exclusion_zone=zone)
np.random.seed(seed)
comp_P, comp_I = prescrump(T_B, m, s=s)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
def test_scrump_int_input():
with pytest.raises(TypeError):
scrump(np.arange(10), 5, ignore_trivial=True, percentage=1.0, pre_scrump=False)
@pytest.mark.parametrize("T_A, T_B", test_data)
@pytest.mark.parametrize("percentages", percentages)
def test_scrump_self_join(T_A, T_B, percentages):
m = 3
zone = int(np.ceil(m / 4))
for percentage in percentages:
seed = np.random.randint(100000)
np.random.seed(seed)
ref_mp = naive.scrump(T_B, m, T_B, percentage, zone, False, None)
ref_P = ref_mp[:, 0]
ref_I = ref_mp[:, 1]
ref_left_I = ref_mp[:, 2]
ref_right_I = ref_mp[:, 3]
np.random.seed(seed)
approx = scrump(
T_B, m, ignore_trivial=True, percentage=percentage, pre_scrump=False
)
approx.update()
comp_P = approx.P_
comp_I = approx.I_
comp_left_I = approx.left_I_
comp_right_I = approx.right_I_
naive.replace_inf(ref_P)
naive.replace_inf(comp_P)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
npt.assert_almost_equal(ref_left_I, comp_left_I)
npt.assert_almost_equal(ref_right_I, comp_right_I)
@pytest.mark.parametrize("T_A, T_B", test_data)
@pytest.mark.parametrize("percentages", percentages)
def test_scrump_A_B_join(T_A, T_B, percentages):
m = 3
for percentage in percentages:
seed = np.random.randint(100000)
np.random.seed(seed)
ref_mp = naive.scrump(T_A, m, T_B, percentage, None, False, None)
ref_P = ref_mp[:, 0]
ref_I = ref_mp[:, 1]
ref_left_I = ref_mp[:, 2]
ref_right_I = ref_mp[:, 3]
np.random.seed(seed)
approx = scrump(
T_A, m, T_B, ignore_trivial=False, percentage=percentage, pre_scrump=False
)
approx.update()
comp_P = approx.P_
comp_I = approx.I_
comp_left_I = approx.left_I_
comp_right_I = approx.right_I_
naive.replace_inf(ref_P)
naive.replace_inf(comp_P)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
npt.assert_almost_equal(ref_left_I, comp_left_I)
npt.assert_almost_equal(ref_right_I, comp_right_I)
@pytest.mark.parametrize("T_A, T_B", test_data)
@pytest.mark.parametrize("percentages", percentages)
def test_scrump_A_B_join_swap(T_A, T_B, percentages):
m = 3
for percentage in percentages:
seed = np.random.randint(100000)
np.random.seed(seed)
ref_mp = naive.scrump(T_B, m, T_A, percentage, None, False, None)
ref_P = ref_mp[:, 0]
# ref_I = ref_mp[:, 1]
ref_left_I = ref_mp[:, 2]
ref_right_I = ref_mp[:, 3]
np.random.seed(seed)
approx = scrump(
T_B, m, T_A, ignore_trivial=False, percentage=percentage, pre_scrump=False
)
approx.update()
comp_P = approx.P_
# comp_I = approx.I_
comp_left_I = approx.left_I_
comp_right_I = approx.right_I_
naive.replace_inf(ref_P)
naive.replace_inf(comp_P)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_left_I, comp_left_I)
npt.assert_almost_equal(ref_right_I, comp_right_I)
@pytest.mark.parametrize("T_A, T_B", test_data)
@pytest.mark.parametrize("m", window_size)
@pytest.mark.parametrize("percentages", percentages)
def test_scrump_self_join_larger_window(T_A, T_B, m, percentages):
if len(T_B) > m:
zone = int(np.ceil(m / 4))
for percentage in percentages:
seed = np.random.randint(100000)
np.random.seed(seed)
ref_mp = naive.scrump(T_B, m, T_B, percentage, zone, False, None)
ref_P = ref_mp[:, 0]
ref_I = ref_mp[:, 1]
ref_left_I = ref_mp[:, 2]
ref_right_I = ref_mp[:, 3]
np.random.seed(seed)
approx = scrump(
T_B, m, ignore_trivial=True, percentage=percentage, pre_scrump=False
)
approx.update()
comp_P = approx.P_
comp_I = approx.I_
comp_left_I = approx.left_I_
comp_right_I = approx.right_I_
naive.replace_inf(ref_P)
naive.replace_inf(comp_P)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
npt.assert_almost_equal(ref_left_I, comp_left_I)
npt.assert_almost_equal(ref_right_I, comp_right_I)
@pytest.mark.parametrize("T_A, T_B", test_data)
def test_scrump_self_join_full(T_A, T_B):
m = 3
zone = int(np.ceil(m / 4))
ref_mp = naive.stamp(T_B, m, exclusion_zone=zone)
ref_P = ref_mp[:, 0]
ref_I = ref_mp[:, 1]
ref_left_I = ref_mp[:, 2]
ref_right_I = ref_mp[:, 3]
approx = scrump(T_B, m, ignore_trivial=True, percentage=1.0, pre_scrump=False)
approx.update()
comp_P = approx.P_
comp_I = approx.I_
comp_left_I = approx.left_I_
comp_right_I = approx.right_I_
naive.replace_inf(ref_P)
naive.replace_inf(comp_P)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
npt.assert_almost_equal(ref_left_I, comp_left_I)
npt.assert_almost_equal(ref_right_I, comp_right_I)
ref_mp = stump(T_B, m, ignore_trivial=True)
ref_P = ref_mp[:, 0]
ref_I = ref_mp[:, 1]
ref_left_I = ref_mp[:, 2]
ref_right_I = ref_mp[:, 3]
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
npt.assert_almost_equal(ref_left_I, comp_left_I)
npt.assert_almost_equal(ref_right_I, comp_right_I)
@pytest.mark.parametrize("T_A, T_B", test_data)
def test_scrump_A_B_join_full(T_A, T_B):
m = 3
ref_mp = naive.stamp(T_A, m, T_B=T_B)
ref_P = ref_mp[:, 0]
ref_I = ref_mp[:, 1]
ref_left_I = ref_mp[:, 2]
ref_right_I = ref_mp[:, 3]
approx = scrump(T_A, m, T_B, ignore_trivial=False, percentage=1.0, pre_scrump=False)
approx.update()
comp_P = approx.P_
comp_I = approx.I_
comp_left_I = approx.left_I_
comp_right_I = approx.right_I_
naive.replace_inf(ref_P)
naive.replace_inf(comp_P)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
npt.assert_almost_equal(ref_left_I, comp_left_I)
npt.assert_almost_equal(ref_right_I, comp_right_I)
ref_mp = stump(T_A, m, T_B=T_B, ignore_trivial=False)
ref_P = ref_mp[:, 0]
ref_I = ref_mp[:, 1]
ref_left_I = ref_mp[:, 2]
ref_right_I = ref_mp[:, 3]
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
npt.assert_almost_equal(ref_left_I, comp_left_I)
npt.assert_almost_equal(ref_right_I, comp_right_I)
@pytest.mark.parametrize("T_A, T_B", test_data)
def test_scrump_A_B_join_full_swap(T_A, T_B):
m = 3
ref_mp = naive.stamp(T_B, m, T_B=T_A)
ref_P = ref_mp[:, 0]
ref_I = ref_mp[:, 1]
ref_left_I = ref_mp[:, 2]
ref_right_I = ref_mp[:, 3]
approx = scrump(T_B, m, T_A, ignore_trivial=False, percentage=1.0, pre_scrump=False)
approx.update()
comp_P = approx.P_
comp_I = approx.I_
comp_left_I = approx.left_I_
comp_right_I = approx.right_I_
naive.replace_inf(ref_P)
naive.replace_inf(comp_P)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
npt.assert_almost_equal(ref_left_I, comp_left_I)
npt.assert_almost_equal(ref_right_I, comp_right_I)
@pytest.mark.parametrize("T_A, T_B", test_data)
@pytest.mark.parametrize("m", window_size)
def test_scrump_self_join_full_larger_window(T_A, T_B, m):
if len(T_B) > m:
zone = int(np.ceil(m / 4))
ref_mp = naive.stamp(T_B, m, exclusion_zone=zone)
ref_P = ref_mp[:, 0]
ref_I = ref_mp[:, 1]
ref_left_I = ref_mp[:, 2]
ref_right_I = ref_mp[:, 3]
approx = scrump(T_B, m, ignore_trivial=True, percentage=1.0, pre_scrump=False)
approx.update()
comp_P = approx.P_
comp_I = approx.I_
comp_left_I = approx.left_I_
comp_right_I = approx.right_I_
naive.replace_inf(ref_P)
naive.replace_inf(comp_P)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
npt.assert_almost_equal(ref_left_I, comp_left_I)
npt.assert_almost_equal(ref_right_I, comp_right_I)
@pytest.mark.parametrize("T_A, T_B", test_data)
@pytest.mark.parametrize("percentages", percentages)
def test_scrump_plus_plus_self_join(T_A, T_B, percentages):
m = 3
zone = int(np.ceil(m / 4))
for s in range(1, zone + 1):
for percentage in percentages:
seed = np.random.randint(100000)
np.random.seed(seed)
ref_P, ref_I = naive.prescrump(T_B, m, T_B, s=s, exclusion_zone=zone)
ref_mp = naive.scrump(T_B, m, T_B, percentage, zone, True, s)
for i in range(ref_mp.shape[0]):
if ref_P[i] < ref_mp[i, 0]:
ref_mp[i, 0] = ref_P[i]
ref_mp[i, 1] = ref_I[i]
ref_P = ref_mp[:, 0]
ref_I = ref_mp[:, 1]
# ref_left_I = ref_mp[:, 2]
# ref_right_I = ref_mp[:, 3]
np.random.seed(seed)
approx = scrump(
T_B, m, ignore_trivial=True, percentage=percentage, pre_scrump=True, s=s
)
approx.update()
comp_P = approx.P_
comp_I = approx.I_
# comp_left_I = approx.left_I_
# comp_right_I = approx.right_I_
naive.replace_inf(ref_P)
naive.replace_inf(comp_I)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
# npt.assert_almost_equal(ref_left_I, comp_left_I)
# npt.assert_almost_equal(ref_right_I, comp_right_I)
@pytest.mark.parametrize("T_A, T_B", test_data)
@pytest.mark.parametrize("percentages", percentages)
def test_scrump_plus_plus_A_B_join(T_A, T_B, percentages):
m = 3
zone = int(np.ceil(m / 4))
for s in range(1, zone + 1):
for percentage in percentages:
seed = np.random.randint(100000)
np.random.seed(seed)
ref_P, ref_I = naive.prescrump(T_A, m, T_B, s=s)
ref_mp = naive.scrump(T_A, m, T_B, percentage, None, False, None)
for i in range(ref_mp.shape[0]):
if ref_P[i] < ref_mp[i, 0]:
ref_mp[i, 0] = ref_P[i]
ref_mp[i, 1] = ref_I[i]
ref_P = ref_mp[:, 0]
ref_I = ref_mp[:, 1]
ref_left_I = ref_mp[:, 2]
ref_right_I = ref_mp[:, 3]
approx = scrump(
T_A,
m,
T_B,
ignore_trivial=False,
percentage=percentage,
pre_scrump=True,
s=s,
)
approx.update()
comp_P = approx.P_
comp_I = approx.I_
comp_left_I = approx.left_I_
comp_right_I = approx.right_I_
naive.replace_inf(ref_P)
naive.replace_inf(comp_P)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
npt.assert_almost_equal(ref_left_I, comp_left_I)
npt.assert_almost_equal(ref_right_I, comp_right_I)
@pytest.mark.parametrize("T_A, T_B", test_data)
def test_scrump_plus_plus_self_join_full(T_A, T_B):
m = 3
zone = int(np.ceil(m / 4))
ref_mp = naive.stamp(T_B, m, exclusion_zone=zone)
ref_P = ref_mp[:, 0]
ref_I = ref_mp[:, 1]
ref_left_I = ref_mp[:, 2]
ref_right_I = ref_mp[:, 3]
approx = scrump(
T_B, m, ignore_trivial=True, percentage=1.0, pre_scrump=True, s=zone
)
approx.update()
comp_P = approx.P_
comp_I = approx.I_
comp_left_I = approx.left_I_
comp_right_I = approx.right_I_
naive.replace_inf(ref_P)
naive.replace_inf(comp_P)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
npt.assert_almost_equal(ref_left_I, comp_left_I)
npt.assert_almost_equal(ref_right_I, comp_right_I)
@pytest.mark.parametrize("T_A, T_B", test_data)
def test_scrump_plus_plus_A_B_join_full(T_A, T_B):
m = 3
zone = int(np.ceil(m / 4))
ref_mp = naive.stamp(T_A, m, T_B=T_B)
ref_P = ref_mp[:, 0]
ref_I = ref_mp[:, 1]
ref_left_I = ref_mp[:, 2]
ref_right_I = ref_mp[:, 3]
approx = scrump(
T_A, m, T_B=T_B, ignore_trivial=False, percentage=1.0, pre_scrump=True, s=zone
)
approx.update()
comp_P = approx.P_
comp_I = approx.I_
comp_left_I = approx.left_I_
comp_right_I = approx.right_I_
naive.replace_inf(ref_P)
naive.replace_inf(comp_P)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
npt.assert_almost_equal(ref_left_I, comp_left_I)
npt.assert_almost_equal(ref_right_I, comp_right_I)
@pytest.mark.parametrize("T_A, T_B", test_data)
def test_scrump_plus_plus_A_B_join_full_swap(T_A, T_B):
m = 3
zone = int(np.ceil(m / 4))
ref_mp = naive.stamp(T_B, m, T_B=T_A)
ref_P = ref_mp[:, 0]
ref_I = ref_mp[:, 1]
ref_left_I = ref_mp[:, 2]
ref_right_I = ref_mp[:, 3]
approx = scrump(
T_B, m, T_B=T_A, ignore_trivial=False, percentage=1.0, pre_scrump=True, s=zone
)
approx.update()
comp_P = approx.P_
comp_I = approx.I_
comp_left_I = approx.left_I_
comp_right_I = approx.right_I_
naive.replace_inf(ref_P)
naive.replace_inf(comp_P)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
npt.assert_almost_equal(ref_left_I, comp_left_I)
npt.assert_almost_equal(ref_right_I, comp_right_I)
@pytest.mark.parametrize("percentages", percentages)
def test_scrump_constant_subsequence_self_join(percentages):
T = np.concatenate((np.zeros(20, dtype=np.float64), np.ones(5, dtype=np.float64)))
m = 3
zone = int(np.ceil(m / 4))
for percentage in percentages:
seed = np.random.randint(100000)
np.random.seed(seed)
ref_mp = naive.scrump(T, m, T, percentage, zone, False, None)
ref_P = ref_mp[:, 0]
ref_I = ref_mp[:, 1]
ref_left_I = ref_mp[:, 2]
ref_right_I = ref_mp[:, 3]
np.random.seed(seed)
approx = scrump(
T, m, ignore_trivial=True, percentage=percentage, pre_scrump=False
)
approx.update()
comp_P = approx.P_
comp_I = approx.I_
comp_left_I = approx.left_I_
comp_right_I = approx.right_I_
naive.replace_inf(ref_P)
naive.replace_inf(comp_P)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
npt.assert_almost_equal(ref_left_I, comp_left_I)
npt.assert_almost_equal(ref_right_I, comp_right_I)
@pytest.mark.parametrize("percentages", percentages)
def test_scrump_identical_subsequence_self_join(percentages):
identical = np.random.rand(8)
T = np.random.rand(20)
T[1 : 1 + identical.shape[0]] = identical
T[11 : 11 + identical.shape[0]] = identical
m = 3
zone = int(np.ceil(m / 4))
for percentage in percentages:
seed = np.random.randint(100000)
np.random.seed(seed)
ref_mp = naive.scrump(T, m, T, percentage, zone, False, None)
ref_P = ref_mp[:, 0]
# ref_I = ref_mp[:, 1]
# ref_left_I = ref_mp[:, 2]
# ref_right_I = ref_mp[:, 3]
np.random.seed(seed)
approx = scrump(
T, m, ignore_trivial=True, percentage=percentage, pre_scrump=False
)
approx.update()
comp_P = approx.P_
# comp_I = approx.I_
# comp_left_I = approx.left_I_
# comp_right_I = approx.right_I_
naive.replace_inf(ref_P)
naive.replace_inf(comp_P)
npt.assert_almost_equal(ref_P, comp_P, decimal=config.STUMPY_TEST_PRECISION)
# npt.assert_almost_equal(ref_I, comp_I)
# npt.assert_almost_equal(ref_left_I, comp_left_I)
# npt.assert_almost_equal(ref_right_I, comp_right_I)
@pytest.mark.parametrize("T_A, T_B", test_data)
@pytest.mark.parametrize("substitute", substitution_values)
@pytest.mark.parametrize("substitution_locations", substitution_locations)
@pytest.mark.parametrize("percentages", percentages)
def test_scrump_nan_inf_self_join(
T_A, T_B, substitute, substitution_locations, percentages
):
m = 3
T_B_sub = T_B.copy()
for substitution_location in substitution_locations:
T_B_sub[:] = T_B[:]
T_B_sub[substitution_location] = substitute
zone = int(np.ceil(m / 4))
for percentage in percentages:
seed = np.random.randint(100000)
np.random.seed(seed)
ref_mp = naive.scrump(T_B_sub, m, T_B_sub, percentage, zone, False, None)
ref_P = ref_mp[:, 0]
ref_I = ref_mp[:, 1]
ref_left_I = ref_mp[:, 2]
ref_right_I = ref_mp[:, 3]
np.random.seed(seed)
approx = scrump(T_B_sub, m, percentage=percentage, pre_scrump=False)
approx.update()
comp_P = approx.P_
comp_I = approx.I_
comp_left_I = approx.left_I_
comp_right_I = approx.right_I_
naive.replace_inf(ref_P)
naive.replace_inf(comp_P)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
npt.assert_almost_equal(ref_left_I, comp_left_I)
npt.assert_almost_equal(ref_right_I, comp_right_I)
@pytest.mark.parametrize("percentages", percentages)
def test_scrump_nan_zero_mean_self_join(percentages):
T = np.array([-1, 0, 1, np.inf, 1, 0, -1])
m = 3
zone = int(np.ceil(m / 4))
for percentage in percentages:
seed = np.random.randint(100000)
np.random.seed(seed)
ref_mp = naive.scrump(T, m, T, percentage, zone, False, None)
ref_P = ref_mp[:, 0]
ref_I = ref_mp[:, 1]
ref_left_I = ref_mp[:, 2]
ref_right_I = ref_mp[:, 3]
np.random.seed(seed)
approx = scrump(T, m, percentage=percentage, pre_scrump=False)
approx.update()
comp_P = approx.P_
comp_I = approx.I_
comp_left_I = approx.left_I_
comp_right_I = approx.right_I_
naive.replace_inf(ref_P)
naive.replace_inf(comp_P)
npt.assert_almost_equal(ref_P, comp_P)
npt.assert_almost_equal(ref_I, comp_I)
npt.assert_almost_equal(ref_left_I, comp_left_I)
npt.assert_almost_equal(ref_right_I, comp_right_I)
| 30.955267
| 88
| 0.628892
| 3,465
| 21,452
| 3.525253
| 0.035498
| 0.042161
| 0.103152
| 0.137536
| 0.927712
| 0.921572
| 0.917151
| 0.910929
| 0.910602
| 0.899959
| 0
| 0.020611
| 0.25592
| 21,452
| 692
| 89
| 31
| 0.744644
| 0.025359
| 0
| 0.82197
| 0
| 0
| 0.013834
| 0.001053
| 0
| 0
| 0
| 0
| 0.149621
| 1
| 0.041667
| false
| 0
| 0.011364
| 0
| 0.05303
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
489dbe86d2ea76f46632d9b1bea99a2d0c4981d9
| 16,771
|
py
|
Python
|
gollahalli_cms/editor/models.py
|
akshaybabloo/gollahalli-cms
|
f74fbb6d88bf3a8acc7c4b0439e97d2bcc44a17f
|
[
"MIT"
] | 1
|
2020-08-08T22:46:40.000Z
|
2020-08-08T22:46:40.000Z
|
gollahalli_cms/editor/models.py
|
akshaybabloo/gollahalli-com
|
f74fbb6d88bf3a8acc7c4b0439e97d2bcc44a17f
|
[
"MIT"
] | 41
|
2017-06-23T03:28:17.000Z
|
2018-03-01T08:26:42.000Z
|
gollahalli_cms/editor/models.py
|
akshaybabloo/gollahalli-com
|
f74fbb6d88bf3a8acc7c4b0439e97d2bcc44a17f
|
[
"MIT"
] | null | null | null |
"""
All models for `editor`.
"""
from django.db import models
class ContentModel(models.Model):
"""
`ContentModel`, primary model has `ref_id`-PK, `created`, `updated`, `website_name`, `cv`, `bio`, `url`, `first_name`
`last_name`, `email_id`, `github`, `twitter`, `linkedin`, `file` and `image`
"""
ref_id = models.IntegerField(auto_created=True, default=1, primary_key=True, serialize=False)
updated = models.DateTimeField(auto_now=True)
website_name = models.CharField(max_length=300, default="Enter your companies name")
cv = models.FileField(null=True, blank=True)
bio = models.CharField(default='Your Bio', max_length=10000)
url = models.URLField(default='https://www.example.com', max_length=400)
first_name = models.CharField(default='First Name', max_length=400)
last_name = models.CharField(default='Last Name', max_length=400)
email_id = models.EmailField(default='example@example.com', max_length=400)
github = models.URLField(default='https://www.example.com', max_length=400)
twitter = models.URLField(default='https://www.example.com', max_length=400)
linkedin = models.URLField(default='https://www.example.com', max_length=400)
file = models.FileField(null=True, blank=True)
image = models.ImageField(null=True, blank=True)
def removed_on_cv_update(self):
"""
Replaces the file if `CV` already present.
"""
try:
obj = ContentModel.objects.get(ref_id=self.ref_id)
except ContentModel.DoesNotExist:
return
if obj.cv and self.cv and obj.cv != self.cv:
obj.cv.delete()
def removed_on_file_update(self):
"""
Replaces the file if `file` already present.
"""
try:
obj = ContentModel.objects.get(ref_id=self.ref_id)
except ContentModel.DoesNotExist:
return
if obj.file and self.file and obj.file != self.file:
obj.file.delete()
def removed_on_image_update(self):
"""
Replaces the file if `image` already present.
"""
try:
obj = ContentModel.objects.get(ref_id=self.ref_id)
except ContentModel.DoesNotExist:
return
if obj.image and self.image and obj.image != self.image:
obj.image.delete()
def delete(self, using=None, keep_parents=False):
"""
Overriding `delete` method of `models.Model`
Parameters
----------
using
keep_parents
Returns
-------
super
"""
self.cv.delete()
self.file.delete()
self.image.delete()
return super(ContentModel, self).delete()
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
"""
Overriding `save` method of `models.Model`
Parameters
----------
force_insert
force_update
using
update_fields
Returns
-------
super
"""
self.removed_on_cv_update()
self.removed_on_file_update()
self.removed_on_image_update()
return super(ContentModel, self).save()
def __str__(self):
return str(self.ref_id)
class EducationModel(models.Model):
"""
`EducationModel` has `id`-PK, `ref_id`-FK, `title`, `from_date`, `to_date`, `where`, `current`, `file`, and `image`.
"""
id = models.IntegerField(auto_created=True, default=1, primary_key=True, serialize=False)
ref_id = models.ForeignKey(ContentModel, related_name='education', on_delete=None)
title = models.CharField(default='title', max_length=500)
from_date = models.DateField()
to_date = models.DateField()
where = models.CharField(default='where', max_length=500)
current = models.BooleanField(default=False)
file = models.FileField(null=True, blank=True)
image = models.ImageField(null=True, blank=True)
updated = models.DateTimeField(auto_now=True)
def removed_on_file_update(self):
"""
Replaces the file if `file` already present.
"""
try:
obj = EducationModel.objects.get(id=self.id)
except EducationModel.DoesNotExist:
return
if obj.file and self.file and obj.file != self.file:
obj.file.delete()
def removed_on_image_update(self):
"""
Replaces the file if `image` already present.
"""
try:
obj = EducationModel.objects.get(id=self.id)
except EducationModel.DoesNotExist:
return
if obj.image and self.image and obj.image != self.image:
obj.image.delete()
def delete(self, using=None, keep_parents=False):
"""
Overriding `delete` method of `models.Model`
Parameters
----------
using
keep_parents
Returns
-------
super
"""
self.file.delete()
self.image.delete()
return super(EducationModel, self).delete()
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
"""
Overriding `save` method of `models.Model`
Parameters
----------
force_insert
force_update
using
update_fields
Returns
-------
super
"""
self.removed_on_file_update()
self.removed_on_image_update()
return super(EducationModel, self).save()
class ProjectsModel(models.Model):
"""
`ProjectModel` has `id`-PK, `ref_id`-FK, `link`, `title`, `category`, `long_description`, `short_description`
`file` and `image`.
"""
id = models.IntegerField(auto_created=True, default=1, primary_key=True, serialize=False)
ref_id = models.ForeignKey(ContentModel, related_name='projects', on_delete=None)
link = models.URLField(default='https://www.example.com', max_length=500)
title = models.CharField(default='title', max_length=500)
category = models.CharField(default='category', max_length=500)
long_description = models.CharField(default='long description', max_length=10000, help_text="Markdown Enabled")
short_description = models.CharField(default='short description', max_length=500)
file = models.FileField(null=True, blank=True)
image = models.ImageField(null=True, blank=True)
updated = models.DateTimeField(auto_now=True)
def removed_on_file_update(self):
"""
Replaces the file if `file` already present.
"""
try:
obj = ProjectsModel.objects.get(id=self.id)
except ProjectsModel.DoesNotExist:
return
if obj.file and self.file and obj.file != self.file:
obj.file.delete()
def removed_on_image_update(self):
"""
Replaces the file if `image` already present.
"""
try:
obj = ProjectsModel.objects.get(id=self.id)
except ProjectsModel.DoesNotExist:
return
if obj.image and self.image and obj.image != self.image:
obj.image.delete()
def delete(self, using=None, keep_parents=False):
"""
Overriding `delete` method of `models.Model`
Parameters
----------
using
keep_parents
Returns
-------
super
"""
self.file.delete()
self.image.delete()
return super(ProjectsModel, self).delete()
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
"""
Overriding `save` method of `models.Model`
Parameters
----------
force_insert
force_update
using
update_fields
Returns
-------
super
"""
self.removed_on_file_update()
self.removed_on_image_update()
return super(ProjectsModel, self).save()
class TutorialsModel(models.Model):
"""
`TutorialsModel` has `id`-PK, `ref_id`-FK, `link`, `title`, `long_description`, `short_description`, `file` and `image`.
"""
id = models.IntegerField(auto_created=True, default=1, primary_key=True, serialize=False)
ref_id = models.ForeignKey(ContentModel, related_name='tutorials', on_delete=None)
link = models.URLField(default='https://www.example.com', max_length=500)
title = models.CharField(default='title', max_length=500)
long_description = models.CharField(default='long description', max_length=10000, help_text="Markdown Enabled")
file = models.FileField(null=True, blank=True)
image = models.ImageField(null=True, blank=True)
updated = models.DateTimeField(auto_now=True)
def removed_on_file_update(self):
"""
Replaces the file if `file` already present.
"""
try:
obj = TutorialsModel.objects.get(id=self.id)
except TutorialsModel.DoesNotExist:
return
if obj.file and self.file and obj.file != self.file:
obj.file.delete()
def removed_on_image_update(self):
"""
Replaces the file if `image` already present.
"""
try:
obj = TutorialsModel.objects.get(id=self.id)
except TutorialsModel.DoesNotExist:
return
if obj.image and self.image and obj.image != self.image:
obj.image.delete()
def delete(self, using=None, keep_parents=False):
"""
Overriding `delete` method of `models.Model`
Parameters
----------
using
keep_parents
Returns
-------
super
"""
self.file.delete()
self.image.delete()
return super(TutorialsModel, self).delete()
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
"""
Overriding `save` method of `models.Model`
Parameters
----------
force_insert
force_update
using
update_fields
Returns
-------
super
"""
self.removed_on_file_update()
self.removed_on_image_update()
return super(TutorialsModel, self).save()
class ExperienceModel(models.Model):
"""
`ExperienceModel` has `id`-PK, `ref_id`-FK, `from_date`, `to_date`, `title`, `where_city`, `where_country`, `company` and
`current`.
"""
id = models.IntegerField(auto_created=True, default=1, primary_key=True, serialize=False)
ref_id = models.ForeignKey(ContentModel, related_name='experience', on_delete=None)
from_date = models.DateField()
to_date = models.DateField()
title = models.CharField(default='title', max_length=500)
where_city = models.CharField(default='where city', max_length=100)
where_country = models.CharField(default='where country', max_length=100)
company = models.CharField(default='company', max_length=500)
current = models.BooleanField(default=False)
updated = models.DateTimeField(auto_now=True)
class SkillsModel(models.Model):
"""
`SkillsModel` has `ref_id`-FK and `type_of_skill`.
"""
ref_id = models.ForeignKey(ContentModel, related_name='skills', on_delete=None)
type_of_skill = models.CharField(default='type', primary_key=True, max_length=500)
def __str__(self):
return self.type_of_skill
class SkillsContentModel(models.Model):
"""
`SkillsContentModel` has `id`-PK, `type_of_skill`-FK, `content`, `file` and `image`
"""
id = models.IntegerField(auto_created=True, default=1, primary_key=True, serialize=False)
type_of_skill = models.ForeignKey(SkillsModel, related_name='skills_content', on_delete=None)
content = models.CharField(default='content', help_text='Markdown Enabled', max_length=500)
file = models.FileField(null=True, blank=True)
image = models.ImageField(null=True, blank=True)
updated = models.DateTimeField(auto_now=True)
def removed_on_file_update(self):
"""
Replaces the file if `file` already present.
"""
try:
obj = SkillsContentModel.objects.get(id=self.id)
except SkillsContentModel.DoesNotExist:
return
if obj.file and self.file and obj.file != self.file:
obj.file.delete()
def removed_on_image_update(self):
"""
Replaces the file if `image` already present.
"""
try:
obj = SkillsContentModel.objects.get(id=self.id)
except SkillsContentModel.DoesNotExist:
return
if obj.image and self.image and obj.image != self.image:
obj.image.delete()
def delete(self, using=None, keep_parents=False):
"""
Overriding `delete` method of `models.Model`
Parameters
----------
using
keep_parents
Returns
-------
super
"""
self.file.delete()
self.image.delete()
return super(SkillsContentModel, self).delete()
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
"""
Overriding `save` method of `models.Model`
Parameters
----------
force_insert
force_update
using
update_fields
Returns
-------
super
"""
self.removed_on_file_update()
self.removed_on_image_update()
return super(SkillsContentModel, self).save()
def __str__(self):
return self.content
class PublicationsModel(models.Model):
"""
`PublicationsModel` has `ref_id`-FK and `type_of_publication`.
"""
ref_id = models.ForeignKey(ContentModel, related_name='publications', on_delete=None)
type_of_publication = models.CharField(default='type', primary_key=True, max_length=500)
updated = models.DateTimeField(auto_now=True)
def __str__(self):
return self.type_of_publication
class PublicationsContentModel(models.Model):
"""
`PublicationsContentModel` has `id`-PK, `type_of_publication`-FK, `content`, `file` and `image`.
"""
id = models.IntegerField(auto_created=True, default=1, primary_key=True, serialize=False)
type_of_publication = models.ForeignKey(PublicationsModel, related_name='publications_content', on_delete=None)
content = models.CharField(default='content', help_text='Markdown Enabled', max_length=500)
file = models.FileField(null=True, blank=True)
image = models.ImageField(null=True, blank=True)
updated = models.DateTimeField(auto_now=True)
def removed_on_file_update(self):
"""
Replaces the file if `file` already present.
"""
try:
obj = PublicationsContentModel.objects.get(id=self.id)
except PublicationsContentModel.DoesNotExist:
return
if obj.file and self.file and obj.file != self.file:
obj.file.delete()
def removed_on_image_update(self):
"""
Replaces the file if `image` already present.
"""
try:
obj = PublicationsContentModel.objects.get(id=self.id)
except PublicationsContentModel.DoesNotExist:
return
if obj.image and self.image and obj.image != self.image:
obj.image.delete()
def delete(self, using=None, keep_parents=False):
"""
Overriding `delete` method of `models.Model`
Parameters
----------
using
keep_parents
Returns
-------
super
"""
self.file.delete()
self.image.delete()
return super(PublicationsContentModel, self).delete()
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
"""
Overriding `save` method of `models.Model`
Parameters
----------
force_insert
force_update
using
update_fields
Returns
-------
super
"""
self.removed_on_file_update()
self.removed_on_image_update()
return super(PublicationsContentModel, self).save()
def __str__(self):
return self.content
class MetaContentModel(models.Model):
"""
`MetaContentModel` has `id`-PK, `header`, `footer` and `meta`.
"""
ref_id = models.IntegerField(auto_created=True, default=1, primary_key=True, serialize=False)
header = models.TextField(default="Header content.", help_text="{{header}}")
footer = models.TextField(default="Footer Content", help_text="{{footer}}")
meta = models.TextField(default="Meta tags", help_text="{{meta_header}}")
updated = models.DateTimeField(auto_now=True)
| 31.583804
| 125
| 0.620595
| 1,886
| 16,771
| 5.362142
| 0.075822
| 0.024028
| 0.041333
| 0.021853
| 0.804608
| 0.794423
| 0.779788
| 0.751211
| 0.715614
| 0.707703
| 0
| 0.007615
| 0.256156
| 16,771
| 530
| 126
| 31.643396
| 0.803046
| 0.187109
| 0
| 0.745763
| 0
| 0
| 0.046927
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.127119
| false
| 0
| 0.004237
| 0.021186
| 0.614407
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
48b92c2e54d8f2cd8117822a7b6282949f134621
| 1,038
|
py
|
Python
|
Centering The Text.py
|
MrAnonymous5635/CSCircles
|
010ac82942c88da357e214ea5462ec378f3667b8
|
[
"MIT"
] | 17
|
2018-09-19T09:44:33.000Z
|
2022-01-17T15:17:11.000Z
|
Centering The Text.py
|
MrAnonymous5635/CSCircles
|
010ac82942c88da357e214ea5462ec378f3667b8
|
[
"MIT"
] | 2
|
2020-02-24T15:28:33.000Z
|
2021-11-16T00:04:52.000Z
|
Centering The Text.py
|
MrAnonymous5635/CSCircles
|
010ac82942c88da357e214ea5462ec378f3667b8
|
[
"MIT"
] | 8
|
2020-02-20T00:02:06.000Z
|
2022-01-06T17:25:51.000Z
|
width = int(input())
while width % 2 == 0:
text = input()
totalDots = chr(46) * (width - len(text))
halfDots = int(width / 2)
halfLen = len(text) / 2
firstDots = chr(46) * (halfDots - int(halfLen))
secondDots = chr(46) * (halfDots - int(halfLen))
if text == "END":
break
elif len(text) % 2 != 0:
secondDots = chr(46) * (halfDots - int(halfLen) - 1)
print(firstDots+text+secondDots)
elif len(text) % 2 == 0:
print(firstDots+text+secondDots)
while width % 2 != 0:
text = input()
totalDots = chr(46) * (width - len(text))
halfDots = int(width / 2)
halfLen = len(text) / 2
firstDots = chr(46) * (halfDots - int(halfLen))
secondDots = chr(46) * (halfDots - int(halfLen))
if text == "END":
break
elif len(text) % 2 != 0:
print(firstDots+text+secondDots)
elif len(text) % 2 == 0:
firstDots = chr(46) * (halfDots - int(halfLen) + 1)
print(firstDots+text+secondDots)
| 27.315789
| 60
| 0.539499
| 125
| 1,038
| 4.48
| 0.168
| 0.071429
| 0.085714
| 0.171429
| 0.976786
| 0.976786
| 0.942857
| 0.942857
| 0.942857
| 0.796429
| 0
| 0.047288
| 0.307322
| 1,038
| 37
| 61
| 28.054054
| 0.731572
| 0
| 0
| 0.827586
| 0
| 0
| 0.00578
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.137931
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5b17f2a08c3ec56a382263a87375e2ef3352b4ca
| 88,549
|
py
|
Python
|
src/pyrad_proc/pyrad/proc/process_intercomp.py
|
jfigui/pyrad
|
7811d593bb09a7f8a621c0e8ae3f32c2b85a0254
|
[
"BSD-3-Clause"
] | 41
|
2016-12-01T08:46:06.000Z
|
2021-06-24T21:14:33.000Z
|
src/pyrad_proc/pyrad/proc/process_intercomp.py
|
jfigui/pyrad
|
7811d593bb09a7f8a621c0e8ae3f32c2b85a0254
|
[
"BSD-3-Clause"
] | 42
|
2017-02-23T14:52:49.000Z
|
2021-02-01T10:43:52.000Z
|
src/pyrad_proc/pyrad/proc/process_intercomp.py
|
jfigui/pyrad
|
7811d593bb09a7f8a621c0e8ae3f32c2b85a0254
|
[
"BSD-3-Clause"
] | 21
|
2016-08-25T15:02:12.000Z
|
2021-05-27T04:09:40.000Z
|
"""
pyrad.proc.process_intercomp
============================
Functions used in the inter-comparison between radars
.. autosummary::
:toctree: generated/
process_time_stats
process_time_stats2
process_time_avg
process_weighted_time_avg
process_time_avg_flag
process_colocated_gates
process_intercomp
process_intercomp_time_avg
process_fields_diff
process_intercomp_fields
"""
from copy import deepcopy
from warnings import warn
import datetime
import numpy as np
import scipy
from netCDF4 import num2date
import pyart
from ..io.io_aux import get_datatype_fields, get_fieldname_pyart
from ..io.io_aux import get_save_dir, make_filename
from ..io.read_data_other import read_colocated_gates, read_colocated_data
from ..io.read_data_other import read_colocated_data_time_avg
from ..io.read_data_radar import interpol_field
from ..util.radar_utils import time_avg_range, get_range_bins_to_avg
from ..util.radar_utils import find_colocated_indexes
def process_time_stats(procstatus, dscfg, radar_list=None):
"""
computes the temporal statistics of a field
Parameters
----------
procstatus : int
Processing status: 0 initializing, 1 processing volume,
2 post-processing
dscfg : dictionary of dictionaries
data set configuration. Accepted Configuration Keywords::
datatype : list of string. Dataset keyword
The input data types
period : float. Dataset keyword
the period to average [s]. If -1 the statistics are going to be
performed over the entire data. Default 3600.
start_average : float. Dataset keyword
when to start the average [s from midnight UTC]. Default 0.
lin_trans: int. Dataset keyword
If 1 apply linear transformation before averaging
use_nan : bool. Dataset keyword
If true non valid data will be used
nan_value : float. Dataset keyword
The value of the non valid data. Default 0
stat: string. Dataset keyword
Statistic to compute: Can be mean, std, cov, min, max. Default
mean
radar_list : list of Radar objects
Optional. list of radar objects
Returns
-------
new_dataset : dict
dictionary containing the output
ind_rad : int
radar index
"""
for datatypedescr in dscfg['datatype']:
radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr)
field_name = get_fieldname_pyart(datatype)
break
ind_rad = int(radarnr[5:8])-1
start_average = dscfg.get('start_average', 0.)
period = dscfg.get('period', 3600.)
lin_trans = dscfg.get('lin_trans', 0)
use_nan = dscfg.get('use_nan', 0)
nan_value = dscfg.get('nan_value', 0.)
stat = dscfg.get('stat', 'mean')
if procstatus == 0:
return None, None
if procstatus == 1:
if radar_list[ind_rad] is None:
warn('No valid radar')
return None, None
radar = radar_list[ind_rad]
if field_name not in radar.fields:
warn(field_name+' not available.')
return None, None
# Prepare auxiliary radar
field = deepcopy(radar.fields[field_name])
if stat in ('mean', 'std', 'cov'):
if lin_trans:
field['data'] = np.ma.power(10., 0.1*field['data'])
if use_nan:
field['data'] = np.ma.asarray(field['data'].filled(nan_value))
if stat in ('std', 'cov'):
sum2_dict = pyart.config.get_metadata('sum_squared')
sum2_dict['data'] = field['data']*field['data']
else:
if use_nan:
field['data'] = np.ma.asarray(field['data'].filled(nan_value))
npoints_dict = pyart.config.get_metadata('number_of_samples')
npoints_dict['data'] = np.ma.asarray(
np.logical_not(np.ma.getmaskarray(field['data'])), dtype=int)
radar_aux = deepcopy(radar)
radar_aux.fields = dict()
radar_aux.add_field(field_name, field)
radar_aux.add_field('number_of_samples', npoints_dict)
if stat in ('std', 'cov'):
radar_aux.add_field('sum_squared', sum2_dict)
# first volume: initialize start and end time of averaging
if dscfg['initialized'] == 0:
avg_par = dict()
if period != -1:
date_00 = dscfg['timeinfo'].replace(
hour=0, minute=0, second=0, microsecond=0)
avg_par.update(
{'starttime': date_00+datetime.timedelta(
seconds=start_average)})
avg_par.update(
{'endtime': avg_par['starttime']+datetime.timedelta(
seconds=period)})
else:
avg_par.update({'starttime': dscfg['timeinfo']})
avg_par.update({'endtime': dscfg['timeinfo']})
avg_par.update({'timeinfo': dscfg['timeinfo']})
dscfg['global_data'] = avg_par
dscfg['initialized'] = 1
if dscfg['initialized'] == 0:
return None, None
dscfg['global_data']['timeinfo'] = dscfg['timeinfo']
# no radar object in global data: create it
if 'radar_out' not in dscfg['global_data']:
if period != -1:
# get start and stop times of new radar object
(dscfg['global_data']['starttime'],
dscfg['global_data']['endtime']) = (
time_avg_range(
dscfg['timeinfo'], dscfg['global_data']['starttime'],
dscfg['global_data']['endtime'], period))
# check if volume time older than starttime
if dscfg['timeinfo'] > dscfg['global_data']['starttime']:
dscfg['global_data'].update({'radar_out': radar_aux})
else:
dscfg['global_data'].update({'radar_out': radar_aux})
return None, None
# still accumulating: add field to global field
if (period == -1 or
dscfg['timeinfo'] < dscfg['global_data']['endtime']):
if period == -1:
dscfg['global_data']['endtime'] = dscfg['timeinfo']
field_interp = interpol_field(
dscfg['global_data']['radar_out'], radar_aux, field_name)
npoints_interp = interpol_field(
dscfg['global_data']['radar_out'], radar_aux,
'number_of_samples')
if use_nan:
field_interp['data'] = np.ma.asarray(
field_interp['data'].filled(nan_value))
dscfg['global_data']['radar_out'].fields[
'number_of_samples']['data'] += np.ma.asarray(
npoints_interp['data'].filled(fill_value=1),
dtype=int)
else:
dscfg['global_data']['radar_out'].fields[
'number_of_samples']['data'] += np.ma.asarray(
npoints_interp['data'].filled(fill_value=0),
dtype=int)
if stat in ('mean', 'std', 'cov'):
masked_sum = np.ma.getmaskarray(
dscfg['global_data']['radar_out'].fields[
field_name]['data'])
valid_sum = np.logical_and(
np.logical_not(masked_sum),
np.logical_not(np.ma.getmaskarray(field_interp['data'])))
dscfg['global_data']['radar_out'].fields[
field_name]['data'][masked_sum] = (
field_interp['data'][masked_sum])
dscfg['global_data']['radar_out'].fields[
field_name]['data'][valid_sum] += (
field_interp['data'][valid_sum])
if stat in ('cov', 'std'):
dscfg['global_data']['radar_out'].fields[
'sum_squared']['data'][masked_sum] = (
field_interp['data'][masked_sum] *
field_interp['data'][masked_sum])
dscfg['global_data']['radar_out'].fields[
'sum_squared']['data'][valid_sum] += (
field_interp['data'][valid_sum] *
field_interp['data'][valid_sum])
elif stat == 'max':
dscfg['global_data']['radar_out'].fields[
field_name]['data'] = np.maximum(
dscfg['global_data']['radar_out'].fields[
field_name]['data'].filled(fill_value=-1.e300),
field_interp['data'].filled(fill_value=-1.e300))
dscfg['global_data']['radar_out'].fields[
field_name]['data'] = np.ma.masked_values(
dscfg['global_data']['radar_out'].fields[
field_name]['data'], -1.e300)
elif stat == 'min':
dscfg['global_data']['radar_out'].fields[
field_name]['data'] = np.minimum(
dscfg['global_data']['radar_out'].fields[
field_name]['data'].filled(fill_value=1.e300),
field_interp['data'].filled(fill_value=1.e300))
dscfg['global_data']['radar_out'].fields[
field_name]['data'] = np.ma.masked_values(
dscfg['global_data']['radar_out'].fields[
field_name]['data'], 1.e300)
return None, None
# we have reached the end of the accumulation period: do the averaging
# and start a new object (only reachable if period != -1)
if stat in ('mean', 'std', 'cov'):
field_mean = (
dscfg['global_data']['radar_out'].fields[field_name]['data'] /
dscfg['global_data']['radar_out'].fields[
'number_of_samples']['data'])
if stat == 'mean':
if lin_trans:
dscfg['global_data']['radar_out'].fields[
field_name]['data'] = 10.*np.ma.log10(field_mean)
else:
dscfg['global_data']['radar_out'].fields[
field_name]['data'] = field_mean
elif stat in ('std', 'cov'):
field_std = np.ma.sqrt(
dscfg['global_data']['radar_out'].fields[
'sum_squared']['data'] /
dscfg['global_data']['radar_out'].fields[
'number_of_samples']['data']-field_mean*field_mean)
if stat == 'std':
if lin_trans:
dscfg['global_data']['radar_out'].fields[
field_name]['data'] = 10.*np.ma.log10(field_std)
else:
dscfg['global_data']['radar_out'].fields[
field_name]['data'] = field_std
else:
if lin_trans:
dscfg['global_data']['radar_out'].fields[
field_name]['data'] = 10.*np.ma.log10(
field_std/field_mean)
else:
dscfg['global_data']['radar_out'].fields[
field_name]['data'] = field_std/field_mean
new_dataset = {
'radar_out': deepcopy(dscfg['global_data']['radar_out']),
'timeinfo': dscfg['global_data']['endtime']}
dscfg['global_data']['starttime'] += datetime.timedelta(
seconds=period)
dscfg['global_data']['endtime'] += datetime.timedelta(seconds=period)
# remove old radar object from global_data dictionary
dscfg['global_data'].pop('radar_out', None)
# get start and stop times of new radar object
dscfg['global_data']['starttime'], dscfg['global_data']['endtime'] = (
time_avg_range(
dscfg['timeinfo'], dscfg['global_data']['starttime'],
dscfg['global_data']['endtime'], period))
# check if volume time older than starttime
if dscfg['timeinfo'] > dscfg['global_data']['starttime']:
dscfg['global_data'].update({'radar_out': radar_aux})
return new_dataset, ind_rad
# no more files to process if there is global data pack it up
if procstatus == 2:
if dscfg['initialized'] == 0:
return None, None
if 'radar_out' not in dscfg['global_data']:
return None, None
if stat in ('mean', 'std', 'cov'):
field_mean = (
dscfg['global_data']['radar_out'].fields[field_name]['data'] /
dscfg['global_data']['radar_out'].fields[
'number_of_samples']['data'])
if stat == 'mean':
if lin_trans:
dscfg['global_data']['radar_out'].fields[
field_name]['data'] = 10.*np.ma.log10(field_mean)
else:
dscfg['global_data']['radar_out'].fields[
field_name]['data'] = field_mean
elif stat in ('std', 'cov'):
field_std = np.ma.sqrt(
dscfg['global_data']['radar_out'].fields[
'sum_squared']['data'] /
dscfg['global_data']['radar_out'].fields[
'number_of_samples']['data']-field_mean*field_mean)
if stat == 'std':
if lin_trans:
dscfg['global_data']['radar_out'].fields[
field_name]['data'] = 10.*np.ma.log10(field_std)
else:
dscfg['global_data']['radar_out'].fields[
field_name]['data'] = field_std
else:
dscfg['global_data']['radar_out'].fields[
field_name]['data'] = field_std/field_mean
new_dataset = {
'radar_out': deepcopy(dscfg['global_data']['radar_out']),
'timeinfo': dscfg['global_data']['endtime']}
return new_dataset, ind_rad
def process_time_stats2(procstatus, dscfg, radar_list=None):
"""
computes the temporal mean of a field
Parameters
----------
procstatus : int
Processing status: 0 initializing, 1 processing volume,
2 post-processing
dscfg : dictionary of dictionaries
data set configuration. Accepted Configuration Keywords::
datatype : list of string. Dataset keyword
The input data types
period : float. Dataset keyword
the period to average [s]. If -1 the statistics are going to be
performed over the entire data. Default 3600.
start_average : float. Dataset keyword
when to start the average [s from midnight UTC]. Default 0.
stat: string. Dataset keyword
Statistic to compute: Can be median, mode, percentileXX
use_nan : bool. Dataset keyword
If true non valid data will be used
nan_value : float. Dataset keyword
The value of the non valid data. Default 0
radar_list : list of Radar objects
Optional. list of radar objects
Returns
-------
new_dataset : dict
dictionary containing the output
ind_rad : int
radar index
"""
for datatypedescr in dscfg['datatype']:
radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr)
field_name = get_fieldname_pyart(datatype)
break
ind_rad = int(radarnr[5:8])-1
start_average = dscfg.get('start_average', 0.)
period = dscfg.get('period', 3600.)
use_nan = dscfg.get('use_nan', 0)
nan_value = dscfg.get('nan_value', 0.)
stat = dscfg.get('stat', 'median')
if 'percentile' in stat:
percentile = float(stat.replace('percentile', ''))
if procstatus == 0:
return None, None
if procstatus == 1:
if radar_list[ind_rad] is None:
warn('No valid radar')
return None, None
radar = radar_list[ind_rad]
if field_name not in radar.fields:
warn(field_name+' not available.')
return None, None
# prepare auxiliary radar
field = deepcopy(radar.fields[field_name])
if use_nan:
field['data'] = np.ma.asarray(field['data'].filled(nan_value))
npoints_dict = pyart.config.get_metadata('number_of_samples')
npoints_dict['data'] = np.ma.asarray(
np.logical_not(np.ma.getmaskarray(field['data'])), dtype=int)
radar_aux = deepcopy(radar)
radar_aux.fields = dict()
radar_aux.add_field(field_name, field)
radar_aux.add_field('number_of_samples', npoints_dict)
# first volume: initialize start and end time of averaging
if dscfg['initialized'] == 0:
avg_par = dict()
if period != -1:
date_00 = dscfg['timeinfo'].replace(
hour=0, minute=0, second=0, microsecond=0)
avg_par.update(
{'starttime': date_00+datetime.timedelta(
seconds=start_average)})
avg_par.update(
{'endtime': avg_par['starttime']+datetime.timedelta(
seconds=period)})
else:
avg_par.update({'starttime': dscfg['timeinfo']})
avg_par.update({'endtime': dscfg['timeinfo']})
avg_par.update({'timeinfo': dscfg['timeinfo']})
dscfg['global_data'] = avg_par
dscfg['initialized'] = 1
if dscfg['initialized'] == 0:
return None, None
dscfg['global_data']['timeinfo'] = dscfg['timeinfo']
# no radar object in global data: create it
if 'radar_out' not in dscfg['global_data']:
if period != -1:
# get start and stop times of new radar object
(dscfg['global_data']['starttime'],
dscfg['global_data']['endtime']) = (
time_avg_range(
dscfg['timeinfo'], dscfg['global_data']['starttime'],
dscfg['global_data']['endtime'], period))
# check if volume time older than starttime
if dscfg['timeinfo'] > dscfg['global_data']['starttime']:
dscfg['global_data'].update({'radar_out': radar_aux})
dscfg['global_data'].update(
{'field_data': np.atleast_3d(
radar_aux.fields[field_name]['data'])})
else:
dscfg['global_data'].update({'radar_out': radar_aux})
dscfg['global_data'].update(
{'field_data': np.atleast_3d(
radar_aux.fields[field_name]['data'])})
return None, None
# still accumulating: add field to global field
if (period == -1 or
dscfg['timeinfo'] < dscfg['global_data']['endtime']):
if period == -1:
dscfg['global_data']['endtime'] = dscfg['timeinfo']
field_interp = interpol_field(
dscfg['global_data']['radar_out'], radar_aux, field_name)
npoints_interp = interpol_field(
dscfg['global_data']['radar_out'], radar_aux,
'number_of_samples')
if use_nan:
field_interp['data'] = np.ma.asarray(
field_interp['data'].filled(nan_value))
dscfg['global_data']['radar_out'].fields[
'number_of_samples']['data'] += np.ma.asarray(
npoints_interp['data'].filled(fill_value=1),
dtype=int)
else:
dscfg['global_data']['radar_out'].fields[
'number_of_samples']['data'] += np.ma.asarray(
npoints_interp['data'].filled(fill_value=0),
dtype=int)
dscfg['global_data']['field_data'] = np.ma.append(
dscfg['global_data']['field_data'],
np.atleast_3d(field_interp['data']), axis=2)
return None, None
# we have reached the end of the accumulation period: do the averaging
# and start a new object (only reachable if period != -1)
if stat == 'median':
dscfg['global_data']['radar_out'].fields[
field_name]['data'] = np.ma.median(
dscfg['global_data']['field_data'], axis=2)
elif stat == 'mode':
mode_data, _ = scipy.stats.mode(
dscfg['global_data']['field_data'].filled(fill_value=np.nan),
axis=2, nan_policy='omit')
dscfg['global_data']['radar_out'].fields[field_name]['data'] = (
np.ma.masked_invalid(np.squeeze(mode_data, axis=2)))
elif 'percentile' in stat:
percent_data = np.nanpercentile(
dscfg['global_data']['field_data'].filled(fill_value=np.nan),
percentile, axis=2)
dscfg['global_data']['radar_out'].fields[field_name]['data'] = (
np.ma.masked_invalid(percent_data))
new_dataset = {
'radar_out': deepcopy(dscfg['global_data']['radar_out']),
'timeinfo': dscfg['global_data']['endtime']}
dscfg['global_data']['starttime'] += datetime.timedelta(
seconds=period)
dscfg['global_data']['endtime'] += datetime.timedelta(seconds=period)
# remove old radar object from global_data dictionary
dscfg['global_data'].pop('radar_out', None)
# get start and stop times of new radar object
dscfg['global_data']['starttime'], dscfg['global_data']['endtime'] = (
time_avg_range(
dscfg['timeinfo'], dscfg['global_data']['starttime'],
dscfg['global_data']['endtime'], period))
# check if volume time older than starttime
if dscfg['timeinfo'] > dscfg['global_data']['starttime']:
dscfg['global_data'].update({'radar_out': radar_aux})
return new_dataset, ind_rad
# no more files to process if there is global data pack it up
if procstatus == 2:
if dscfg['initialized'] == 0:
return None, None
if 'radar_out' not in dscfg['global_data']:
return None, None
if stat == 'median':
dscfg['global_data']['radar_out'].fields[field_name]['data'] = (
np.ma.median(dscfg['global_data']['field_data'], axis=2))
elif stat == 'mode':
mode_data, _ = scipy.stats.mode(
dscfg['global_data']['field_data'].filled(fill_value=np.nan),
axis=2, nan_policy='omit')
dscfg['global_data']['radar_out'].fields[field_name]['data'] = (
np.ma.masked_invalid(np.squeeze(mode_data, axis=2)))
elif 'percentile' in stat:
percent_data = np.nanpercentile(
dscfg['global_data']['field_data'].filled(fill_value=np.nan),
percentile, axis=2)
dscfg['global_data']['radar_out'].fields[field_name]['data'] = (
np.ma.masked_invalid(percent_data))
new_dataset = {
'radar_out': deepcopy(dscfg['global_data']['radar_out']),
'timeinfo': dscfg['global_data']['endtime']}
return new_dataset, ind_rad
def process_time_avg(procstatus, dscfg, radar_list=None):
"""
computes the temporal mean of a field
Parameters
----------
procstatus : int
Processing status: 0 initializing, 1 processing volume,
2 post-processing
dscfg : dictionary of dictionaries
data set configuration. Accepted Configuration Keywords::
datatype : list of string. Dataset keyword
The input data types
period : float. Dataset keyword
the period to average [s]. Default 3600.
start_average : float. Dataset keyword
when to start the average [s from midnight UTC]. Default 0.
lin_trans: int. Dataset keyword
If 1 apply linear transformation before averaging
radar_list : list of Radar objects
Optional. list of radar objects
Returns
-------
new_dataset : dict
dictionary containing the output
ind_rad : int
radar index
"""
for datatypedescr in dscfg['datatype']:
radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr)
field_name = get_fieldname_pyart(datatype)
break
ind_rad = int(radarnr[5:8])-1
lin_trans = dscfg.get('lin_trans', 0)
if procstatus == 0:
return None, None
if procstatus == 1:
if radar_list[ind_rad] is None:
warn('No valid radar')
return None, None
radar = radar_list[ind_rad]
if field_name not in radar.fields:
warn(field_name+' not available.')
return None, None
period = dscfg.get('period', 3600.)
field = deepcopy(radar.fields[field_name])
if lin_trans:
field['data'] = np.ma.power(10., 0.1*field['data'])
field['data'] = field['data'].filled(fill_value=0.)
field['data'] = np.ma.asarray(field['data'])
radar_aux = deepcopy(radar)
radar_aux.fields = dict()
radar_aux.add_field(field_name, field)
npoints_dict = pyart.config.get_metadata('number_of_samples')
npoints_dict['data'] = np.ma.ones(
(radar.nrays, radar.ngates), dtype=int)
radar_aux.add_field('number_of_samples', npoints_dict)
# first volume: initialize start and end time of averaging
if dscfg['initialized'] == 0:
start_average = dscfg.get('start_average', 0.)
date_00 = dscfg['timeinfo'].replace(
hour=0, minute=0, second=0, microsecond=0)
avg_par = dict()
avg_par.update(
{'starttime': date_00+datetime.timedelta(
seconds=start_average)})
avg_par.update(
{'endtime': avg_par['starttime']+datetime.timedelta(
seconds=period)})
avg_par.update({'timeinfo': dscfg['timeinfo']})
dscfg['global_data'] = avg_par
dscfg['initialized'] = 1
if dscfg['initialized'] == 0:
return None, None
dscfg['global_data']['timeinfo'] = dscfg['timeinfo']
# no radar object in global data: create it
if 'radar_out' not in dscfg['global_data']:
# get start and stop times of new radar object
(dscfg['global_data']['starttime'],
dscfg['global_data']['endtime']) = (
time_avg_range(
dscfg['timeinfo'], dscfg['global_data']['starttime'],
dscfg['global_data']['endtime'], period))
# check if volume time older than starttime
if dscfg['timeinfo'] > dscfg['global_data']['starttime']:
dscfg['global_data'].update({'radar_out': radar_aux})
return None, None
# still accumulating: add field to global field
if dscfg['timeinfo'] < dscfg['global_data']['endtime']:
field_interp = interpol_field(
dscfg['global_data']['radar_out'], radar_aux, field_name)
npoints_interp = interpol_field(
dscfg['global_data']['radar_out'], radar_aux,
'number_of_samples')
dscfg['global_data']['radar_out'].fields[field_name]['data'] += (
field_interp['data'].filled(fill_value=0))
dscfg['global_data']['radar_out'].fields[
'number_of_samples']['data'] += (
npoints_interp['data'].filled(fill_value=0)).astype('int')
return None, None
# we have reached the end of the accumulation period: do the averaging
# and start a new object
dscfg['global_data']['radar_out'].fields[field_name]['data'] /= (
dscfg['global_data']['radar_out'].fields[
'number_of_samples']['data'])
if lin_trans:
dscfg['global_data']['radar_out'].fields[field_name]['data'] = (
10.*np.ma.log10(
dscfg['global_data']['radar_out'].fields[
field_name]['data']))
new_dataset = {
'radar_out': deepcopy(dscfg['global_data']['radar_out']),
'timeinfo': dscfg['global_data']['endtime']}
dscfg['global_data']['starttime'] += datetime.timedelta(
seconds=period)
dscfg['global_data']['endtime'] += datetime.timedelta(seconds=period)
# remove old radar object from global_data dictionary
dscfg['global_data'].pop('radar_out', None)
# get start and stop times of new radar object
dscfg['global_data']['starttime'], dscfg['global_data']['endtime'] = (
time_avg_range(
dscfg['timeinfo'], dscfg['global_data']['starttime'],
dscfg['global_data']['endtime'], period))
# check if volume time older than starttime
if dscfg['timeinfo'] > dscfg['global_data']['starttime']:
dscfg['global_data'].update({'radar_out': radar_aux})
return new_dataset, ind_rad
# no more files to process if there is global data pack it up
if procstatus == 2:
if dscfg['initialized'] == 0:
return None, None
if 'radar_out' not in dscfg['global_data']:
return None, None
(dscfg['global_data']['radar_out'].fields[field_name][
'data']) /= (
dscfg['global_data']['radar_out'].fields[
'number_of_samples']['data'])
if lin_trans:
dscfg['global_data']['radar_out'].fields[field_name]['data'] = (
10.*np.ma.log10(
dscfg['global_data']['radar_out'].fields[
field_name]['data']))
new_dataset = {
'radar_out': deepcopy(dscfg['global_data']['radar_out']),
'timeinfo': dscfg['global_data']['endtime']}
return new_dataset, ind_rad
def process_weighted_time_avg(procstatus, dscfg, radar_list=None):
"""
computes the temporal mean of a field weighted by the reflectivity
Parameters
----------
procstatus : int
Processing status: 0 initializing, 1 processing volume,
2 post-processing
dscfg : dictionary of dictionaries
data set configuration. Accepted Configuration Keywords::
datatype : list of string. Dataset keyword
The input data types
period : float. Dataset keyword
the period to average [s]. Default 3600.
start_average : float. Dataset keyword
when to start the average [s from midnight UTC]. Default 0.
radar_list : list of Radar objects
Optional. list of radar objects
Returns
-------
new_dataset : Radar
radar object
ind_rad : int
radar index
"""
for datatypedescr in dscfg['datatype']:
radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr)
if datatype in ('dBZ', 'dBZc', 'dBuZ', 'dBZv', 'dBZvc', 'dBuZv'):
refl_name = get_fieldname_pyart(datatype)
else:
field_name = get_fieldname_pyart(datatype)
ind_rad = int(radarnr[5:8])-1
if procstatus == 0:
return None, None
if procstatus == 1:
if radar_list[ind_rad] is None:
warn('No valid radar')
return None, None
radar = radar_list[ind_rad]
if field_name not in radar.fields or refl_name not in radar.fields:
warn('Unable to compute weighted average. Missing data')
return None, None
period = dscfg.get('period', 3600.)
field = deepcopy(radar.fields[field_name])
field['data'] = field['data'].filled(fill_value=0.)
field['data'] = np.ma.asarray(field['data'])
refl_field = deepcopy(radar.fields[refl_name])
refl_field['data'] = np.ma.power(10., 0.1*refl_field['data'])
refl_field['data'] = refl_field['data'].filled(fill_value=0.)
refl_field['data'] = np.ma.asarray(refl_field['data'])
field['data'] *= refl_field['data']
radar_aux = deepcopy(radar)
radar_aux.fields = dict()
radar_aux.add_field(field_name, field)
radar_aux.add_field(refl_name, refl_field)
# first volume: initialize start and end time of averaging
if dscfg['initialized'] == 0:
start_average = dscfg.get('start_average', 0.)
date_00 = dscfg['timeinfo'].replace(
hour=0, minute=0, second=0, microsecond=0)
avg_par = dict()
avg_par.update(
{'starttime': date_00+datetime.timedelta(
seconds=start_average)})
avg_par.update(
{'endtime': avg_par['starttime']+datetime.timedelta(
seconds=period)})
avg_par.update({'timeinfo': dscfg['timeinfo']})
dscfg['global_data'] = avg_par
dscfg['initialized'] = 1
if dscfg['initialized'] == 0:
return None, None
dscfg['global_data']['timeinfo'] = dscfg['timeinfo']
# no radar object in global data: create it
if 'radar_out' not in dscfg['global_data']:
# get start and stop times of new radar object
(dscfg['global_data']['starttime'],
dscfg['global_data']['endtime']) = (
time_avg_range(
dscfg['timeinfo'], dscfg['global_data']['starttime'],
dscfg['global_data']['endtime'], period))
# check if volume time older than starttime
if dscfg['timeinfo'] > dscfg['global_data']['starttime']:
dscfg['global_data'].update({'radar_out': radar_aux})
return None, None
# still accumulating: add field to global field
if dscfg['timeinfo'] < dscfg['global_data']['endtime']:
field_interp = interpol_field(
dscfg['global_data']['radar_out'], radar_aux, field_name)
dscfg['global_data']['radar_out'].fields[field_name]['data'] += (
field_interp['data'].filled(fill_value=0))
refl_interp = interpol_field(
dscfg['global_data']['radar_out'], radar_aux, refl_name)
dscfg['global_data']['radar_out'].fields[refl_name]['data'] += (
refl_interp['data'].filled(fill_value=0))
return None, None
# we have reached the end of the accumulation period: do the averaging
# and start a new object
dscfg['global_data']['radar_out'].fields[field_name]['data'] /= (
dscfg['global_data']['radar_out'].fields[refl_name]['data'])
new_dataset = {
'radar_out': deepcopy(dscfg['global_data']['radar_out']),
'timeinfo': dscfg['global_data']['endtime']}
dscfg['global_data']['starttime'] += datetime.timedelta(
seconds=period)
dscfg['global_data']['endtime'] += datetime.timedelta(seconds=period)
# remove old radar object from global_data dictionary
dscfg['global_data'].pop('radar_out', None)
# get start and stop times of new radar object
dscfg['global_data']['starttime'], dscfg['global_data']['endtime'] = (
time_avg_range(
dscfg['timeinfo'], dscfg['global_data']['starttime'],
dscfg['global_data']['endtime'], period))
# check if volume time older than starttime
if dscfg['timeinfo'] > dscfg['global_data']['starttime']:
dscfg['global_data'].update({'radar_out': radar_aux})
return new_dataset, ind_rad
# no more files to process if there is global data pack it up
if procstatus == 2:
if dscfg['initialized'] == 0:
return None, None
if 'radar_out' not in dscfg['global_data']:
return None, None
dscfg['global_data']['radar_out'].fields[field_name]['data'] /= (
dscfg['global_data']['radar_out'].fields[refl_name]['data'])
new_dataset = {
'radar_out': deepcopy(dscfg['global_data']['radar_out']),
'timeinfo': dscfg['global_data']['endtime']}
return new_dataset, ind_rad
def process_time_avg_flag(procstatus, dscfg, radar_list=None):
"""
computes a flag field describing the conditions of the data used while
averaging
Parameters
----------
procstatus : int
Processing status: 0 initializing, 1 processing volume,
2 post-processing
dscfg : dictionary of dictionaries
data set configuration. Accepted Configuration Keywords::
datatype : list of string. Dataset keyword
The input data types
period : float. Dataset keyword
the period to average [s]. Default 3600.
start_average : float. Dataset keyword
when to start the average [s from midnight UTC]. Default 0.
phidpmax: float. Dataset keyword
maximum PhiDP
beamwidth : float. Dataset keyword
the antenna beamwidth [deg]. If None that of the keys
radar_beam_width_h or radar_beam_width_v in attribute
instrument_parameters of the radar object will be used. If the key
or the attribute are not present the beamwidth will be set to None
radar_list : list of Radar objects
Optional. list of radar objects
Returns
-------
new_dataset : Radar
radar object
ind_rad : int
radar index
"""
temp_name = None
hydro_name = None
iso0_name = None
echo_name = None
for datatypedescr in dscfg['datatype']:
radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr)
if datatype in ('PhiDP', 'PhiDPc'):
phidp_name = get_fieldname_pyart(datatype)
elif datatype == 'echoID':
echo_name = get_fieldname_pyart(datatype)
elif datatype == 'hydro':
hydro_name = get_fieldname_pyart(datatype)
elif datatype == 'TEMP':
temp_name = get_fieldname_pyart(datatype)
elif datatype == 'H_ISO0':
iso0_name = 'height_over_iso0'
ind_rad = int(radarnr[5:8])-1
if procstatus == 0:
return None, None
if procstatus == 1:
if radar_list[ind_rad] is None:
warn('No valid radar')
return None, None
radar = radar_list[ind_rad]
phidpmax = dscfg.get('phidpmax', 60.)
period = dscfg.get('period', 3600.)
time_avg_flag = pyart.config.get_metadata('time_avg_flag')
time_avg_flag['data'] = np.ma.zeros(
(radar.nrays, radar.ngates), dtype=int)
if phidp_name not in radar.fields:
warn('Missing PhiDP data')
time_avg_flag['data'] += 1
else:
phidp_field = radar.fields[phidp_name]
time_avg_flag['data'][phidp_field['data'] > phidpmax] += 1
if echo_name is not None:
if echo_name not in radar.fields:
warn('Missing echo ID data')
time_avg_flag['data'] += 100
else:
echo_field = radar.fields[echo_name]
time_avg_flag['data'][echo_field['data'] == 2] += 100
if hydro_name is not None and echo_name is not None:
if ((hydro_name not in radar.fields) or
(echo_name not in radar.fields)):
warn('Missing hydrometeor classification data')
time_avg_flag['data'] += 10000
else:
hydro_field = radar.fields[hydro_name]
# check where is no rain
is_not_rain = np.logical_and(
hydro_field['data'] != 4, hydro_field['data'] != 6)
# where is no rain should be precip
is_not_rain = np.logical_and(
is_not_rain, echo_field['data'] == 3)
time_avg_flag['data'][is_not_rain] += 10000
elif temp_name is not None:
if temp_name not in radar.fields:
warn('Missing temperature data')
time_avg_flag['data'] += 10000
else:
beamwidth = dscfg.get('beamwidth', None)
if beamwidth is None:
if radar.instrument_parameters is not None:
if ('radar_beam_width_h' in
radar.instrument_parameters):
beamwidth = radar.instrument_parameters[
'radar_beam_width_h']['data'][0]
elif ('radar_beam_width_v' in
radar.instrument_parameters):
beamwidth = radar.instrument_parameters[
'radar_beam_width_v']['data'][0]
if beamwidth is None:
warn('Antenna beam width unknown.')
mask_fzl, _ = pyart.correct.get_mask_fzl(
radar, fzl=None, doc=None, min_temp=0., max_h_iso0=0.,
thickness=700., beamwidth=beamwidth,
temp_field=temp_name, iso0_field=iso0_name,
temp_ref='temperature')
time_avg_flag['data'][mask_fzl] += 10000
elif iso0_name is not None:
if iso0_name not in radar.fields:
warn('Missing height relative to iso0 data')
time_avg_flag['data'] += 10000
else:
beamwidth = dscfg.get('beamwidth', None)
if beamwidth is None:
if radar.instrument_parameters is not None:
if ('radar_beam_width_h' in
radar.instrument_parameters):
beamwidth = radar.instrument_parameters[
'radar_beam_width_h']['data'][0]
elif ('radar_beam_width_v' in
radar.instrument_parameters):
beamwidth = radar.instrument_parameters[
'radar_beam_width_v']['data'][0]
if beamwidth is None:
warn('Antenna beam width unknown.')
mask_fzl, _ = pyart.correct.get_mask_fzl(
radar, fzl=None, doc=None, min_temp=0., max_h_iso0=0.,
thickness=700., beamwidth=beamwidth,
temp_field=temp_name, iso0_field=iso0_name,
temp_ref='height_over_iso0')
time_avg_flag['data'][mask_fzl] += 10000
radar_aux = deepcopy(radar)
radar_aux.fields = dict()
radar_aux.add_field('time_avg_flag', time_avg_flag)
# first volume: initialize start and end time of averaging
if dscfg['initialized'] == 0:
start_average = dscfg.get('start_average', 0.)
date_00 = dscfg['timeinfo'].replace(
hour=0, minute=0, second=0, microsecond=0)
avg_par = dict()
avg_par.update(
{'starttime': date_00+datetime.timedelta(
seconds=start_average)})
avg_par.update(
{'endtime': avg_par['starttime']+datetime.timedelta(
seconds=period)})
avg_par.update({'timeinfo': dscfg['timeinfo']})
dscfg['global_data'] = avg_par
dscfg['initialized'] = 1
if dscfg['initialized'] == 0:
return None, None
dscfg['global_data']['timeinfo'] = dscfg['timeinfo']
# no radar object in global data: create it
if 'radar_out' not in dscfg['global_data']:
# get start and stop times of new radar object
(dscfg['global_data']['starttime'],
dscfg['global_data']['endtime']) = (
time_avg_range(
dscfg['timeinfo'], dscfg['global_data']['starttime'],
dscfg['global_data']['endtime'], period))
# check if volume time older than starttime
if dscfg['timeinfo'] > dscfg['global_data']['starttime']:
dscfg['global_data'].update({'radar_out': radar_aux})
return None, None
# still accumulating: add field to global field
if dscfg['timeinfo'] < dscfg['global_data']['endtime']:
flag_interp = interpol_field(
dscfg['global_data']['radar_out'], radar_aux, 'time_avg_flag')
dscfg['global_data']['radar_out'].fields[
'time_avg_flag']['data'] += (
flag_interp['data'].filled(fill_value=0)).astype(int)
return None, None
# we have reached the end of the accumulation: start a new object
new_dataset = {
'radar_out': deepcopy(dscfg['global_data']['radar_out']),
'timeinfo': dscfg['global_data']['endtime']}
dscfg['global_data']['starttime'] += datetime.timedelta(
seconds=period)
dscfg['global_data']['endtime'] += datetime.timedelta(seconds=period)
# remove old radar object from global_data dictionary
dscfg['global_data'].pop('radar_out', None)
# get start and stop times of new radar object
dscfg['global_data']['starttime'], dscfg['global_data']['endtime'] = (
time_avg_range(
dscfg['timeinfo'], dscfg['global_data']['starttime'],
dscfg['global_data']['endtime'], period))
# check if volume time older than starttime
if dscfg['timeinfo'] > dscfg['global_data']['starttime']:
dscfg['global_data'].update({'radar_out': radar_aux})
return new_dataset, ind_rad
# no more files to process if there is global data pack it up
if procstatus == 2:
if dscfg['initialized'] == 0:
return None, None
if 'radar_out' not in dscfg['global_data']:
return None, None
new_dataset = {
'radar_out': deepcopy(dscfg['global_data']['radar_out']),
'timeinfo': dscfg['global_data']['endtime']}
return new_dataset, ind_rad
def process_colocated_gates(procstatus, dscfg, radar_list=None):
"""
Find colocated gates within two radars
Parameters
----------
procstatus : int
Processing status: 0 initializing, 1 processing volume,
2 post-processing
dscfg : dictionary of dictionaries
data set configuration. Accepted Configuration Keywords::
datatype : list of string. Dataset keyword
The input data types
h_tol : float. Dataset keyword
Tolerance in altitude difference between radar gates [m].
Default 100.
latlon_tol : float. Dataset keyword
Tolerance in latitude and longitude position between radar gates
[deg]. Default 0.0005
vol_d_tol : float. Dataset keyword
Tolerance in pulse volume diameter [m]. Default 100.
vismin : float. Dataset keyword
Minimum visibility [percent]. Default None.
hmin : float. Dataset keyword
Minimum altitude [m MSL]. Default None.
hmax : float. Dataset keyword
Maximum altitude [m MSL]. Default None.
rmin : float. Dataset keyword
Minimum range [m]. Default None.
rmax : float. Dataset keyword
Maximum range [m]. Default None.
elmin : float. Dataset keyword
Minimum elevation angle [deg]. Default None.
elmax : float. Dataset keyword
Maximum elevation angle [deg]. Default None.
azrad1min : float. Dataset keyword
Minimum azimuth angle [deg] for radar 1. Default None.
azrad1max : float. Dataset keyword
Maximum azimuth angle [deg] for radar 1. Default None.
azrad2min : float. Dataset keyword
Minimum azimuth angle [deg] for radar 2. Default None.
azrad2max : float. Dataset keyword
Maximum azimuth angle [deg] for radar 2. Default None.
radar_list : list of Radar objects
Optional. list of radar objects
Returns
-------
new_dataset : radar object
radar object containing the flag field
ind_rad : int
radar index
"""
if procstatus != 1:
return None, None
# check how many radars are there
radarnr_dict = dict()
ind_radar_list = set()
for datatypedescr in dscfg['datatype']:
radarnr = datatypedescr.split(':')[0]
radarnr_dict.update({radarnr: []})
ind_radar_list.add(int(radarnr[5:8])-1)
ind_radar_list = list(ind_radar_list)
if (len(radarnr_dict) != 2) or (len(radar_list) < 2):
warn('Intercomparison requires data from two different radars')
return None, None
# create the list of data types for each radar
for datatypedescr in dscfg['datatype']:
radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr)
if radarnr in radarnr_dict:
radarnr_dict[radarnr].append(get_fieldname_pyart(datatype))
radar1 = radar_list[ind_radar_list[0]]
radar2 = radar_list[ind_radar_list[1]]
if radar1 is None or radar2 is None:
warn('Unable to inter-compare radars. Missing radar')
if 'instrument_name' in radar1.metadata:
print('Radar 1: '+radar1.metadata['instrument_name'])
if 'instrument_name' in radar2.metadata:
print('Radar 2: '+radar2.metadata['instrument_name'])
coloc_gates_field = 'colocated_gates'
h_tol = dscfg.get('h_tol', 100.)
latlon_tol = dscfg.get('latlon_tol', 0.0005)
vol_d_tol = dscfg.get('vol_d_tol', 100.)
vismin = dscfg.get('vismin', None)
hmin = dscfg.get('hmin', None)
hmax = dscfg.get('hmax', None)
rmin = dscfg.get('rmin', None)
rmax = dscfg.get('rmax', None)
elmin = dscfg.get('elmin', None)
elmax = dscfg.get('elmax', None)
azrad1min = dscfg.get('azrad1min', None)
azrad1max = dscfg.get('azrad1max', None)
azrad2min = dscfg.get('azrad2min', None)
azrad2max = dscfg.get('azrad2max', None)
visib_field = None
if 'visibility' in radarnr_dict['RADAR'+'{:03d}'.format(
ind_radar_list[0]+1)]:
visib_field = 'visibility'
if vismin is not None and visib_field is None:
warn('Unable to filter data according to visibility. ' +
'Visibility field for RADAR'+'{:03d}'.format(
ind_radar_list[0]+1)+' not available')
gate_coloc_rad1_dict = pyart.util.intersection(
radar1, radar2,
h_tol=h_tol, latlon_tol=latlon_tol, vol_d_tol=vol_d_tol,
vismin=vismin, hmin=hmin, hmax=hmax, rmin=rmin, rmax=rmax,
elmin=elmin, elmax=elmax, azmin=azrad1min, azmax=azrad1max,
visib_field=visib_field, intersec_field=coloc_gates_field)
visib_field = None
if 'visibility' in radarnr_dict['RADAR'+'{:03d}'.format(
ind_radar_list[1]+1)]:
visib_field = 'visibility'
if vismin is not None and visib_field is None:
warn('Unable to filter data according to visibility. ' +
'Visibility field for RADAR'+'{:03d}'.format(
ind_radar_list[1]+1)+' not available')
gate_coloc_rad2_dict = pyart.util.intersection(
radar2, radar1,
h_tol=h_tol, latlon_tol=latlon_tol, vol_d_tol=vol_d_tol,
vismin=vismin, hmin=hmin, hmax=hmax, rmin=rmin, rmax=rmax,
elmin=elmin, elmax=elmax, azmin=azrad2min, azmax=azrad2max,
visib_field=visib_field, intersec_field=coloc_gates_field)
new_rad1 = deepcopy(radar1)
new_rad1.fields = dict()
new_rad1.add_field('colocated_gates', gate_coloc_rad1_dict)
new_rad2 = deepcopy(radar2)
new_rad2.fields = dict()
new_rad2.add_field('colocated_gates', gate_coloc_rad2_dict)
coloc_rad1_dict, new_rad1.fields['colocated_gates'] = (
pyart.util.colocated_gates(
new_rad1, new_rad2, h_tol=h_tol,
latlon_tol=latlon_tol, coloc_gates_field=coloc_gates_field))
coloc_rad2_dict, new_rad2.fields['colocated_gates'] = (
pyart.util.colocated_gates(
new_rad2, new_rad1, h_tol=h_tol,
latlon_tol=latlon_tol, coloc_gates_field=coloc_gates_field))
# prepare output
rad1_dict = {
'coloc_dict': coloc_rad1_dict,
'radar_out': new_rad1}
rad2_dict = {
'coloc_dict': coloc_rad2_dict,
'radar_out': new_rad2}
new_dataset = {
'RADAR'+'{:03d}'.format(ind_radar_list[0]+1): rad1_dict,
'RADAR'+'{:03d}'.format(ind_radar_list[1]+1): rad2_dict}
return new_dataset, ind_radar_list
def process_intercomp(procstatus, dscfg, radar_list=None):
"""
intercomparison between two radars
Parameters
----------
procstatus : int
Processing status: 0 initializing, 1 processing volume,
2 post-processing
dscfg : dictionary of dictionaries
data set configuration. Accepted Configuration Keywords::
datatype : list of string. Dataset keyword
The input data types
coloc_data_dir : string. Dataset keyword
name of the directory containing the csv file with colocated data
coloc_radars_name : string. Dataset keyword
string identifying the radar names
azi_tol : float. Dataset keyword
azimuth tolerance between the two radars. Default 0.5 deg
ele_tol : float. Dataset keyword
elevation tolerance between the two radars. Default 0.5 deg
rng_tol : float. Dataset keyword
range tolerance between the two radars. Default 50 m
radar_list : list of Radar objects
Optional. list of radar objects
Returns
-------
new_dataset : dict
dictionary containing a dictionary with intercomparison data and the
key "final" which contains a boolean that is true when all volumes
have been processed
ind_rad : int
radar index
"""
if procstatus == 0:
savedir = dscfg['colocgatespath']+dscfg['coloc_radars_name']+'/'
prdtype = 'info'
if 'prdtype' in dscfg:
prdtype = dscfg['prdtype']
fname = make_filename(
prdtype, 'COLOCATED_GATES', dscfg['coloc_radars_name'], ['csv'],
timeinfo=None)[0]
(rad1_ray_ind, rad1_rng_ind, rad1_ele, rad1_azi, rad1_rng,
rad2_ray_ind, rad2_rng_ind, rad2_ele, rad2_azi, rad2_rng) = (
read_colocated_gates(savedir+fname))
if rad1_ele is None:
raise ValueError('Unable to intercompare radars. ' +
'Missing colocated gates file')
dscfg['global_data'] = {
'rad1_ray_ind': rad1_ray_ind,
'rad1_rng_ind': rad1_rng_ind,
'rad1_ele': rad1_ele,
'rad1_azi': rad1_azi,
'rad1_rng': rad1_rng,
'rad2_ray_ind': rad2_ray_ind,
'rad2_rng_ind': rad2_rng_ind,
'rad2_ele': rad2_ele,
'rad2_azi': rad2_azi,
'rad2_rng': rad2_rng}
return None, None
if procstatus == 1:
# check how many radars are there
radarnr_dict = dict()
ind_radar_list = set()
for datatypedescr in dscfg['datatype']:
radarnr = datatypedescr.split(':')[0]
radarnr_dict.update({radarnr: []})
ind_radar_list.add(int(radarnr[5:8])-1)
ind_radar_list = list(ind_radar_list)
if (len(radarnr_dict) != 2) or (len(radar_list) < 2):
warn('Intercomparison requires data from two different radars')
return None, None
# create the list of data types for each radar
for datatypedescr in dscfg['datatype']:
radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr)
field_name = get_fieldname_pyart(datatype)
break
radar1 = radar_list[ind_radar_list[0]]
radar2 = radar_list[ind_radar_list[1]]
if radar1 is None or radar2 is None:
warn('Unable to inter-compare radars. Missing radar')
return None, None
if ((field_name not in radar1.fields) or
(field_name not in radar2.fields)):
warn('Unable to get values of field '+field_name +
' at colocated range bins. ' +
'Field missing in one of the radars')
return None, None
if not dscfg['initialized']:
dscfg['global_data'].update({'timeinfo': dscfg['timeinfo']})
dscfg['global_data'].update(
{'rad1_name': dscfg['RadarName'][ind_radar_list[0]]})
dscfg['global_data'].update(
{'rad2_name': dscfg['RadarName'][ind_radar_list[1]]})
dscfg['initialized'] = 1
rad1_field = radar1.fields[field_name]['data']
rad2_field = radar2.fields[field_name]['data']
intercomp_dict = {
'rad1_time': [],
'rad1_ray_ind': [],
'rad1_rng_ind': [],
'rad1_ele': [],
'rad1_azi': [],
'rad1_rng': [],
'rad1_val': [],
'rad2_time': [],
'rad2_ray_ind': [],
'rad2_rng_ind': [],
'rad2_ele': [],
'rad2_azi': [],
'rad2_rng': [],
'rad2_val': []}
# determine if radar data has to be averaged
avg_rad1, avg_rad2, avg_rad_lim = get_range_bins_to_avg(
radar1.range['data'], radar2.range['data'])
# rays are indexed to regular grid
rays_are_indexed = dscfg.get('rays_are_indexed', False)
if not rays_are_indexed:
azi_tol = dscfg.get('azi_tol', 0.5)
ele_tol = dscfg.get('ele_tol', 0.5)
rng_tol = dscfg.get('rng_tol', 50.)
rad1_ray_ind, rad1_rng_ind, rad2_ray_ind, rad2_rng_ind = (
find_colocated_indexes(
radar1, radar2, dscfg['global_data']['rad1_ele'],
dscfg['global_data']['rad1_azi'],
dscfg['global_data']['rad1_rng'],
dscfg['global_data']['rad2_ele'],
dscfg['global_data']['rad2_azi'],
dscfg['global_data']['rad2_rng'], ele_tol=ele_tol,
azi_tol=azi_tol, rng_tol=rng_tol))
else:
rad1_ray_ind = deepcopy(dscfg['global_data']['rad1_ray_ind'])
rad1_rng_ind = deepcopy(dscfg['global_data']['rad1_rng_ind'])
rad2_ray_ind = deepcopy(dscfg['global_data']['rad2_ray_ind'])
rad2_rng_ind = deepcopy(dscfg['global_data']['rad2_rng_ind'])
# keep only indices of valid gates
val1_vec = rad1_field[rad1_ray_ind, rad1_rng_ind]
val2_vec = rad2_field[rad1_ray_ind, rad1_rng_ind]
mask_val1 = np.ma.getmaskarray(val1_vec)
mask_val2 = np.ma.getmaskarray(val2_vec)
isvalid = np.logical_not(np.logical_or(mask_val1, mask_val2))
rad1_ray_ind = rad1_ray_ind[isvalid]
rad1_rng_ind = rad1_rng_ind[isvalid]
rad2_ray_ind = rad2_ray_ind[isvalid]
rad2_rng_ind = rad2_rng_ind[isvalid]
# if averaging required loop over valid gates and average
if avg_rad1:
ngates_valid = len(rad1_ray_ind)
val1_vec = np.ma.masked_all(ngates_valid, dtype=float)
is_valid_avg = np.zeros(ngates_valid, dtype=bool)
for i in range(ngates_valid):
if rad1_rng_ind[i]+avg_rad_lim[1] >= radar1.ngates:
continue
if rad1_rng_ind[i]+avg_rad_lim[0] < 0:
continue
ind_rng = list(range(rad1_rng_ind[i]+avg_rad_lim[0],
rad1_rng_ind[i]+avg_rad_lim[1]+1))
if np.any(np.ma.getmaskarray(
rad1_field[rad1_ray_ind[i], ind_rng])):
continue
val1_vec[i] = np.ma.asarray(np.ma.mean(
rad1_field[rad1_ray_ind[i], ind_rng]))
is_valid_avg[i] = True
rad1_ray_ind = rad1_ray_ind[is_valid_avg]
rad1_rng_ind = rad1_rng_ind[is_valid_avg]
rad2_ray_ind = rad2_ray_ind[is_valid_avg]
rad2_rng_ind = rad2_rng_ind[is_valid_avg]
val1_vec = val1_vec[is_valid_avg]
val2_vec = rad2_field[rad2_ray_ind, rad2_rng_ind]
elif avg_rad2:
ngates_valid = len(rad2_ray_ind)
val2_vec = np.ma.masked_all(ngates_valid, dtype=float)
is_valid_avg = np.zeros(ngates_valid, dtype=bool)
for i in range(ngates_valid):
if rad2_rng_ind[i]+avg_rad_lim[1] >= radar2.ngates:
continue
if rad2_rng_ind[i]+avg_rad_lim[0] < 0:
continue
ind_rng = list(range(rad2_rng_ind[i]+avg_rad_lim[0],
rad2_rng_ind[i]+avg_rad_lim[1]+1))
if np.any(np.ma.getmaskarray(
rad2_field[rad2_ray_ind[i], ind_rng])):
continue
val2_vec[i] = np.ma.asarray(np.ma.mean(
rad2_field[rad2_ray_ind[i], ind_rng]))
is_valid_avg[i] = True
rad1_ray_ind = rad1_ray_ind[is_valid_avg]
rad1_rng_ind = rad1_rng_ind[is_valid_avg]
rad2_ray_ind = rad2_ray_ind[is_valid_avg]
rad2_rng_ind = rad2_rng_ind[is_valid_avg]
val2_vec = val2_vec[is_valid_avg]
val1_vec = rad1_field[rad1_ray_ind, rad1_rng_ind]
else:
val1_vec = val1_vec[isvalid]
val2_vec = val2_vec[isvalid]
intercomp_dict['rad1_time'] = num2date(
radar1.time['data'][rad1_ray_ind], radar1.time['units'],
radar1.time['calendar'])
intercomp_dict['rad1_ray_ind'] = rad1_ray_ind
intercomp_dict['rad1_rng_ind'] = rad1_rng_ind
intercomp_dict['rad1_ele'] = radar1.elevation['data'][rad1_ray_ind]
intercomp_dict['rad1_azi'] = radar1.azimuth['data'][rad1_ray_ind]
intercomp_dict['rad1_rng'] = radar1.range['data'][rad1_rng_ind]
intercomp_dict['rad1_val'] = val1_vec
intercomp_dict['rad2_time'] = num2date(
radar2.time['data'][rad2_ray_ind], radar2.time['units'],
radar2.time['calendar'])
intercomp_dict['rad2_ray_ind'] = rad2_ray_ind
intercomp_dict['rad2_rng_ind'] = rad2_rng_ind
intercomp_dict['rad2_ele'] = radar2.elevation['data'][rad2_ray_ind]
intercomp_dict['rad2_azi'] = radar2.azimuth['data'][rad2_ray_ind]
intercomp_dict['rad2_rng'] = radar2.range['data'][rad2_rng_ind]
intercomp_dict['rad2_val'] = val2_vec
new_dataset = {'intercomp_dict': intercomp_dict,
'timeinfo': dscfg['global_data']['timeinfo'],
'final': False}
return new_dataset, None
if procstatus == 2:
savedir = get_save_dir(
dscfg['basepath'], dscfg['procname'], dscfg['dsname'],
dscfg['coloc_data_dir'],
timeinfo=dscfg['global_data']['timeinfo'], create_dir=False)
fname = make_filename(
'colocated_data', dscfg['type'], 'dBZc', ['csv'],
timeinfo=dscfg['global_data']['timeinfo'], timeformat='%Y%m%d')
fname = savedir+fname[0]
coloc_data = read_colocated_data(fname)
intercomp_dict = {
'rad1_name': dscfg['global_data']['rad1_name'],
'rad1_time': coloc_data[0],
'rad1_ray_ind': coloc_data[1],
'rad1_rng_ind': coloc_data[2],
'rad1_ele': coloc_data[3],
'rad1_azi': coloc_data[4],
'rad1_rng': coloc_data[5],
'rad1_val': coloc_data[6],
'rad2_name': dscfg['global_data']['rad2_name'],
'rad2_time': coloc_data[7],
'rad2_ray_ind': coloc_data[8],
'rad2_rng_ind': coloc_data[9],
'rad2_ele': coloc_data[10],
'rad2_azi': coloc_data[11],
'rad2_rng': coloc_data[12],
'rad2_val': coloc_data[13]}
new_dataset = {'intercomp_dict': intercomp_dict,
'timeinfo': dscfg['global_data']['timeinfo'],
'final': True}
return new_dataset, None
def process_intercomp_time_avg(procstatus, dscfg, radar_list=None):
"""
intercomparison between the average reflectivity of two radars
Parameters
----------
procstatus : int
Processing status: 0 initializing, 1 processing volume,
2 post-processing
dscfg : dictionary of dictionaries
data set configuration. Accepted Configuration Keywords::
datatype : list of string. Dataset keyword
The input data types
coloc_data_dir : string. Dataset keyword
name of the directory containing the csv file with colocated data
coloc_radars_name : string. Dataset keyword
string identifying the radar names
azi_tol : float. Dataset keyword
azimuth tolerance between the two radars. Default 0.5 deg
ele_tol : float. Dataset keyword
elevation tolerance between the two radars. Default 0.5 deg
rng_tol : float. Dataset keyword
range tolerance between the two radars. Default 50 m
clt_max : int. Dataset keyword
maximum number of samples that can be clutter contaminated.
Default 100 i.e. all
phi_excess_max : int. Dataset keyword
maximum number of samples that can have excess instantaneous
PhiDP. Default 100 i.e. all
non_rain_max : int. Dataset keyword
maximum number of samples that can be no rain. Default 100 i.e. all
phi_avg_max : float. Dataset keyword
maximum average PhiDP allowed. Default 600 deg i.e. any
radar_list : list of Radar objects
Optional. list of radar objects
Returns
-------
new_dataset : dict
dictionary containing a dictionary with intercomparison data and the
key "final" which contains a boolean that is true when all volumes
have been processed
ind_rad : int
radar index
"""
if procstatus == 0:
savedir = dscfg['colocgatespath']+dscfg['coloc_radars_name']+'/'
prdtype = 'info'
if 'prdtype' in dscfg:
prdtype = dscfg['prdtype']
fname = make_filename(
prdtype, 'COLOCATED_GATES', dscfg['coloc_radars_name'], ['csv'],
timeinfo=None)[0]
(rad1_ray_ind, rad1_rng_ind, rad1_ele, rad1_azi, rad1_rng,
rad2_ray_ind, rad2_rng_ind, rad2_ele, rad2_azi, rad2_rng) = (
read_colocated_gates(savedir+fname))
if rad1_ele is None:
raise ValueError('Unable to intercompare radars. ' +
'Missing colocated gates file')
dscfg['global_data'] = {
'rad1_ray_ind': rad1_ray_ind,
'rad1_rng_ind': rad1_rng_ind,
'rad1_ele': rad1_ele,
'rad1_azi': rad1_azi,
'rad1_rng': rad1_rng,
'rad2_ray_ind': rad2_ray_ind,
'rad2_rng_ind': rad2_rng_ind,
'rad2_ele': rad2_ele,
'rad2_azi': rad2_azi,
'rad2_rng': rad2_rng}
return None, None
if procstatus == 1:
# check how many radars are there
ind_radar_list = set()
for datatypedescr in dscfg['datatype']:
radarnr = datatypedescr.split(':')[0]
ind_radar_list.add(int(radarnr[5:8])-1)
ind_radar_list = list(ind_radar_list)
if (len(ind_radar_list) != 2) or (len(radar_list) < 2):
warn('Intercomparison requires data from two different radars')
return None, None
radarnr_list = ['RADAR'+'{:03d}'.format(ind_radar_list[0]+1),
'RADAR'+'{:03d}'.format(ind_radar_list[1]+1)]
# get field names
for datatypedescr in dscfg['datatype']:
radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr)
if radarnr == radarnr_list[0]:
if (datatype in (
'dBZ', 'dBZc', 'dBuZ', 'dBZv', 'dBZvc', 'dBuZv')):
rad1_refl_field = get_fieldname_pyart(datatype)
elif datatype in ('PhiDP', 'PhiDPc'):
rad1_phidp_field = get_fieldname_pyart(datatype)
elif datatype == 'time_avg_flag':
rad1_flag_field = get_fieldname_pyart(datatype)
elif radarnr == radarnr_list[1]:
if (datatype in (
'dBZ', 'dBZc', 'dBuZ', 'dBZv', 'dBZvc', 'dBuZv')):
rad2_refl_field = get_fieldname_pyart(datatype)
elif datatype in ('PhiDP', 'PhiDPc'):
rad2_phidp_field = get_fieldname_pyart(datatype)
elif datatype == 'time_avg_flag':
rad2_flag_field = get_fieldname_pyart(datatype)
radar1 = radar_list[ind_radar_list[0]]
radar2 = radar_list[ind_radar_list[1]]
if radar1 is None or radar2 is None:
warn('Unable to inter-compare radars. Missing radar')
return None, None
if ((rad1_refl_field not in radar1.fields) or
(rad1_phidp_field not in radar1.fields) or
(rad1_flag_field not in radar1.fields) or
(rad2_refl_field not in radar2.fields) or
(rad2_phidp_field not in radar2.fields) or
(rad2_flag_field not in radar2.fields)):
warn('Unable to compare radar time avg fields. ' +
'Fields missing')
return None, None
if not dscfg['initialized']:
dscfg['global_data'].update({'timeinfo': dscfg['timeinfo']})
dscfg['global_data'].update(
{'rad1_name': dscfg['RadarName'][ind_radar_list[0]]})
dscfg['global_data'].update(
{'rad2_name': dscfg['RadarName'][ind_radar_list[1]]})
dscfg['initialized'] = 1
refl1 = radar1.fields[rad1_refl_field]['data']
refl2 = radar2.fields[rad2_refl_field]['data']
phidp1 = radar1.fields[rad1_phidp_field]['data']
phidp2 = radar2.fields[rad2_phidp_field]['data']
flag1 = radar1.fields[rad1_flag_field]['data']
flag2 = radar2.fields[rad2_flag_field]['data']
intercomp_dict = {
'rad1_time': [],
'rad1_ray_ind': [],
'rad1_rng_ind': [],
'rad1_ele': [],
'rad1_azi': [],
'rad1_rng': [],
'rad1_dBZavg': [],
'rad1_PhiDPavg': [],
'rad1_Flagavg': [],
'rad2_time': [],
'rad2_ray_ind': [],
'rad2_rng_ind': [],
'rad2_ele': [],
'rad2_azi': [],
'rad2_rng': [],
'rad2_dBZavg': [],
'rad2_PhiDPavg': [],
'rad2_Flagavg': []}
# determine if radar data has to be averaged
avg_rad1, avg_rad2, avg_rad_lim = get_range_bins_to_avg(
radar1.range['data'], radar2.range['data'])
# rays are indexed to regular grid
rays_are_indexed = dscfg.get('rays_are_indexed', False)
# get current radars gates indices
if not rays_are_indexed:
azi_tol = dscfg.get('azi_tol', 0.5)
ele_tol = dscfg.get('ele_tol', 0.5)
rng_tol = dscfg.get('rng_tol', 50.)
rad1_ray_ind, rad1_rng_ind, rad2_ray_ind, rad2_rng_ind = (
find_colocated_indexes(
radar1, radar2, dscfg['global_data']['rad1_ele'],
dscfg['global_data']['rad1_azi'],
dscfg['global_data']['rad1_rng'],
dscfg['global_data']['rad2_ele'],
dscfg['global_data']['rad2_azi'],
dscfg['global_data']['rad2_rng'], ele_tol=ele_tol,
azi_tol=azi_tol, rng_tol=rng_tol))
else:
rad1_ray_ind = deepcopy(dscfg['global_data']['rad1_ray_ind'])
rad1_rng_ind = deepcopy(dscfg['global_data']['rad1_rng_ind'])
rad2_ray_ind = deepcopy(dscfg['global_data']['rad2_ray_ind'])
rad2_rng_ind = deepcopy(dscfg['global_data']['rad2_rng_ind'])
# keep only indices and data of valid gates
refl1_vec = refl1[rad1_ray_ind, rad1_rng_ind]
phidp1_vec = phidp1[rad1_ray_ind, rad1_rng_ind]
flag1_vec = flag1[rad1_ray_ind, rad1_rng_ind]
refl2_vec = refl2[rad2_ray_ind, rad2_rng_ind]
phidp2_vec = phidp2[rad2_ray_ind, rad2_rng_ind]
flag2_vec = flag2[rad2_ray_ind, rad2_rng_ind]
mask_refl1 = np.ma.getmaskarray(refl1_vec)
mask_phidp1 = np.ma.getmaskarray(phidp1_vec)
mask_refl2 = np.ma.getmaskarray(refl2_vec)
mask_phidp2 = np.ma.getmaskarray(phidp2_vec)
isvalid = np.logical_not(
np.logical_or(np.logical_or(mask_refl1, mask_refl2),
np.logical_or(mask_phidp1, mask_phidp2)))
rad1_ray_ind = rad1_ray_ind[isvalid]
rad1_rng_ind = rad1_rng_ind[isvalid]
rad2_ray_ind = rad2_ray_ind[isvalid]
rad2_rng_ind = rad2_rng_ind[isvalid]
# if averaging required loop over valid gates and average
# only if all gates valid
if avg_rad1:
ngates_valid = len(rad1_ray_ind)
refl1_vec = np.ma.masked_all(ngates_valid, dtype=float)
phidp1_vec = np.ma.masked_all(ngates_valid, dtype=float)
flag1_vec = np.ma.masked_all(ngates_valid, dtype=int)
is_valid_avg = np.zeros(ngates_valid, dtype=bool)
for i in range(ngates_valid):
if rad1_rng_ind[i]+avg_rad_lim[1] >= radar1.ngates:
continue
if rad1_rng_ind[i]+avg_rad_lim[0] < 0:
continue
ind_rng = list(range(rad1_rng_ind[i]+avg_rad_lim[0],
rad1_rng_ind[i]+avg_rad_lim[1]+1))
if np.any(np.ma.getmaskarray(
refl1[rad1_ray_ind[i], ind_rng])):
continue
if np.any(np.ma.getmaskarray(
phidp1[rad1_ray_ind[i], ind_rng])):
continue
refl1_vec[i] = np.ma.asarray(np.ma.mean(
refl1[rad1_ray_ind[i], ind_rng]))
phidp1_vec[i] = np.ma.asarray(np.ma.mean(
phidp1[rad1_ray_ind[i], ind_rng]))
rad1_flag = flag1[rad1_ray_ind[i], ind_rng]
rad1_excess_phi = rad1_flag % 100
rad1_clt = ((rad1_flag-rad1_excess_phi) % 10000) / 100
rad1_prec = (
((rad1_flag-rad1_clt*100-rad1_excess_phi) % 1000000) /
10000)
flag1_vec[i] = int(
10000*np.max(rad1_prec)+100*np.max(rad1_clt) +
np.max(rad1_excess_phi))
is_valid_avg[i] = True
rad1_ray_ind = rad1_ray_ind[is_valid_avg]
rad1_rng_ind = rad1_rng_ind[is_valid_avg]
rad2_ray_ind = rad2_ray_ind[is_valid_avg]
rad2_rng_ind = rad2_rng_ind[is_valid_avg]
refl1_vec = refl1_vec[is_valid_avg]
phidp1_vec = phidp1_vec[is_valid_avg]
flag1_vec = flag1_vec[is_valid_avg]
refl2_vec = refl2[rad2_ray_ind, rad2_rng_ind]
phidp2_vec = phidp2[rad2_ray_ind, rad2_rng_ind]
flag2_vec = flag2[rad2_ray_ind, rad2_rng_ind]
elif avg_rad2:
ngates_valid = len(rad2_ray_ind)
refl2_vec = np.ma.masked_all(ngates_valid, dtype=float)
phidp2_vec = np.ma.masked_all(ngates_valid, dtype=float)
flag2_vec = np.ma.masked_all(ngates_valid, dtype=int)
is_valid_avg = np.zeros(ngates_valid, dtype=bool)
for i in range(ngates_valid):
if rad2_rng_ind[i]+avg_rad_lim[1] >= radar2.ngates:
continue
if rad2_rng_ind[i]+avg_rad_lim[0] < 0:
continue
ind_rng = list(range(rad2_rng_ind[i]+avg_rad_lim[0],
rad2_rng_ind[i]+avg_rad_lim[1]+1))
if np.any(np.ma.getmaskarray(
refl2[rad2_ray_ind[i], ind_rng])):
continue
if np.any(np.ma.getmaskarray(
phidp2[rad2_ray_ind[i], ind_rng])):
continue
refl2_vec[i] = np.ma.asarray(np.ma.mean(
refl2[rad2_ray_ind[i], ind_rng]))
phidp2_vec[i] = np.ma.asarray(np.ma.mean(
phidp2[rad2_ray_ind[i], ind_rng]))
rad2_flag = flag2[rad2_ray_ind[i], ind_rng]
rad2_excess_phi = rad2_flag % 100
rad2_clt = ((rad2_flag-rad2_excess_phi) % 10000) / 100
rad2_prec = (
((rad2_flag-rad2_clt*100-rad2_excess_phi) % 1000000) /
10000)
flag2_vec[i] = int(
10000*np.max(rad2_prec)+100*np.max(rad2_clt) +
np.max(rad2_excess_phi))
is_valid_avg[i] = True
rad1_ray_ind = rad1_ray_ind[is_valid_avg]
rad1_rng_ind = rad1_rng_ind[is_valid_avg]
rad2_ray_ind = rad2_ray_ind[is_valid_avg]
rad2_rng_ind = rad2_rng_ind[is_valid_avg]
refl2_vec = refl2_vec[is_valid_avg]
phidp2_vec = phidp2_vec[is_valid_avg]
flag2_vec = flag2_vec[is_valid_avg]
refl1_vec = refl1[rad1_ray_ind, rad1_rng_ind]
phidp1_vec = phidp1[rad1_ray_ind, rad1_rng_ind]
flag1_vec = flag1[rad1_ray_ind, rad1_rng_ind]
else:
refl1_vec = refl1_vec[isvalid]
phidp1_vec = phidp1_vec[isvalid]
flag1_vec = flag1_vec[isvalid]
refl2_vec = refl2_vec[isvalid]
phidp2_vec = phidp2_vec[isvalid]
flag2_vec = flag2_vec[isvalid]
intercomp_dict['rad1_time'] = np.empty(
len(rad1_ray_ind), dtype=datetime.datetime)
intercomp_dict['rad1_time'][:] = dscfg['global_data']['timeinfo']
intercomp_dict['rad1_ray_ind'] = rad1_ray_ind
intercomp_dict['rad1_rng_ind'] = rad1_rng_ind
intercomp_dict['rad1_ele'] = radar1.elevation['data'][rad1_ray_ind]
intercomp_dict['rad1_azi'] = radar1.azimuth['data'][rad1_ray_ind]
intercomp_dict['rad1_rng'] = radar1.range['data'][rad1_rng_ind]
intercomp_dict['rad1_dBZavg'] = refl1_vec
intercomp_dict['rad1_PhiDPavg'] = phidp1_vec
intercomp_dict['rad1_Flagavg'] = flag1_vec
intercomp_dict['rad2_time'] = deepcopy(intercomp_dict['rad1_time'])
intercomp_dict['rad2_ray_ind'] = rad2_ray_ind
intercomp_dict['rad2_rng_ind'] = rad2_rng_ind
intercomp_dict['rad2_ele'] = radar2.elevation['data'][rad2_ray_ind]
intercomp_dict['rad2_azi'] = radar2.azimuth['data'][rad2_ray_ind]
intercomp_dict['rad2_rng'] = radar2.range['data'][rad2_rng_ind]
intercomp_dict['rad2_dBZavg'] = refl2_vec
intercomp_dict['rad2_PhiDPavg'] = phidp2_vec
intercomp_dict['rad2_Flagavg'] = flag2_vec
new_dataset = {'intercomp_dict': intercomp_dict,
'timeinfo': dscfg['global_data']['timeinfo'],
'final': False}
return new_dataset, None
if procstatus == 2:
# get field name
refl_type = None
for datatypedescr in dscfg['datatype']:
radarnr, _, datatype, _, _ = get_datatype_fields(datatypedescr)
if datatype in ('dBZ', 'dBZc', 'dBuZ', 'dBZv', 'dBZvc', 'dBuZv'):
refl_type = datatype
break
if refl_type is None:
warn('Unknown reflectivity type')
return None, None
savedir = get_save_dir(
dscfg['basepath'], dscfg['procname'], dscfg['dsname'],
dscfg['coloc_data_dir'],
timeinfo=dscfg['global_data']['timeinfo'], create_dir=False)
fname = make_filename(
'colocated_data', dscfg['type'], refl_type, ['csv'],
timeinfo=dscfg['global_data']['timeinfo'], timeformat='%Y%m%d')
fname = savedir+fname[0]
(rad1_time, rad1_ray_ind, rad1_rng_ind, rad1_ele, rad1_azi, rad1_rng,
rad1_dBZ, rad1_phi, rad1_flag, rad2_time, rad2_ray_ind, rad2_rng_ind,
rad2_ele, rad2_azi, rad2_rng, rad2_dBZ, rad2_phi, rad2_flag) = (
read_colocated_data_time_avg(fname))
rad1_excess_phi = (rad1_flag % 100).astype(int)
rad2_excess_phi = (rad2_flag % 100).astype(int)
rad1_clt = (((rad1_flag-rad1_excess_phi) % 10000) / 100).astype(int)
rad2_clt = (((rad2_flag-rad2_excess_phi) % 10000) / 100).astype(int)
rad1_non_rain = (
((rad1_flag-rad1_clt*100-rad1_excess_phi) % 1000000) /
10000).astype(int)
rad2_non_rain = (
((rad2_flag-rad2_clt*100-rad2_excess_phi) % 1000000) /
10000).astype(int)
clt_max = dscfg.get('clt_max', 100)
phi_excess_max = dscfg.get('phi_excess_max', 100)
non_rain_max = dscfg.get('non_rain_max', 100)
phi_avg_max = dscfg.get('phi_avg_max', 600.)
# filter out invalid data
ind_val = np.where(
np.logical_and.reduce((
rad1_clt <= clt_max, rad2_clt <= clt_max,
rad1_excess_phi <= phi_excess_max,
rad2_excess_phi <= phi_excess_max,
rad1_non_rain <= non_rain_max, rad2_non_rain <= non_rain_max,
rad1_phi <= phi_avg_max, rad2_phi <= phi_avg_max)))[0]
intercomp_dict = {
'rad1_name': dscfg['global_data']['rad1_name'],
'rad1_time': rad1_time[ind_val],
'rad1_ray_ind': rad1_ray_ind[ind_val],
'rad1_rng_ind': rad1_rng_ind[ind_val],
'rad1_ele': rad1_ele[ind_val],
'rad1_azi': rad1_azi[ind_val],
'rad1_rng': rad1_rng[ind_val],
'rad1_val': rad1_dBZ[ind_val],
'rad2_name': dscfg['global_data']['rad2_name'],
'rad2_time': rad2_time[ind_val],
'rad2_ray_ind': rad1_ray_ind[ind_val],
'rad2_rng_ind': rad1_rng_ind[ind_val],
'rad2_ele': rad2_ele[ind_val],
'rad2_azi': rad2_azi[ind_val],
'rad2_rng': rad2_rng[ind_val],
'rad2_val': rad2_dBZ[ind_val]}
new_dataset = {'intercomp_dict': intercomp_dict,
'timeinfo': dscfg['global_data']['timeinfo'],
'final': True}
return new_dataset, None
def process_fields_diff(procstatus, dscfg, radar_list=None):
"""
Computes the field difference between RADAR001 and radar002,
i.e. RADAR001-RADAR002. Assumes both radars have the same geometry
Parameters
----------
procstatus : int
Processing status: 0 initializing, 1 processing volume,
2 post-processing
dscfg : dictionary of dictionaries
data set configuration. Accepted Configuration Keywords::
datatype : list of string. Dataset keyword
The input data types
radar_list : list of Radar objects
Optional. list of radar objects
Returns
-------
new_dataset : dict
dictionary containing a radar object containing the field differences
ind_rad : int
radar index
"""
if procstatus != 1:
return None, None
# check how many radars are there
radarnr_dict = dict()
ind_radar_list = set()
for datatypedescr in dscfg['datatype']:
radarnr = datatypedescr.split(':')[0]
radarnr_dict.update({radarnr: []})
ind_radar_list.add(int(radarnr[5:8])-1)
ind_radar_list = list(ind_radar_list)
if (len(radarnr_dict) != 2) or (len(radar_list) < 2):
warn('Intercomparison requires data from two different radars')
return None, None
# create the list of data types for each radar
radarnr, _, datatype, _, _ = get_datatype_fields(dscfg['datatype'][0])
field_name_1 = get_fieldname_pyart(datatype)
radarnr, _, datatype, _, _ = get_datatype_fields(dscfg['datatype'][1])
field_name_2 = get_fieldname_pyart(datatype)
radar1 = radar_list[ind_radar_list[0]]
radar2 = radar_list[ind_radar_list[1]]
if radar1 is None or radar2 is None:
warn('Unable to inter-compare radars. Missing radar')
return None, None
if ((field_name_1 not in radar1.fields) or
(field_name_2 not in radar2.fields)):
warn('Unable to compare fields '+field_name_1+'and '+field_name_2 +
'. Field missing in one of the radars')
return None, None
field_diff = pyart.config.get_metadata('fields_difference')
field_diff['data'] = (
radar1.fields[field_name_1]['data'] -
radar2.fields[field_name_2]['data'])
field_diff['long_name'] = field_name_1+' - '+field_name_2
rad_diff = deepcopy(radar1)
rad_diff.fields = dict()
rad_diff.add_field('fields_difference', field_diff)
new_dataset = {'radar_out': rad_diff}
return new_dataset, None
def process_intercomp_fields(procstatus, dscfg, radar_list=None):
"""
intercomparison between two radars
Parameters
----------
procstatus : int
Processing status: 0 initializing, 1 processing volume,
2 post-processing
dscfg : dictionary of dictionaries
data set configuration. Accepted Configuration Keywords::
datatype : list of string. Dataset keyword
The input data types
radar_list : list of Radar objects
Optional. list of radar objects
Returns
-------
new_dataset : dict
dictionary containing a dictionary with intercomparison data
ind_rad : int
radar index
"""
if procstatus != 1:
return None, None
# check how many radars are there
radarnr_dict = dict()
ind_radar_list = set()
for datatypedescr in dscfg['datatype']:
radarnr = datatypedescr.split(':')[0]
radarnr_dict.update({radarnr: []})
ind_radar_list.add(int(radarnr[5:8])-1)
ind_radar_list = list(ind_radar_list)
if (len(radarnr_dict) != 2) or (len(radar_list) < 2):
warn('Intercomparison requires data from two different radars')
return None, None
# create the list of data types for each radar
radarnr, _, datatype, _, _ = get_datatype_fields(dscfg['datatype'][0])
field_name_1 = get_fieldname_pyart(datatype)
radarnr, _, datatype, _, _ = get_datatype_fields(dscfg['datatype'][1])
field_name_2 = get_fieldname_pyart(datatype)
radar1 = radar_list[ind_radar_list[0]]
radar2 = radar_list[ind_radar_list[1]]
if radar1 is None or radar2 is None:
warn('Unable to inter-compare radars. Missing radar')
return None, None
if ((field_name_1 not in radar1.fields) or
(field_name_2 not in radar2.fields)):
warn('Unable to compare fields '+field_name_1+' and '+field_name_2 +
'. Field missing in one of the radars')
return None, None
data1 = deepcopy(radar1.fields[field_name_1]['data'])
data2 = deepcopy(radar2.fields[field_name_2]['data'])
mask1 = np.ma.getmaskarray(data1)
mask2 = np.ma.getmaskarray(data2)
data1[mask2] = np.ma.masked
data2[mask1] = np.ma.masked
intercomp_dict = {
'rad1_name': dscfg['RadarName'][ind_radar_list[0]],
'rad1_val': data1.compressed(),
'rad2_name': dscfg['RadarName'][ind_radar_list[1]],
'rad2_val': data2.compressed()}
new_dataset = {'intercomp_dict': intercomp_dict,
'timeinfo': dscfg['timeinfo'],
'final': False}
return new_dataset, None
| 39.37261
| 79
| 0.575422
| 10,512
| 88,549
| 4.586663
| 0.044425
| 0.05351
| 0.075599
| 0.032355
| 0.857472
| 0.840921
| 0.817069
| 0.792285
| 0.772083
| 0.751778
| 0
| 0.02382
| 0.312065
| 88,549
| 2,248
| 80
| 39.390125
| 0.76768
| 0.16584
| 0
| 0.738292
| 1
| 0
| 0.157959
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006887
| false
| 0
| 0.009642
| 0
| 0.068182
| 0.001377
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d29c5de6d157869c7b9e10256ecd4f63f916f180
| 32,439
|
py
|
Python
|
sdk/python/pulumi_azure/appconfiguration/configuration_key.py
|
roderik/pulumi-azure
|
f6d0c058d6f9111a709bc5f1515d1638f9d615f0
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/appconfiguration/configuration_key.py
|
roderik/pulumi-azure
|
f6d0c058d6f9111a709bc5f1515d1638f9d615f0
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure/appconfiguration/configuration_key.py
|
roderik/pulumi-azure
|
f6d0c058d6f9111a709bc5f1515d1638f9d615f0
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['ConfigurationKeyArgs', 'ConfigurationKey']
@pulumi.input_type
class ConfigurationKeyArgs:
def __init__(__self__, *,
configuration_store_id: pulumi.Input[str],
key: pulumi.Input[str],
content_type: Optional[pulumi.Input[str]] = None,
etag: Optional[pulumi.Input[str]] = None,
label: Optional[pulumi.Input[str]] = None,
locked: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
type: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[str]] = None,
vault_key_reference: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ConfigurationKey resource.
:param pulumi.Input[str] configuration_store_id: Specifies the id of the App Configuration. Changing this forces a new resource to be created.
:param pulumi.Input[str] key: The name of the App Configuration Key to create. Changing this forces a new resource to be created.
:param pulumi.Input[str] content_type: The content type of the App Configuration Key. This should only be set when type is set to `kv`.
:param pulumi.Input[str] etag: The ETag of the key.
:param pulumi.Input[str] label: The label of the App Configuration Key. Changing this forces a new resource to be created.
:param pulumi.Input[bool] locked: Should this App Configuration Key be Locked to prevent changes?
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] type: The type of the App Configuration Key. It can either be `kv` (simple [key/value](https://docs.microsoft.com/en-us/azure/azure-app-configuration/concept-key-value)) or `vault` (where the value is a reference to a [Key Vault Secret](https://azure.microsoft.com/en-gb/services/key-vault/).
:param pulumi.Input[str] value: The value of the App Configuration Key. This should only be set when type is set to `kv`.
:param pulumi.Input[str] vault_key_reference: The ID of the vault secret this App Configuration Key refers to, when `type` is set to `vault`.
"""
pulumi.set(__self__, "configuration_store_id", configuration_store_id)
pulumi.set(__self__, "key", key)
if content_type is not None:
pulumi.set(__self__, "content_type", content_type)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if label is not None:
pulumi.set(__self__, "label", label)
if locked is not None:
pulumi.set(__self__, "locked", locked)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if type is not None:
pulumi.set(__self__, "type", type)
if value is not None:
pulumi.set(__self__, "value", value)
if vault_key_reference is not None:
pulumi.set(__self__, "vault_key_reference", vault_key_reference)
@property
@pulumi.getter(name="configurationStoreId")
def configuration_store_id(self) -> pulumi.Input[str]:
"""
Specifies the id of the App Configuration. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "configuration_store_id")
@configuration_store_id.setter
def configuration_store_id(self, value: pulumi.Input[str]):
pulumi.set(self, "configuration_store_id", value)
@property
@pulumi.getter
def key(self) -> pulumi.Input[str]:
"""
The name of the App Configuration Key to create. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: pulumi.Input[str]):
pulumi.set(self, "key", value)
@property
@pulumi.getter(name="contentType")
def content_type(self) -> Optional[pulumi.Input[str]]:
"""
The content type of the App Configuration Key. This should only be set when type is set to `kv`.
"""
return pulumi.get(self, "content_type")
@content_type.setter
def content_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content_type", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
The ETag of the key.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
The label of the App Configuration Key. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter
def locked(self) -> Optional[pulumi.Input[bool]]:
"""
Should this App Configuration Key be Locked to prevent changes?
"""
return pulumi.get(self, "locked")
@locked.setter
def locked(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "locked", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The type of the App Configuration Key. It can either be `kv` (simple [key/value](https://docs.microsoft.com/en-us/azure/azure-app-configuration/concept-key-value)) or `vault` (where the value is a reference to a [Key Vault Secret](https://azure.microsoft.com/en-gb/services/key-vault/).
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def value(self) -> Optional[pulumi.Input[str]]:
"""
The value of the App Configuration Key. This should only be set when type is set to `kv`.
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "value", value)
@property
@pulumi.getter(name="vaultKeyReference")
def vault_key_reference(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the vault secret this App Configuration Key refers to, when `type` is set to `vault`.
"""
return pulumi.get(self, "vault_key_reference")
@vault_key_reference.setter
def vault_key_reference(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vault_key_reference", value)
@pulumi.input_type
class _ConfigurationKeyState:
def __init__(__self__, *,
configuration_store_id: Optional[pulumi.Input[str]] = None,
content_type: Optional[pulumi.Input[str]] = None,
etag: Optional[pulumi.Input[str]] = None,
key: Optional[pulumi.Input[str]] = None,
label: Optional[pulumi.Input[str]] = None,
locked: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
type: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[str]] = None,
vault_key_reference: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering ConfigurationKey resources.
:param pulumi.Input[str] configuration_store_id: Specifies the id of the App Configuration. Changing this forces a new resource to be created.
:param pulumi.Input[str] content_type: The content type of the App Configuration Key. This should only be set when type is set to `kv`.
:param pulumi.Input[str] etag: The ETag of the key.
:param pulumi.Input[str] key: The name of the App Configuration Key to create. Changing this forces a new resource to be created.
:param pulumi.Input[str] label: The label of the App Configuration Key. Changing this forces a new resource to be created.
:param pulumi.Input[bool] locked: Should this App Configuration Key be Locked to prevent changes?
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] type: The type of the App Configuration Key. It can either be `kv` (simple [key/value](https://docs.microsoft.com/en-us/azure/azure-app-configuration/concept-key-value)) or `vault` (where the value is a reference to a [Key Vault Secret](https://azure.microsoft.com/en-gb/services/key-vault/).
:param pulumi.Input[str] value: The value of the App Configuration Key. This should only be set when type is set to `kv`.
:param pulumi.Input[str] vault_key_reference: The ID of the vault secret this App Configuration Key refers to, when `type` is set to `vault`.
"""
if configuration_store_id is not None:
pulumi.set(__self__, "configuration_store_id", configuration_store_id)
if content_type is not None:
pulumi.set(__self__, "content_type", content_type)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if key is not None:
pulumi.set(__self__, "key", key)
if label is not None:
pulumi.set(__self__, "label", label)
if locked is not None:
pulumi.set(__self__, "locked", locked)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if type is not None:
pulumi.set(__self__, "type", type)
if value is not None:
pulumi.set(__self__, "value", value)
if vault_key_reference is not None:
pulumi.set(__self__, "vault_key_reference", vault_key_reference)
@property
@pulumi.getter(name="configurationStoreId")
def configuration_store_id(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the id of the App Configuration. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "configuration_store_id")
@configuration_store_id.setter
def configuration_store_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "configuration_store_id", value)
@property
@pulumi.getter(name="contentType")
def content_type(self) -> Optional[pulumi.Input[str]]:
"""
The content type of the App Configuration Key. This should only be set when type is set to `kv`.
"""
return pulumi.get(self, "content_type")
@content_type.setter
def content_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content_type", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
The ETag of the key.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
"""
The name of the App Configuration Key to create. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def label(self) -> Optional[pulumi.Input[str]]:
"""
The label of the App Configuration Key. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "label")
@label.setter
def label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "label", value)
@property
@pulumi.getter
def locked(self) -> Optional[pulumi.Input[bool]]:
"""
Should this App Configuration Key be Locked to prevent changes?
"""
return pulumi.get(self, "locked")
@locked.setter
def locked(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "locked", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The type of the App Configuration Key. It can either be `kv` (simple [key/value](https://docs.microsoft.com/en-us/azure/azure-app-configuration/concept-key-value)) or `vault` (where the value is a reference to a [Key Vault Secret](https://azure.microsoft.com/en-gb/services/key-vault/).
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def value(self) -> Optional[pulumi.Input[str]]:
"""
The value of the App Configuration Key. This should only be set when type is set to `kv`.
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "value", value)
@property
@pulumi.getter(name="vaultKeyReference")
def vault_key_reference(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the vault secret this App Configuration Key refers to, when `type` is set to `vault`.
"""
return pulumi.get(self, "vault_key_reference")
@vault_key_reference.setter
def vault_key_reference(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vault_key_reference", value)
class ConfigurationKey(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
configuration_store_id: Optional[pulumi.Input[str]] = None,
content_type: Optional[pulumi.Input[str]] = None,
etag: Optional[pulumi.Input[str]] = None,
key: Optional[pulumi.Input[str]] = None,
label: Optional[pulumi.Input[str]] = None,
locked: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
type: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[str]] = None,
vault_key_reference: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages an Azure App Configuration Key.
## Example Usage
### `Kv` Type
```python
import pulumi
import pulumi_azure as azure
rg = azure.core.ResourceGroup("rg", location="West Europe")
appconf = azure.appconfiguration.ConfigurationStore("appconf",
resource_group_name=rg.name,
location=rg.location)
test = azure.appconfiguration.ConfigurationKey("test",
configuration_store_id=appconf.id,
key="appConfKey1",
label="somelabel",
value="a test")
```
### `Vault` Type
```python
import pulumi
import pulumi_azure as azure
rg = azure.core.ResourceGroup("rg", location="West Europe")
appconf = azure.appconfiguration.ConfigurationStore("appconf",
resource_group_name=rg.name,
location=rg.location)
current = azure.core.get_client_config()
kv = azure.keyvault.KeyVault("kv",
location=azurerm_resource_group["test"]["location"],
resource_group_name=azurerm_resource_group["test"]["name"],
tenant_id=current.tenant_id,
sku_name="premium",
soft_delete_retention_days=7,
access_policies=[azure.keyvault.KeyVaultAccessPolicyArgs(
tenant_id=current.tenant_id,
object_id=current.object_id,
key_permissions=[
"create",
"get",
],
secret_permissions=[
"set",
"get",
"delete",
"purge",
"recover",
],
)])
kvs = azure.keyvault.Secret("kvs",
value="szechuan",
key_vault_id=kv.id)
test = azure.appconfiguration.ConfigurationKey("test",
configuration_store_id=azurerm_app_configuration["test"]["id"],
key="key1",
type="vault",
label="label1",
vault_key_reference=kvs.id)
```
## Import
App Configuration Keys can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:appconfiguration/configurationKey:ConfigurationKey test /subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.AppConfiguration/configurationStores/appConf1/AppConfigurationKey/appConfKey1/Label/label1
```
If you wish to import a key with an empty label then sustitute the label's name with `%00`, like this
```sh
$ pulumi import azure:appconfiguration/configurationKey:ConfigurationKey test /subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.AppConfiguration/configurationStores/appConf1/AppConfigurationKey/appConfKey1/Label/%00
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] configuration_store_id: Specifies the id of the App Configuration. Changing this forces a new resource to be created.
:param pulumi.Input[str] content_type: The content type of the App Configuration Key. This should only be set when type is set to `kv`.
:param pulumi.Input[str] etag: The ETag of the key.
:param pulumi.Input[str] key: The name of the App Configuration Key to create. Changing this forces a new resource to be created.
:param pulumi.Input[str] label: The label of the App Configuration Key. Changing this forces a new resource to be created.
:param pulumi.Input[bool] locked: Should this App Configuration Key be Locked to prevent changes?
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] type: The type of the App Configuration Key. It can either be `kv` (simple [key/value](https://docs.microsoft.com/en-us/azure/azure-app-configuration/concept-key-value)) or `vault` (where the value is a reference to a [Key Vault Secret](https://azure.microsoft.com/en-gb/services/key-vault/).
:param pulumi.Input[str] value: The value of the App Configuration Key. This should only be set when type is set to `kv`.
:param pulumi.Input[str] vault_key_reference: The ID of the vault secret this App Configuration Key refers to, when `type` is set to `vault`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ConfigurationKeyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an Azure App Configuration Key.
## Example Usage
### `Kv` Type
```python
import pulumi
import pulumi_azure as azure
rg = azure.core.ResourceGroup("rg", location="West Europe")
appconf = azure.appconfiguration.ConfigurationStore("appconf",
resource_group_name=rg.name,
location=rg.location)
test = azure.appconfiguration.ConfigurationKey("test",
configuration_store_id=appconf.id,
key="appConfKey1",
label="somelabel",
value="a test")
```
### `Vault` Type
```python
import pulumi
import pulumi_azure as azure
rg = azure.core.ResourceGroup("rg", location="West Europe")
appconf = azure.appconfiguration.ConfigurationStore("appconf",
resource_group_name=rg.name,
location=rg.location)
current = azure.core.get_client_config()
kv = azure.keyvault.KeyVault("kv",
location=azurerm_resource_group["test"]["location"],
resource_group_name=azurerm_resource_group["test"]["name"],
tenant_id=current.tenant_id,
sku_name="premium",
soft_delete_retention_days=7,
access_policies=[azure.keyvault.KeyVaultAccessPolicyArgs(
tenant_id=current.tenant_id,
object_id=current.object_id,
key_permissions=[
"create",
"get",
],
secret_permissions=[
"set",
"get",
"delete",
"purge",
"recover",
],
)])
kvs = azure.keyvault.Secret("kvs",
value="szechuan",
key_vault_id=kv.id)
test = azure.appconfiguration.ConfigurationKey("test",
configuration_store_id=azurerm_app_configuration["test"]["id"],
key="key1",
type="vault",
label="label1",
vault_key_reference=kvs.id)
```
## Import
App Configuration Keys can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:appconfiguration/configurationKey:ConfigurationKey test /subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.AppConfiguration/configurationStores/appConf1/AppConfigurationKey/appConfKey1/Label/label1
```
If you wish to import a key with an empty label then sustitute the label's name with `%00`, like this
```sh
$ pulumi import azure:appconfiguration/configurationKey:ConfigurationKey test /subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resourceGroup1/providers/Microsoft.AppConfiguration/configurationStores/appConf1/AppConfigurationKey/appConfKey1/Label/%00
```
:param str resource_name: The name of the resource.
:param ConfigurationKeyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ConfigurationKeyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
configuration_store_id: Optional[pulumi.Input[str]] = None,
content_type: Optional[pulumi.Input[str]] = None,
etag: Optional[pulumi.Input[str]] = None,
key: Optional[pulumi.Input[str]] = None,
label: Optional[pulumi.Input[str]] = None,
locked: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
type: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[str]] = None,
vault_key_reference: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ConfigurationKeyArgs.__new__(ConfigurationKeyArgs)
if configuration_store_id is None and not opts.urn:
raise TypeError("Missing required property 'configuration_store_id'")
__props__.__dict__["configuration_store_id"] = configuration_store_id
__props__.__dict__["content_type"] = content_type
__props__.__dict__["etag"] = etag
if key is None and not opts.urn:
raise TypeError("Missing required property 'key'")
__props__.__dict__["key"] = key
__props__.__dict__["label"] = label
__props__.__dict__["locked"] = locked
__props__.__dict__["tags"] = tags
__props__.__dict__["type"] = type
__props__.__dict__["value"] = value
__props__.__dict__["vault_key_reference"] = vault_key_reference
super(ConfigurationKey, __self__).__init__(
'azure:appconfiguration/configurationKey:ConfigurationKey',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
configuration_store_id: Optional[pulumi.Input[str]] = None,
content_type: Optional[pulumi.Input[str]] = None,
etag: Optional[pulumi.Input[str]] = None,
key: Optional[pulumi.Input[str]] = None,
label: Optional[pulumi.Input[str]] = None,
locked: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
type: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[str]] = None,
vault_key_reference: Optional[pulumi.Input[str]] = None) -> 'ConfigurationKey':
"""
Get an existing ConfigurationKey resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] configuration_store_id: Specifies the id of the App Configuration. Changing this forces a new resource to be created.
:param pulumi.Input[str] content_type: The content type of the App Configuration Key. This should only be set when type is set to `kv`.
:param pulumi.Input[str] etag: The ETag of the key.
:param pulumi.Input[str] key: The name of the App Configuration Key to create. Changing this forces a new resource to be created.
:param pulumi.Input[str] label: The label of the App Configuration Key. Changing this forces a new resource to be created.
:param pulumi.Input[bool] locked: Should this App Configuration Key be Locked to prevent changes?
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] type: The type of the App Configuration Key. It can either be `kv` (simple [key/value](https://docs.microsoft.com/en-us/azure/azure-app-configuration/concept-key-value)) or `vault` (where the value is a reference to a [Key Vault Secret](https://azure.microsoft.com/en-gb/services/key-vault/).
:param pulumi.Input[str] value: The value of the App Configuration Key. This should only be set when type is set to `kv`.
:param pulumi.Input[str] vault_key_reference: The ID of the vault secret this App Configuration Key refers to, when `type` is set to `vault`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ConfigurationKeyState.__new__(_ConfigurationKeyState)
__props__.__dict__["configuration_store_id"] = configuration_store_id
__props__.__dict__["content_type"] = content_type
__props__.__dict__["etag"] = etag
__props__.__dict__["key"] = key
__props__.__dict__["label"] = label
__props__.__dict__["locked"] = locked
__props__.__dict__["tags"] = tags
__props__.__dict__["type"] = type
__props__.__dict__["value"] = value
__props__.__dict__["vault_key_reference"] = vault_key_reference
return ConfigurationKey(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="configurationStoreId")
def configuration_store_id(self) -> pulumi.Output[str]:
"""
Specifies the id of the App Configuration. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "configuration_store_id")
@property
@pulumi.getter(name="contentType")
def content_type(self) -> pulumi.Output[str]:
"""
The content type of the App Configuration Key. This should only be set when type is set to `kv`.
"""
return pulumi.get(self, "content_type")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
The ETag of the key.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def key(self) -> pulumi.Output[str]:
"""
The name of the App Configuration Key to create. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def label(self) -> pulumi.Output[Optional[str]]:
"""
The label of the App Configuration Key. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "label")
@property
@pulumi.getter
def locked(self) -> pulumi.Output[Optional[bool]]:
"""
Should this App Configuration Key be Locked to prevent changes?
"""
return pulumi.get(self, "locked")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[Optional[str]]:
"""
The type of the App Configuration Key. It can either be `kv` (simple [key/value](https://docs.microsoft.com/en-us/azure/azure-app-configuration/concept-key-value)) or `vault` (where the value is a reference to a [Key Vault Secret](https://azure.microsoft.com/en-gb/services/key-vault/).
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def value(self) -> pulumi.Output[str]:
"""
The value of the App Configuration Key. This should only be set when type is set to `kv`.
"""
return pulumi.get(self, "value")
@property
@pulumi.getter(name="vaultKeyReference")
def vault_key_reference(self) -> pulumi.Output[Optional[str]]:
"""
The ID of the vault secret this App Configuration Key refers to, when `type` is set to `vault`.
"""
return pulumi.get(self, "vault_key_reference")
| 45.116829
| 325
| 0.634391
| 3,953
| 32,439
| 5.050089
| 0.057931
| 0.081
| 0.083454
| 0.072735
| 0.91359
| 0.899815
| 0.881982
| 0.875369
| 0.872815
| 0.864048
| 0
| 0.006611
| 0.258578
| 32,439
| 718
| 326
| 45.179666
| 0.823417
| 0.439933
| 0
| 0.810888
| 1
| 0
| 0.078153
| 0.017437
| 0
| 0
| 0
| 0
| 0
| 1
| 0.163324
| false
| 0.002865
| 0.014327
| 0
| 0.275072
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
961e5bda3a366270649e3dc685b3d30b09f61606
| 29,094
|
py
|
Python
|
tests/integration/test_groups_it.py
|
corylevine/okta-sdk-python
|
c86b8fdc4525e84199143c27213c0aebc6b2af8f
|
[
"Apache-2.0"
] | 145
|
2017-06-13T21:54:04.000Z
|
2022-02-25T05:44:34.000Z
|
tests/integration/test_groups_it.py
|
corylevine/okta-sdk-python
|
c86b8fdc4525e84199143c27213c0aebc6b2af8f
|
[
"Apache-2.0"
] | 146
|
2017-06-02T17:46:12.000Z
|
2022-03-29T15:52:15.000Z
|
tests/integration/test_groups_it.py
|
corylevine/okta-sdk-python
|
c86b8fdc4525e84199143c27213c0aebc6b2af8f
|
[
"Apache-2.0"
] | 98
|
2017-06-27T03:44:51.000Z
|
2022-03-23T04:58:18.000Z
|
import pytest
from tests.mocks import MockOktaClient
from tests.mocks import mock_pause_function
from http import HTTPStatus
import okta.models as models
class TestGroupsResource:
"""
Integration Tests for the Groups Resource
"""
@pytest.mark.vcr()
@pytest.mark.asyncio
async def test_create_get_group(self, fs):
# Instantiate Mock Client
client = MockOktaClient(fs)
# Create Group Object
GROUP_NAME = "Group-Target-Test"
group_profile = models.GroupProfile({
"name": GROUP_NAME
})
group_obj = models.Group({
"profile": group_profile
})
try:
# Create Group
group, _, err = await client.create_group(group_obj)
assert err is None
assert isinstance(group, models.Group)
# Get group using ID
found_group, _, err = await client.get_group(group.id)
assert err is None
assert found_group.id == group.id
# Delete created group
_, err = await client.delete_group(group.id)
assert err is None
# Ensure group cannot be found again
found_group, resp, err = await client.get_group(group.id)
assert err is not None
assert resp.get_status() == HTTPStatus.NOT_FOUND
assert found_group is None
finally:
# Delete created group if it wasn't deleted during test
try:
_, err = await client.delete_group(group.id)
except Exception:
pass
@pytest.mark.vcr()
@pytest.mark.asyncio
async def test_list_groups(self, fs):
# Instantiate Mock Client
client = MockOktaClient(fs)
# Create Group Object
GROUP_NAME = "Group-Target-Test"
group_profile = models.GroupProfile({
"name": GROUP_NAME
})
group_obj = models.Group({
"profile": group_profile
})
try:
# Create Group
group, _, err = await client.create_group(group_obj)
assert err is None
assert isinstance(group, models.Group)
groups_list, resp, err = await client.list_groups()
assert err is None
assert not resp.has_next()
assert next((grp for grp in groups_list if grp.id == group.id))
finally:
# Delete created group
_, err = await client.delete_group(group.id)
assert err is None
@pytest.mark.vcr()
@pytest.mark.asyncio
async def test_search_group(self, fs):
# Instantiate Mock Client
client = MockOktaClient(fs)
# Create Group Object
GROUP_NAME = "Group-Target-Test"
group_profile = models.GroupProfile({
"name": GROUP_NAME
})
group_obj = models.Group({
"profile": group_profile
})
try:
# Create Group
group, _, err = await client.create_group(group_obj)
assert err is None
assert isinstance(group, models.Group)
query_params_query = {"q": GROUP_NAME}
groups_list, _, err = await client.list_groups(query_params_query)
assert err is None
assert groups_list
assert len(groups_list) == 1
assert next((grp for grp in groups_list if grp.id == group.id))
finally:
# Delete created group
_, err = await client.delete_group(group.id)
assert err is None
@pytest.mark.vcr()
@pytest.mark.asyncio
async def test_update_group(self, fs):
# Instantiate Mock Client
client = MockOktaClient(fs)
# Create Group Object
GROUP_NAME = "Group-Target-Test"
group_profile = models.GroupProfile({
"name": GROUP_NAME
})
group_obj = models.Group({
"profile": group_profile
})
try:
# Create Group
group, _, err = await client.create_group(group_obj)
assert err is None
assert isinstance(group, models.Group)
# Create Updated Group Object
# Create Group Object
NEW_GROUP_NAME = "Group-Target-Test NEW"
new_group_profile = models.GroupProfile({
"name": NEW_GROUP_NAME
})
new_group_obj = models.Group({
"profile": new_group_profile
})
_, _, err = await client.update_group(group.id, new_group_obj)
assert err is None
# Verify update worked
found_group, _, err = await client.get_group(group.id)
assert err is None
assert found_group.id == group.id
assert found_group.profile.name == NEW_GROUP_NAME
finally:
# Delete created group
_, err = await client.delete_group(group.id)
assert err is None
@pytest.mark.vcr()
@pytest.mark.asyncio
async def test_remove_group(self, fs):
# Instantiate Mock Client
client = MockOktaClient(fs)
# Create Password
password = models.PasswordCredential({
"value": "Password150kta"
})
# Create User Credentials
user_creds = models.UserCredentials({
"password": password
})
# Create User Profile and CreateUser Request
user_profile = models.UserProfile()
user_profile.first_name = "John"
user_profile.last_name = "Doe-Remove-Group"
user_profile.email = "John.Doe-Remove-Group@example.com"
user_profile.login = "John.Doe-Remove-Group@example.com"
create_user_req = models.CreateUserRequest({
"credentials": user_creds,
"profile": user_profile
})
try:
# Create query parameters and Create User
query_params_create = {"activate": "False"}
user, _, err = await client.create_user(
create_user_req, query_params_create)
assert err is None
assert isinstance(user, models.User)
# Create Group Object
GROUP_NAME = "Group-Target-Test"
group_profile = models.GroupProfile({
"name": GROUP_NAME
})
group_obj = models.Group({
"profile": group_profile
})
# Create Group
group, _, err = await client.create_group(group_obj)
assert err is None
assert isinstance(group, models.Group)
# Add user to group
_, err = await client.add_user_to_group(group.id, user.id)
assert err is None
users_in_group, _, err = await client.list_group_users(group.id)
assert err is None
assert next((usr for usr in users_in_group if usr.id == user.id))
# Delete created group
_, err = await client.delete_group(group.id)
assert err is None
# Retrieve group list again to ensure deleted
groups_list, _, err = await client.list_groups()
assert err is None
assert next((grp for grp in groups_list if grp.id ==
group.id), None) is None
finally:
errors = []
try:
_, err = await client.delete_group(group.id)
except Exception:
pass
# Deactivate, then delete created user
try:
_, err = await client.deactivate_or_delete_user(user.id)
assert err is None
except Exception as exc:
errors.append(exc)
try:
_, err = await client.deactivate_or_delete_user(user.id)
assert err is None
except Exception as exc:
errors.append(exc)
assert len(errors) == 0
@pytest.mark.vcr()
@pytest.mark.asyncio
async def test_group_roles_operations(self, fs):
# Instantiate Mock Client
client = MockOktaClient(fs)
# Create Group Object
GROUP_NAME = "Group-Target-Test"
group_profile = models.GroupProfile({
"name": GROUP_NAME
})
group_obj = models.Group({
"profile": group_profile
})
try:
# Create Group
group, _, err = await client.create_group(group_obj)
assert err is None
assert isinstance(group, models.Group)
# Create roles
assign_role_req_ua = models.AssignRoleRequest({
"type": models.RoleType.USER_ADMIN
})
assign_role_req_aa = models.AssignRoleRequest({
"type": models.RoleType.APP_ADMIN
})
ua_role, _, err = await client.assign_role_to_group(
group.id, assign_role_req_ua)
assert err is None
aa_role, _, err = await client.assign_role_to_group(
group.id, assign_role_req_aa)
assert err is None
group_roles, _, err = await client.list_group_assigned_roles(group.id)
assert err is None
assert len(group_roles) == 2
assert next((rle for rle in group_roles if rle.id == ua_role.id))
assert next((rle for rle in group_roles if rle.id == aa_role.id))
_, err = await client.remove_role_from_group(group.id, ua_role.id)
assert err is None
group_roles, _, err = await client.list_group_assigned_roles(group.id)
assert err is None
assert len(group_roles) == 1
assert next((rle for rle in group_roles if rle.id ==
ua_role.id), None) is None
assert next((rle for rle in group_roles if rle.id == aa_role.id))
finally:
# Delete created group
_, err = await client.delete_group(group.id)
assert err is None
@pytest.mark.vcr()
@pytest.mark.asyncio
async def test_group_users_operations(self, fs):
# Instantiate Mock Client
client = MockOktaClient(fs)
# Create Password
password = models.PasswordCredential({
"value": "Password150kta"
})
# Create User Credentials
user_creds = models.UserCredentials({
"password": password
})
# Create User Profile and CreateUser Request
user_profile = models.UserProfile()
user_profile.first_name = "John"
user_profile.last_name = "Doe-Activate"
user_profile.email = "John.Doe-Activate@example.com"
user_profile.login = "John.Doe-Activate@example.com"
create_user_req = models.CreateUserRequest({
"credentials": user_creds,
"profile": user_profile
})
try:
# Create query parameters and Create User
query_params_create = {"activate": "False"}
user, _, err = await client.create_user(
create_user_req, query_params_create)
assert err is None
assert isinstance(user, models.User)
# Create Group Object
GROUP_NAME = "Group-Target-Test"
group_profile = models.GroupProfile({
"name": GROUP_NAME
})
group_obj = models.Group({
"profile": group_profile
})
# Create Group
group, _, err = await client.create_group(group_obj)
assert err is None
assert isinstance(group, models.Group)
# Add user to group
_, err = await client.add_user_to_group(group.id, user.id)
assert err is None
users_in_group, _, err = await client.list_group_users(group.id)
assert err is None
assert next((usr for usr in users_in_group if usr.id == user.id))
# Remove user from group
_, err = await client.remove_user_from_group(group.id, user.id)
assert err is None
users_in_group, _, err = await client.list_group_users(group.id)
assert err is None
assert len(users_in_group) == 0
assert next(
(usr for usr in users_in_group if usr.id == user.id), None) is None
finally:
errors = []
# Deactivate, then delete created user
try:
_, err = await client.deactivate_or_delete_user(user.id)
assert err is None
except Exception as exc:
errors.append(exc)
try:
_, err = await client.deactivate_or_delete_user(user.id)
assert err is None
except Exception as exc:
errors.append(exc)
# Delete created group
try:
_, err = await client.delete_group(group.id)
assert err is None
except Exception as exc:
errors.append(exc)
assert len(errors) == 0
@pytest.mark.vcr()
@pytest.mark.asyncio
async def test_group_rule_operations(self, fs):
# Instantiate Mock Client
client = MockOktaClient(fs)
# Create Password
password = models.PasswordCredential({
"value": "Password150kta"
})
# Create User Credentials
user_creds = models.UserCredentials({
"password": password
})
# Create User Profile and CreateUser Request
user_profile = models.UserProfile()
user_profile.first_name = "John"
user_profile.last_name = "Doe-Group-Rule-Ops"
user_profile.email = "John.Doe-Group-Rule-Ops@example.com"
user_profile.login = "John.Doe-Group-Rule-Ops@example.com"
create_user_req = models.CreateUserRequest({
"credentials": user_creds,
"profile": user_profile
})
try:
# Create query parameters and Create User
query_params_create = {"activate": "False"}
user, _, err = await client.create_user(
create_user_req, query_params_create)
assert err is None
assert isinstance(user, models.User)
# Create Group Object
GROUP_NAME = "Group-Target-Test-Group-Rule-Ops"
group_profile = models.GroupProfile({
"name": GROUP_NAME
})
group_obj = models.Group({
"profile": group_profile
})
# Create Group
group, _, err = await client.create_group(group_obj)
assert err is None
assert isinstance(group, models.Group)
# Create Group Rule
last_name = user.profile.last_name
GROUP_RULE_NAME = "Test-Group-Rule-Group-Rule-Ops"
GROUP_RULE_TYPE = "group_rule"
GROUP_RULE_EXP_TYPE = "urn:okta:expression:1.0"
GROUP_RULE_EXP_VALUE = f"user.lastName==\"{last_name}\""
group_rule_exp = models.GroupRuleExpression({
"type": GROUP_RULE_EXP_TYPE,
"value": GROUP_RULE_EXP_VALUE
})
group_rule_cond = models.GroupRuleConditions({
"expression": group_rule_exp
})
group_rule_group_assignment = models.GroupRuleGroupAssignment({
"groupIds": [group.id]
})
group_rule_action = models.GroupRuleAction({
"assignUserToGroups": group_rule_group_assignment
})
group_rule_object = models.GroupRule({
"actions": group_rule_action,
"conditions": group_rule_cond,
"name": GROUP_RULE_NAME,
"type": GROUP_RULE_TYPE
})
group_rule, _, err = await client.create_group_rule(group_rule_object)
assert err is None
assert isinstance(group_rule, models.GroupRule)
# Activate Group Rule
_, err = await client.activate_group_rule(group_rule.id)
assert err is None
# 15 second sleep for backend to update
mock_pause_function(15)
users_in_group, _, err = await client.list_group_users(group.id)
assert err is None
assert next((usr for usr in users_in_group if usr.id ==
user.id), None) is not None
# Ensure activated rule is in group rules
group_rules, _, err = await client.list_group_rules()
assert err is None
assert next((rule for rule in group_rules if rule.id ==
group_rule.id), None) is not None
# Deactivate rule (to update)
_, err = await client.deactivate_group_rule(group_rule.id)
assert err is None
# Update rule
# Create new rule
NEW_GROUP_RULE_NAME = "Test-Group-Rule Updated"
NEW_GROUP_RULE_EXP_VALUE = "user.lastName==\"BLAHBLAHBLAH\""
new_group_rule_exp = models.GroupRuleExpression({
"type": GROUP_RULE_EXP_TYPE,
"value": NEW_GROUP_RULE_EXP_VALUE
})
new_group_rule_cond = models.GroupRuleConditions({
"expression": new_group_rule_exp
})
new_group_rule_group_assignment = models.GroupRuleGroupAssignment({
"groupIds": [group.id]
})
new_group_rule_action = models.GroupRuleAction({
"assignUserToGroups": new_group_rule_group_assignment
})
new_group_rule_object = models.GroupRule({
"actions": new_group_rule_action,
"conditions": new_group_rule_cond,
"name": NEW_GROUP_RULE_NAME,
"type": GROUP_RULE_TYPE
})
new_group_rule, _, err = await client.update_group_rule(
group_rule.id,
new_group_rule_object)
assert err is None
# Activate updated rule and verify user isn't in group
_, err = await client.activate_group_rule(new_group_rule.id)
assert err is None
# 15 second sleep for backend to update
mock_pause_function(15)
users_in_group, _, err = await client.list_group_users(group.id)
assert err is None
assert next(
(usr for usr in users_in_group if usr.id == user.id), None) is None
finally:
errors = []
# Deactivate rule
try:
_, err = await client.deactivate_group_rule(new_group_rule.id)
assert err is None
except Exception as exc:
errors.append(exc)
# Deactivate, then delete created user
try:
_, err = await client.deactivate_user(user.id)
assert err is None
except Exception as exc:
errors.append(exc)
try:
_, err = await client.deactivate_or_delete_user(user.id)
assert err is None
except Exception as exc:
errors.append(exc)
# Delete created group
try:
_, err = await client.delete_group(group.id)
assert err is None
except Exception as exc:
errors.append(exc)
# Delete group rules
try:
_, err = await client.delete_group_rule(group_rule.id)
assert err is None
except Exception as exc:
errors.append(exc)
assert len(errors) == 0
@pytest.mark.vcr()
@pytest.mark.asyncio
async def test_group_target_add(self, fs):
# Instantiate Mock Client
client = MockOktaClient(fs)
# Create Group Objects
GROUP_1_NAME = "Group-Target-Test 1"
group_1_profile = models.GroupProfile({
"name": GROUP_1_NAME
})
group_1_obj = models.Group({
"profile": group_1_profile
})
GROUP_2_NAME = "Group-Target-Test 2"
group_2_profile = models.GroupProfile({
"name": GROUP_2_NAME
})
group_2_obj = models.Group({
"profile": group_2_profile
})
try:
# Create Groups
group_1, _, err = await client.create_group(group_1_obj)
assert err is None
assert isinstance(group_1, models.Group)
group_2, _, err = await client.create_group(group_2_obj)
assert err is None
assert isinstance(group_2, models.Group)
# Create role and add group targets
assign_role_req_ua = models.AssignRoleRequest({
"type": models.RoleType.USER_ADMIN
})
ua_role, _, err = await client.assign_role_to_group(
group_1.id, assign_role_req_ua)
assert err is None
_, err = await\
client.add_group_target_to_group_administrator_role_for_group(
group_1.id, ua_role.id, group_2.id)
# Make sure targets are listed
groups_list, _, err = await client.list_group_targets_for_group_role(
group_1.id, ua_role.id)
assert err is None
assert next((grp for grp in groups_list if grp.id == group_2.id))
finally:
errors = []
# Delete created groups
try:
_, err = await client.delete_group(group_1.id)
assert err is None
except Exception as exc:
errors.append(exc)
try:
_, err = await client.delete_group(group_2.id)
assert err is None
except Exception as exc:
errors.append(exc)
assert len(errors) == 0
@pytest.mark.vcr()
@pytest.mark.asyncio
async def test_group_target_remove(self, fs):
# Instantiate Mock Client
client = MockOktaClient(fs)
# Create Group Objects
GROUP_1_NAME = "Group-Target-Test 1"
group_1_profile = models.GroupProfile({
"name": GROUP_1_NAME
})
group_1_obj = models.Group({
"profile": group_1_profile
})
GROUP_2_NAME = "Group-Target-Test 2"
group_2_profile = models.GroupProfile({
"name": GROUP_2_NAME
})
group_2_obj = models.Group({
"profile": group_2_profile
})
GROUP_3_NAME = "Group-Target-Test 3"
group_3_profile = models.GroupProfile({
"name": GROUP_3_NAME
})
group_3_obj = models.Group({
"profile": group_3_profile
})
try:
# Create Groups
group_1, _, err = await client.create_group(group_1_obj)
assert err is None
assert isinstance(group_1, models.Group)
group_2, _, err = await client.create_group(group_2_obj)
assert err is None
assert isinstance(group_2, models.Group)
group_3, _, err = await client.create_group(group_3_obj)
assert err is None
assert isinstance(group_3, models.Group)
# Create role and add group targets
assign_role_req_ua = models.AssignRoleRequest({
"type": models.RoleType.USER_ADMIN
})
ua_role, _, err = await client.assign_role_to_group(
group_1.id, assign_role_req_ua)
assert err is None
_, err = await\
client.add_group_target_to_group_administrator_role_for_group(
group_1.id, ua_role.id, group_2.id)
_, err = await\
client.add_group_target_to_group_administrator_role_for_group(
group_1.id, ua_role.id, group_3.id)
groups_list, _, err = await client.list_group_targets_for_group_role(
group_1.id, ua_role.id)
assert err is None
assert next((grp for grp in groups_list if grp.id == group_2.id))
assert next((grp for grp in groups_list if grp.id == group_3.id))
# Remove from 2 and ensure 2 isn't listed
_, err = await \
client.remove_group_target_from_group_admin_role_given_to_group(
group_1.id, ua_role.id, group_2.id)
groups_list, _, err = await client.list_group_targets_for_group_role(
group_1.id, ua_role.id)
assert err is None
assert next((grp for grp in groups_list if grp.id ==
group_2.id), None) is None
assert next((grp for grp in groups_list if grp.id == group_3.id))
finally:
errors = []
# Delete created groups
try:
_, err = await client.delete_group(group_1.id)
assert err is None
except Exception as exc:
errors.append(exc)
try:
_, err = await client.delete_group(group_2.id)
assert err is None
except Exception as exc:
errors.append(exc)
try:
_, err = await client.delete_group(group_3.id)
assert err is None
except Exception as exc:
errors.append(exc)
assert len(errors) == 0
@pytest.mark.vcr()
@pytest.mark.asyncio
async def test_group_assigned_applications(self, fs):
# Instantiate Mock Client
client = MockOktaClient(fs)
# Create Group Objects
GROUP_NAME = "Group-Target-Test"
group_profile = models.GroupProfile({
"name": GROUP_NAME
})
group_obj = models.Group({
"profile": group_profile
})
try:
# Create Group
group, _, err = await client.create_group(group_obj)
assert err is None
assert isinstance(group, models.Group)
# Create Application object and Application in Org
APP_LABEL = "Test Assigned-Applications"
BUTTON_FIELD = "btn-login"
PASSWORD_FIELD = "txt-box-password"
USERNAME_FIELD = "txt-box-username"
URL = "https://example.com/login.html"
LOGIN_URL_REGEX = f"^{URL}$"
swa_app_settings_app = models.SwaApplicationSettingsApplication({
"buttonField": BUTTON_FIELD,
"passwordField": PASSWORD_FIELD,
"usernameField": USERNAME_FIELD,
"url": URL,
"loginUrlRegex": LOGIN_URL_REGEX
})
swa_app_settings = models.SwaApplicationSettings({
"app": swa_app_settings_app
})
swa_app_obj = models.SwaApplication({
"label": APP_LABEL,
"settings": swa_app_settings,
"signOnMode": models.ApplicationSignOnMode.BROWSER_PLUGIN
})
swa_app, _, err = await client.create_application(swa_app_obj)
assert err is None
assert isinstance(swa_app, models.SwaApplication)
# Assign app and group
assign_ag_req = models.ApplicationGroupAssignment({
"priority": 0,
"applicationId": swa_app.id,
"groupId": group.id
})
assign_app_group, _, err = await \
client.create_application_group_assignment(
swa_app.id, group.id, assign_ag_req)
assert err is None
# 3 second sleep for backend to update
mock_pause_function(3)
# Check assigned apps and ensure created app is found
assigned_apps, _, err = await \
client.list_assigned_applications_for_group(group.id)
assert err is None
assert assigned_apps is not None
assert len(assigned_apps) > 0
assert next((app for app in assigned_apps if app.id == swa_app.id))
finally:
errors = []
# Cleanup app and group created
try:
_, err = await client.deactivate_application(swa_app.id)
assert err is None
except Exception as exc:
errors.append(exc)
try:
_, err = await client.delete_application(swa_app.id)
assert err is None
except Exception as exc:
errors.append(exc)
try:
_, err = await client.delete_group(group.id)
assert err is None
except Exception as exc:
errors.append(exc)
assert len(errors) == 0
| 34.553444
| 83
| 0.557709
| 3,225
| 29,094
| 4.794729
| 0.064496
| 0.041906
| 0.073336
| 0.071784
| 0.834702
| 0.793507
| 0.755416
| 0.732846
| 0.727349
| 0.705491
| 0
| 0.005601
| 0.367877
| 29,094
| 841
| 84
| 34.59453
| 0.835191
| 0.07847
| 0
| 0.758454
| 0
| 0
| 0.05096
| 0.011233
| 0
| 0
| 0
| 0
| 0.21256
| 1
| 0
| false
| 0.025765
| 0.008052
| 0
| 0.009662
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8297c15b3881e1867201c56cec4b869c20b9419e
| 127
|
py
|
Python
|
{{cookiecutter.project_slug}}/app/ext/base.py
|
jonatasoli/fastapi-template-cookiecutter
|
4a982e9a46dc6b7d1dafda8ca170429ea32b1bf4
|
[
"MIT"
] | 7
|
2021-03-12T18:17:42.000Z
|
2021-09-14T02:13:32.000Z
|
{{cookiecutter.project_slug}}/app/ext/base.py
|
jonatasoli/fastapi-template-cookiecutter
|
4a982e9a46dc6b7d1dafda8ca170429ea32b1bf4
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_slug}}/app/ext/base.py
|
jonatasoli/fastapi-template-cookiecutter
|
4a982e9a46dc6b7d1dafda8ca170429ea32b1bf4
|
[
"MIT"
] | 3
|
2021-02-12T15:07:48.000Z
|
2021-09-14T02:13:34.000Z
|
from {{cookiecutter.app_slug_snakecase}}.models.models_{{cookiecutter.app_slug_snakecase}} import *
from .database import Base
| 42.333333
| 99
| 0.826772
| 16
| 127
| 6.25
| 0.5625
| 0.3
| 0.38
| 0.56
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.062992
| 127
| 2
| 100
| 63.5
| 0.840336
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 1
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
82b518cae3e77bea31fdb232ef31fbf29bbc2269
| 11,839
|
py
|
Python
|
src/MagicClick/KCrawler.py
|
hjiangsse/magic_click
|
dc104cc56e8c0ad78eadfc5c86c906157834ffde
|
[
"MIT"
] | null | null | null |
src/MagicClick/KCrawler.py
|
hjiangsse/magic_click
|
dc104cc56e8c0ad78eadfc5c86c906157834ffde
|
[
"MIT"
] | null | null | null |
src/MagicClick/KCrawler.py
|
hjiangsse/magic_click
|
dc104cc56e8c0ad78eadfc5c86c906157834ffde
|
[
"MIT"
] | null | null | null |
#-------------------------------------------------------------------------------
# Author: hjiang
# Email: heng.jiang@jingle.ai
# Time: Thu Nov 25 14:18:20 2021
# 本模块的功能是利用akshare和baostock提供的接口,获取各种维度的k线行情数据
#-------------------------------------------------------------------------------
import baostock as bs
import akshare as ak
import pandas as pd
from datetime import datetime
import MagicClick.CrawlerUtils as utils
#-------------------------------------------------------------------------------
#利用akshare提供的数据获取接口,获得A股票名称列表
#-------------------------------------------------------------------------------
def get_astock_list():
a_stock_list = []
a_sh_stocks_frame = ak.stock_info_sh_name_code()
for code in a_sh_stocks_frame['公司代码']:
a_stock_list.append("sh." + code)
a_sz_stocks_frame = ak.stock_info_sz_name_code()
for code in a_sz_stocks_frame['A股代码']:
a_stock_list.append("sz." + code)
return a_stock_list
#-------------------------------------------------------------------------------
# 得到一支股票日k前复权数据
#-------------------------------------------------------------------------------
def get_day_k_data_pre_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1990-12-19"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_day_k_data(code, columns, start_date, end_date, adjust_flag='2')
#-------------------------------------------------------------------------------
# 得到一支股票日k后复权数据
#-------------------------------------------------------------------------------
def get_day_k_data_post_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1990-12-19"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_day_k_data(code, columns, start_date, end_date, adjust_flag='1')
#-------------------------------------------------------------------------------
# 得到一支股票日k未复权数据
#-------------------------------------------------------------------------------
def get_day_k_data_no_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1990-12-19"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_day_k_data(code, columns, start_date, end_date, adjust_flag='3')
#-------------------------------------------------------------------------------
# 得到一支股票周k前复权数据
#-------------------------------------------------------------------------------
def get_week_k_data_pre_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1990-12-19"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_week_or_month_k_data(code, columns, start_date, end_date, week_or_month='w', adjust_flag='2')
#-------------------------------------------------------------------------------
# 得到一支股票周k后复权数据
#-------------------------------------------------------------------------------
def get_week_k_data_post_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1990-12-19"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_week_or_month_k_data(code, columns, start_date, end_date, week_or_month='w', adjust_flag='1')
#-------------------------------------------------------------------------------
# 得到一支股票周k未复权数据
#-------------------------------------------------------------------------------
def get_week_k_data_no_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1990-12-19"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_week_or_month_k_data(code, columns, start_date, end_date, week_or_month='w', adjust_flag='3')
#-------------------------------------------------------------------------------
# 得到一支股票月k前复权数据
#-------------------------------------------------------------------------------
def get_month_k_data_pre_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1990-12-19"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_week_or_month_k_data(code, columns, start_date, end_date, week_or_month='m', adjust_flag='2')
#-------------------------------------------------------------------------------
# 得到一支股票月k后复权数据
#-------------------------------------------------------------------------------
def get_month_k_data_post_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1990-12-19"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_week_or_month_k_data(code, columns, start_date, end_date, week_or_month='m', adjust_flag='1')
#-------------------------------------------------------------------------------
# 得到一支股票月k未复权数据
#-------------------------------------------------------------------------------
def get_month_k_data_no_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1990-12-19"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_week_or_month_k_data(code, columns, start_date, end_date, week_or_month='m', adjust_flag='3')
#-------------------------------------------------------------------------------
# 得到一支股票5分钟k前复权数据
#-------------------------------------------------------------------------------
def get_5_minutes_k_data_pre_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1999-07-26"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_minutes_k_data(code, columns, start_date, end_date, minute_freq = '5', adjust_flag='2')
#-------------------------------------------------------------------------------
# 得到一支股票5分钟k后复权数据
#-------------------------------------------------------------------------------
def get_5_minutes_k_data_post_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1999-07-26"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_minutes_k_data(code, columns, start_date, end_date, minute_freq = '5', adjust_flag='1')
#-------------------------------------------------------------------------------
# 得到一支股票5分钟k未复权数据
#-------------------------------------------------------------------------------
def get_5_minutes_k_data_no_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1999-07-26"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_minutes_k_data(code, columns, start_date, end_date, minute_freq = '5', adjust_flag='3')
#-------------------------------------------------------------------------------
# 得到一支股票15分钟k前复权数据
#-------------------------------------------------------------------------------
def get_15_minutes_k_data_pre_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1999-07-26"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_minutes_k_data(code, columns, start_date, end_date, minute_freq = '15', adjust_flag='2')
#-------------------------------------------------------------------------------
# 得到一支股票15分钟k后复权数据
#-------------------------------------------------------------------------------
def get_15_minutes_k_data_post_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1999-07-26"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_minutes_k_data(code, columns, start_date, end_date, minute_freq = '15', adjust_flag='1')
#-------------------------------------------------------------------------------
# 得到一支股票15分钟k未复权数据
#-------------------------------------------------------------------------------
def get_15_minutes_k_data_no_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1999-07-26"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_minutes_k_data(code, columns, start_date, end_date, minute_freq = '15', adjust_flag='3')
#-------------------------------------------------------------------------------
# 得到一支股票30分钟k前复权数据
#-------------------------------------------------------------------------------
def get_30_minutes_k_data_pre_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1999-07-26"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_minutes_k_data(code, columns, start_date, end_date, minute_freq = '30', adjust_flag='2')
#-------------------------------------------------------------------------------
# 得到一支股票30分钟k后复权数据
#-------------------------------------------------------------------------------
def get_30_minutes_k_data_post_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1999-07-26"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_minutes_k_data(code, columns, start_date, end_date, minute_freq = '30', adjust_flag='1')
#-------------------------------------------------------------------------------
# 得到一支股票30分钟k未复权数据
#-------------------------------------------------------------------------------
def get_30_minutes_k_data_no_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1999-07-26"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_minutes_k_data(code, columns, start_date, end_date, minute_freq = '30', adjust_flag='3')
#-------------------------------------------------------------------------------
# 得到一支股票60分钟k前复权数据
#-------------------------------------------------------------------------------
def get_60_minutes_k_data_pre_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1999-07-26"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_minutes_k_data(code, columns, start_date, end_date, minute_freq = '60', adjust_flag='2')
#-------------------------------------------------------------------------------
# 得到一支股票60分钟k后复权数据
#-------------------------------------------------------------------------------
def get_60_minutes_k_data_post_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1999-07-26"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_minutes_k_data(code, columns, start_date, end_date, minute_freq = '60', adjust_flag='1')
#-------------------------------------------------------------------------------
# 得到一支股票60分钟k未复权数据
#-------------------------------------------------------------------------------
def get_60_minutes_k_data_no_adjust(code, columns, start_date=None, end_date=None):
if (start_date is None):
start_date = "1999-07-26"
if (end_date is None):
end_date = datetime.today().strftime('%Y-%m-%d')
return utils.get_minutes_k_data(code, columns, start_date, end_date, minute_freq = '60', adjust_flag='3')
| 49.743697
| 114
| 0.484331
| 1,317
| 11,839
| 4.023538
| 0.085042
| 0.142668
| 0.126816
| 0.15852
| 0.868843
| 0.843555
| 0.819211
| 0.819211
| 0.819211
| 0.819211
| 0
| 0.025229
| 0.116142
| 11,839
| 237
| 115
| 49.953587
| 0.481174
| 0.346989
| 0
| 0.6
| 0
| 0
| 0.057449
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157143
| false
| 0
| 0.035714
| 0
| 0.35
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7d6f2fc6ba07280159eae6e1a5ce4e0aabd4843c
| 5,220
|
py
|
Python
|
cyclofit/users/forms.py
|
piyushmohan01/CycloFit-SEPM
|
f97a7032e22e29daf48f0796462a22e58b20709c
|
[
"MIT"
] | 4
|
2021-09-10T00:30:15.000Z
|
2022-03-03T09:05:03.000Z
|
cyclofit/users/forms.py
|
piyushmohan01/CycloFit-SEPM
|
f97a7032e22e29daf48f0796462a22e58b20709c
|
[
"MIT"
] | 1
|
2022-03-03T05:41:13.000Z
|
2022-03-03T05:44:41.000Z
|
cyclofit/users/forms.py
|
piyushmohan01/CycloFit-SEPM
|
f97a7032e22e29daf48f0796462a22e58b20709c
|
[
"MIT"
] | 3
|
2021-05-18T18:19:55.000Z
|
2021-10-13T11:29:56.000Z
|
from cyclofit.models import User
from flask_login import current_user
from flask_wtf import FlaskForm
from flask_wtf.file import FileAllowed, FileField
from wtforms import (BooleanField, IntegerField, PasswordField, RadioField,
StringField, SubmitField)
from wtforms.validators import (DataRequired, Email, EqualTo, InputRequired,
Length, NumberRange, ValidationError)
class RegistrationForm(FlaskForm):
email = StringField('Email',
validators=[InputRequired("Please enter your email address."),
Email("Enter valid email address")])
username = StringField('Username',
validators=[DataRequired(),
Length(min=2, max=20)])
password = PasswordField('Password',
validators=[DataRequired()])
confirm_password = PasswordField('Confirm Password',
validators=[DataRequired(),
EqualTo('password')])
submit = SubmitField('Register')
# custom validations
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
# query if the entered username is present in the db
# if username already present throw validation error
if user:
raise ValidationError('Username already taken! Pick another!')
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
# query if the entered email is present in the db
# if email already present throw validation error
if user:
raise ValidationError('Email already taken! Pick another!')
class ProfileForm(FlaskForm):
area = StringField('Area',
validators=[DataRequired(),
Length(min=2, max=20)])
contactno = IntegerField('Contact Number', validators=[NumberRange(min=10),\
DataRequired('Enter valid number with no symbols')])
age = StringField('Age',
validators=[DataRequired()])
gender = RadioField('Gender', choices=[('Male','Male'),('Female','Female')],\
validators=[DataRequired()])
emergencyno = IntegerField('Emergency Number', validators=[NumberRange(min=10),\
DataRequired('Enter valid number with no symbols')])
submit = SubmitField('GO')
class LoginForm(FlaskForm):
email = StringField('Email',
validators=[DataRequired(), Email()])
password = PasswordField('Password',
validators=[DataRequired()])
remember = BooleanField('Remember Me')
submit = SubmitField('Sign In')
class UpdateGeneralForm(FlaskForm):
email = StringField('Email',
validators=[InputRequired("Please enter your email address."),
Email("Enter valid email address")])
username = StringField('Username',
validators=[DataRequired(),
Length(min=2, max=20)])
picture = FileField('Profile Picture', validators=[FileAllowed(['jpg', 'png'])])
submit = SubmitField('Update')
# custom validations
def validate_username(self, username):
if username.data != current_user.username:
user = User.query.filter_by(username=username.data).first()
# query if the entered username is present in the db
# if username already present throw validation error
if user:
raise ValidationError('Username already taken! Pick another!')
def validate_email(self, email):
if email.data != current_user.email:
user = User.query.filter_by(email=email.data).first()
# query if the entered email is present in the db
# if email already present throw validation error
if user:
raise ValidationError('Email already taken! Pick another!')
class UpdatePersonalForm(FlaskForm):
area = StringField('Area',
validators=[DataRequired(),
Length(min=2, max=20)])
contactno = IntegerField('Contact Number', validators=[NumberRange(min=10),\
DataRequired('Enter valid number with no symbols')])
age = StringField('Age',
validators=[DataRequired()])
gender = RadioField('Gender', choices=[('Male','Male'),('Female','Female')],\
validators=[DataRequired()])
emergencyno = IntegerField('Emergency Number', validators=[NumberRange(min=10),\
DataRequired('Enter valid number with no symbols')])
submit = SubmitField('Update')
class RequestResetForm(FlaskForm):
email = StringField('Email',
validators=[InputRequired("Please enter your email address."),
Email("Enter valid email address")])
submit = SubmitField('Submit')
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if user is None:
raise ValidationError('No Account found!')
class ResetPasswordForm(FlaskForm):
password = PasswordField('Password',
validators=[DataRequired()])
confirm_password = PasswordField('Confirm Password',
validators=[DataRequired(),
EqualTo('password')])
submit = SubmitField('Submit')
| 43.865546
| 84
| 0.64023
| 511
| 5,220
| 6.504892
| 0.199609
| 0.092659
| 0.045126
| 0.02858
| 0.77858
| 0.751203
| 0.751203
| 0.724729
| 0.724729
| 0.724729
| 0
| 0.005097
| 0.248276
| 5,220
| 118
| 85
| 44.237288
| 0.841998
| 0.08295
| 0
| 0.742268
| 0
| 0
| 0.161851
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051546
| false
| 0.092784
| 0.061856
| 0
| 0.494845
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
7ddf0e2708cd69950a0915285131a2a9b10496e6
| 105
|
py
|
Python
|
segmenter/config/hash_config.py
|
brandongk/segmenter
|
dbc042d31dc74f1abdc87ae10a6be78ba38ddb91
|
[
"Unlicense"
] | null | null | null |
segmenter/config/hash_config.py
|
brandongk/segmenter
|
dbc042d31dc74f1abdc87ae10a6be78ba38ddb91
|
[
"Unlicense"
] | null | null | null |
segmenter/config/hash_config.py
|
brandongk/segmenter
|
dbc042d31dc74f1abdc87ae10a6be78ba38ddb91
|
[
"Unlicense"
] | null | null | null |
import hashlib
def hash_config(in_string):
return hashlib.md5(str(in_string).encode()).hexdigest()
| 17.5
| 59
| 0.752381
| 15
| 105
| 5.066667
| 0.8
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010753
| 0.114286
| 105
| 5
| 60
| 21
| 0.806452
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
8186287d405963d81d5a1445933be0c4a32defd1
| 216
|
py
|
Python
|
environments/mujoco/rand_param_envs/gym/envs/parameter_tuning/__init__.py
|
NagisaZj/varibad
|
df7cda81588c62a2a3bee69e4173228701bd7000
|
[
"MIT"
] | null | null | null |
environments/mujoco/rand_param_envs/gym/envs/parameter_tuning/__init__.py
|
NagisaZj/varibad
|
df7cda81588c62a2a3bee69e4173228701bd7000
|
[
"MIT"
] | 2
|
2021-01-13T14:58:50.000Z
|
2021-01-13T14:59:40.000Z
|
environments/mujoco/rand_param_envs/gym/envs/parameter_tuning/__init__.py
|
NagisaZj/varibad
|
df7cda81588c62a2a3bee69e4173228701bd7000
|
[
"MIT"
] | null | null | null |
from environments.mujoco.rand_param_envs.gym.envs.parameter_tuning.convergence import ConvergenceControl
from environments.mujoco.rand_param_envs.gym.envs.parameter_tuning.train_deep_cnn import CNNClassifierTraining
| 72
| 110
| 0.907407
| 28
| 216
| 6.714286
| 0.571429
| 0.170213
| 0.234043
| 0.276596
| 0.606383
| 0.606383
| 0.606383
| 0.606383
| 0.606383
| 0.606383
| 0
| 0
| 0.037037
| 216
| 2
| 111
| 108
| 0.903846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
8191b3fafce7dbd16d435129ed7050d4a34dd7b4
| 303
|
py
|
Python
|
ignite/contrib/metrics/__init__.py
|
DhDeepLIT/ignite
|
b913efd055a7e7cc59d35ff63663d49318453e0f
|
[
"BSD-3-Clause"
] | 1
|
2020-06-13T15:22:08.000Z
|
2020-06-13T15:22:08.000Z
|
ignite/contrib/metrics/__init__.py
|
DhDeepLIT/ignite
|
b913efd055a7e7cc59d35ff63663d49318453e0f
|
[
"BSD-3-Clause"
] | null | null | null |
ignite/contrib/metrics/__init__.py
|
DhDeepLIT/ignite
|
b913efd055a7e7cc59d35ff63663d49318453e0f
|
[
"BSD-3-Clause"
] | 1
|
2020-06-13T15:30:46.000Z
|
2020-06-13T15:30:46.000Z
|
from ignite.contrib.metrics.average_precision import AveragePrecision
from ignite.contrib.metrics.roc_auc import ROC_AUC, RocCurve
from ignite.contrib.metrics.precision_recall_curve import PrecisionRecallCurve
import ignite.contrib.metrics.regression
from ignite.contrib.metrics.gpu_info import GpuInfo
| 50.5
| 78
| 0.884488
| 40
| 303
| 6.55
| 0.45
| 0.248092
| 0.381679
| 0.366412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.062706
| 303
| 5
| 79
| 60.6
| 0.922535
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
819c176fa8e540a631dff18e0f77f99a464c7688
| 2,883
|
py
|
Python
|
neo/SmartContract/tests/test_smart_contract2.py
|
volekerb/neo-python
|
5bdded2c339219355cf1d31ae58653b0f94c6e51
|
[
"MIT"
] | 387
|
2017-07-17T18:25:54.000Z
|
2021-11-18T06:19:47.000Z
|
neo/SmartContract/tests/test_smart_contract2.py
|
volekerb/neo-python
|
5bdded2c339219355cf1d31ae58653b0f94c6e51
|
[
"MIT"
] | 967
|
2017-08-19T15:48:03.000Z
|
2021-06-01T21:42:39.000Z
|
neo/SmartContract/tests/test_smart_contract2.py
|
volekerb/neo-python
|
5bdded2c339219355cf1d31ae58653b0f94c6e51
|
[
"MIT"
] | 286
|
2017-07-17T03:44:36.000Z
|
2021-11-18T06:19:32.000Z
|
import binascii
from neo.IO.Helper import Helper
from neo.Utils.BlockchainFixtureTestCase import BlockchainFixtureTestCase
from neo.Settings import settings
from neo.Core.State.UnspentCoinState import UnspentCoinState
import os
class SmartContractTest2(BlockchainFixtureTestCase):
@classmethod
def leveldb_testpath(cls):
return os.path.join(settings.DATA_DIR_PATH, 'fixtures/test_chain')
tbblock = b'000000007134e5ee56f841bb73dbff969a9ef793c05f175cd386b2f24874a54c441cc0500e6c4e19da72fd4956a28670f36d26e03fd43c1794a1d3a5ad4f738dd48b53f505c7605b992400006b76abd322b7bd0bbe48d3a3f5d10013ab9ffee489706078714f1ea201c3400df8020bf9c22cd865b43b73060be3302abbab95b5f38941ba288cd77b846c9c1edcef1ab9a108f0a2fb8180e88178d3e85e316243054e48b29ced9dde54766340d9efc4f6d78970aba6712688071b862413bd53d58620e87c951aa3eac5c2611cdfecfcf084c12cfbe6cd356ef7726b9b5e93c10b5ffa7dc6e77ae8dc8c7af09240756caac1dad30a93662f36194fe270bb2afe0a557492122027df5f95dc5b1b9d18b169a6a96795019067ba008e5d42250c23886f0807ec20f3c880b2e740d1048b532102103a7f7dd016558597f7960d27c516a4394fd968b9e65155eb4b013e4040406e2102a7bc55fe8684e0119768d104ba30795bdcc86619e864add26156723ed185cd622102b3622bf4017bdfe317c58aed5f4c753f206b7db896046fa7d774bbc4bf7f8dc22103d90c07df63e690ce77912e10ab51acc944b66860237b608c4f8f8309e71ee69954ae0200006b76abd300000000d101de39202f726f6f742f2e6e656f707974686f6e2f436861696e732f556e6974546573742d534d2f636f6e7472616374732f73616d706c65322e70790474657374047465737404746573740474657374000102030702024c725ec56b6a00527ac46a51527ac46a52527ac46a00c3036164649c640d006a51c36a52c3936c7566616a00c3037375629c640d006a51c36a52c3946c7566616a00c3036d756c9c640d006a51c36a52c3956c7566616a00c3036469769c640d006a51c36a52c3966c7566614f6c7566006c756668134e656f2e436f6e74726163742e437265617465001a7118020000000001347fff9221a8caf429279a82906688eb78264c1a9a2791d95ee47b6e095120aa000001e72d286979ee6cb1b7e65dfddfb2e384100b8d148e7758de42e4168b71792c600080b5fc5c02000023ba2703c53263e8d6e522dc32203339dcd8eee90141405787dc8c47ba7da02668582b822bb50e1b615546a5f01826967cba603a0744a01aed6c098d809f20ec199a84269aa01ea911564effe7c1b4ad65d71f4ca995a12321031a6c6fbbdf02ca351745fa86b9ba5a9452d785ac4f7fc2b7548ca2a46c4fcf4aac'
def test_b_invocation(self):
hexdata = binascii.unhexlify(self.tbblock)
block = Helper.AsSerializableWithType(hexdata, 'neo.Core.Block.Block')
json = block.ToJson()
self.assertIsNotNone(json)
snapshot = self._blockchain._db.createSnapshot()
snapshot.PersistingBlock = block
snapshot.UnspentCoins.Add(b'aa2051096e7be45ed991279a1a4c2678eb886690829a2729f4caa82192ff7f34',
UnspentCoinState.FromTXOutputsConfirmed([0]))
result = False
with BlockchainFixtureTestCase.MPPersist():
result = self._blockchain.Persist(block, snapshot)
self.assertTrue(result)
| 77.918919
| 1,787
| 0.892473
| 104
| 2,883
| 24.653846
| 0.538462
| 0.01092
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.481008
| 0.077697
| 2,883
| 36
| 1,788
| 80.083333
| 0.483264
| 0
| 0
| 0
| 0
| 0
| 0.64967
| 0.636143
| 0
| 1
| 0
| 0
| 0.083333
| 1
| 0.083333
| false
| 0
| 0.25
| 0.041667
| 0.458333
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
48456b9a592d7bfcd7a70f8415c074df43d0409e
| 2,366
|
py
|
Python
|
segint_research_django/segint_api/migrations/0024_auto_20200727_2119.py
|
VarianAPIs/SegInt-Research
|
3b7aa71ada46cbb35a428a00eb9f2a5c43f15d51
|
[
"MIT"
] | 3
|
2021-04-15T14:24:24.000Z
|
2022-03-23T17:07:06.000Z
|
segint_research_django/segint_api/migrations/0024_auto_20200727_2119.py
|
VarianAPIs/SegInt-Research
|
3b7aa71ada46cbb35a428a00eb9f2a5c43f15d51
|
[
"MIT"
] | null | null | null |
segint_research_django/segint_api/migrations/0024_auto_20200727_2119.py
|
VarianAPIs/SegInt-Research
|
3b7aa71ada46cbb35a428a00eb9f2a5c43f15d51
|
[
"MIT"
] | 1
|
2021-04-21T15:05:09.000Z
|
2021-04-21T15:05:09.000Z
|
# Generated by Django 3.0.7 on 2020-07-27 21:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('segint_api', '0023_auto_20200727_2118'),
]
operations = [
migrations.AlterField(
model_name='modelchanneldescription',
name='dimensions_max_x',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='modelchanneldescription',
name='dimensions_max_y',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='modelchanneldescription',
name='dimensions_max_z',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='modelchanneldescription',
name='dimensions_min_x',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='modelchanneldescription',
name='dimensions_min_y',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='modelchanneldescription',
name='dimensions_min_z',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='modelchanneldescription',
name='spacing_max_x',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='modelchanneldescription',
name='spacing_max_y',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='modelchanneldescription',
name='spacing_max_z',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='modelchanneldescription',
name='spacing_min_x',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='modelchanneldescription',
name='spacing_min_y',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='modelchanneldescription',
name='spacing_min_z',
field=models.IntegerField(default=0),
),
]
| 31.972973
| 50
| 0.585799
| 198
| 2,366
| 6.79798
| 0.207071
| 0.178306
| 0.222883
| 0.258544
| 0.887073
| 0.887073
| 0.887073
| 0.863299
| 0.814264
| 0.814264
| 0
| 0.026413
| 0.311919
| 2,366
| 73
| 51
| 32.410959
| 0.800369
| 0.019019
| 0
| 0.716418
| 1
| 0
| 0.208279
| 0.128935
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.014925
| 0
| 0.059701
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
6f9b31a85894e4e825fb1e06405c32b3a615d616
| 4,123
|
py
|
Python
|
asnets/experiments/det_n_puzzle.py
|
xf1590281/ASNets
|
5f4b29fb62a5e72004b813228442d06246c9ec33
|
[
"MIT"
] | 21
|
2017-12-05T13:27:36.000Z
|
2021-11-16T20:32:33.000Z
|
asnets/experiments/det_n_puzzle.py
|
xf1590281/ASNets
|
5f4b29fb62a5e72004b813228442d06246c9ec33
|
[
"MIT"
] | 2
|
2018-07-16T12:15:46.000Z
|
2020-10-31T00:02:49.000Z
|
asnets/experiments/det_n_puzzle.py
|
xf1590281/ASNets
|
5f4b29fb62a5e72004b813228442d06246c9ec33
|
[
"MIT"
] | 7
|
2018-03-19T13:45:13.000Z
|
2022-03-24T07:52:20.000Z
|
"""For experiments on n-puzzle from IPC'08 learning track."""
PDDL_DIR = '../problems/ipc08-learn/n-puzzle/'
COMMON_PDDLS = ['learning/n-puzzle-typed.pddl']
TRAIN_PDDLS = [
# 'learning/bootstrap/typed/bootstrap-typed-01.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-02.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-03.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-04.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-05.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-06.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-07.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-08.pddl',
'learning/bootstrap/typed/bootstrap-typed-09.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-10.pddl',
'learning/bootstrap/typed/bootstrap-typed-11.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-12.pddl',
'learning/bootstrap/typed/bootstrap-typed-13.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-14.pddl',
'learning/bootstrap/typed/bootstrap-typed-15.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-16.pddl',
'learning/bootstrap/typed/bootstrap-typed-17.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-18.pddl',
'learning/bootstrap/typed/bootstrap-typed-19.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-20.pddl',
'learning/bootstrap/typed/bootstrap-typed-21.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-22.pddl',
'learning/bootstrap/typed/bootstrap-typed-23.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-24.pddl',
'learning/bootstrap/typed/bootstrap-typed-25.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-26.pddl',
'learning/bootstrap/typed/bootstrap-typed-27.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-28.pddl',
'learning/bootstrap/typed/bootstrap-typed-29.pddl',
# 'learning/bootstrap/typed/bootstrap-typed-30.pddl',
] # yapf: disable
TRAIN_NAMES = None
TEST_RUNS = [
(['testing/target/typed/n-puzzle-target-4x4-01-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-4x4-02-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-4x4-03-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-4x4-04-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-4x4-05-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-5x5-01-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-5x5-02-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-5x5-03-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-5x5-04-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-5x5-05-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-5x5-06-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-5x5-07-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-5x5-08-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-5x5-09-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-5x5-10-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-5x5-11-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-5x5-12-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-5x5-13-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-5x5-14-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-5x5-15-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-6x6-01-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-6x6-02-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-6x6-03-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-6x6-04-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-6x6-05-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-7x7-01-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-7x7-02-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-7x7-03-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-7x7-04-typed.pddl'], None),
(['testing/target/typed/n-puzzle-target-7x7-05-typed.pddl'], None),
] # yapf: disable
| 58.9
| 71
| 0.703614
| 575
| 4,123
| 5.036522
| 0.106087
| 0.290055
| 0.227901
| 0.321133
| 0.908494
| 0.896064
| 0.495511
| 0.483771
| 0.483771
| 0.483771
| 0
| 0.049198
| 0.092894
| 4,123
| 69
| 72
| 59.753623
| 0.725134
| 0.260005
| 0
| 0
| 0
| 0
| 0.730248
| 0.730248
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6fa8325c4d0a6e62d661bc02d1324a76817c9005
| 36,261
|
py
|
Python
|
dashboard/dashboard/pinpoint/models/job_test.py
|
PLSV/catapult
|
88e5b1f40c89c4b80d3dd56a722936d07f222a55
|
[
"BSD-3-Clause"
] | null | null | null |
dashboard/dashboard/pinpoint/models/job_test.py
|
PLSV/catapult
|
88e5b1f40c89c4b80d3dd56a722936d07f222a55
|
[
"BSD-3-Clause"
] | null | null | null |
dashboard/dashboard/pinpoint/models/job_test.py
|
PLSV/catapult
|
88e5b1f40c89c4b80d3dd56a722936d07f222a55
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import datetime
import mock
import sys
from tracing.value.diagnostics import generic_set
from tracing.value.diagnostics import reserved_infos
from dashboard.common import layered_cache
from dashboard.common import utils
from dashboard.models import histogram
from dashboard.pinpoint.models import change
from dashboard.pinpoint.models import errors
from dashboard.pinpoint.models import job
from dashboard.pinpoint import test
_CHROMIUM_URL = 'https://chromium.googlesource.com/chromium/src'
_COMMENT_STARTED = (u"""\U0001f4cd Pinpoint job started.
https://testbed.example.com/job/1""")
_COMMENT_COMPLETED_NO_COMPARISON = (
u"""<b>\U0001f4cd Job complete. See results below.</b>
https://testbed.example.com/job/1""")
_COMMENT_COMPLETED_NO_DIFFERENCES = (
u"""<b>\U0001f4cd Couldn't reproduce a difference.</b>
https://testbed.example.com/job/1""")
_COMMENT_COMPLETED_WITH_COMMIT = (
u"""<b>\U0001f4cd Found a significant difference at 1 commit.</b>
10 revisions compared.
https://testbed.example.com/job/1
<b>Subject.</b> by author@chromium.org
https://example.com/repository/+/git_hash
0 \u2192 1.235 (+1.235) (+\u221e%)
Understanding performance regressions:
http://g.co/ChromePerformanceRegressions
You can view the full results and re-run the Pinpoint job at:
https://testbed.example.com/job/1
If you think Pinpoint blamed the wrong commit, please add the
`Chromeperf-Auto-NeedsAttention` label to the issue so that a sheriff can
help diagnose.""")
_COMMENT_COMPLETED_WITH_COMMIT_AND_DOCS = (
u"""<b>\U0001f4cd Found a significant difference at 1 commit.</b>
10 revisions compared.
https://testbed.example.com/job/1
<b>Subject.</b> by author@chromium.org
https://example.com/repository/+/git_hash
1.235 \u2192 0 (-1.235) (-100%)
Understanding performance regressions:
http://g.co/ChromePerformanceRegressions
Benchmark doc link:
http://docs
You can view the full results and re-run the Pinpoint job at:
https://testbed.example.com/job/1
If you think Pinpoint blamed the wrong commit, please add the
`Chromeperf-Auto-NeedsAttention` label to the issue so that a sheriff can
help diagnose.""")
_COMMENT_COMPLETED_WITH_AUTOROLL_COMMIT = (
u"""<b>\U0001f4cd Found a significant difference at 1 commit.</b>
10 revisions compared.
https://testbed.example.com/job/1
<b>Subject.</b> by chromium-autoroll@skia-public.iam.gserviceaccount.com
https://example.com/repository/+/git_hash
20 \u2192 30 (+10) (+50%)
Assigning to sheriff sheriff@bar.com because "Subject." is a roll.
Understanding performance regressions:
http://g.co/ChromePerformanceRegressions
You can view the full results and re-run the Pinpoint job at:
https://testbed.example.com/job/1
If you think Pinpoint blamed the wrong commit, please add the
`Chromeperf-Auto-NeedsAttention` label to the issue so that a sheriff can
help diagnose.""")
_COMMENT_COMPLETED_WITH_PATCH = (
u"""<b>\U0001f4cd Found a significant difference at 1 commit.</b>
10 revisions compared.
https://testbed.example.com/job/1
<b>Subject.</b> by author@chromium.org
https://codereview.com/c/672011/2f0d5c7
40 \u2192 20 (-20) (-50%)
Understanding performance regressions:
http://g.co/ChromePerformanceRegressions
You can view the full results and re-run the Pinpoint job at:
https://testbed.example.com/job/1
If you think Pinpoint blamed the wrong commit, please add the
`Chromeperf-Auto-NeedsAttention` label to the issue so that a sheriff can
help diagnose.""")
_COMMENT_COMPLETED_THREE_DIFFERENCES = (
u"""<b>\U0001f4cd Found significant differences at 3 commits.</b>
10 revisions compared.
https://testbed.example.com/job/1
The top 3 are:
<b>1. Subject.</b> by author1@chromium.org
https://example.com/repository/+/git_hash_1
50 \u2192 0 (-50) (-100%)
<b>2. Subject.</b> by author2@chromium.org
https://example.com/repository/+/git_hash_2
0 \u2192 40 (+40) (+\u221e%)
<b>3. Subject.</b> by author3@chromium.org
https://example.com/repository/+/git_hash_3
0 \u2192 No values
Understanding performance regressions:
http://g.co/ChromePerformanceRegressions
You can view the full results and re-run the Pinpoint job at:
https://testbed.example.com/job/1
If you think Pinpoint blamed the wrong commit, please add the
`Chromeperf-Auto-NeedsAttention` label to the issue so that a sheriff can
help diagnose.""")
_COMMENT_COMPLETED_THREE_DIFFERENCES_ABSOLUTE = (
u"""<b>\U0001f4cd Found significant differences at 3 commits.</b>
10 revisions compared.
https://testbed.example.com/job/1
The top 3 are:
<b>1. Subject.</b> by author3@chromium.org
https://example.com/repository/+/git_hash_3
0 \u2192 -100 (-100) (+\u221e%)
<b>2. Subject.</b> by author2@chromium.org
https://example.com/repository/+/git_hash_2
10 \u2192 0 (-10) (-100%)
<b>3. Subject.</b> by author1@chromium.org
https://example.com/repository/+/git_hash_1
No values \u2192 10
Understanding performance regressions:
http://g.co/ChromePerformanceRegressions
You can view the full results and re-run the Pinpoint job at:
https://testbed.example.com/job/1
If you think Pinpoint blamed the wrong commit, please add the
`Chromeperf-Auto-NeedsAttention` label to the issue so that a sheriff can
help diagnose.""")
_COMMENT_FAILED = (
u"""\U0001f63f Pinpoint job stopped with an error.
https://testbed.example.com/job/1
Error string""")
_COMMENT_CODE_REVIEW = (
u"""\U0001f4cd Job complete.
See results at: https://testbed.example.com/job/1""")
@mock.patch.object(
job.results2, 'GetCachedResults2',
mock.MagicMock(return_value='http://foo'))
class JobTest(test.TestCase):
@mock.patch.object(
job.timing_record, 'GetSimilarHistoricalTimings',
mock.MagicMock(
return_value=job.timing_record.EstimateResult(
job.timing_record.Timings(
datetime.timedelta(seconds=10),
datetime.timedelta(seconds=5),
datetime.timedelta(seconds=100)), ['try', 'linux'])))
@mock.patch.object(
job.scheduler, 'QueueStats',
mock.MagicMock(return_value=[]))
def testAsDictOptions_Estimate(self):
j = job.Job.New((), (), bug_id=123456)
d = j.AsDict([job.OPTION_ESTIMATE])
self.assertTrue('estimate' in d)
self.assertEqual(d['estimate']['timings'][0], 10)
self.assertEqual(d['estimate']['timings'][1], 5)
self.assertEqual(d['estimate']['timings'][2], 100)
self.assertEqual(d['estimate']['tags'], ['try', 'linux'])
@mock.patch.object(
job.timing_record, 'GetSimilarHistoricalTimings',
mock.MagicMock(return_value=None))
@mock.patch.object(
job.scheduler, 'QueueStats',
mock.MagicMock(return_value=[]))
def testAsDictOptions_EstimateFails(self):
j = job.Job.New((), (), bug_id=123456)
d = j.AsDict([job.OPTION_ESTIMATE])
self.assertFalse('estimate' in d)
class RetryTest(test.TestCase):
def setUp(self):
super(RetryTest, self).setUp()
def testStarted_RecoverableError_BacksOff(self):
j = job.Job.New((), (), comparison_mode='performance')
j.Start()
j.state.Explore = mock.MagicMock(
side_effect=errors.RecoverableError(None))
j._Schedule = mock.MagicMock()
j.put = mock.MagicMock()
j.Fail = mock.MagicMock()
j.Run()
j.Run()
j.Run()
self.assertEqual(j._Schedule.call_args_list[0],
mock.call(countdown=job._TASK_INTERVAL * 2))
self.assertEqual(j._Schedule.call_args_list[1],
mock.call(countdown=job._TASK_INTERVAL * 4))
self.assertEqual(j._Schedule.call_args_list[2],
mock.call(countdown=job._TASK_INTERVAL * 8))
self.assertFalse(j.Fail.called)
j.Run()
self.assertTrue(j.Fail.called)
def testStarted_RecoverableError_Resets(self):
j = job.Job.New((), (), comparison_mode='performance')
j.Start()
j.state.Explore = mock.MagicMock(
side_effect=errors.RecoverableError(None))
j._Schedule = mock.MagicMock()
j.put = mock.MagicMock()
j.Fail = mock.MagicMock()
j.Run()
j.Run()
j.Run()
self.assertEqual(j._Schedule.call_args_list[0],
mock.call(countdown=job._TASK_INTERVAL * 2))
self.assertEqual(j._Schedule.call_args_list[1],
mock.call(countdown=job._TASK_INTERVAL * 4))
self.assertEqual(j._Schedule.call_args_list[2],
mock.call(countdown=job._TASK_INTERVAL * 8))
self.assertFalse(j.Fail.called)
j.state.Explore = mock.MagicMock()
j.Run()
self.assertEqual(0, j.retry_count)
@mock.patch('dashboard.pinpoint.models.job_state.JobState.ChangesExamined',
lambda _: 10)
@mock.patch('dashboard.common.utils.ServiceAccountHttp', mock.MagicMock())
class BugCommentTest(test.TestCase):
def setUp(self):
super(BugCommentTest, self).setUp()
self.add_bug_comment = mock.MagicMock()
self.get_issue = mock.MagicMock()
patcher = mock.patch('dashboard.services.issue_tracker_service.'
'IssueTrackerService')
issue_tracker_service = patcher.start()
issue_tracker_service.return_value = mock.MagicMock(
AddBugComment=self.add_bug_comment, GetIssue=self.get_issue)
self.addCleanup(patcher.stop)
def testNoBug(self):
j = job.Job.New((), ())
j.Start()
j.Run()
self.ExecuteDeferredTasks('default')
self.assertFalse(self.add_bug_comment.called)
def testStarted(self):
j = job.Job.New((), (), bug_id=123456)
j.Start()
self.ExecuteDeferredTasks('default')
self.assertFalse(j.failed)
self.add_bug_comment.assert_called_once_with(
123456, _COMMENT_STARTED, send_email=True, project='chromium')
def testCompletedNoComparison(self):
j = job.Job.New((), (), bug_id=123456)
j.Run()
self.ExecuteDeferredTasks('default')
self.assertFalse(j.failed)
self.add_bug_comment.assert_called_once_with(
123456,
_COMMENT_COMPLETED_NO_COMPARISON,
labels=['Pinpoint-Tryjob-Completed'],
project='chromium',
)
def testCompletedNoDifference(self):
j = job.Job.New((), (), bug_id=123456, comparison_mode='performance')
j.Run()
self.ExecuteDeferredTasks('default')
self.assertFalse(j.failed)
self.add_bug_comment.assert_called_once_with(
123456,
_COMMENT_COMPLETED_NO_DIFFERENCES,
labels=['Pinpoint-No-Repro'],
status='WontFix',
project='chromium',
)
@mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict')
@mock.patch.object(job.job_state.JobState, 'ResultValues')
@mock.patch.object(job.job_state.JobState, 'Differences')
def testCompletedWithCommit(self, differences, result_values, commit_as_dict):
c = change.Change((change.Commit('chromium', 'git_hash'),))
differences.return_value = [(None, c)]
result_values.side_effect = [0], [1.23456]
commit_as_dict.return_value = {
'repository': 'chromium',
'git_hash': 'git_hash',
'url': 'https://example.com/repository/+/git_hash',
'author': 'author@chromium.org',
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.',
}
self.get_issue.return_value = {'status': 'Untriaged'}
j = job.Job.New((), (), bug_id=123456, comparison_mode='performance')
j.Run()
self.ExecuteDeferredTasks('default')
self.assertFalse(j.failed)
self.add_bug_comment.assert_called_once_with(
123456,
_COMMENT_COMPLETED_WITH_COMMIT,
status='Assigned',
owner='author@chromium.org',
labels=['Pinpoint-Culprit-Found'],
cc_list=['author@chromium.org'],
merge_issue=None,
project='chromium')
@mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict')
@mock.patch.object(job.job_state.JobState, 'ResultValues')
@mock.patch.object(job.job_state.JobState, 'Differences')
def testCompletedMergeIntoExisting(
self, differences, result_values, commit_as_dict):
c = change.Change((change.Commit('chromium', 'git_hash'),))
differences.return_value = [(None, c)]
result_values.side_effect = [0], [1.23456]
commit_as_dict.return_value = {
'repository': 'chromium',
'git_hash': 'git_hash',
'author': 'author@chromium.org',
'subject': 'Subject.',
'url': 'https://example.com/repository/+/git_hash',
'message': 'Subject.\n\nCommit message.',
}
self.get_issue.return_value = {
'status': 'Untriaged',
'id': '111222',
'projectId': 'chromium'
}
layered_cache.SetExternal('commit_hash_git_hash', 'chromium:111222')
j = job.Job.New((), (), bug_id=123456, comparison_mode='performance')
j.Run()
self.ExecuteDeferredTasks('default')
self.assertFalse(j.failed)
self.add_bug_comment.assert_called_once_with(
123456,
_COMMENT_COMPLETED_WITH_COMMIT,
status='Assigned',
owner='author@chromium.org',
cc_list=[],
labels=['Pinpoint-Culprit-Found'],
merge_issue='111222',
project='chromium')
@mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict')
@mock.patch.object(job.job_state.JobState, 'ResultValues')
@mock.patch.object(job.job_state.JobState, 'Differences')
def testCompletedSkipsMergeWhenDuplicate(
self, differences, result_values, commit_as_dict):
c = change.Change((change.Commit('chromium', 'git_hash'),))
differences.return_value = [(None, c)]
result_values.side_effect = [0], [1.23456]
commit_as_dict.return_value = {
'repository': 'chromium',
'git_hash': 'git_hash',
'author': 'author@chromium.org',
'subject': 'Subject.',
'url': 'https://example.com/repository/+/git_hash',
'message': 'Subject.\n\nCommit message.',
}
def _GetIssue(bug_id, project='chromium'):
if bug_id == '111222':
return {'status': 'Duplicate', 'projectId': project, 'id': '111222'}
else:
return {'status': 'Untriaged', 'projectId': project, 'id': str(bug_id)}
self.get_issue.side_effect = _GetIssue
layered_cache.SetExternal('commit_hash_git_hash', 'chromium:111222')
j = job.Job.New((), (),
bug_id=123456,
comparison_mode='performance',
project='chromium')
j.Run()
self.ExecuteDeferredTasks('default')
self.assertFalse(j.failed)
self.add_bug_comment.assert_called_once_with(
123456,
_COMMENT_COMPLETED_WITH_COMMIT,
status='Assigned',
owner='author@chromium.org',
labels=['Pinpoint-Culprit-Found'],
cc_list=['author@chromium.org'],
merge_issue=None,
project='chromium')
@mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict')
@mock.patch.object(job.job_state.JobState, 'ResultValues')
@mock.patch.object(job.job_state.JobState, 'Differences')
def testCompletedWithInvalidIssue(
self, differences, result_values, commit_as_dict):
c = change.Change((change.Commit('chromium', 'git_hash'),))
differences.return_value = [(None, c)]
result_values.side_effect = [0], [1.23456]
commit_as_dict.return_value = {
'repository': 'chromium',
'git_hash': 'git_hash',
'url': 'https://example.com/repository/+/git_hash',
'author': 'author@chromium.org',
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.',
}
self.get_issue.return_value = None
j = job.Job.New((), (), bug_id=123456, comparison_mode='performance')
j.Run()
self.ExecuteDeferredTasks('default')
self.assertFalse(j.failed)
self.assertFalse(self.add_bug_comment.called)
@mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict')
@mock.patch.object(job.job_state.JobState, 'ResultValues')
@mock.patch.object(job.job_state.JobState, 'Differences')
def testCompletedWithCommitAndDocs(
self, differences, result_values, commit_as_dict):
c = change.Change((change.Commit('chromium', 'git_hash'),))
differences.return_value = [(None, c)]
result_values.side_effect = [1.23456], [0]
commit_as_dict.return_value = {
'repository': 'chromium',
'git_hash': 'git_hash',
'url': 'https://example.com/repository/+/git_hash',
'author': 'author@chromium.org',
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.',
}
self.get_issue.return_value = {'status': 'Untriaged'}
j = job.Job.New(
(), (), bug_id=123456, comparison_mode='performance',
tags={'test_path': 'master/bot/benchmark'})
diag_dict = generic_set.GenericSet([[u'Benchmark doc link', u'http://docs']])
diag = histogram.SparseDiagnostic(
data=diag_dict.AsDict(), start_revision=1, end_revision=sys.maxsize,
name=reserved_infos.DOCUMENTATION_URLS.name,
test=utils.TestKey('master/bot/benchmark'))
diag.put()
j.Run()
self.ExecuteDeferredTasks('default')
self.assertFalse(j.failed)
self.add_bug_comment.assert_called_once_with(
123456,
_COMMENT_COMPLETED_WITH_COMMIT_AND_DOCS,
status='Assigned',
owner='author@chromium.org',
labels=['Pinpoint-Culprit-Found'],
cc_list=['author@chromium.org'],
merge_issue=None,
project='chromium')
@mock.patch('dashboard.pinpoint.models.change.patch.GerritPatch.AsDict')
@mock.patch.object(job.job_state.JobState, 'ResultValues')
@mock.patch.object(job.job_state.JobState, 'Differences')
def testCompletedWithPatch(self, differences, result_values, patch_as_dict):
commits = (change.Commit('chromium', 'git_hash'),)
patch = change.GerritPatch('https://codereview.com', 672011, '2f0d5c7')
c = change.Change(commits, patch)
differences.return_value = [(None, c)]
result_values.side_effect = [40], [20]
patch_as_dict.return_value = {
'url': 'https://codereview.com/c/672011/2f0d5c7',
'author': 'author@chromium.org',
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.',
}
self.get_issue.return_value = {'status': 'Untriaged'}
j = job.Job.New((), (), bug_id=123456, comparison_mode='performance')
j.Run()
self.ExecuteDeferredTasks('default')
self.assertFalse(j.failed)
self.add_bug_comment.assert_called_once_with(
123456,
_COMMENT_COMPLETED_WITH_PATCH,
status='Assigned',
owner='author@chromium.org',
labels=['Pinpoint-Culprit-Found'],
cc_list=['author@chromium.org'],
merge_issue=None,
project='chromium')
@mock.patch('dashboard.pinpoint.models.change.patch.GerritPatch.AsDict')
@mock.patch.object(job.job_state.JobState, 'ResultValues')
@mock.patch.object(job.job_state.JobState, 'Differences')
def testCompletedDoesNotReassign(
self, differences, result_values, patch_as_dict):
commits = (change.Commit('chromium', 'git_hash'),)
patch = change.GerritPatch('https://codereview.com', 672011, '2f0d5c7')
c = change.Change(commits, patch)
c = change.Change(commits, patch)
differences.return_value = [(None, c)]
result_values.side_effect = [40], [20]
patch_as_dict.return_value = {
'url': 'https://codereview.com/c/672011/2f0d5c7',
'author': 'author@chromium.org',
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.',
}
self.get_issue.return_value = {'status': 'Assigned'}
j = job.Job.New((), (), bug_id=123456, comparison_mode='performance')
j.Run()
self.ExecuteDeferredTasks('default')
self.assertFalse(j.failed)
self.add_bug_comment.assert_called_once_with(
123456,
_COMMENT_COMPLETED_WITH_PATCH,
owner=None,
status=None,
cc_list=['author@chromium.org'],
labels=['Pinpoint-Culprit-Found'],
merge_issue=None,
project='chromium')
@mock.patch('dashboard.pinpoint.models.change.patch.GerritPatch.AsDict')
@mock.patch.object(job.job_state.JobState, 'ResultValues')
@mock.patch.object(job.job_state.JobState, 'Differences')
def testCompletedDoesNotReopen(
self, differences, result_values, patch_as_dict):
commits = (change.Commit('chromium', 'git_hash'),)
patch = change.GerritPatch('https://codereview.com', 672011, '2f0d5c7')
c = change.Change(commits, patch)
differences.return_value = [(None, c)]
result_values.side_effect = [40], [20]
patch_as_dict.return_value = {
'url': 'https://codereview.com/c/672011/2f0d5c7',
'author': 'author@chromium.org',
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.',
}
self.get_issue.return_value = {'status': 'Fixed'}
j = job.Job.New((), (), bug_id=123456, comparison_mode='performance')
j.Run()
self.ExecuteDeferredTasks('default')
self.assertFalse(j.failed)
self.add_bug_comment.assert_called_once_with(
123456,
_COMMENT_COMPLETED_WITH_PATCH,
owner=None,
status=None,
cc_list=['author@chromium.org'],
labels=['Pinpoint-Culprit-Found'],
merge_issue=None,
project='chromium')
@mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict')
@mock.patch.object(job.job_state.JobState, 'ResultValues')
@mock.patch.object(job.job_state.JobState, 'Differences')
def testCompletedMultipleDifferences(self, differences, result_values,
commit_as_dict):
c0 = change.Change((change.Commit('chromium', 'git_hash_0'),))
c1 = change.Change((change.Commit('chromium', 'git_hash_1'),))
c2 = change.Change((change.Commit('chromium', 'git_hash_2'),))
c2_5 = change.Change((change.Commit('chromium', 'git_hash_2_5')))
c3 = change.Change((change.Commit('chromium', 'git_hash_3'),))
change_map = {c0: [50], c1: [0], c2: [40], c2_5: [0], c3: []}
differences.return_value = [(c0, c1), (c1, c2), (c2_5, c3)]
result_values.side_effect = lambda c: change_map.get(c, [])
commit_as_dict.side_effect = (
{
'repository': 'chromium',
'git_hash': 'git_hash_1',
'url': 'https://example.com/repository/+/git_hash_1',
'author': 'author1@chromium.org',
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.',
},
{
'repository': 'chromium',
'git_hash': 'git_hash_2',
'url': 'https://example.com/repository/+/git_hash_2',
'author': 'author2@chromium.org',
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.',
},
{
'repository': 'chromium',
'git_hash': 'git_hash_3',
'url': 'https://example.com/repository/+/git_hash_3',
'author': 'author3@chromium.org',
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.',
},
)
self.get_issue.return_value = {'status': 'Untriaged'}
j = job.Job.New((), (), bug_id=123456, comparison_mode='performance')
j.Run()
self.ExecuteDeferredTasks('default')
self.assertFalse(j.failed)
# We now only CC folks from the top commit.
self.add_bug_comment.assert_called_once_with(
123456,
_COMMENT_COMPLETED_THREE_DIFFERENCES,
status='Assigned',
owner='author1@chromium.org',
cc_list=['author1@chromium.org'],
labels=['Pinpoint-Multiple-Culprits'],
merge_issue=None,
project='chromium')
@mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict')
@mock.patch.object(job.job_state.JobState, 'ResultValues')
@mock.patch.object(job.job_state.JobState, 'Differences')
def testCompletedMultipleDifferences_BlameAbsoluteLargest(
self, differences, result_values, commit_as_dict):
c1 = change.Change((change.Commit('chromium', 'git_hash_1'),))
c2 = change.Change((change.Commit('chromium', 'git_hash_2'),))
c3 = change.Change((change.Commit('chromium', 'git_hash_3'),))
change_map = {c1: [10], c2: [0], c3: [-100]}
differences.return_value = [(None, c1), (c1, c2), (c2, c3)]
result_values.side_effect = lambda c: change_map.get(c, [])
commit_as_dict.side_effect = (
{
'repository': 'chromium',
'git_hash': 'git_hash_1',
'url': 'https://example.com/repository/+/git_hash_1',
'author': 'author1@chromium.org',
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.',
},
{
'repository': 'chromium',
'git_hash': 'git_hash_2',
'url': 'https://example.com/repository/+/git_hash_2',
'author': 'author2@chromium.org',
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.',
},
{
'repository': 'chromium',
'git_hash': 'git_hash_3',
'url': 'https://example.com/repository/+/git_hash_3',
'author': 'author3@chromium.org',
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.',
},
)
self.get_issue.return_value = {'status': 'Untriaged'}
j = job.Job.New((), (), bug_id=123456, comparison_mode='performance')
j.Run()
self.ExecuteDeferredTasks('default')
self.assertFalse(j.failed)
# We now only CC folks from the top commit.
self.add_bug_comment.assert_called_once_with(
123456,
_COMMENT_COMPLETED_THREE_DIFFERENCES_ABSOLUTE,
status='Assigned',
owner='author3@chromium.org',
cc_list=['author3@chromium.org'],
labels=['Pinpoint-Multiple-Culprits'],
merge_issue=None,
project='chromium')
@mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict')
@mock.patch.object(job.job_state.JobState, 'ResultValues')
@mock.patch.object(job.job_state.JobState, 'Differences')
def testCompletedMultipleDifferences_TenCulpritsCcTopTwo(
self, differences, result_values, commit_as_dict):
self.Parameterized_TestCompletedMultipleDifferences(
10, 2, differences, result_values, commit_as_dict)
@mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict')
@mock.patch.object(job.job_state.JobState, 'ResultValues')
@mock.patch.object(job.job_state.JobState, 'Differences')
def testCompletedMultipleDifferences_HundredCulpritsCcTopThree(
self, differences, result_values, commit_as_dict):
self.Parameterized_TestCompletedMultipleDifferences(
100, 3, differences, result_values, commit_as_dict)
def Parameterized_TestCompletedMultipleDifferences(
self, number_culprits, expected_num_ccs, differences, result_values,
commit_as_dict):
changes = [
change.Change((change.Commit('chromium', 'git_hash_%d' % (i,)),))
for i in range(1, number_culprits+1)]
# Return [(None,c1), (c1,c2), (c2,c3), ...]
differences.return_value = zip([None] + changes, changes)
# Ensure culprits are ordered by deriving change results values from commit
# names. E.g.:
# Change(git_hash_1) -> result_value=[1],
# Change(git_hash_2) -> result_value=[4],
# etc.
def ResultValuesFromFakeGitHash(change_obj):
if change_obj is None:
return [0]
v = int(change_obj.commits[0].git_hash[len('git_hash_'):])
return [v*v] # Square the value to ensure increasing deltas.
result_values.side_effect = ResultValuesFromFakeGitHash
commit_as_dict.side_effect = [
{
'repository': 'chromium',
'git_hash': 'git_hash_%d' % (i,),
'url': 'https://example.com/repository/+/git_hash_%d' % (i,),
'author': 'author%d@chromium.org' % (i,),
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.',
}
for i in range(1, number_culprits+1)]
self.get_issue.return_value = {'status': 'Untriaged'}
j = job.Job.New((), (), bug_id=123456, comparison_mode='performance')
j.Run()
self.ExecuteDeferredTasks('default')
self.assertFalse(j.failed)
expected_ccs = [
'author%d@chromium.org' % (i,)
for i in range(number_culprits, number_culprits - expected_num_ccs, -1)
]
# We only CC folks from the top commits.
self.add_bug_comment.assert_called_once_with(
123456,
mock.ANY,
status='Assigned',
owner=expected_ccs[0],
cc_list=sorted(expected_ccs),
labels=['Pinpoint-Multiple-Culprits'],
merge_issue=None,
project='chromium')
@mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict')
@mock.patch.object(job.job_state.JobState, 'ResultValues')
@mock.patch.object(job.job_state.JobState, 'Differences')
def testCompletedMultipleDifferences_NoDeltas(self, differences,
result_values, commit_as_dict):
"""Regression test for http://crbug.com/1078680.
Picks people to notify even when none of the differences have deltas (they
are all transitions to/from "No values").
"""
# Two differences, neither has deltas (50 -> No Values, No Values -> 50).
c0 = change.Change((change.Commit('chromium', 'git_hash_0'),))
c1 = change.Change((change.Commit('chromium', 'git_hash_1'),))
c2 = change.Change((change.Commit('chromium', 'git_hash_2'),))
change_map = {c0: [50], c1: [], c2: [50]}
differences.return_value = [(c0, c1), (c1, c2)]
result_values.side_effect = lambda c: change_map.get(c, [])
commit_as_dict.side_effect = (
{
'repository': 'chromium',
'git_hash': 'git_hash_1',
'url': 'https://example.com/repository/+/git_hash_1',
'author': 'author1@chromium.org',
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.',
},
{
'repository': 'chromium',
'git_hash': 'git_hash_2',
'url': 'https://example.com/repository/+/git_hash_2',
'author': 'author2@chromium.org',
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.',
},
)
self.get_issue.return_value = {'status': 'Untriaged'}
j = job.Job.New((), (), bug_id=123456, comparison_mode='performance')
j.Run()
self.ExecuteDeferredTasks('default')
self.assertFalse(j.failed)
# Notifies the owner of the first change in the list of differences, seeing
# as they are all equally small.
self.add_bug_comment.assert_called_once_with(
123456,
mock.ANY,
status='Assigned',
owner='author1@chromium.org',
cc_list=['author1@chromium.org'],
labels=['Pinpoint-Multiple-Culprits'],
merge_issue=None,
project='chromium')
@mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict')
@mock.patch.object(job.job_state.JobState, 'ResultValues')
@mock.patch.object(job.job_state.JobState, 'Differences')
def testCompletedWithAutoroll(
self, differences, result_values, commit_as_dict):
c = change.Change((change.Commit('chromium', 'git_hash'),))
differences.return_value = [(None, c)]
result_values.side_effect = [20], [30]
commit_as_dict.return_value = {
'repository': 'chromium',
'git_hash': 'git_hash',
'url': 'https://example.com/repository/+/git_hash',
'author': 'chromium-autoroll@skia-public.iam.gserviceaccount.com',
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.\n\nTBR=sheriff@bar.com',
}
self.get_issue.return_value = {'status': 'Untriaged'}
j = job.Job.New((), (), bug_id=123456, comparison_mode='performance')
j.put()
j.Run()
self.ExecuteDeferredTasks('default')
self.assertFalse(j.failed)
self.add_bug_comment.assert_called_once_with(
123456,
_COMMENT_COMPLETED_WITH_AUTOROLL_COMMIT,
status='Assigned',
owner='sheriff@bar.com',
cc_list=['chromium-autoroll@skia-public.iam.gserviceaccount.com'],
labels=['Pinpoint-Culprit-Found'],
merge_issue=None,
project='chromium')
@mock.patch('dashboard.pinpoint.models.change.commit.Commit.AsDict')
@mock.patch.object(job.job_state.JobState, 'ResultValues')
@mock.patch.object(job.job_state.JobState, 'Differences')
def testCompletedWithAutorollCulpritButNotMostRecent(
self, differences, result_values, commit_as_dict):
"""Regression test for http://crbug.com/1076756.
When an autoroll has the biggest delta, assigns to its sheriff even when it
is not the latest change.
"""
c0 = change.Change((change.Commit('chromium', 'git_hash_0'),))
c1 = change.Change((change.Commit('chromium', 'git_hash_1'),))
c2 = change.Change((change.Commit('chromium', 'git_hash_2'),))
change_map = {c0: [0], c1: [10], c2: [10]}
differences.return_value = [(c0, c1), (c1, c2)]
result_values.side_effect = lambda c: change_map.get(c, [])
commit_as_dict.side_effect = (
{
'repository': 'chromium',
'git_hash': 'git_hash_1',
'url': 'https://example.com/repository/+/git_hash_1',
'author': 'chromium-autoroll@skia-public.iam.gserviceaccount.com',
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.\n\nTBR=sheriff@bar.com',
},
{
'repository': 'chromium',
'git_hash': 'git_hash_2',
'url': 'https://example.com/repository/+/git_hash_2',
'author': 'author2@chromium.org',
'subject': 'Subject.',
'message': 'Subject.\n\nCommit message.',
},
)
self.get_issue.return_value = {'status': 'Untriaged'}
j = job.Job.New((), (), bug_id=123456, comparison_mode='performance')
j.put()
j.Run()
self.ExecuteDeferredTasks('default')
self.assertFalse(j.failed)
self.add_bug_comment.assert_called_once_with(
mock.ANY,
mock.ANY,
status='Assigned',
owner='sheriff@bar.com',
cc_list=['chromium-autoroll@skia-public.iam.gserviceaccount.com'],
labels=mock.ANY,
merge_issue=None,
project='chromium')
@mock.patch.object(job.job_state.JobState, 'ScheduleWork',
mock.MagicMock(side_effect=AssertionError('Error string')))
def testFailed(self):
j = job.Job.New((), (), bug_id=123456)
with self.assertRaises(AssertionError):
j.Run()
self.ExecuteDeferredTasks('default')
self.assertTrue(j.failed)
self.add_bug_comment.assert_called_once_with(
123456,
_COMMENT_FAILED,
send_email=True,
labels=['Pinpoint-Job-Failed'],
project='chromium')
@mock.patch.object(job.job_state.JobState, 'ScheduleWork',
mock.MagicMock(side_effect=AssertionError('Error string')))
def testFailed_ExceptionDetailsFieldAdded(self):
j = job.Job.New((), (), bug_id=123456)
with self.assertRaises(AssertionError):
j.Run()
j.exception = j.exception_details['traceback']
exception_details = job.Job.exception_details
delattr(job.Job, 'exception_details')
j.put()
self.assertTrue(j.failed)
self.assertFalse(hasattr(j, 'exception_details'))
job.Job.exception_details = exception_details
j = j.key.get(use_cache=False)
self.assertTrue(j.failed)
self.assertTrue(hasattr(j, 'exception_details'))
self.assertEqual(j.exception, j.exception_details['traceback'])
self.assertTrue(
j.exception_details['message'] in j.exception.splitlines()[-1])
@mock.patch('dashboard.services.gerrit_service.PostChangeComment')
def testCompletedUpdatesGerrit(self, post_change_comment):
j = job.Job.New(
(), (), gerrit_server='https://review.com', gerrit_change_id='123456')
j.Run()
self.ExecuteDeferredTasks('default')
post_change_comment.assert_called_once_with(
'https://review.com', '123456', _COMMENT_CODE_REVIEW)
| 37.73257
| 81
| 0.663854
| 4,363
| 36,261
| 5.3438
| 0.090763
| 0.027021
| 0.026378
| 0.028565
| 0.815569
| 0.788076
| 0.764486
| 0.741583
| 0.737379
| 0.729487
| 0
| 0.02792
| 0.190039
| 36,261
| 960
| 82
| 37.771875
| 0.765918
| 0.028515
| 0
| 0.706522
| 0
| 0
| 0.236305
| 0.054422
| 0
| 0
| 0
| 0
| 0.086957
| 1
| 0.04212
| false
| 0
| 0.02038
| 0
| 0.072011
| 0.001359
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
82ef9f85e58efaa2f5f3121aa39b4e3489184ef4
| 3,383
|
py
|
Python
|
python/Day9.py
|
Simik31/AOC-2021
|
fc0459cf4f3af8439657969b4e957a35e5d56484
|
[
"WTFPL"
] | null | null | null |
python/Day9.py
|
Simik31/AOC-2021
|
fc0459cf4f3af8439657969b4e957a35e5d56484
|
[
"WTFPL"
] | null | null | null |
python/Day9.py
|
Simik31/AOC-2021
|
fc0459cf4f3af8439657969b4e957a35e5d56484
|
[
"WTFPL"
] | null | null | null |
def part_1() -> None:
height_map: list[list[int]] = []
lowest_truths: list[list[bool]] = []
with open("../data/day9.txt") as dFile:
for row in dFile.readlines():
if (row := row.strip()) != "":
height_map.append([])
lowest_truths.append([])
for num in row:
height_map[-1].append(int(num))
lowest_truths[-1].append(False)
for r in range(len(height_map)):
for c in range(len(height_map[r])):
truths: list[bool] = []
if r >= 1:
truths.append(height_map[r][c] < height_map[r - 1][c])
if r + 1 < len(height_map):
truths.append(height_map[r][c] < height_map[r + 1][c])
if c >= 1:
truths.append(height_map[r][c] < height_map[r][c - 1])
if c + 1 < len(height_map[r]):
truths.append(height_map[r][c] < height_map[r][c + 1])
lowest_truths[r][c] = False not in truths
result: int = sum(
height_map[r][c] + 1
for r in range(len(height_map))
for c in range(len(height_map[r]))
if lowest_truths[r][c]
)
print("Day: 9 | Part: 1 | Result:", result)
def backtrack_basin(
height_map: list[list[int]],
r: int,
c: int,
counter: int,
visited: list[tuple[int, int]],
) -> int:
if (r, c) in visited:
return counter
visited.append((r, c))
if height_map[r][c] == 9:
return counter
counter += 1
if r >= 1:
counter = backtrack_basin(height_map, r - 1, c, counter, visited)
if r + 1 < len(height_map):
counter = backtrack_basin(height_map, r + 1, c, counter, visited)
if c >= 1:
counter = backtrack_basin(height_map, r, c - 1, counter, visited)
if c + 1 < len(height_map[r]):
counter = backtrack_basin(height_map, r, c + 1, counter, visited)
return counter
def part_2() -> None:
height_map: list[list[int]] = []
lowest_truths: list[list[bool]] = []
with open("../data/day9.txt") as dFile:
for row in dFile.readlines():
if (row := row.strip()) != "":
height_map.append([])
lowest_truths.append([])
for num in row:
height_map[-1].append(int(num))
lowest_truths[-1].append(False)
for r in range(len(height_map)):
for c in range(len(height_map[r])):
truths: list[bool] = []
if r >= 1:
truths.append(height_map[r][c] < height_map[r - 1][c])
if r + 1 < len(height_map):
truths.append(height_map[r][c] < height_map[r + 1][c])
if c >= 1:
truths.append(height_map[r][c] < height_map[r][c - 1])
if c + 1 < len(height_map[r]):
truths.append(height_map[r][c] < height_map[r][c + 1])
lowest_truths[r][c] = False not in truths
basins: list[int] = []
for r in range(len(height_map)):
for c in range(len(height_map[r])):
if lowest_truths[r][c]:
basins.append(backtrack_basin(height_map, r, c, 0, []))
basins.sort()
result: int = basins[-1] * basins[-2] * basins[-3]
print("Day: 9 | Part: 2 | Result:", result)
if __name__ == "__main__":
part_1()
part_2()
| 30.754545
| 73
| 0.514632
| 473
| 3,383
| 3.528541
| 0.105708
| 0.237268
| 0.179748
| 0.112043
| 0.809467
| 0.789694
| 0.766327
| 0.754943
| 0.754943
| 0.754943
| 0
| 0.019859
| 0.33018
| 3,383
| 109
| 74
| 31.036697
| 0.716681
| 0
| 0
| 0.616279
| 0
| 0
| 0.027786
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034884
| false
| 0
| 0
| 0
| 0.069767
| 0.023256
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d24a905f05ef285a253e0a2ff4cfcb6ce7518ebc
| 149
|
py
|
Python
|
second/core/__init__.py
|
ldtho/ConeDetectionPointpillars
|
feb730f69c2aed56160e90bbddfbce4713c70baf
|
[
"MIT"
] | null | null | null |
second/core/__init__.py
|
ldtho/ConeDetectionPointpillars
|
feb730f69c2aed56160e90bbddfbce4713c70baf
|
[
"MIT"
] | null | null | null |
second/core/__init__.py
|
ldtho/ConeDetectionPointpillars
|
feb730f69c2aed56160e90bbddfbce4713c70baf
|
[
"MIT"
] | null | null | null |
# from . import box_np_ops, box_tf_ops, geometry, preprocess, non_max_suppression
from . import box_np_ops, geometry, preprocess, non_max_suppression
| 74.5
| 81
| 0.832215
| 23
| 149
| 4.956522
| 0.478261
| 0.175439
| 0.22807
| 0.263158
| 0.95614
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100671
| 149
| 2
| 82
| 74.5
| 0.850746
| 0.530201
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d26966bc6e89d6cd4c312930c30252a92e0c65bd
| 392
|
py
|
Python
|
test.py
|
Nirav-Madhani/Drive-URL-Decoder
|
84536e46c2bce7a60a720e01aa7b1107b55463b5
|
[
"MIT"
] | null | null | null |
test.py
|
Nirav-Madhani/Drive-URL-Decoder
|
84536e46c2bce7a60a720e01aa7b1107b55463b5
|
[
"MIT"
] | null | null | null |
test.py
|
Nirav-Madhani/Drive-URL-Decoder
|
84536e46c2bce7a60a720e01aa7b1107b55463b5
|
[
"MIT"
] | null | null | null |
'''
If module not found error occurs.
import os, sys
sys.path.insert(0, os.path.abspath("."))
sys.path.insert(0, os.path.abspath(".."))
'''
from main import getLink
print(getLink('https://drive.google.com/file/d/0B9cVpIKZxC6fc3RhcnRlcl9maWxlX2Rhc2hlclYw/view?usp=sharing'))
print(getLink('https://drive.google.com/file/d/0B9cVpIKZxC6fc3RhcnRlcl9maWxlX2Rhc2hlclYw/view?usp=sharing','pdf'))
| 30.153846
| 114
| 0.760204
| 52
| 392
| 5.730769
| 0.538462
| 0.04698
| 0.087248
| 0.09396
| 0.791946
| 0.791946
| 0.791946
| 0.610738
| 0.610738
| 0.610738
| 0
| 0.043478
| 0.061224
| 392
| 12
| 115
| 32.666667
| 0.766304
| 0.334184
| 0
| 0
| 0
| 0
| 0.72619
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
967db40c4bd8b4c86c6ef835c6314a602707c0dc
| 4,181
|
py
|
Python
|
tests/eureka_client_replication_test.py
|
estuaryoss/netflixoss-eureka
|
d43626c8de38a7c1a1a2a3c97fc848dd9c60d0e0
|
[
"MIT"
] | null | null | null |
tests/eureka_client_replication_test.py
|
estuaryoss/netflixoss-eureka
|
d43626c8de38a7c1a1a2a3c97fc848dd9c60d0e0
|
[
"MIT"
] | null | null | null |
tests/eureka_client_replication_test.py
|
estuaryoss/netflixoss-eureka
|
d43626c8de38a7c1a1a2a3c97fc848dd9c60d0e0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import time
import unittest
from urllib.error import URLError
from tests.client.eureka_client import EurekaClient
from tests.utils.docker_utils import DockerUtils
class FlaskServerTestCase(unittest.TestCase):
file = "docker-compose.yml"
# file = "../docker-compose.yml"
time_to_wait_until_compose_up = 60
time_to_wait_until_discovery_up = 10
def setUp(self):
DockerUtils.down(file=self.file)
def test_eureka_replication_both_up(self):
DockerUtils.up_service(self.file, "eureka-server1")
DockerUtils.up_service(self.file, "eureka-server2")
time.sleep(self.time_to_wait_until_compose_up)
DockerUtils.up_service(self.file, "estuary-discovery")
time.sleep(self.time_to_wait_until_discovery_up)
apps_list1 = EurekaClient("http://localhost:8080/eureka/v2").get_apps()
apps_list2 = EurekaClient("http://localhost:8081/eureka/v2").get_apps()
self.assertEqual(len(apps_list1), 1)
self.assertEqual(apps_list1, apps_list2)
def test_eureka_replication_second_recovers(self):
DockerUtils.up_service(self.file, "eureka-server1")
DockerUtils.up_service(self.file, "eureka-server2")
time.sleep(self.time_to_wait_until_compose_up)
DockerUtils.up_service(self.file, "estuary-discovery")
time.sleep(self.time_to_wait_until_discovery_up)
DockerUtils.stop_service(self.file, "eureka-server2")
apps_list1 = EurekaClient("http://localhost:8080/eureka/v2").get_apps()
self.assertEqual(len(apps_list1), 1)
try:
EurekaClient("http://localhost:8081/eureka/v2").get_apps()
except Exception as e:
self.assertIsInstance(e, URLError)
DockerUtils.start_service(self.file, "eureka-server2")
time.sleep(self.time_to_wait_until_compose_up)
apps_list1 = EurekaClient("http://localhost:8080/eureka/v2").get_apps()
apps_list2 = EurekaClient("http://localhost:8081/eureka/v2").get_apps()
self.assertEqual(len(apps_list1), 1)
self.assertEqual(apps_list1, apps_list2)
def test_eureka_replication_first_recovers(self):
DockerUtils.up_service(self.file, "eureka-server1")
DockerUtils.up_service(self.file, "eureka-server2")
time.sleep(self.time_to_wait_until_compose_up)
DockerUtils.up_service(self.file, "estuary-discovery")
time.sleep(self.time_to_wait_until_discovery_up)
DockerUtils.stop_service(self.file, "eureka-server1")
apps_list2 = EurekaClient("http://localhost:8081/eureka/v2").get_apps()
self.assertEqual(len(apps_list2), 1)
try:
EurekaClient("http://localhost:8080/eureka/v2").get_apps()
except Exception as e:
self.assertIsInstance(e, URLError)
DockerUtils.start_service(self.file, "eureka-server1")
time.sleep(self.time_to_wait_until_compose_up)
apps_list1 = EurekaClient("http://localhost:8080/eureka/v2").get_apps()
apps_list2 = EurekaClient("http://localhost:8081/eureka/v2").get_apps()
self.assertEqual(len(apps_list2), 1)
self.assertEqual(apps_list1, apps_list2)
def test_eureka_replication_just_one_up_and_recovers(self):
DockerUtils.up_service(self.file, "eureka-server1")
time.sleep(self.time_to_wait_until_compose_up)
DockerUtils.up_service(self.file, "estuary-discovery")
time.sleep(self.time_to_wait_until_discovery_up)
apps_list1 = EurekaClient("http://localhost:8080/eureka/v2").get_apps()
self.assertEqual(len(apps_list1), 1)
try:
EurekaClient("http://localhost:8081/eureka/v2").get_apps()
except Exception as e:
self.assertIsInstance(e, URLError)
DockerUtils.stop_service(self.file, "eureka-server1")
time.sleep(self.time_to_wait_until_compose_up)
DockerUtils.up_service(self.file, "eureka-server1")
time.sleep(self.time_to_wait_until_compose_up)
apps_list1 = EurekaClient("http://localhost:8080/eureka/v2").get_apps()
self.assertEqual(len(apps_list1), 1)
if __name__ == '__main__':
unittest.main()
| 45.445652
| 79
| 0.705094
| 535
| 4,181
| 5.226168
| 0.134579
| 0.051502
| 0.091202
| 0.075107
| 0.873391
| 0.869814
| 0.851931
| 0.847997
| 0.837625
| 0.833333
| 0
| 0.032839
| 0.176991
| 4,181
| 91
| 80
| 45.945055
| 0.779715
| 0.012437
| 0
| 0.730769
| 0
| 0
| 0.164526
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.064103
| false
| 0
| 0.064103
| 0
| 0.179487
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
968ef1e7a801bf6e868bc6935a085bbbd7b10c11
| 240
|
py
|
Python
|
src/powergslb/system/__init__.py
|
fyanuar/powergslb
|
7dae75fd89017ef77385d15fde8c931b8436db92
|
[
"MIT"
] | 81
|
2016-01-21T12:02:39.000Z
|
2022-02-11T13:07:07.000Z
|
src/powergslb/system/__init__.py
|
fyanuar/powergslb
|
7dae75fd89017ef77385d15fde8c931b8436db92
|
[
"MIT"
] | 24
|
2017-03-05T00:12:49.000Z
|
2022-01-18T12:34:32.000Z
|
src/powergslb/system/__init__.py
|
fyanuar/powergslb
|
7dae75fd89017ef77385d15fde8c931b8436db92
|
[
"MIT"
] | 28
|
2017-04-28T17:16:29.000Z
|
2022-02-16T04:49:23.000Z
|
from powergslb.system.config import get_config, parse_config
from powergslb.system.service import SystemService
from powergslb.system.thread import AbstractThread
__all__ = ['get_config', 'parse_config', 'SystemService', 'AbstractThread']
| 40
| 75
| 0.829167
| 28
| 240
| 6.821429
| 0.428571
| 0.204188
| 0.298429
| 0.209424
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 240
| 5
| 76
| 48
| 0.868182
| 0
| 0
| 0
| 0
| 0
| 0.204167
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9699437382cf7a4b89d77455bcd64a9bce94c98d
| 5,971
|
py
|
Python
|
QDgym_extended/QDgym_envs.py
|
adaptive-intelligent-robotics/QDgym_extended
|
99375c07b1bb328fd451a80629863c05813b6eeb
|
[
"MIT"
] | 10
|
2021-03-01T17:36:25.000Z
|
2022-02-21T03:21:55.000Z
|
QDgym_extended/QDgym_envs.py
|
adaptive-intelligent-robotics/QDgym_extended
|
99375c07b1bb328fd451a80629863c05813b6eeb
|
[
"MIT"
] | null | null | null |
QDgym_extended/QDgym_envs.py
|
adaptive-intelligent-robotics/QDgym_extended
|
99375c07b1bb328fd451a80629863c05813b6eeb
|
[
"MIT"
] | 3
|
2021-05-19T10:00:17.000Z
|
2022-02-14T08:02:27.000Z
|
import gym
gym.logger.set_level(40)
import numpy as np
from pybullet_envs.gym_locomotion_envs import AntBulletEnv, HalfCheetahBulletEnv, Walker2DBulletEnv, HumanoidBulletEnv, HopperBulletEnv
from pybullet_envs.robot_bases import MJCFBasedRobot
class QDAntBulletEnv(AntBulletEnv):
def __init__(self, render=False):
super().__init__(render=render)
self.T = 0
self.tot_reward = 0.0
self.desc = np.array([0.0 for _ in range(4)])
self.desc_acc = np.array([0.0 for _ in range(4)])
print(f"The behavioural desciptor is {len(self.desc)}-dimentional",
f"and defined as proportion of feet contact time with the ground in the order {self.robot.foot_list}")
def reset(self):
r = super().reset()
self.T = 0
self.tot_reward = 0.0
self.desc = np.array([0.0 for _ in range(4)])
self.desc_acc = np.array([0.0 for _ in range(4)])
return r
def step(self, a):
state, reward, done, info = super().step(a)
self.desc_acc += self.robot.feet_contact
self.tot_reward += reward
self.T += 1
self.alive = (self.__dict__["_alive"] >= 0.0)
self.desc = self.desc_acc / self.T
info["bc"] = self.desc
info["x_pos"] = None
return state, reward, done, info
class QDHalfCheetahBulletEnv(HalfCheetahBulletEnv):
def __init__(self, render=False):
super().__init__(render=render)
self.T = 0
self.tot_reward = 0.0
self.desc = np.array([0.0 for _ in range(2)])
self.desc_acc = np.array([0.0 for _ in range(2)])
print(f"The behavioural desciptor is {len(self.desc)}-dimentional",
f"and defined as proportion of feet contact time with the ground in the order {[self.robot.foot_list[0] , self.robot.foot_list[3]]}")
def reset(self):
r = super().reset()
self.T = 0
self.tot_reward = 0.0
self.desc = np.array([0.0 for _ in range(2)])
self.desc_acc = np.array([0.0 for _ in range(2)])
return r
def step(self, a):
state, reward, done, info = super().step(a)
self.desc_acc[0] += self.robot.feet_contact[0]
self.desc_acc[1] += self.robot.feet_contact[3]
self.tot_reward += reward
self.T += 1
self.alive = (self.__dict__["_alive"] >= 0.0)
self.desc = self.desc_acc / self.T
info["bc"] = self.desc
info["x_pos"] = None
return state, reward, done, info
class QDWalker2DBulletEnv(Walker2DBulletEnv):
def __init__(self, render=False):
super().__init__(render=render)
self.T = 0
self.tot_reward = 0.0
self.desc = np.array([0.0 for _ in range(2)])
self.desc_acc = np.array([0.0 for _ in range(2)])
print(f"The behavioural desciptor is {len(self.desc)}-dimentional",
f"and defined as proportion of feet contact time with the ground in the order {self.robot.foot_list}")
def reset(self):
r = super().reset()
self.T = 0
self.tot_reward = 0.0
self.desc = np.array([0.0 for _ in range(2)])
self.desc_acc = np.array([0.0 for _ in range(2)])
return r
def step(self, a):
state, reward, done, info = super().step(a)
self.desc_acc += self.robot.feet_contact
self.tot_reward += reward
self.T += 1
self.alive = (self.__dict__["_alive"] >= 0.0)
self.desc = self.desc_acc / self.T
info["bc"] = self.desc
info["x_pos"] = None
return state, reward, done, info
class QDHumanoidBulletEnv(HumanoidBulletEnv):
def __init__(self, render=False):
super().__init__(render=render)
self.T = 0
self.tot_reward = 0.0
self.desc = np.array([0.0 for _ in range(2)])
self.desc_acc = np.array([0.0 for _ in range(2)])
print(f"The behavioural desciptor is {len(self.desc)}-dimentional",
f"and defined as proportion of feet contact time with the ground in the order {self.robot.foot_list}")
def reset(self):
r = super().reset()
self.T = 0
self.tot_reward = 0.0
self.desc = np.array([0.0 for _ in range(2)])
self.desc_acc = np.array([0.0 for _ in range(2)])
return r
def step(self, a):
state, reward, done, info = super().step(a)
self.desc_acc += self.robot.feet_contact
self.tot_reward += reward
self.T += 1
self.alive = (self.__dict__["_alive"] >= 0.0)
self.desc = self.desc_acc / self.T
info["bc"] = self.desc
info["x_pos"] = None
return state, reward, done, info
class QDHopperBulletEnv(HopperBulletEnv):
def __init__(self, render=False):
super().__init__(render=render)
self.T = 0
self.tot_reward = 0.0
self.desc = np.array([0.0 for _ in range(1)])
self.desc_acc = np.array([0.0 for _ in range(1)])
print(f"The behavioural desciptor is {len(self.desc)}-dimentional",
f"and defined as proportion of feet contact time with the ground in the order {self.robot.foot_list}")
def reset(self):
r = super().reset()
self.T = 0
self.tot_reward = 0.0
self.desc = np.array([0.0 for _ in range(1)])
self.desc_acc = np.array([0.0 for _ in range(1)])
return r
def step(self, a):
state, reward, done, info = super().step(a)
self.desc_acc += self.robot.feet_contact
self.tot_reward += reward
self.T += 1
self.alive = (self.__dict__["_alive"] >= 0.0)
self.desc = self.desc_acc / self.T
info["bc"] = self.desc
info["x_pos"] = None
return state, reward, done, info
if __name__ == "__main__":
env = QDHalfCheetahBulletEnv()
env.reset()
a = env.action_space.sample()
env.step(a)
print(env.alive)
print(env.desc)
| 31.098958
| 147
| 0.589516
| 863
| 5,971
| 3.901506
| 0.10197
| 0.109296
| 0.068607
| 0.05346
| 0.834571
| 0.834571
| 0.834571
| 0.834571
| 0.834571
| 0.834571
| 0
| 0.026964
| 0.279518
| 5,971
| 191
| 148
| 31.26178
| 0.755695
| 0
| 0
| 0.86014
| 0
| 0.034965
| 0.147212
| 0.046558
| 0
| 0
| 0
| 0
| 0
| 1
| 0.104895
| false
| 0
| 0.027972
| 0
| 0.237762
| 0.048951
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
969a2a6fa02ddab7b8ce5647e86388edcd830f47
| 1,481
|
py
|
Python
|
6_schema/project/tests/test_parsers/test_og.py
|
neel-jaris/Python-Web-Scraping-Projects
|
f77426d1351e5b5ca06aacf415388409d89277a9
|
[
"MIT"
] | 15
|
2019-03-30T02:41:55.000Z
|
2022-03-21T05:15:28.000Z
|
6_schema/project/tests/test_parsers/test_og.py
|
neel-jaris/Python-Web-Scraping-Projects
|
f77426d1351e5b5ca06aacf415388409d89277a9
|
[
"MIT"
] | 1
|
2021-12-02T17:32:56.000Z
|
2021-12-02T17:32:56.000Z
|
6_schema/project/tests/test_parsers/test_og.py
|
neel-jaris/Python-Web-Scraping-Projects
|
f77426d1351e5b5ca06aacf415388409d89277a9
|
[
"MIT"
] | 9
|
2019-12-26T19:35:04.000Z
|
2022-01-24T15:45:27.000Z
|
from schema.parsers.opengraph import format_og
def test_format_og():
data = {
"namespace": {"og": "http://ogp.me/ns#"},
"properties": [
("og:description",
"Free Shipping on orders over $35. Buy Dungeons & Dragons Player's Handbook (Dungeons & Dragons Core Rulebooks) at Walmart.com"),
("og:image",
"https://i5.walmartimages.com/asr/ce1033ea-4934-4098-af07-16d0136689fd_1.5cebe0dbf47d95ddc489e506c8cc28f7.jpeg"),
("og:url",
"/ip/Dungeons-Dragons-Player-s-Handbook-Dungeons-Dragons-Core-Rulebooks-9780786965601/37784457"),
("og:title",
"Dungeons & Dragons Player's Handbook (Dungeons & Dragons Core Rulebooks) - Walmart.com"),
("og:site_name", "Walmart.com"),
("og:type", "product.item"),
],
}
expected = {
"description": "Free Shipping on orders over $35. Buy Dungeons & Dragons Player's Handbook (Dungeons & Dragons Core Rulebooks) at Walmart.com",
"image": "https://i5.walmartimages.com/asr/ce1033ea-4934-4098-af07-16d0136689fd_1.5cebe0dbf47d95ddc489e506c8cc28f7.jpeg",
"type": "product.item",
"url": "/ip/Dungeons-Dragons-Player-s-Handbook-Dungeons-Dragons-Core-Rulebooks-9780786965601/37784457",
"site_name": "Walmart.com",
"title": "Dungeons & Dragons Player's Handbook (Dungeons & Dragons Core Rulebooks) - Walmart.com",
}
assert format_og(data) == expected
| 51.068966
| 151
| 0.638082
| 163
| 1,481
| 5.748466
| 0.361963
| 0.192102
| 0.134472
| 0.140875
| 0.783351
| 0.783351
| 0.783351
| 0.783351
| 0.783351
| 0.783351
| 0
| 0.110631
| 0.218771
| 1,481
| 28
| 152
| 52.892857
| 0.699222
| 0
| 0
| 0
| 0
| 0.230769
| 0.67657
| 0.125591
| 0
| 0
| 0
| 0
| 0.038462
| 1
| 0.038462
| false
| 0
| 0.038462
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
969c61f2dfe90de40b5d080a188f7f9226bd244e
| 30,378
|
py
|
Python
|
utils/bmex_views.py
|
kylegodbey/bmex-web
|
3f8c07696de580dfdeb5613ab36c2c53a22717b0
|
[
"MIT"
] | 1
|
2022-01-26T21:26:39.000Z
|
2022-01-26T21:26:39.000Z
|
utils/bmex_views.py
|
kylegodbey/bmex-web
|
3f8c07696de580dfdeb5613ab36c2c53a22717b0
|
[
"MIT"
] | null | null | null |
utils/bmex_views.py
|
kylegodbey/bmex-web
|
3f8c07696de580dfdeb5613ab36c2c53a22717b0
|
[
"MIT"
] | 1
|
2022-01-27T15:42:20.000Z
|
2022-01-27T15:42:20.000Z
|
from dash import dcc
from dash import html
import dash_bootstrap_components as dbc
import utils.dash_reusable_components as drc
def masses_view():
return html.Div(
id="body",
className="container scalable",
children=[
html.Div(
id="app-container",
# className="row",
children=[
html.Div(
# className="three columns",
id="left-column",
children=[
drc.Card(
id="first-card",
children=[
drc.NamedDropdown(
name="Compute For",
id="dropdown-iso-chain",
options=[
{"label": "Single Nucleus", "value": "single"},
{"label": "Isotopic Chain", "value": "isotopic"},
{"label": "Isotonic Chain", "value": "isotonic"},
],
clearable=False,
searchable=False,
value="single",
),
]
),
drc.Card(
id="quantity-single",
children=[
drc.NamedDropdown(
name="Select Quantity",
id="dropdown-select-quantity",
options=[
{"label": "All", "value": "All"},
{"label": "Binding Energy", "value": "BE"},
{"label": "One Neutron Separation Energy", "value": "OneNSE",},
{"label": "One Proton Separation Energy", "value": "OnePSE",},
{"label": "Two Neutron Separation Energy", "value": "TwoNSE",},
{"label": "Two Proton Separation Energy", "value": "TwoPSE",},
{"label": "Alpha Separation Energy", "value": "AlphaSE",},
{"label": "Two Proton Shell Gap", "value": "TwoNSGap",},
{"label": "Two Neutron Shell Gap", "value": "TwoPSGap",},
{"label": "Double Mass Difference", "value": "DoubleMDiff",},
{"label": "Neutron 3-Point Odd-Even Binding Energy Difference", "value": "N3PointOED",},
{"label": "Proton 3-Point Odd-Even Binding Energy Difference", "value": "P3PointOED",},
{"label": "Single-Neutron Energy Splitting", "value": "SNESplitting",},
{"label": "Single-Proton Energy Splitting", "value": "SPESplitting",},
{"label": "Wigner Energy Coefficient", "value": "WignerEC",},
],
clearable=False,
searchable=False,
value="All",
),
]
),
drc.Card(
id="data-card",
children=[
drc.NamedDropdown(
name="Select Dataset",
id="dropdown-select-dataset",
options=[
{"label": "Experiment", "value": "Exp"},
{"label": "SkMs", "value": "SkMs"},
],
clearable=False,
searchable=False,
value="Exp",
),
],
),
drc.Card(
id="protons-card",
children=[
drc.NamedInput(
name="Protons",
id="protons",
type="number",
min=0,
max=200,
step=1,
placeholder="Proton #",
style={'width':'100%'},
),
],
),
drc.Card(
id="neutrons-card",
children=[
drc.NamedInput(
name="Neutrons",
id="neutrons",
type="number",
min=0,
max=200,
step=1,
placeholder="Neutron #",
style={'width':'100%'},
),
],
),
drc.Card(
id="zmin-card",
children=[
drc.NamedInput(
name="Minimum Z",
id="zmin",
type="number",
min=0,
max=200,
step=1,
placeholder="Z Min",
style={'width':'100%'},
),
],
),
drc.Card(
id="zmax-card",
children=[
drc.NamedInput(
name="Maximum Z",
id="zmax",
type="number",
min=0,
max=200,
step=1,
placeholder="Z Max",
style={'width':'100%'},
),
],
),
drc.Card(
id="nmin-card",
children=[
drc.NamedInput(
name="Minimum N",
id="nmin",
type="number",
min=0,
max=200,
step=1,
placeholder="N Min",
style={'width':'100%'},
),
],
),
drc.Card(
id="nmax-card",
children=[
drc.NamedInput(
name="Maximum N",
id="nmax",
type="number",
min=0,
max=200,
step=1,
placeholder="N Max",
style={'width':'100%'},
),
],
),
],
),
html.Div(
id="div-graphs",
children=[
dcc.Graph(
id="graph-sklearn-svm",
figure=dict(
layout=dict(
plot_bgcolor="#282b38", paper_bgcolor="#282b38"
)
),
),
],
),
],
)
],
)
def gpe_view():
return html.Div(
id="body",
className="container scalable",
children=[
html.Div(
id="app-container",
# className="row",
children=[
html.Div(
# className="three columns",
id="left-column",
children=[
drc.Card(
id="first-card",
children=[
drc.NamedDropdown(
name="Compute For",
id="dropdown-iso-chain",
options=[
{"label": "Single Nucleus", "value": "single"},
{"label": "Isotopic Chain", "value": "isotopic"},
#{"label": "Isotonic Chain", "value": "isotonic"},
],
clearable=False,
searchable=False,
value="single",
),
]
),
drc.Card(
id="quantity-single",
children=[
drc.NamedDropdown(
name="Select Quantity",
id="dropdown-select-quantity",
options=[
#{"label": "All", "value": "All"},
#{"label": "Binding Energy", "value": "BE"},
#{"label": "One Neutron Separation Energy", "value": "OneNSE",},
#{"label": "One Proton Separation Energy", "value": "OnePSE",},
{"label": "Two Neutron Separation Energy", "value": "TwoNSE",},
#{"label": "Two Proton Separation Energy", "value": "TwoPSE",},
#{"label": "Alpha Separation Energy", "value": "AlphaSE",},
#{"label": "Two Proton Shell Gap", "value": "TwoNSGap",},
#{"label": "Two Neutron Shell Gap", "value": "TwoPSGap",},
#{"label": "Double Mass Difference", "value": "DoubleMDiff",},
#{"label": "Neutron 3-Point Odd-Even Binding Energy Difference", "value": "N3PointOED",},
#{"label": "Proton 3-Point Odd-Even Binding Energy Difference", "value": "P3PointOED",},
#{"label": "Single-Neutron Energy Splitting", "value": "SNESplitting",},
#{"label": "Single-Proton Energy Splitting", "value": "SPESplitting",},
#{"label": "Wigner Energy Coefficient", "value": "WignerEC",},
],
clearable=False,
searchable=False,
value="TwoNSE",
),
]
),
drc.Card(
id="data-card",
children=[
drc.NamedDropdown(
name="Select Dataset",
id="dropdown-select-dataset",
options=[
{"label": "FRDM", "value": "FRDM"},
#{"label": "SkMs", "value": "SkMs"},
],
clearable=False,
searchable=False,
value="FRDM",
),
],
),
drc.Card(
id="gp-card",
children=[
drc.NamedInput(
name="Eta",
id="eta",
type="number",
min=0.0,
max=5.0,
#step=0.01,
placeholder="Eta",
style={'width':'100%'},
value=0.9,
),
drc.NamedInput(
name="RhoN",
id="rhon",
type="number",
min=0,
max=5,
#step=0.01,
placeholder="RhoN",
style={'width':'100%'},
value=1.529,
),
drc.NamedInput(
name="RhoZ",
id="rhoz",
type="number",
min=0,
max=5,
#step=0.01,
placeholder="RhoZ",
style={'width':'100%'},
value=0.2533,
),
html.Button('Train!', id='submit-gpe', n_clicks=0, style={"color":"#e76f51"}),
]
),
drc.Card(
id="protons-card",
children=[
drc.NamedInput(
name="Protons",
id="protons",
type="number",
min=0,
max=200,
step=1,
placeholder="Proton #",
style={'width':'100%'},
),
],
),
drc.Card(
id="neutrons-card",
children=[
drc.NamedInput(
name="Neutrons",
id="neutrons",
type="number",
min=0,
max=200,
step=1,
placeholder="Neutron #",
style={'width':'100%'},
),
],
),
drc.Card(
id="zmin-card",
children=[
drc.NamedInput(
name="Minimum Z",
id="zmin",
type="number",
min=0,
max=200,
step=1,
placeholder="Z Min",
style={'width':'100%'},
),
],
),
drc.Card(
id="zmax-card",
children=[
drc.NamedInput(
name="Maximum Z",
id="zmax",
type="number",
min=0,
max=200,
step=1,
placeholder="Z Max",
style={'width':'100%'},
),
],
),
drc.Card(
id="nmin-card",
children=[
drc.NamedInput(
name="Minimum N",
id="nmin",
type="number",
min=0,
max=200,
step=1,
placeholder="N Min",
style={'width':'100%'},
),
],
),
drc.Card(
id="nmax-card",
children=[
drc.NamedInput(
name="Maximum N",
id="nmax",
type="number",
min=0,
max=200,
step=1,
placeholder="N Max",
style={'width':'100%'},
),
],
),
],
),
# html.Div(
# id="div-graphs-loading",
# children=[
# dcc.Loading(
# id="loading-1",
# children=[
# html.Div(
# id="div-graphs-gpe",
# children=[
# dcc.Graph(
# id="graph-sklearn-svm",
# figure=dict(
# layout=dict(
# plot_bgcolor="#282b38", paper_bgcolor="#282b38"
# )
# ),
# ),
# ],
# ),
# ],
# ),
# ],
# ),
html.Div(id='div-right',children=
dcc.Loading(
id="loading-1",
children =html.Div(id="div-graphs-loading",style={'width':'100%'},children=[html.Div(
id="div-graphs-gpe",
children=[
dcc.Graph(
id="graph-sklearn-svm",
figure=dict(
layout=dict(
plot_bgcolor="#282b38", paper_bgcolor="#282b38"
)
),
),
],
),]))),
],
)
],
)
def pesnet_view():
return html.Div(
id="body",
className="container scalable",
children=[
html.Div(
id="app-container",
# className="row",
children=[
html.Div(
# className="three columns",
id="left-column",
children=[
drc.Card(
id="first-card",
children=[
drc.NamedDropdown(
name="Compute For",
id="dropdown-iso-chain",
options=[
{"label": "Single Nucleus", "value": "single"},
#{"label": "Isotonic Chain", "value": "isotonic"},
],
clearable=False,
searchable=False,
value="single",
),
]
),
drc.Card(
id="quantity-single",
children=[
drc.NamedDropdown(
name="Select Quantity",
id="dropdown-select-quantity",
options=[
#{"label": "All", "value": "All"},
#{"label": "Binding Energy", "value": "BE"},
#{"label": "One Neutron Separation Energy", "value": "OneNSE",},
#{"label": "One Proton Separation Energy", "value": "OnePSE",},
{"label": "Potential Energy Surface", "value": "PES",},
#{"label": "Two Proton Separation Energy", "value": "TwoPSE",},
#{"label": "Alpha Separation Energy", "value": "AlphaSE",},
#{"label": "Two Proton Shell Gap", "value": "TwoNSGap",},
#{"label": "Two Neutron Shell Gap", "value": "TwoPSGap",},
#{"label": "Double Mass Difference", "value": "DoubleMDiff",},
#{"label": "Neutron 3-Point Odd-Even Binding Energy Difference", "value": "N3PointOED",},
#{"label": "Proton 3-Point Odd-Even Binding Energy Difference", "value": "P3PointOED",},
#{"label": "Single-Neutron Energy Splitting", "value": "SNESplitting",},
#{"label": "Single-Proton Energy Splitting", "value": "SPESplitting",},
#{"label": "Wigner Energy Coefficient", "value": "WignerEC",},
],
clearable=False,
searchable=False,
value="PES",
),
]
),
drc.Card(
id="data-card",
children=[
drc.NamedDropdown(
name="Select Dataset",
id="dropdown-select-dataset",
options=[
{"label": "UNEDF1", "value": "UNEDF1"},
#{"label": "SkMs", "value": "SkMs"},
],
clearable=False,
searchable=False,
value="UNEDF1",
),
],
),
drc.Card(
id="protons-card",
children=[
drc.NamedInput(
name="Protons",
id="protons",
type="number",
min=0,
max=200,
step=1,
placeholder="Proton #",
style={'width':'100%'},
),
],
),
drc.Card(
id="neutrons-card",
children=[
drc.NamedInput(
name="Neutrons",
id="neutrons",
type="number",
min=0,
max=250,
step=1,
placeholder="Neutron #",
style={'width':'100%'},
),
],
),
],
),
# html.Div(
# id="div-graphs-loading",
# children=[
# dcc.Loading(
# id="loading-1",
# children=[
# html.Div(
# id="div-graphs-gpe",
# children=[
# dcc.Graph(
# id="graph-sklearn-svm",
# figure=dict(
# layout=dict(
# plot_bgcolor="#282b38", paper_bgcolor="#282b38"
# )
# ),
# ),
# ],
# ),
# ],
# ),
# ],
# ),
html.Div(id='div-right',children=
dcc.Loading(
id="loading-1",
children =html.Div(id="div-graphs-loading",style={'width':'100%'},children=[html.Div(
id="div-graphs-pesnet",
children=[
dcc.Graph(
id="graph-sklearn-svm",
figure=dict(
layout=dict(
plot_bgcolor="#282b38", paper_bgcolor="#282b38"
)
),
),
],
),]))),
],
)
],
)
| 51.401015
| 133
| 0.221608
| 1,323
| 30,378
| 5.07483
| 0.112623
| 0.044236
| 0.032172
| 0.035448
| 0.935061
| 0.925082
| 0.925082
| 0.921656
| 0.921656
| 0.898421
| 0
| 0.024398
| 0.691026
| 30,378
| 590
| 134
| 51.488136
| 0.690923
| 0.110968
| 0
| 0.861723
| 0
| 0
| 0.09634
| 0.005239
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006012
| true
| 0
| 0.008016
| 0.006012
| 0.02004
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
738bc3f3de08716357c1eaee6fac2d3e33b8dc60
| 11,121
|
py
|
Python
|
evaluation.py
|
constantin50/separator
|
e630df1045a455458d143cc070ef05d15af196fc
|
[
"MIT"
] | null | null | null |
evaluation.py
|
constantin50/separator
|
e630df1045a455458d143cc070ef05d15af196fc
|
[
"MIT"
] | null | null | null |
evaluation.py
|
constantin50/separator
|
e630df1045a455458d143cc070ef05d15af196fc
|
[
"MIT"
] | null | null | null |
def evaluate(separatice,test):
errors = 0
for key in test.keys():
if separatice.into_clauses(key) != test[key]:
print(key)
errors += 1
print('accuracy:',1 - errors/len(test))
test = {'какие места есть в корпусах, где можно купить еды?' : ['какие места есть в корпусах', 'где можно купить еды?'], 'Сравнивать карателей с пёселями - это оскорбление пёселей.' : ['Сравнивать карателей с пёселями','это оскорбление пёселей.'],
'Люди идут мимо стелы к дворцу, в котором по слухам заседает именинник' : ['Люди идут мимо стелы к дворцу','в котором по слухам заседает именинник'],
'Ненавижу кабачки, их изобрели специально для того, чтобы их сажали, не зная, зачем.' : ['Ненавижу кабачки', 'их изобрели специально для того','их сажали, не зная, зачем.'],
'поставить ограждение без охраны, причем нужных цветов в нужной пропорции' : ['поставить ограждение без охраны', 'причем нужных цветов в нужной пропорции'],
'И ведь есть люди, которые на полном серьёзе думают, что каждому участнику акции платят по 50€.' : ['И ведь есть люди','которые на полном серьёзе думают', 'каждому участнику акции платят по 50€.'],
'В Лиде каждый день собирается, но про неё почему то почти не пишут.' : ['В Лиде каждый день собирается','про неё почему то почти не пишут.'],
'Как же они бояться свои морды засветит,это трындец' : ['Как же они бояться свои морды засветит', 'это трындец'],
'Так вот на что, собственно, говоря идут мои налоги.' : ['Так вот на что, собственно', 'говоря идут мои налоги.'],
'А перед этим надо согласовать слова, которые будете употреблять для согласования.' : ['А перед этим надо согласовать слова', 'которые будете употреблять для согласования.'],
'я испортил книгу библиотеки, что будет' : ['я испортил книгу библиотеки', 'что будет'],
'что если я не сдам книгу в библиотеку вовремя, мне выпишут штраф?' : ['что если я не сдам книгу в библиотеку вовремя', 'мне выпишут штраф?'],
'мой друг не смог сдать учебник вовремя, что с ним будет?' : ['мой друг не смог сдать учебник вовремя', 'что с ним будет?'],
'я сдал все экзамены без троек, могу ли я переветись на бесплатное обучение?' : ['я сдал все экзамены без троек', 'могу ли я переветись на бесплатное обучение?'],
'где брать справку о том, что я действительно студент вуза?' : ['где брать справку о том', 'я действительно студент вуза?'],
'я хочу получить справку, которая подтверждает, что я студент ггф!' : ['я хочу получить справку','которая подтверждает','я студент ггф!'],
'я потерял зачетку перед экзаменом, сколько времени уйдет на ее восстановление' : ['я потерял зачетку перед экзаменом','сколько времени уйдет на ее восстановление'],
'что делать, если мне проставили неправильную оценку в личном кабинете?!' : ['что делать','если мне проставили неправильную оценку в личном кабинете?!'],
'как найти место, где студенческие объединение проводит мероприятие' : ['как найти место', 'где студенческие объединение проводит мероприятие'],
'что делать, если согласно расписанию ты не можешь посещать ни один курс по выбору, но обязан получить зачет хотя бы одному из них' : ['что делать', 'если согласно расписанию ты не можешь посещать ни один курс по выбору', 'обязан получить зачет хотя бы одному из них'],
'я общался бы с ним, если бы он был умнее' : ['я общался бы с ним', 'он был умнее'],
'могу ли я получать социальную стипендию, я сирота' : ['могу ли я получать социальную стипендию', 'я сирота'],
'я стал жертвой кражи, могу ли я получить материальную помощь' : ['я стал жертвой кражи', 'могу ли я получить материальную помощь'],
'как понять по номеру аудитории, где она находится' : ['как понять по номеру аудитории', 'где она находится'],
'есть ли столовая в старом корпусе, если да, то как её найти' : ['есть ли столовая в старом корпусе, если да', 'то как её найти'],
'как мне узнать, где находиться деканат моего факультета' : ['как мне узнать', 'где находиться деканат моего факультета'],
'у нас сократили часы в учебном плане, имеем ли мы право на скидку на обучение' : ['у нас сократили часы в учебном плане', 'имеем ли мы право на скидку на обучение'],
'кто отвечает за то, какие спецкурсы будут в этом году, и может ли студент как-то повлиять' : ['кто отвечает за то', 'какие спецкурсы будут в этом году', 'и может ли студент как-то повлиять'],
'для любого эпсилона большего нуля существует такая дельта больше нуля, что \"хвост\" функции не будет выходить за границы этого эпсилона.' : ['для любого эпсилона большего нуля существует такая дельта больше нуля', '\"хвост\" функции не будет выходить за границы этого эпсилона.'],
'Вот где где, а в академе, по-моему, самые упитанные белки, у них кормушки пустые не видел, белки мимо пробегают' : ['Вот где где', 'в академе, по-моему, самые упитанные белки','у них кормушки пустые не видел','белки мимо пробегают'],
'На фоне требований снизить стоимость обучения и отмены ряда практик, результирующая онлайн-учеба не самая положительная' : ['На фоне требований снизить стоимость обучения и отмены ряда практик','результирующая онлайн-учеба не самая положительная'],
'могу ли я взять деньги у сбербанка 350000 рублей на 20 лет чтобы пенсионый фонд ежемесечно погошал этот кредит в течении этих лет' : ['могу ли я взять деньги у сбербанка 350000 рублей на 20 лет', 'пенсионый фонд ежемесечно погошал этот кредит в течении этих лет'],
'Сегодня вечером к нам приезжает новый студент из Египта, в понедельник ему будет нужна помощь в покупке симкарты, обмене денег и в оформлении документов в университете, кто сможет помочь утром(9-10)?' : ['Сегодня вечером к нам приезжает новый студент из Египта', 'в понедельник ему будет нужна помощь в покупке симкарты, обмене денег и в оформлении документов в университете','кто сможет помочь утром(9-10)?'],
'Не-а, вероятно можно с помощью csv или pandas такое сделать, если не один раз подобным образом вставлять нужно будет' : ['Не-а', 'вероятно', 'можно с помощью csv или pandas такое сделать', 'если не один раз подобным образом вставлять нужно будет'],
'нас там нормам не учат, филологов ещё вроде немного на пунктуацию и орфографию проверяют' : ['нас там нормам не учат', 'филологов ещё вроде немного на пунктуацию и орфографию проверяют'],
'так, значит, итоги, я попробовала fasttext+logreg, получилось неплохо, я 2-3 реплики из каждого класса отделили в тест, классификатор не ошибается.' : ['так, значит, итоги', 'я попробовала fasttext+logreg', 'получилось неплохо','я 2-3 реплики из каждого класса отделили в тест','классификатор не ошибается.'],
'привет, с 2 курса у нас каждый семестр есть курсы на выбор, и плюс есть курсы для всех гуманитариев' : ['привет', 'с 2 курса у нас каждый семестр есть курсы на выбор','и плюс есть курсы для всех гуманитариев'],
'да, верно, но есть проблема в том, что эта штука работает нестабильно, а результаты разные из-за разных моделей.' : ['да', 'верно', 'есть проблема в том', 'эта штука работает нестабильно', 'результаты разные из-за разных моделей.'],
'ладно, вроде туман в голове начинает рассеиваться, надо как-то всё переварить.' : ['ладно', 'вроде туман в голове начинает рассеиваться','надо как-то всё переварить.'],
'Есть 1млн коротких строк англ языка(допустим из библии), юзер начинает вводить строку и надо подобрать наиболее подходящую по смыслу.' : ['Есть 1млн коротких строк англ языка(допустим из библии)','юзер начинает вводить строку','надо подобрать наиболее подходящую по смыслу.'],
'Очень прошу прощения, чуть-чуть не расчитала впемя и не успела код доделать до пар, поздно вечером пришлю.' : ['Очень прошу прощения', 'чуть-чуть не расчитала впемя', 'не успела код доделать до пар', 'поздно вечером пришлю.'],
'Энкодер можешь на гпу запускать, а про approximate nearest neighbor search на гпу я не слышал, но можно попробовать погуглить.' : ['Энкодер можешь на гпу запускать', 'про approximate nearest neighbor search на гпу я не слышал', 'можно попробовать погуглить.'],
'да, окей, сесть прописать в кракострочной перспективе быстрее, чем отлаживать почти \"черный ящик\"' : ['да, окей', 'сесть прописать в кракострочной перспективе быстрее', 'чем отлаживать почти \"черный ящик\"'],
'у меня есть вариант как запускать эту штуку быстрее, ещё не оттестировала, завтра к утру мб будет слегка обновленный код или не будет.' : ['у меня есть вариант как запускать эту штуку быстрее', 'ещё не оттестировала', 'завтра к утру мб будет слегка обновленный код или не будет.'],
'какие мероприятия проходят в НГУ, где можно посмотреть список?' : ['какие мероприятия проходят в НГУ','где можно посмотреть список?'],
'для экспериментов и тренировки это прямо находка, очень много времени экономит' : ['для экспериментов и тренировки это прямо находка', 'очень много времени экономит'],
'Вы можете обратить внимание на пару по выч. методам на 2 потоке без аудитории, она будет проходить дистанционно' : ['Вы можете обратить внимание на пару по выч. методам на 2 потоке без аудитории', 'она будет проходить дистанционно'],
'Сейчас в Минске можно увидеть, как те, кто не поступили задерживают тех, кто поступил.' : ['Сейчас в Минске можно увидеть, как те', 'кто не поступили задерживают тех', 'кто поступил.'],
'так вот код для классификатора плюс файл с датасетом, нужно соблюдать имена столбцов: class - название класса интента, class_id - номер класса, text - сама реплика.' : ['так вот код для классификатора плюс файл с датасетом, нужно соблюдать имена столбцов', 'class - название класса интента', 'class_id - номер класса', 'text - сама реплика.'],
'фу тут тараканы, горячей воды нет, комнаты маленькие' : ['фу тут тараканы', 'горячей воды нет', 'комнаты маленькие'],'Господь сотворил целые числа; остальное — дело рук человека.' : ['Господь сотворил целые числа', 'остальное — дело рук человека.'],
'и ещё, пиши раз в день, пожалуйста, о прогрессе, например, \"за сегодня было добавлено столько-то фраз такого типа и столько-то синсетов было наполнено\".' :['и ещё', 'пиши раз в день', 'пожалуйста', 'о прогрессе, например', '"за сегодня было добавлено столько-то фраз такого типа', 'столько-то синсетов было наполнено".'],
'Ребят, где находится магаз?':['Ребят', 'где находится магаз?'],'Иван Васильевич, а что сказал Петр Иванвович?':['Иван Васильевич', 'что сказал Петр Иванвович?'],'Чел, ты потерялся...':['Чел','ты потерялся...'],'Иван, Петр сказал, что ты не очень прилежный студент!':['Иван', 'Петр сказал', 'ты не очень прилежный студент!'],
'Я собрался с ребятами, мы решили, что хотели бы создать студенческое объединение.':['Я собрался с ребятами', 'мы решили', 'хотели бы создать студенческое объединение.'],
'Уже сейчас студенты Санкт-Петербурга проявили интерес к данному мероприятию, и я считаю, что студентам НГУ подобные инициативы также будут интересны.':['Уже сейчас студенты Санкт-Петербурга проявили интерес к данному мероприятию', 'и я считаю', 'студентам НГУ подобные инициативы также будут интересны.'],
'Во-первых, кто ты такой чтобы мне такое говорить, во-вторых, он уже сказал, что здесь нет юридического основания для спора!':['Во-первых', 'кто ты такой', 'мне такое говорить', 'во-вторых', 'он уже сказал', 'здесь нет юридического основания для спора!']}
| 171.092308
| 412
| 0.748404
| 1,640
| 11,121
| 5.07561
| 0.327439
| 0.005766
| 0.006728
| 0.003604
| 0.925276
| 0.873618
| 0.824123
| 0.77655
| 0.694137
| 0.608361
| 0
| 0.004155
| 0.156011
| 11,121
| 64
| 413
| 173.765625
| 0.882271
| 0
| 0
| 0
| 0
| 0.278689
| 0.889291
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016393
| false
| 0
| 0
| 0
| 0.016393
| 0.032787
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
738fefb50375a6a158fc2242b9003a272f001b21
| 14,746
|
py
|
Python
|
tests/integration/test_user.py
|
andrew-chang-dewitt/hoops-api
|
3530c5127c35742aad84df8d6a5286b9f5ad3608
|
[
"MIT"
] | null | null | null |
tests/integration/test_user.py
|
andrew-chang-dewitt/hoops-api
|
3530c5127c35742aad84df8d6a5286b9f5ad3608
|
[
"MIT"
] | 10
|
2021-11-02T23:31:56.000Z
|
2021-12-07T03:41:12.000Z
|
tests/integration/test_user.py
|
andrew-chang-dewitt/hoops
|
3530c5127c35742aad84df8d6a5286b9f5ad3608
|
[
"MIT"
] | null | null | null |
"""Tests for /user routes."""
from uuid import UUID
from unittest import main, IsolatedAsyncioTestCase as TestCase
from db_wrapper.model import sql
# internal test dependencies
from tests.helpers.application import (
get_test_client,
get_token_header,
)
class TestRoutePostRoot(TestCase):
"""Tests for `POST /user`."""
async def test_valid_request(self) -> None:
"""Testing a valid request's response."""
new_user = {
"handle": "new_user",
"password": "@ new p4s5w0rd",
"full_name": "A Full Name",
"preferred_name": "Nickname",
}
async with get_test_client() as clients:
client, database = clients
response = await client.post(
"/user",
json=new_user)
with self.subTest(
msg="Responds with a status code of 201."):
self.assertEqual(201, response.status_code)
with self.subTest(
msg="Responds with newly created User's handle, names, & ID."
):
body = response.json()
with self.subTest():
self.assertTrue(UUID(body["id"]))
with self.subTest():
self.assertEqual(body["handle"], new_user["handle"])
with self.subTest():
self.assertEqual(body["full_name"], new_user["full_name"])
with self.subTest():
self.assertEqual(body["preferred_name"],
new_user["preferred_name"])
with self.subTest(
msg="Response body does not include new password."):
self.assertNotIn("password", body.keys())
with self.subTest(
msg="New User is in the database."):
body = response.json()
new_id = UUID(body["id"])
await database.connect()
query_result = await database.execute_and_return(sql.SQL("""
SELECT * FROM hoops_user
WHERE id = {new_id};
""").format(new_id=sql.Literal(new_id)))
await database.disconnect()
result = query_result[0]
with self.subTest(
msg="Given handle & database handle match."):
self.assertEqual(result["handle"], new_user["handle"])
with self.subTest(
msg="Given full name & database full name match."):
self.assertEqual(
result["full_name"], new_user["full_name"])
with self.subTest(
msg="Given preferred name & matches db."):
self.assertEqual(
result["preferred_name"], new_user["preferred_name"])
with self.subTest(
msg="Given password is encrypted before storing."):
self.assertNotEqual(
result["password"], new_user["password"])
async def test_user_already_exists(self) -> None:
"""Returns 409 if user already exists."""
new_user = {
"handle": "new_user",
"password": "@ new p4s5w0rd",
"full_name": "A Full Name",
"preferred_name": "Nickname",
}
async with get_test_client() as clients:
client, database = clients
# create existing user
await database.connect()
await database.execute("""
INSERT INTO hoops_user(handle, full_name, password)
VALUES ('new_user', 'A Name', 'password');
""")
await database.disconnect()
# attempt to add new user with same handle
response = await client.post(
"/user",
json=new_user)
self.assertEqual(409, response.status_code)
class TestRouteGetRoot(TestCase):
"""Tests for `GET /user`."""
async def test_valid_request(self) -> None:
"""Testing a valid request's response."""
query = """
INSERT INTO
hoops_user(handle, full_name, preferred_name, password)
VALUES
('user', 'A Full Name', 'Nickname', '@ new p4s5w0rd')
RETURNING id;
"""
async with get_test_client() as clients:
client, database = clients
await database.connect()
result = await database.execute_and_return(query)
await database.disconnect()
user_id = result[0]["id"]
response = await client.get(
"/user",
headers={
**get_token_header(user_id),
"accept": "application/json"})
with self.subTest(
msg="Responds with a status code of 200."):
self.assertEqual(200, response.status_code)
with self.subTest(
msg="Responds with Content-Type: application/json Header."
):
self.assertEqual(response.headers.get(
'content-type'), 'application/json')
with self.subTest(
msg="Responds with authorized User's data."):
body = response.json()
with self.subTest():
self.assertEqual(body["id"], str(user_id))
with self.subTest():
self.assertEqual(body['handle'], "user")
with self.subTest():
self.assertEqual(body['full_name'], "A Full Name")
with self.subTest():
self.assertEqual(body['preferred_name'], "Nickname")
with self.subTest(
msg="Response doesn't include the User's password."):
with self.assertRaises(KeyError):
body["password"] # pylint: disable=pointless-statement
class TestRoutePutRoot(TestCase):
"""Tests for `PUT /user`."""
async def test_valid_request(self) -> None:
"""Testing a valid request's response."""
query = """
INSERT INTO
hoops_user(handle, full_name, preferred_name, password)
VALUES
('user', 'A Full Name', 'Nickname', '@ new p4s5w0rd')
RETURNING id;
"""
async with get_test_client() as clients:
client, database = clients
await database.connect()
result = await database.execute_and_return(query)
await database.disconnect()
user_id = result[0]["id"]
response = await client.put(
"/user",
headers={
**get_token_header(user_id),
"accept": "application/json"},
json={"handle": "new_handle"})
with self.subTest(
msg="Responds with a status code of 200."):
self.assertEqual(200, response.status_code)
with self.subTest(
msg="Responds with User's updated information."
):
body = response.json()
with self.subTest():
self.assertEqual(body["id"], str(user_id))
with self.subTest():
self.assertEqual(body["handle"], "new_handle")
with self.subTest():
self.assertEqual(body["full_name"], "A Full Name")
with self.subTest():
self.assertEqual(body["preferred_name"],
"Nickname")
with self.subTest(
msg="Response body does not include new password."):
self.assertNotIn("password", body.keys())
with self.subTest(
msg="Changes to user show in database."):
body = response.json()
await database.connect()
query_result = await database.execute_and_return(sql.SQL("""
SELECT handle FROM hoops_user
WHERE id = {user_id};
""").format(user_id=sql.Literal(user_id)))
await database.disconnect()
result = query_result[0]
with self.subTest(
msg="Given handle & database handle match."):
self.assertEqual(result["handle"], "new_handle")
async def test_cant_update_password(self) -> None:
"""PUT /user can't update a User's password."""
query = """
INSERT INTO
hoops_user(handle, full_name, preferred_name, password)
VALUES
('user', 'A Full Name', 'Nickname', '@ new p4s5w0rd')
RETURNING id;
"""
async with get_test_client() as clients:
client, database = clients
await database.connect()
result = await database.execute_and_return(query)
await database.disconnect()
user_id = result[0]["id"]
response = await client.put(
"/user",
headers={
**get_token_header(user_id),
"accept": "application/json"},
json={"password": "this won't work"})
with self.subTest(
msg="Responds with a status code of 422."):
self.assertEqual(422, response.status_code)
class TestRoutePutPassword(TestCase):
"""Tests for `PUT /user/password`."""
async def test_valid_request(self) -> None:
"""Testing a valid request's response."""
query = """
INSERT INTO
hoops_user(handle, full_name, preferred_name, password)
VALUES
('user', 'A Full Name', 'Nickname', '@ new p4s5w0rd')
RETURNING id;
"""
async with get_test_client() as clients:
client, database = clients
await database.connect()
result = await database.execute_and_return(query)
await database.disconnect()
user_id = result[0]["id"]
response = await client.put(
"/user/password",
headers={
**get_token_header(user_id),
"accept": "application/json"},
json="updated password")
with self.subTest(
msg="Responds with a status code of 200."):
self.assertEqual(200, response.status_code)
with self.subTest(
msg="Responds with User's information."
):
body = response.json()
with self.subTest():
self.assertEqual(body["id"], str(user_id))
with self.subTest():
self.assertEqual(body["handle"], "user")
with self.subTest():
self.assertEqual(body["full_name"], "A Full Name")
with self.subTest():
self.assertEqual(body["preferred_name"],
"Nickname")
with self.subTest(
msg="Response body does not include new password."):
self.assertNotIn("password", body.keys())
with self.subTest(
msg="Changes to user show in database."):
body = response.json()
await database.connect()
query_result = await database.execute_and_return(sql.SQL("""
SELECT
(password = crypt({password}, password))
AS pwmatch
FROM hoops_user
WHERE
id = {user_id};
""").format(
user_id=sql.Literal(user_id),
password=sql.Literal("updated password")))
await database.disconnect()
result = query_result[0]
with self.subTest(
msg="Database contains updated password."):
self.assertTrue(result["pwmatch"])
class TestRouteDeleteRoot(TestCase):
"""Tests for `DELETE /user`."""
async def test_valid_request(self) -> None:
"""Testing a valid request's response."""
query = """
INSERT INTO
hoops_user(handle, full_name, preferred_name, password)
VALUES
('user', 'A Full Name', 'Nickname', '@ new p4s5w0rd')
RETURNING id;
"""
async with get_test_client() as clients:
client, database = clients
await database.connect()
result = await database.execute_and_return(query)
await database.disconnect()
user_id = result[0]["id"]
response = await client.delete(
"/user",
headers={
**get_token_header(user_id),
"accept": "application/json"})
with self.subTest(
msg="Responds with a status code of 200."):
self.assertEqual(200, response.status_code)
with self.subTest(
msg="Responds with deleted User's information."
):
body = response.json()
with self.subTest():
self.assertEqual(body["id"], str(user_id))
with self.subTest():
self.assertEqual(body["handle"], "user")
with self.subTest():
self.assertEqual(body["full_name"], "A Full Name")
with self.subTest():
self.assertEqual(body["preferred_name"],
"Nickname")
with self.subTest(
msg="Response body does not include new password."):
self.assertNotIn("password", body.keys())
with self.subTest(
msg="User is removed from database."):
body = response.json()
await database.connect()
query_result = await database.execute_and_return(sql.SQL("""
SELECT *
FROM hoops_user
WHERE id = {user_id};
""").format(
user_id=sql.Literal(user_id)))
await database.disconnect()
self.assertEqual(len(query_result), 0)
if __name__ == "__main__":
main()
| 35.7046
| 78
| 0.496881
| 1,382
| 14,746
| 5.178003
| 0.106368
| 0.053661
| 0.098519
| 0.067915
| 0.790945
| 0.779486
| 0.775573
| 0.762018
| 0.745109
| 0.717859
| 0
| 0.008109
| 0.397871
| 14,746
| 412
| 79
| 35.791262
| 0.797838
| 0.018785
| 0
| 0.742857
| 0
| 0
| 0.262551
| 0
| 0
| 0
| 0
| 0
| 0.126984
| 1
| 0
| false
| 0.092063
| 0.012698
| 0
| 0.028571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
fb4efd87acd5e3b455476f92cd2ab7b92c4af889
| 433,568
|
py
|
Python
|
source/module_troops.py
|
WuphonsReach/BeggarsDiplomacy
|
e6b7f3790e28f458eca0a42250a6bbf41bef8d30
|
[
"MIT"
] | null | null | null |
source/module_troops.py
|
WuphonsReach/BeggarsDiplomacy
|
e6b7f3790e28f458eca0a42250a6bbf41bef8d30
|
[
"MIT"
] | null | null | null |
source/module_troops.py
|
WuphonsReach/BeggarsDiplomacy
|
e6b7f3790e28f458eca0a42250a6bbf41bef8d30
|
[
"MIT"
] | null | null | null |
import random
from header_common import *
from header_items import *
from header_troops import *
from header_skills import *
from ID_factions import *
from ID_items import *
from ID_scenes import *
from compiler import *
####################################################################################################################
# Each troop contains the following fields:
# 1) Troop id (string): used for referencing troops in other files. The prefix trp_ is automatically added before each troop-id .
# 2) Toop name (string).
# 3) Plural troop name (string).
# 4) Troop flags (int). See header_troops.py for a list of available flags
# 5) Scene (int) (only applicable to heroes) For example: scn_reyvadin_castle|entry(1) puts troop in reyvadin castle's first entry point
# 6) Reserved (int). Put constant "reserved" or 0.
# 7) Faction (int)
# 8) Inventory (list): Must be a list of items
# 9) Attributes (int): Example usage:
# str_6|agi_6|int_4|cha_5|level(5)
# 10) Weapon proficiencies (int): Example usage:
# wp_one_handed(55)|wp_two_handed(90)|wp_polearm(36)|wp_archery(80)|wp_crossbow(24)|wp_throwing(45)
# The function wp(x) will create random weapon proficiencies close to value x.
# To make an expert archer with other weapon proficiencies close to 60 you can use something like:
# wp_archery(160) | wp(60)
# 11) Skills (int): See header_skills.py to see a list of skills. Example:
# knows_ironflesh_3|knows_power_strike_2|knows_athletics_2|knows_riding_2
# 12) Face code (int): You can obtain the face code by pressing ctrl+E in face generator screen
# 13) Face code (int)(2) (only applicable to regular troops, can be omitted for heroes):
# The game will create random faces between Face code 1 and face code 2 for generated troops
# 14) Troop image (string): If this variable is set, the troop will use an image rather than its 3D visual during the conversations
# town_1 Sargoth
# town_2 Tihr
# town_3 Veluca
# town_4 Suno
# town_5 Jelkala
# town_6 Praven
# town_7 Uxkhal
# town_8 Reyvadin
# town_9 Khudan
# town_10 Tulga
# town_11 Curaw
# town_12 Wercheg
# town_13 Rivacheg
# town_14 Halmar
####################################################################################################################
# Some constant and function declarations to be used below...
# wp_one_handed () | wp_two_handed () | wp_polearm () | wp_archery () | wp_crossbow () | wp_throwing ()
def wp(x):
n = 0
r = 10 + int(x / 10)
# n |= wp_one_handed(x + random.randrange(r))
# n |= wp_two_handed(x + random.randrange(r))
# n |= wp_polearm(x + random.randrange(r))
# n |= wp_archery(x + random.randrange(r))
# n |= wp_crossbow(x + random.randrange(r))
# n |= wp_throwing(x + random.randrange(r))
n |= wp_one_handed(x)
n |= wp_two_handed(x)
n |= wp_polearm(x)
n |= wp_archery(x)
n |= wp_crossbow(x)
n |= wp_throwing(x)
return n
def wpe(m,a,c,t):
n = 0
n |= wp_one_handed(m)
n |= wp_two_handed(m)
n |= wp_polearm(m)
n |= wp_archery(a)
n |= wp_crossbow(c)
n |= wp_throwing(t)
return n
def wpex(o,w,p,a,c,t):
n = 0
n |= wp_one_handed(o)
n |= wp_two_handed(w)
n |= wp_polearm(p)
n |= wp_archery(a)
n |= wp_crossbow(c)
n |= wp_throwing(t)
return n
def wp_melee(x):
n = 0
r = 10 + int(x / 10)
# n |= wp_one_handed(x + random.randrange(r))
# n |= wp_two_handed(x + random.randrange(r))
# n |= wp_polearm(x + random.randrange(r))
n |= wp_one_handed(x + 20)
n |= wp_two_handed(x)
n |= wp_polearm(x + 10)
return n
#Skills
knows_common = knows_riding_1|knows_trade_2|knows_inventory_management_2|knows_prisoner_management_1|knows_leadership_1
knows_common_multiplayer = knows_trade_10|knows_inventory_management_10|knows_prisoner_management_10|knows_leadership_10|knows_spotting_10|knows_pathfinding_10|knows_tracking_10|knows_engineer_10|knows_first_aid_10|knows_surgery_10|knows_wound_treatment_10|knows_tactics_10|knows_trainer_10|knows_looting_10
def_attrib = str_7 | agi_5 | int_4 | cha_4
def_attrib_multiplayer = int_30 | cha_30
knows_lord_1 = knows_riding_3|knows_trade_2|knows_inventory_management_2|knows_tactics_4|knows_prisoner_management_4|knows_leadership_7
knows_warrior_npc = knows_weapon_master_2|knows_ironflesh_1|knows_athletics_1|knows_power_strike_2|knows_riding_2|knows_shield_1|knows_inventory_management_2
knows_merchant_npc = knows_riding_2|knows_trade_3|knows_inventory_management_3 #knows persuasion
knows_tracker_npc = knows_weapon_master_1|knows_athletics_2|knows_spotting_2|knows_pathfinding_2|knows_tracking_2|knows_ironflesh_1|knows_inventory_management_2
lord_attrib = str_20|agi_20|int_20|cha_20|level(38)
knight_attrib_1 = str_15|agi_14|int_8|cha_16|level(22)
knight_attrib_2 = str_16|agi_16|int_10|cha_18|level(26)
knight_attrib_3 = str_18|agi_17|int_12|cha_20|level(30)
knight_attrib_4 = str_19|agi_19|int_13|cha_22|level(35)
knight_attrib_5 = str_20|agi_20|int_15|cha_25|level(41)
knight_skills_1 = knows_riding_3|knows_ironflesh_2|knows_power_strike_3|knows_athletics_1|knows_tactics_2|knows_prisoner_management_1|knows_leadership_3
knight_skills_2 = knows_riding_4|knows_ironflesh_3|knows_power_strike_4|knows_athletics_2|knows_tactics_3|knows_prisoner_management_2|knows_leadership_5
knight_skills_3 = knows_riding_5|knows_ironflesh_4|knows_power_strike_5|knows_athletics_3|knows_tactics_4|knows_prisoner_management_2|knows_leadership_6
knight_skills_4 = knows_riding_6|knows_ironflesh_5|knows_power_strike_6|knows_athletics_4|knows_tactics_5|knows_prisoner_management_3|knows_leadership_7
knight_skills_5 = knows_riding_7|knows_ironflesh_6|knows_power_strike_7|knows_athletics_5|knows_tactics_6|knows_prisoner_management_3|knows_leadership_9
#These face codes are generated by the in-game face generator.
#Enable edit mode and press ctrl+E in face generator screen to obtain face codes.
reserved = 0
no_scene = 0
swadian_face_younger_1 = 0x0000000000000001124000000020000000000000001c00800000000000000000
swadian_face_young_1 = 0x0000000400000001124000000020000000000000001c00800000000000000000
swadian_face_middle_1 = 0x0000000800000001124000000020000000000000001c00800000000000000000
swadian_face_old_1 = 0x0000000d00000001124000000020000000000000001c00800000000000000000
swadian_face_older_1 = 0x0000000fc0000001124000000020000000000000001c00800000000000000000
swadian_face_younger_2 = 0x00000000000062c76ddcdf7feefbffff00000000001efdbc0000000000000000
swadian_face_young_2 = 0x00000003c00062c76ddcdf7feefbffff00000000001efdbc0000000000000000
swadian_face_middle_2 = 0x00000007c00062c76ddcdf7feefbffff00000000001efdbc0000000000000000
swadian_face_old_2 = 0x0000000bc00062c76ddcdf7feefbffff00000000001efdbc0000000000000000
swadian_face_older_2 = 0x0000000fc00062c76ddcdf7feefbffff00000000001efdbc0000000000000000
vaegir_face_younger_1 = 0x0000000000000001124000000020000000000000001c00800000000000000000
vaegir_face_young_1 = 0x0000000400000001124000000020000000000000001c00800000000000000000
vaegir_face_middle_1 = 0x0000000800000001124000000020000000000000001c00800000000000000000
vaegir_face_old_1 = 0x0000000d00000001124000000020000000000000001c00800000000000000000
vaegir_face_older_1 = 0x0000000fc0000001124000000020000000000000001c00800000000000000000
vaegir_face_younger_2 = 0x000000003f00230c4deeffffffffffff00000000001efff90000000000000000
vaegir_face_young_2 = 0x00000003bf00230c4deeffffffffffff00000000001efff90000000000000000
vaegir_face_middle_2 = 0x00000007bf00230c4deeffffffffffff00000000001efff90000000000000000
vaegir_face_old_2 = 0x0000000cbf00230c4deeffffffffffff00000000001efff90000000000000000
vaegir_face_older_2 = 0x0000000ff100230c4deeffffffffffff00000000001efff90000000000000000
khergit_face_younger_1 = 0x0000000009003109207000000000000000000000001c80470000000000000000
khergit_face_young_1 = 0x00000003c9003109207000000000000000000000001c80470000000000000000
khergit_face_middle_1 = 0x00000007c9003109207000000000000000000000001c80470000000000000000
khergit_face_old_1 = 0x0000000b89003109207000000000000000000000001c80470000000000000000
khergit_face_older_1 = 0x0000000fc9003109207000000000000000000000001c80470000000000000000
khergit_face_younger_2 = 0x000000003f0061cd6d7ffbdf9df6ebee00000000001ffb7f0000000000000000
khergit_face_young_2 = 0x00000003bf0061cd6d7ffbdf9df6ebee00000000001ffb7f0000000000000000
khergit_face_middle_2 = 0x000000077f0061cd6d7ffbdf9df6ebee00000000001ffb7f0000000000000000
khergit_face_old_2 = 0x0000000b3f0061cd6d7ffbdf9df6ebee00000000001ffb7f0000000000000000
khergit_face_older_2 = 0x0000000fff0061cd6d7ffbdf9df6ebee00000000001ffb7f0000000000000000
nord_face_younger_1 = 0x0000000000000001124000000020000000000000001c00800000000000000000
nord_face_young_1 = 0x0000000400000001124000000020000000000000001c00800000000000000000
nord_face_middle_1 = 0x0000000800000001124000000020000000000000001c00800000000000000000
nord_face_old_1 = 0x0000000d00000001124000000020000000000000001c00800000000000000000
nord_face_older_1 = 0x0000000fc0000001124000000020000000000000001c00800000000000000000
nord_face_younger_2 = 0x00000000310023084deeffffffffffff00000000001efff90000000000000000
nord_face_young_2 = 0x00000003b10023084deeffffffffffff00000000001efff90000000000000000
nord_face_middle_2 = 0x00000008310023084deeffffffffffff00000000001efff90000000000000000
nord_face_old_2 = 0x0000000c710023084deeffffffffffff00000000001efff90000000000000000
nord_face_older_2 = 0x0000000ff10023084deeffffffffffff00000000001efff90000000000000000
rhodok_face_younger_1 = 0x0000000009002003140000000000000000000000001c80400000000000000000
rhodok_face_young_1 = 0x0000000449002003140000000000000000000000001c80400000000000000000
rhodok_face_middle_1 = 0x0000000849002003140000000000000000000000001c80400000000000000000
rhodok_face_old_1 = 0x0000000cc9002003140000000000000000000000001c80400000000000000000
rhodok_face_older_1 = 0x0000000fc9002003140000000000000000000000001c80400000000000000000
rhodok_face_younger_2 = 0x00000000000062c76ddcdf7feefbffff00000000001efdbc0000000000000000
rhodok_face_young_2 = 0x00000003c00062c76ddcdf7feefbffff00000000001efdbc0000000000000000
rhodok_face_middle_2 = 0x00000007c00062c76ddcdf7feefbffff00000000001efdbc0000000000000000
rhodok_face_old_2 = 0x0000000bc00062c76ddcdf7feefbffff00000000001efdbc0000000000000000
rhodok_face_older_2 = 0x0000000fc00062c76ddcdf7feefbffff00000000001efdbc0000000000000000
man_face_younger_1 = 0x0000000000000001124000000020000000000000001c00800000000000000000
man_face_young_1 = 0x0000000400000001124000000020000000000000001c00800000000000000000
man_face_middle_1 = 0x0000000800000001124000000020000000000000001c00800000000000000000
man_face_old_1 = 0x0000000d00000001124000000020000000000000001c00800000000000000000
man_face_older_1 = 0x0000000fc0000001124000000020000000000000001c00800000000000000000
man_face_younger_2 = 0x000000003f0052064deeffffffffffff00000000001efff90000000000000000
man_face_young_2 = 0x00000003bf0052064deeffffffffffff00000000001efff90000000000000000
man_face_middle_2 = 0x00000007bf0052064deeffffffffffff00000000001efff90000000000000000
man_face_old_2 = 0x0000000bff0052064deeffffffffffff00000000001efff90000000000000000
man_face_older_2 = 0x0000000fff0052064deeffffffffffff00000000001efff90000000000000000
merchant_face_1 = man_face_young_1
merchant_face_2 = man_face_older_2
woman_face_1 = 0x0000000000000001000000000000000000000000001c00000000000000000000
woman_face_2 = 0x00000003bf0030067ff7fbffefff6dff00000000001f6dbf0000000000000000
swadian_woman_face_1 = 0x0000000180102006124925124928924900000000001c92890000000000000000
swadian_woman_face_2 = 0x00000001bf1000061db6d75db6b6dbad00000000001c92890000000000000000
khergit_woman_face_1 = 0x0000000180103006124925124928924900000000001c92890000000000000000
khergit_woman_face_2 = 0x00000001af1030025b6eb6dd6db6dd6d00000000001eedae0000000000000000
refugee_face1 = woman_face_1
refugee_face2 = woman_face_2
girl_face1 = woman_face_1
girl_face2 = woman_face_2
mercenary_face_1 = 0x0000000000000000000000000000000000000000001c00000000000000000000
mercenary_face_2 = 0x0000000cff00730b6db6db6db7fbffff00000000001efffe0000000000000000
vaegir_face1 = vaegir_face_young_1
vaegir_face2 = vaegir_face_older_2
bandit_face1 = man_face_young_1
bandit_face2 = man_face_older_2
undead_face1 = 0x00000000002000000000000000000000
undead_face2 = 0x000000000020010000001fffffffffff
#NAMES:
#
tf_guarantee_all = tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_helmet|tf_guarantee_horse|tf_guarantee_shield|tf_guarantee_ranged
tf_guarantee_all_wo_ranged = tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_helmet|tf_guarantee_horse|tf_guarantee_shield
troops = [
["player","Player","Player",tf_hero|tf_unmoveable_in_party_window,no_scene,reserved,fac_player_faction,
[],
str_4|agi_4|int_4|cha_4,wp(15),0,0x000000018000000136db6db6db6db6db00000000001db6db0000000000000000],
["multiplayer_profile_troop_male","multiplayer_profile_troop_male","multiplayer_profile_troop_male", tf_hero|tf_guarantee_all, 0, 0,fac_commoners,
[itm_leather_jerkin, itm_leather_boots],
0, 0, 0, 0x000000018000000136db6db6db6db6db00000000001db6db0000000000000000],
["multiplayer_profile_troop_female","multiplayer_profile_troop_female","multiplayer_profile_troop_female", tf_hero|tf_female|tf_guarantee_all, 0, 0,fac_commoners,
[itm_tribal_warrior_outfit, itm_leather_boots],
0, 0, 0, 0x000000018000000136db6db6db6db6db00000000001db6db0000000000000000],
["temp_troop","Temp Troop","Temp Troop",tf_hero,no_scene,reserved,fac_commoners,[],def_attrib,0,knows_common|knows_inventory_management_10,0],
## ["game","Game","Game",tf_hero,no_scene,reserved,fac_commoners,[],def_attrib,0,knows_common,0],
## ["unarmed_troop","Unarmed Troop","Unarmed Troops",tf_hero,no_scene,reserved,fac_commoners,[itm_arrows,itm_short_bow],def_attrib|str_14,0,knows_common|knows_power_draw_2,0],
####################################################################################################################
# Troops before this point are hardwired into the game and their order should not be changed!
####################################################################################################################
["find_item_cheat","find_item_cheat","find_item_cheat",tf_hero|tf_is_merchant,no_scene,reserved,fac_commoners,[],def_attrib,0,knows_common|knows_inventory_management_10,0],
["random_town_sequence","Random Town Sequence","Random Town Sequence",tf_hero,no_scene,reserved,fac_commoners,[],def_attrib,0,knows_common|knows_inventory_management_10,0],
["tournament_participants","Tournament Participants","Tournament Participants",tf_hero,no_scene,reserved,fac_commoners,[],def_attrib,0,knows_common|knows_inventory_management_10,0],
["tutorial_maceman","Maceman","Maceman",tf_guarantee_boots|tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_tutorial_club,itm_leather_jerkin,itm_hide_boots],
str_6|agi_6|level(1),wp(50),knows_common,mercenary_face_1,mercenary_face_2],
["tutorial_archer","Archer","Archer",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_ranged,no_scene,reserved,fac_commoners,
[itm_tutorial_short_bow,itm_tutorial_arrows,itm_linen_tunic,itm_hide_boots],
str_6|agi_6|level(5),wp(100),knows_common|knows_power_draw_4,mercenary_face_1,mercenary_face_2],
["tutorial_swordsman","Swordsman","Swordsman",tf_guarantee_boots|tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_tutorial_sword,itm_leather_vest,itm_hide_boots],
str_6|agi_6|level(5),wp(80),knows_common,mercenary_face_1,mercenary_face_2],
["novice_fighter","Novice Fighter","Novice Fighters",tf_guarantee_boots|tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_hide_boots],
str_6|agi_6|level(5),wp(60),knows_common,mercenary_face_1, mercenary_face_2],
["regular_fighter","Regular Fighter","Regular Fighters",tf_guarantee_boots|tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_hide_boots],
str_8|agi_8|level(11),wp(90),knows_common|knows_ironflesh_1|knows_power_strike_1|knows_athletics_1|knows_riding_1|knows_shield_2,mercenary_face_1, mercenary_face_2],
["veteran_fighter","Veteran Fighter","Veteran Fighters",tf_guarantee_boots|tf_guarantee_armor,no_scene,0,fac_commoners,
[itm_hide_boots],
str_10|agi_10|level(17),wp(110),knows_common|knows_ironflesh_3|knows_power_strike_2|knows_athletics_2|knows_riding_2|knows_shield_3,mercenary_face_1, mercenary_face_2],
["champion_fighter","Champion Fighter","Champion Fighters",tf_guarantee_boots|tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_hide_boots],
str_12|agi_11|level(22),wp(140),knows_common|knows_ironflesh_4|knows_power_strike_3|knows_athletics_3|knows_riding_3|knows_shield_4,mercenary_face_1, mercenary_face_2],
["arena_training_fighter_1","Novice Fighter","Novice Fighters",tf_guarantee_boots|tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_hide_boots],
str_6|agi_6|level(5),wp(60),knows_common,mercenary_face_1, mercenary_face_2],
["arena_training_fighter_2","Novice Fighter","Novice Fighters",tf_guarantee_boots|tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_hide_boots],
str_7|agi_6|level(7),wp(70),knows_common,mercenary_face_1, mercenary_face_2],
["arena_training_fighter_3","Regular Fighter","Regular Fighters",tf_guarantee_boots|tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_hide_boots],
str_8|agi_7|level(9),wp(80),knows_common,mercenary_face_1, mercenary_face_2],
["arena_training_fighter_4","Regular Fighter","Regular Fighters",tf_guarantee_boots|tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_hide_boots],
str_8|agi_8|level(11),wp(90),knows_common,mercenary_face_1, mercenary_face_2],
["arena_training_fighter_5","Regular Fighter","Regular Fighters",tf_guarantee_boots|tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_hide_boots],
str_9|agi_8|level(13),wp(100),knows_common,mercenary_face_1, mercenary_face_2],
["arena_training_fighter_6","Veteran Fighter","Veteran Fighters",tf_guarantee_boots|tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_hide_boots],
str_10|agi_9|level(15),wp(110),knows_common,mercenary_face_1, mercenary_face_2],
["arena_training_fighter_7","Veteran Fighter","Veteran Fighters",tf_guarantee_boots|tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_hide_boots],
str_10|agi_10|level(17),wp(120),knows_common,mercenary_face_1, mercenary_face_2],
["arena_training_fighter_8","Veteran Fighter","Veteran Fighters",tf_guarantee_boots|tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_hide_boots],
str_11|agi_10|level(19),wp(130),knows_common,mercenary_face_1, mercenary_face_2],
["arena_training_fighter_9","Champion Fighter","Champion Fighters",tf_guarantee_boots|tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_hide_boots],
str_12|agi_11|level(21),wp(140),knows_common,mercenary_face_1, mercenary_face_2],
["arena_training_fighter_10","Champion Fighter","Champion Fighters",tf_guarantee_boots|tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_hide_boots],
str_12|agi_12|level(23),wp(150),knows_common,mercenary_face_1, mercenary_face_2],
["cattle","Cattle","Cattle",0,no_scene,reserved,fac_neutral, [], def_attrib|level(1),wp(60),0,mercenary_face_1, mercenary_face_2],
#soldiers:
#This troop is the troop marked as soldiers_begin
["farmer","Farmer","Farmers",tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_cleaver,itm_dagger,itm_knife,itm_pitch_fork,itm_sickle,itm_club,itm_stones,itm_leather_cap,itm_felt_hat,itm_felt_hat,itm_linen_tunic,itm_coarse_tunic,itm_nomad_boots,itm_wrapping_boots],
def_attrib|level(4),wp(60),knows_common,man_face_middle_1, man_face_old_2],
["townsman","Townsman","Townsmen",tf_guarantee_boots|tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_cleaver,itm_knife,itm_dagger,itm_club,itm_quarter_staff,itm_dagger,itm_stones,itm_leather_cap,itm_linen_tunic,itm_coarse_tunic,itm_leather_apron,itm_nomad_boots,itm_wrapping_boots],
def_attrib|level(4),wp(60),knows_common,mercenary_face_1, mercenary_face_2],
["watchman","Watchman","Watchmen",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_shield,no_scene,reserved,fac_commoners,
[itm_bolts,itm_spiked_club,itm_fighting_pick,itm_sword_medieval_a,itm_boar_spear,itm_hunting_crossbow,itm_light_crossbow,itm_tab_shield_round_a,itm_tab_shield_round_b,itm_padded_cloth,itm_leather_jerkin,itm_leather_cap,itm_padded_coif,itm_footman_helmet,itm_nomad_boots,itm_wrapping_boots],
def_attrib|level(9),wp(75),knows_common|knows_shield_1,mercenary_face_1, mercenary_face_2],
["caravan_guard","Caravan Guard","Caravan Guards",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_horse|tf_guarantee_shield,no_scene,0,fac_commoners,
[itm_spear,itm_fighting_pick,itm_sword_medieval_a,itm_voulge,itm_shortened_voulge,itm_tab_shield_round_b,itm_tab_shield_round_c,itm_leather_jerkin,itm_leather_vest,itm_hide_boots,itm_padded_coif,itm_nasal_helmet,itm_footman_helmet,itm_saddle_horse],
def_attrib|level(14),wp(85),knows_common|knows_riding_2|knows_ironflesh_1|knows_shield_3,mercenary_face_1, mercenary_face_2],
["mercenary_swordsman","Mercenary Swordsman","Mercenary Swordsmen",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_shield,no_scene,reserved,fac_commoners,
[itm_bastard_sword_a,itm_sword_medieval_b,itm_sword_medieval_b_small,itm_tab_shield_heater_c,itm_mail_hauberk,itm_haubergeon,itm_leather_boots,itm_mail_chausses,itm_kettle_hat,itm_mail_coif,itm_flat_topped_helmet, itm_helmet_with_neckguard],
def_attrib|level(20),wp(100),knows_common|knows_riding_3|knows_ironflesh_3|knows_shield_3|knows_power_strike_3,mercenary_face_1, mercenary_face_2],
["hired_blade","Hired Blade","Hired Blades",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_helmet|tf_guarantee_shield,no_scene,reserved,fac_commoners,
[itm_bastard_sword_b,itm_sword_medieval_c,itm_tab_shield_heater_cav_a,itm_haubergeon,itm_mail_chausses,itm_iron_greaves,itm_plate_boots,itm_guard_helmet,itm_great_helmet,itm_bascinet, itm_leather_gloves],
def_attrib|level(25),wp(130),knows_common|knows_riding_3|knows_athletics_5|knows_shield_5|knows_power_strike_5|knows_ironflesh_5,mercenary_face_1, mercenary_face_2],
["mercenary_crossbowman","Mercenary Crossbowman","Mercenary Crossbowmen",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_ranged,no_scene,reserved,fac_commoners,
[itm_bolts,itm_spiked_club,itm_fighting_pick,itm_sword_medieval_a,itm_boar_spear,itm_crossbow,itm_tab_shield_pavise_a,itm_tab_shield_round_b,itm_padded_cloth,itm_leather_jerkin,itm_leather_cap,itm_padded_coif,itm_footman_helmet,itm_nomad_boots,itm_wrapping_boots],
def_attrib|level(19),wp_one_handed (90) | wp_two_handed (90) | wp_polearm (90) | wp_archery (90) | wp_crossbow (130) | wp_throwing (90),knows_common|knows_athletics_5|knows_shield_1,mercenary_face_1, mercenary_face_2],
["mercenary_horseman","Mercenary Horseman","Mercenary Horsemen",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_horse|tf_guarantee_shield,no_scene,reserved,fac_commoners,
[itm_lance,itm_bastard_sword_a,itm_sword_medieval_b,itm_tab_shield_heater_c,itm_mail_shirt,itm_haubergeon,itm_leather_boots,itm_norman_helmet,itm_mail_coif,itm_helmet_with_neckguard,itm_saddle_horse,itm_courser],
def_attrib|level(20),wp(100),knows_common|knows_riding_4|knows_ironflesh_3|knows_shield_2|knows_power_strike_3,mercenary_face_1, mercenary_face_2],
["mercenary_cavalry","Mercenary Cavalry","Mercenary Cavalry",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_horse|tf_guarantee_shield,no_scene,reserved,fac_commoners,
[itm_heavy_lance,itm_bastard_sword_a,itm_sword_medieval_b,itm_tab_shield_heater_c,itm_cuir_bouilli,itm_banded_armor,itm_hide_boots,itm_kettle_hat,itm_mail_coif,itm_flat_topped_helmet,itm_helmet_with_neckguard,itm_warhorse,itm_hunter],
def_attrib|level(25),wp(130),knows_common|knows_riding_5|knows_ironflesh_4|knows_shield_5|knows_power_strike_4,mercenary_face_1, mercenary_face_2],
["mercenaries_end","mercenaries_end","mercenaries_end",0,no_scene,reserved,fac_commoners,
[],
def_attrib|level(4),wp(60),knows_common,mercenary_face_1, mercenary_face_2],
#peasant - retainer - footman - man-at-arms - knight
["swadian_recruit","Swadian Recruit","Swadian Recruits",tf_guarantee_armor,0,0,fac_kingdom_1,
[itm_scythe,itm_hatchet,itm_pickaxe,itm_club,itm_stones,itm_tab_shield_heater_a,itm_leather_cap,itm_felt_hat,itm_felt_hat,
itm_shirt,itm_coarse_tunic,itm_leather_apron,itm_nomad_boots,itm_wrapping_boots],
def_attrib|level(4),wp(60),knows_common,swadian_face_younger_1, swadian_face_middle_2],
["swadian_militia","Swadian Militia","Swadian Militia",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_shield,0,0,fac_kingdom_1,
[itm_bolts,itm_spiked_club,itm_fighting_pick,itm_boar_spear,itm_hunting_crossbow,itm_tab_shield_heater_a,
itm_padded_cloth,itm_red_gambeson,itm_arming_cap,itm_arming_cap,itm_ankle_boots,itm_wrapping_boots],
def_attrib|level(9),wp(75),knows_common,swadian_face_young_1, swadian_face_old_2],
["swadian_footman","Swadian Footman","Swadian Footmen",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_shield,0,0,fac_kingdom_1,
[itm_spear,itm_fighting_pick,itm_sword_medieval_b_small,itm_sword_medieval_a,itm_tab_shield_heater_b,
itm_mail_with_tunic_red,itm_ankle_boots,itm_mail_coif,itm_norman_helmet],
def_attrib|level(14),wp_melee(85),knows_common|knows_ironflesh_2|knows_shield_2|knows_athletics_2|knows_power_strike_2,swadian_face_young_1, swadian_face_old_2],
["swadian_infantry","Swadian Infantry","Swadian Infantry",tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_1,
[itm_pike,itm_fighting_pick,itm_bastard_sword_a,itm_sword_medieval_a,itm_sword_medieval_b_small,itm_tab_shield_heater_c,
itm_mail_with_surcoat,itm_haubergeon,itm_mail_chausses,itm_leather_boots,itm_segmented_helmet,itm_flat_topped_helmet,itm_helmet_with_neckguard],
def_attrib|level(20),wp_melee(105),knows_common|knows_riding_3|knows_ironflesh_2|knows_power_strike_2|knows_shield_3|knows_athletics_3,swadian_face_middle_1, swadian_face_old_2],
["swadian_sergeant","Swadian Sergeant","Swadian Sergeants",tf_mounted|tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_1,
[itm_awlpike,itm_awlpike_long,itm_bastard_sword_b,itm_morningstar,itm_sword_medieval_c,itm_tab_shield_heater_d,
itm_coat_of_plates,itm_brigandine_red,itm_mail_boots,itm_iron_greaves,itm_flat_topped_helmet,itm_guard_helmet,itm_mail_mittens,itm_gauntlets],
def_attrib|level(25),wp_melee(135),knows_common|knows_shield_4|knows_ironflesh_4|knows_power_strike_4|knows_athletics_4,swadian_face_middle_1, swadian_face_older_2],
["swadian_skirmisher","Swadian Skirmisher","Swadian Skirmishers",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_1,
[itm_bolts,itm_light_crossbow,itm_hunting_crossbow,itm_club,itm_voulge,itm_tab_shield_heater_a,
itm_red_gambeson,itm_padded_cloth,itm_ankle_boots,itm_arming_cap,itm_arming_cap],
def_attrib|level(14),wp(80),knows_common|knows_riding_2|knows_ironflesh_1,swadian_face_young_1, swadian_face_middle_2],
["swadian_crossbowman","Swadian Crossbowman","Swadian Crossbowmen",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_1,
[itm_bolts,itm_crossbow,itm_light_crossbow,itm_fighting_pick,itm_sword_medieval_a,itm_shortened_voulge,itm_tab_shield_heater_b,
itm_leather_jerkin,itm_red_gambeson,itm_leather_boots,itm_ankle_boots,itm_norman_helmet,itm_segmented_helmet],
def_attrib|level(19),wp_one_handed (90) | wp_two_handed (90) | wp_polearm (90) | wp_archery (90) | wp_crossbow (100) | wp_throwing (90),knows_common|knows_riding_2|knows_ironflesh_1|knows_athletics_1,swadian_face_young_1, swadian_face_old_2],
["swadian_sharpshooter","Swadian Sharpshooter","Swadian Sharpshooters",tf_guarantee_ranged|tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_1,
[itm_bolts,itm_arrows,itm_crossbow,itm_crossbow,itm_heavy_crossbow,itm_sword_medieval_b_small,itm_sword_medieval_a,itm_long_voulge,itm_tab_shield_heater_c,
itm_haubergeon,itm_arena_armor_red,itm_leather_boots,itm_mail_chausses,itm_kettle_hat,itm_helmet_with_neckguard,itm_leather_gloves],
str_14 | agi_10 | int_4 | cha_4|level(24),wp_one_handed (100) | wp_two_handed (100) | wp_polearm (100) | wp_archery (100) | wp_crossbow (120) | wp_throwing (100),knows_common|knows_power_draw_3|knows_ironflesh_1|knows_power_strike_1|knows_athletics_2,swadian_face_middle_1, swadian_face_older_2],
["swadian_man_at_arms","Swadian Man at Arms","Swadian Men at Arms",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_horse|tf_guarantee_shield,0,0,fac_kingdom_1,
[itm_lance,itm_fighting_pick,itm_bastard_sword_b,itm_sword_medieval_b,itm_sword_medieval_c_small,itm_tab_shield_heater_cav_a,
itm_haubergeon,itm_mail_with_surcoat,itm_mail_chausses,itm_norman_helmet,itm_mail_coif,itm_flat_topped_helmet,itm_helmet_with_neckguard,itm_warhorse,itm_warhorse,itm_hunter],
def_attrib|level(21),wp_melee(100),knows_common|knows_riding_4|knows_ironflesh_2|knows_shield_2|knows_power_strike_3,swadian_face_young_1, swadian_face_old_2],
["swadian_knight","Swadian Knight","Swadian Knights",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_helmet|tf_guarantee_horse|tf_guarantee_shield,0,0,fac_kingdom_1,
[itm_heavy_lance,itm_sword_two_handed_b,itm_sword_medieval_d_long,itm_morningstar,itm_morningstar,itm_sword_medieval_d_long,itm_tab_shield_heater_cav_b,
itm_coat_of_plates_red,itm_cuir_bouilli,itm_plate_boots,itm_guard_helmet,itm_great_helmet,itm_bascinet,itm_charger,itm_warhorse,itm_gauntlets,itm_mail_mittens],
def_attrib|level(28),wp_one_handed (150) | wp_two_handed (130) | wp_polearm (130) | wp_archery (75) | wp_crossbow (75) | wp_throwing (75),knows_common|knows_riding_5|knows_shield_5|knows_ironflesh_5|knows_power_strike_5,swadian_face_middle_1, swadian_face_older_2],
#SB : leather_jerkin -> arena_tunic_red
["swadian_messenger","Swadian Messenger","Swadian Messengers",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_horse|tf_guarantee_ranged,0,0,fac_kingdom_1,
[itm_sword_medieval_a,itm_arena_tunic_red,itm_leather_boots,itm_courser,itm_leather_gloves,itm_light_crossbow,itm_bolts],
str_7 | agi_21 | int_4 | cha_4|level(25),wp(130),knows_common|knows_riding_7|knows_horse_archery_5,swadian_face_young_1, swadian_face_old_2],
["swadian_deserter","Swadian Deserter","Swadian Deserters",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_deserters,
[itm_bolts,itm_light_crossbow,itm_hunting_crossbow,itm_dagger,itm_club,itm_shortened_voulge,itm_voulge,itm_long_voulge,itm_wooden_shield,itm_leather_jerkin,itm_padded_cloth,itm_hide_boots,itm_padded_coif,itm_nasal_helmet,itm_footman_helmet],
def_attrib|level(14),wp(80),knows_common|knows_riding_2|knows_ironflesh_1,swadian_face_young_1, swadian_face_old_2],
["swadian_prison_guard","Prison Guard","Prison Guards",tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_1,
[itm_awlpike,itm_pike,itm_great_sword,itm_morningstar,itm_sword_medieval_b,itm_tab_shield_heater_c,itm_coat_of_plates,itm_plate_armor,itm_plate_boots,itm_guard_helmet,itm_helmet_with_neckguard,itm_bascinet,itm_guard_helmet,itm_leather_gloves],
def_attrib|level(25),wp(130),knows_common|knows_shield_3|knows_ironflesh_3|knows_power_strike_3,swadian_face_young_1, swadian_face_old_2],
["swadian_castle_guard","Castle Guard","Castle Guards",tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_1,
[itm_awlpike,itm_pike,itm_great_sword,itm_morningstar,itm_sword_medieval_b,itm_tab_shield_heater_c,itm_tab_shield_heater_d,itm_coat_of_plates,itm_plate_armor,itm_plate_boots,itm_guard_helmet,itm_helmet_with_neckguard,itm_bascinet,itm_guard_helmet,itm_leather_gloves],
def_attrib|level(25),wp(130),knows_common|knows_shield_3|knows_ironflesh_3|knows_power_strike_3,swadian_face_young_1, swadian_face_old_2],
# Vaegir watchman?
["vaegir_recruit","Vaegir Recruit","Vaegir Recruits",tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_2,
[itm_scythe,itm_hatchet,itm_cudgel,itm_axe,itm_stones,itm_tab_shield_kite_a, itm_tab_shield_kite_a,
itm_linen_tunic, itm_rawhide_coat,itm_nomad_boots],
def_attrib|level(4),wp(60),knows_common, vaegir_face_younger_1, vaegir_face_middle_2],
["vaegir_footman","Vaegir Footman","Vaegir Footmen",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_shield,0,0,fac_kingdom_2,
[itm_spiked_club,itm_hand_axe,itm_sword_viking_1,itm_two_handed_axe,itm_tab_shield_kite_a,itm_tab_shield_kite_b,itm_spear,itm_nomad_cap,itm_vaegir_fur_cap,itm_rawhide_coat,itm_nomad_armor,itm_nomad_boots],
def_attrib|level(9),wp(75),knows_common, vaegir_face_young_1, vaegir_face_middle_2],
["vaegir_skirmisher","Vaegir Skirmisher","Vaegir Skirmishers",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_2,
[itm_arrows,itm_spiked_mace,itm_axe,itm_sword_khergit_1,itm_short_bow,itm_short_bow,itm_hunting_bow,itm_javelin,itm_javelin,itm_steppe_cap,itm_nomad_cap,itm_leather_vest,itm_leather_vest,itm_nomad_armor,itm_nomad_boots],
str_10 | agi_5 | int_4 | cha_4|level(14),wp(60),knows_ironflesh_1|knows_power_draw_1|knows_power_throw_1,vaegir_face_young_1, vaegir_face_old_2],
["vaegir_archer","Vaegir Archer","Vaegir Archers",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_2,
[itm_arrows,itm_axe,itm_sword_khergit_1,itm_nomad_bow,itm_nomad_bow,itm_short_bow,
itm_leather_jerkin,itm_leather_vest,itm_nomad_boots,itm_vaegir_spiked_helmet,itm_vaegir_fur_helmet,itm_vaegir_fur_cap,itm_nomad_cap],
str_12 | agi_5 | int_4 | cha_4|level(19),wp_one_handed (70) | wp_two_handed (70) | wp_polearm (70) | wp_archery (110) | wp_crossbow (70) | wp_throwing (70),knows_ironflesh_1|knows_power_draw_3|knows_athletics_2|knows_power_throw_1,vaegir_face_young_1, vaegir_face_older_2],
["vaegir_marksman","Vaegir Marksman","Vaegir Marksmen",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_2,
[itm_barbed_arrows,itm_axe,itm_voulge,itm_sword_khergit_2,itm_strong_bow,itm_war_bow,itm_strong_bow,
itm_leather_vest,itm_studded_leather_coat,itm_leather_boots,itm_vaegir_lamellar_helmet,itm_vaegir_spiked_helmet,itm_vaegir_fur_helmet],
str_14 | agi_5 | int_4 | cha_4|level(24),wp_one_handed (80) | wp_two_handed (80) | wp_polearm (80) | wp_archery (140) | wp_crossbow (80) | wp_throwing (80),knows_ironflesh_2|knows_power_draw_5|knows_athletics_3|knows_power_throw_1,vaegir_face_young_1, vaegir_face_older_2],
["vaegir_veteran","Vaegir Veteran","Vaegir Veterans",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_shield,0,0,fac_kingdom_2,
[itm_spiked_mace,itm_two_handed_axe,itm_sword_viking_1,itm_tab_shield_kite_b,itm_tab_shield_kite_c,itm_spear,
itm_steppe_cap,itm_vaegir_spiked_helmet,itm_vaegir_fur_helmet,itm_vaegir_fur_cap,itm_leather_jerkin,itm_studded_leather_coat,itm_nomad_boots,itm_saddle_horse],
def_attrib|level(14),wp_melee(85),knows_athletics_2|knows_ironflesh_1|knows_power_strike_2|knows_shield_2,vaegir_face_young_1, vaegir_face_old_2],
["vaegir_infantry","Vaegir Infantry","Vaegir Infantries",tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_2,
[itm_pike,itm_two_handed_battle_axe_2,itm_sword_viking_2,itm_sword_khergit_2,itm_tab_shield_kite_c,itm_spear,
itm_mail_hauberk,itm_lamellar_vest,itm_leather_boots,itm_vaegir_lamellar_helmet,itm_vaegir_spiked_helmet,itm_vaegir_fur_helmet],
def_attrib|level(19),wp_melee(100),knows_athletics_3|knows_ironflesh_2|knows_power_strike_3|knows_shield_2,vaegir_face_young_1, vaegir_face_older_2],
["vaegir_guard","Vaegir Guard","Vaegir Guards",tf_mounted|tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_2,
[itm_ashwood_pike,itm_fighting_axe,itm_bardiche,itm_two_handed_battle_axe_2,itm_battle_axe,itm_fighting_axe,itm_tab_shield_kite_d,
itm_banded_armor,itm_lamellar_vest,itm_lamellar_armor,itm_mail_chausses,itm_iron_greaves,itm_vaegir_war_helmet,itm_vaegir_war_helmet,itm_vaegir_lamellar_helmet,itm_leather_gloves],
def_attrib|level(24),wp_melee(130),knows_riding_2|knows_athletics_4|knows_shield_2|knows_ironflesh_3|knows_power_strike_4,vaegir_face_middle_1, vaegir_face_older_2],
["vaegir_horseman","Vaegir Horseman","Vaegir Horsemen",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_horse|tf_guarantee_shield,0,0,fac_kingdom_2,
[itm_battle_axe,itm_sword_khergit_2,itm_lance,itm_tab_shield_kite_cav_a,itm_spear,
itm_studded_leather_coat,itm_lamellar_vest,itm_leather_boots,itm_vaegir_lamellar_helmet,itm_vaegir_spiked_helmet,itm_vaegir_fur_helmet,itm_steppe_horse,itm_hunter],
def_attrib|level(21),wp(100),knows_riding_3|knows_ironflesh_3|knows_power_strike_3,vaegir_face_young_1, vaegir_face_older_2],
["vaegir_knight","Vaegir Knight","Vaegir Knights",tf_mounted|tf_guarantee_boots|tf_guarantee_gloves|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_horse|tf_guarantee_shield,0,0,fac_kingdom_2,
[itm_bardiche,itm_great_bardiche,itm_war_axe,itm_fighting_axe,itm_lance,itm_lance,itm_tab_shield_kite_cav_b,
itm_banded_armor,itm_lamellar_vest,itm_lamellar_armor,itm_mail_boots,itm_plate_boots,itm_vaegir_war_helmet,itm_vaegir_war_helmet,itm_vaegir_lamellar_helmet,itm_hunter, itm_warhorse_steppe,itm_leather_gloves],
def_attrib|level(26),wp_one_handed (120) | wp_two_handed (140) | wp_polearm (120) | wp_archery (120) | wp_crossbow (120) | wp_throwing (120),knows_riding_4|knows_shield_2|knows_ironflesh_4|knows_power_strike_4,vaegir_face_middle_1, vaegir_face_older_2],
#SB : sword_medieval_b -> fighting_axe, itm_leather_jerkin -> itm_studded_leather_coat
["vaegir_messenger","Vaegir Messenger","Vaegir Messengers",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_horse|tf_guarantee_ranged,0,0,fac_kingdom_2,
[itm_fighting_axe,itm_studded_leather_coat,itm_leather_boots,itm_courser,itm_leather_gloves,itm_short_bow,itm_arrows],
str_7 | agi_21 | int_4 | cha_4|level(25),wp(130),knows_common|knows_riding_7|knows_horse_archery_5|knows_power_draw_5,vaegir_face_young_1, vaegir_face_older_2],
["vaegir_deserter","Vaegir Deserter","Vaegir Deserters",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_deserters,
[itm_arrows,itm_spiked_mace,itm_axe,itm_falchion,itm_short_bow,itm_short_bow,itm_hunting_bow,itm_javelin,itm_javelin,itm_steppe_cap,itm_nomad_cap,itm_leather_vest,itm_leather_vest,itm_nomad_armor,itm_nomad_boots],
str_10 | agi_5 | int_4 | cha_4|level(14),wp(80),knows_ironflesh_1|knows_power_draw_1,vaegir_face_young_1, vaegir_face_older_2],
["vaegir_prison_guard","Prison Guard","Prison Guards", tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_2,
[itm_ashwood_pike,itm_battle_fork,itm_bardiche,itm_battle_axe,itm_fighting_axe,itm_tab_shield_kite_b,itm_studded_leather_coat,itm_lamellar_armor,itm_mail_chausses,itm_iron_greaves,itm_nordic_helmet,itm_nordic_helmet,itm_nordic_helmet,itm_spiked_helmet,itm_leather_gloves],
def_attrib|level(24),wp(130),knows_athletics_3|knows_shield_2|knows_ironflesh_3,vaegir_face_middle_1, vaegir_face_older_2],
["vaegir_castle_guard","Castle Guard","Castle Guards", tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_2,
[itm_ashwood_pike,itm_battle_fork,itm_bardiche,itm_battle_axe,itm_fighting_axe,itm_tab_shield_kite_d,itm_studded_leather_coat,itm_lamellar_armor,itm_mail_chausses,itm_iron_greaves,itm_nordic_helmet,itm_nordic_helmet,itm_nordic_helmet,itm_spiked_helmet,itm_leather_gloves],
def_attrib|level(24),wp(130),knows_athletics_3|knows_shield_2|knows_ironflesh_3,vaegir_face_middle_1, vaegir_face_older_2],
["khergit_tribesman","Khergit Tribesman","Khergit Tribesmen",tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_3,
[itm_arrows,itm_club,itm_spear,itm_hunting_bow,
itm_steppe_cap,itm_nomad_cap_b,itm_leather_vest,itm_steppe_armor,itm_nomad_boots,itm_khergit_leather_boots],
def_attrib|level(5),wp(50),knows_common|knows_riding_3|knows_power_draw_2|knows_horse_archery_2,khergit_face_younger_1, khergit_face_old_2],
["khergit_skirmisher","Khergit Skirmisher","Khergit Skirmishers",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_horse|tf_guarantee_ranged,0,0,fac_kingdom_3,
[itm_arrows,itm_sword_khergit_1,itm_winged_mace,itm_spear,itm_nomad_bow,itm_javelin,itm_tab_shield_small_round_a,
itm_steppe_cap,itm_nomad_cap_b,itm_leather_steppe_cap_a,itm_khergit_armor,itm_steppe_armor,itm_leather_vest,itm_nomad_boots,itm_khergit_leather_boots,itm_steppe_horse,itm_saddle_horse],
def_attrib|level(10),wp_one_handed (60) | wp_two_handed (60) | wp_polearm (60) | wp_archery (80) | wp_crossbow (60) | wp_throwing (80),knows_common|knows_riding_4|knows_power_draw_3|knows_power_throw_1|knows_horse_archery_3,khergit_face_younger_1, khergit_face_old_2],
["khergit_horseman","Khergit Horseman","Khergit Horsemen",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_ranged|tf_guarantee_horse,0,0,fac_kingdom_3,
[itm_arrows,itm_light_lance,itm_nomad_bow,itm_sword_khergit_2,itm_tab_shield_small_round_a,itm_tab_shield_small_round_b,itm_spear,
itm_leather_steppe_cap_a, itm_leather_steppe_cap_b,itm_nomad_robe,itm_nomad_vest,itm_khergit_leather_boots,itm_hide_boots,itm_spiked_helmet,itm_nomad_cap,itm_steppe_horse,itm_hunter],
def_attrib|level(14),wp(80),knows_common|knows_riding_5|knows_power_draw_4|knows_ironflesh_2|knows_power_throw_2|knows_horse_archery_3|knows_shield_1,khergit_face_young_1, khergit_face_older_2],
["khergit_horse_archer","Khergit Horse Archer","Khergit Horse Archers",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_ranged|tf_guarantee_horse,0,0,fac_kingdom_3,
[itm_arrows,itm_sword_khergit_2,itm_winged_mace,itm_spear,itm_khergit_bow,itm_tab_shield_small_round_a,itm_tab_shield_small_round_a,itm_tab_shield_small_round_b,itm_bodkin_arrows,itm_arrows,itm_javelin,
itm_leather_steppe_cap_b,itm_nomad_cap_b,itm_tribal_warrior_outfit,itm_nomad_robe,itm_khergit_leather_boots,itm_tab_shield_small_round_a,itm_tab_shield_small_round_b,itm_steppe_horse],
def_attrib|level(14),wp_one_handed (80) | wp_two_handed (80) | wp_polearm (80) | wp_archery (110) | wp_crossbow (80) | wp_throwing (110),knows_riding_5|knows_power_draw_3|knows_ironflesh_1|knows_horse_archery_4|knows_power_throw_3,khergit_face_young_1, khergit_face_older_2],
["khergit_veteran_horse_archer","Khergit Veteran Horse Archer","Khergit Veteran Horse Archers",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_3,
[itm_sword_khergit_3,itm_winged_mace,itm_spear,itm_khergit_bow,itm_khergit_bow,itm_khergit_bow,
itm_nomad_bow,itm_arrows,itm_khergit_arrows,itm_khergit_arrows,itm_khergit_arrows,itm_javelin,itm_tab_shield_small_round_b,itm_tab_shield_small_round_c,
itm_khergit_cavalry_helmet,itm_khergit_cavalry_helmet,itm_leather_warrior_cap,itm_lamellar_vest_khergit,itm_tribal_warrior_outfit,itm_khergit_leather_boots,itm_leather_gloves,itm_steppe_horse,itm_courser],
def_attrib|level(21),wp_one_handed (90) | wp_two_handed (90) | wp_polearm (90) | wp_archery (130) | wp_crossbow (90) | wp_throwing (130),knows_riding_7|knows_power_draw_5|knows_ironflesh_3|knows_horse_archery_7|knows_power_throw_4|knows_shield_1,khergit_face_middle_1, khergit_face_older_2],
["khergit_lancer","Khergit Lancer","Khergit Lancers",tf_mounted|tf_guarantee_all_wo_ranged,0,0,fac_kingdom_3,
[itm_sword_khergit_4,itm_spiked_mace,itm_one_handed_war_axe_b,itm_hafted_blade_a,itm_hafted_blade_b,itm_heavy_lance,itm_lance,
itm_khergit_guard_helmet,itm_khergit_cavalry_helmet,itm_khergit_war_helmet,itm_lamellar_vest_khergit,itm_lamellar_armor,itm_khergit_leather_boots,itm_splinted_leather_greaves,itm_leather_gloves,itm_scale_gauntlets,itm_tab_shield_small_round_b,itm_tab_shield_small_round_c,itm_courser,itm_warhorse_steppe,itm_warhorse_steppe,itm_warhorse_steppe],
def_attrib|level(23),wp_one_handed (110) | wp_two_handed (110) | wp_polearm (150) | wp_archery (110) | wp_crossbow (110) | wp_throwing (110),knows_riding_7|knows_power_strike_4|knows_power_draw_4|knows_power_throw_2|knows_ironflesh_4|knows_horse_archery_1|knows_shield_2,khergit_face_middle_1, khergit_face_older_2],
#SB : leather_jerkin-> nomad_robe, short_bow -> khergit_bow, arrows -> khergit_arrows
["khergit_messenger","Khergit Messenger","Khergit Messengers",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_horse|tf_guarantee_ranged,0,0,fac_kingdom_3,
[itm_sword_khergit_2,itm_nomad_robe,itm_leather_boots,itm_courser,itm_leather_gloves,itm_khergit_bow,itm_khergit_arrows],
str_7 | agi_21 | int_4 | cha_4|level(25),wp(125),knows_common|knows_riding_7|knows_horse_archery_5|knows_power_draw_5,khergit_face_young_1, khergit_face_older_2],
["khergit_deserter","Khergit Deserter","Khergit Deserters",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_deserters,
[itm_arrows,itm_spiked_mace,itm_axe,itm_sword_khergit_1,itm_short_bow,itm_short_bow,itm_hunting_bow,itm_javelin,itm_javelin,itm_steppe_cap,itm_nomad_cap_b,itm_khergit_armor,itm_steppe_armor,itm_tribal_warrior_outfit,itm_nomad_boots],
str_10 | agi_5 | int_4 | cha_4|level(14),wp(80),knows_ironflesh_1|knows_power_draw_1,khergit_face_young_1, khergit_face_older_2],
["khergit_prison_guard","Prison Guard","Prison Guards", tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_3,
[itm_sword_khergit_3,itm_tab_shield_small_round_b,itm_tab_shield_small_round_a,itm_lamellar_vest_khergit,itm_lamellar_armor,itm_khergit_leather_boots,itm_iron_greaves,itm_khergit_guard_helmet,itm_khergit_cavalry_helmet,itm_leather_warrior_cap],
def_attrib|level(24),wp(130),knows_athletics_5|knows_shield_2|knows_ironflesh_5,khergit_face_middle_1, khergit_face_older_2],
["khergit_castle_guard","Castle Guard","Castle Guards", tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_3,
[itm_sword_khergit_4,itm_tab_shield_small_round_b,itm_tab_shield_small_round_a,itm_lamellar_vest_khergit,itm_lamellar_armor,itm_khergit_leather_boots,itm_iron_greaves,itm_khergit_guard_helmet,itm_khergit_cavalry_helmet,itm_leather_warrior_cap],
def_attrib|level(24),wp(130),knows_athletics_5|knows_shield_2|knows_ironflesh_5,khergit_face_middle_1, khergit_face_older_2],
["nord_recruit","Nord Recruit","Nord Recruits",tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_4,
[itm_axe,itm_hatchet,itm_spear,itm_tab_shield_round_a,itm_tab_shield_round_a,
itm_blue_tunic,itm_coarse_tunic,itm_hide_boots,itm_nomad_boots],
def_attrib|level(6),wp(50),knows_power_strike_1|knows_power_throw_1|knows_riding_1|knows_athletics_1,nord_face_younger_1, nord_face_old_2],
["nord_footman","Nord Footman","Nord Footmen",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_shield,0,0,fac_kingdom_4,
[itm_fighting_axe,itm_one_handed_war_axe_a,itm_spear,itm_tab_shield_round_a,itm_tab_shield_round_b,itm_javelin,itm_throwing_axes,
itm_leather_cap,itm_skullcap,itm_nomad_vest,itm_leather_boots,itm_nomad_boots],
def_attrib|level(10),wp(70),knows_ironflesh_2|knows_power_strike_2|knows_power_throw_2|knows_riding_2|knows_athletics_2|knows_shield_1,nord_face_young_1, nord_face_old_2],
["nord_trained_footman","Nord Trained Footman","Nord Trained Footmen",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_shield|tf_guarantee_helmet,0,0,fac_kingdom_4,
[itm_one_handed_war_axe_a,itm_one_handed_war_axe_b,itm_one_handed_battle_axe_a,itm_tab_shield_round_b,
itm_skullcap,itm_nasal_helmet,itm_nordic_footman_helmet,itm_byrnie,itm_studded_leather_coat,itm_leather_boots],
def_attrib|level(14),wp(100),knows_ironflesh_3|knows_power_strike_3|knows_power_throw_2|knows_riding_2|knows_athletics_3|knows_shield_2,nord_face_young_1, nord_face_old_2],
["nord_warrior","Nord Warrior","Nord Warriors",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_shield|tf_guarantee_helmet,0,0,fac_kingdom_4,
[itm_sword_viking_1,itm_one_handed_war_axe_b,itm_one_handed_battle_axe_a,itm_tab_shield_round_c,itm_javelin,
itm_nordic_footman_helmet,itm_nordic_fighter_helmet,itm_mail_shirt,itm_studded_leather_coat,itm_hunter_boots,itm_leather_boots],
def_attrib|level(19),wp(115),knows_ironflesh_4|knows_power_strike_4|knows_power_throw_3|knows_riding_2|knows_athletics_4|knows_shield_3,nord_face_young_1, nord_face_older_2],
["nord_veteran","Nord Veteran","Nord Veterans",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_shield|tf_guarantee_helmet,0,0,fac_kingdom_4,
[itm_sword_viking_2,itm_sword_viking_2_small,itm_one_handed_battle_axe_b,itm_spiked_mace,itm_tab_shield_round_d,itm_javelin,itm_throwing_axes,
itm_nordic_helmet,itm_nordic_fighter_helmet,itm_mail_hauberk,itm_mail_shirt,itm_splinted_leather_greaves,itm_leather_boots,itm_leather_gloves],
def_attrib|level(24),wp(145),knows_ironflesh_5|knows_power_strike_5|knows_power_throw_4|knows_riding_3|knows_athletics_5|knows_shield_4,nord_face_young_1, nord_face_older_2],
["nord_champion","Nord Huscarl","Nord Huscarls",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_shield|tf_guarantee_helmet,0,0,fac_kingdom_4,
[itm_sword_viking_3,itm_sword_viking_3_small,itm_great_axe,itm_one_handed_battle_axe_c,itm_tab_shield_round_e,itm_throwing_spears,itm_heavy_throwing_axes,itm_heavy_throwing_axes,
itm_nordic_huscarl_helmet,itm_nordic_warlord_helmet,itm_banded_armor,itm_mail_boots,itm_mail_chausses,itm_mail_mittens],
def_attrib|level(28),wp(170),knows_ironflesh_7|knows_power_strike_7|knows_power_throw_5|knows_riding_2|knows_athletics_7|knows_shield_6,nord_face_middle_1, nord_face_older_2],
["nord_huntsman","Nord Huntsman","Nord Huntsmen",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_4,
[itm_arrows,itm_rawhide_coat,itm_hatchet,itm_hunting_bow,itm_hide_boots],
str_10 | agi_5 | int_4 | cha_4|level(11),wp_one_handed (60) | wp_two_handed (60) | wp_polearm (60) | wp_archery (70) | wp_crossbow (60) | wp_throwing (60),knows_ironflesh_1|knows_power_draw_1|knows_athletics_2,nord_face_young_1, nord_face_old_2],
["nord_archer","Nord Archer","Nord Archers",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_4,
[itm_arrows,itm_axe,itm_short_bow,itm_padded_leather,itm_leather_jerkin,itm_padded_leather,itm_leather_boots,itm_nasal_helmet,itm_nordic_archer_helmet,itm_leather_cap],
str_11 | agi_5 | int_4 | cha_4|level(15),wp_one_handed (80) | wp_two_handed (80) | wp_polearm (80) | wp_archery (95) | wp_crossbow (80) | wp_throwing (80),knows_ironflesh_2|knows_power_draw_3|knows_athletics_5,nord_face_young_1, nord_face_old_2],
["nord_veteran_archer","Nord Veteran Archer","Nord Veteran Archers",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_4,
[itm_bodkin_arrows,itm_sword_viking_2,itm_fighting_axe,itm_two_handed_axe,itm_long_bow,itm_mail_shirt,itm_mail_shirt,itm_byrnie,itm_leather_boots,itm_nordic_archer_helmet,itm_nordic_veteran_archer_helmet],
str_12 | agi_5 | int_4 | cha_4|level(19),wp_one_handed (95) | wp_two_handed (95) | wp_polearm (95) | wp_archery (120) | wp_crossbow (95) | wp_throwing (95),knows_power_strike_3|knows_ironflesh_4|knows_power_draw_5|knows_athletics_7,nord_face_middle_1, nord_face_older_2],
#SB : leather_jerkin -> arena_tunic_blue, short_bow -> long_bow
["nord_messenger","Nord Messenger","Nord Messengers",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_horse|tf_guarantee_ranged,0,0,fac_kingdom_4,
[itm_sword_viking_2,itm_arena_tunic_blue,itm_leather_boots,itm_courser,itm_leather_gloves,itm_long_bow,itm_arrows],
str_7 | agi_21 | int_4 | cha_4|level(25),wp(130),knows_common|knows_riding_7|knows_horse_archery_5|knows_power_draw_5,nord_face_young_1, nord_face_older_2],
["nord_deserter","Nord Deserter","Nord Deserters",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_deserters,
[itm_arrows,itm_spiked_mace,itm_axe,itm_falchion,itm_short_bow,itm_short_bow,itm_hunting_bow,itm_javelin,itm_javelin,itm_steppe_cap,itm_nomad_cap,itm_leather_vest,itm_leather_vest,itm_nomad_armor,itm_nomad_boots],
str_10 | agi_5 | int_4 | cha_4|level(14),wp(80),knows_ironflesh_1|knows_power_draw_1,nord_face_young_1, nord_face_older_2],
["nord_prison_guard","Prison Guard","Prison Guards", tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_4,
[itm_ashwood_pike,itm_battle_fork,itm_battle_axe,itm_fighting_axe,itm_tab_shield_round_d,itm_mail_hauberk,itm_mail_chausses,itm_iron_greaves,itm_nordic_helmet,itm_nordic_helmet,itm_nordic_helmet,itm_spiked_helmet,itm_leather_gloves],
def_attrib|level(24),wp(130),knows_athletics_3|knows_shield_2|knows_ironflesh_3,nord_face_middle_1, nord_face_older_2],
["nord_castle_guard","Castle Guard","Castle Guards", tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_4,
[itm_ashwood_pike,itm_battle_fork,itm_battle_axe,itm_fighting_axe,itm_tab_shield_round_d,itm_tab_shield_round_e,itm_mail_hauberk,itm_heraldic_mail_with_tabard,itm_mail_chausses,itm_iron_greaves,itm_nordic_helmet,itm_nordic_helmet,itm_nordic_helmet,itm_spiked_helmet,itm_leather_gloves],
def_attrib|level(24),wp(130),knows_athletics_3|knows_shield_2|knows_ironflesh_3,nord_face_middle_1, nord_face_older_2],
["rhodok_tribesman","Rhodok Tribesman","Rhodok Tribesmen",tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_5,
[itm_pitch_fork,itm_tab_shield_pavise_a,
itm_shirt,itm_coarse_tunic,itm_wrapping_boots,itm_nomad_boots,itm_head_wrappings,itm_straw_hat],
def_attrib|level(4),wp(55),knows_common|knows_power_draw_2|knows_ironflesh_1,rhodok_face_younger_1, rhodok_face_old_2],
["rhodok_spearman","Rhodok Spearman","Rhodok Spearmen",tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_5,
[itm_spear,itm_pike,itm_spear,itm_tab_shield_pavise_a,itm_falchion,
itm_felt_hat_b,itm_common_hood,itm_leather_armor,itm_arena_tunic_green,itm_wrapping_boots,itm_nomad_boots],
def_attrib|level(9),wp(80),knows_common|knows_ironflesh_2|knows_shield_1|knows_power_strike_2|knows_athletics_1,rhodok_face_young_1, rhodok_face_old_2],
["rhodok_trained_spearman","Rhodok Trained Spearman","Rhodok Trained Spearmen",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_shield,0,0,fac_kingdom_5,
[itm_pike,itm_war_spear,itm_tab_shield_pavise_b,
itm_footman_helmet,itm_padded_coif,itm_aketon_green,itm_aketon_green,itm_ragged_outfit,itm_nomad_boots,itm_leather_boots],
def_attrib|level(14),wp_one_handed (105) | wp_two_handed (105) | wp_polearm (115) | wp_archery (105) | wp_crossbow (105) | wp_throwing (105),knows_common|knows_ironflesh_3|knows_shield_2|knows_power_strike_2|knows_athletics_2,rhodok_face_young_1, rhodok_face_older_2],
["rhodok_veteran_spearman","Rhodok Veteran Spearman","Rhodok Veteran Spearmen",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_shield,0,0,fac_kingdom_5,
[itm_ashwood_pike,itm_glaive,itm_tab_shield_pavise_c,
itm_kettle_hat,itm_mail_coif,itm_mail_with_tunic_green,itm_leather_boots,itm_splinted_leather_greaves,itm_leather_gloves],
def_attrib|level(19),wp_one_handed (115) | wp_two_handed (115) | wp_polearm (130) | wp_archery (115) | wp_crossbow (115) | wp_throwing (115),knows_common|knows_ironflesh_5|knows_shield_3|knows_power_strike_4|knows_athletics_3,rhodok_face_young_1, rhodok_face_older_2],
["rhodok_sergeant","Rhodok Sergeant","Rhodok Sergeants",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_shield|tf_guarantee_gloves,0,0,fac_kingdom_5,
[itm_glaive,itm_military_hammer,itm_military_cleaver_c,itm_tab_shield_pavise_d,
itm_full_helm, itm_bascinet_3,itm_bascinet_2,itm_surcoat_over_mail,itm_surcoat_over_mail,itm_heraldic_mail_with_surcoat,itm_mail_chausses,itm_leather_gloves,itm_mail_mittens],
def_attrib|level(25),wp_one_handed (130) | wp_two_handed (115) | wp_polearm (155) | wp_archery (115) | wp_crossbow (115) | wp_throwing (115),knows_common|knows_ironflesh_6|knows_shield_5|knows_power_strike_5|knows_athletics_5,rhodok_face_middle_1, rhodok_face_older_2],
["rhodok_crossbowman","Rhodok Crossbowman","Rhodok Crossbowmen",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_ranged,0,0,fac_kingdom_5,
[itm_sword_medieval_a,itm_falchion,itm_club_with_spike_head,itm_tab_shield_pavise_a,itm_crossbow,itm_bolts,
itm_arena_tunic_green,itm_felt_hat_b,itm_common_hood,itm_nomad_boots,itm_wrapping_boots],
def_attrib|level(10),wp(85),knows_common|knows_ironflesh_2|knows_shield_1|knows_power_strike_1|knows_athletics_2,rhodok_face_young_1, rhodok_face_older_2],
["rhodok_trained_crossbowman","Rhodok Trained Crossbowman","Rhodok Trained Crossbowmen",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_ranged|tf_guarantee_shield,0,0,fac_kingdom_5,
[itm_sword_medieval_a,itm_sword_medieval_b_small,itm_club_with_spike_head,itm_tab_shield_pavise_a,itm_crossbow,itm_bolts,
itm_common_hood,itm_leather_armor,itm_arena_armor_green,itm_nomad_boots],
def_attrib|level(15),wp_one_handed (90) | wp_two_handed (90) | wp_polearm (90) | wp_archery (90) | wp_crossbow (105) | wp_throwing (90),knows_common|knows_ironflesh_1|knows_shield_2|knows_power_strike_2|knows_athletics_3,rhodok_face_young_1, rhodok_face_older_2],
["rhodok_veteran_crossbowman","Rhodok Veteran Crossbowman","Rhodok Veteran Crossbowmen",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_ranged|tf_guarantee_shield,0,0,fac_kingdom_5,
[itm_sword_medieval_a,itm_sword_medieval_b_small,itm_fighting_pick,itm_club_with_spike_head,itm_tab_shield_pavise_b,itm_tab_shield_pavise_c,itm_heavy_crossbow,itm_bolts,
itm_leather_cap,itm_felt_hat_b,itm_aketon_green,itm_leather_boots],
def_attrib|level(20),wp_one_handed (100) | wp_two_handed (100) | wp_polearm (100) | wp_archery (100) | wp_crossbow (120) | wp_throwing (100),knows_common|knows_ironflesh_2|knows_shield_3|knows_power_strike_3|knows_athletics_4,rhodok_face_middle_1, rhodok_face_older_2],
["rhodok_sharpshooter","Rhodok Sharpshooter","Rhodok Sharpshooters",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_ranged|tf_guarantee_shield,0,0,fac_kingdom_5,
[itm_sword_medieval_b,itm_military_pick,itm_military_hammer,itm_tab_shield_pavise_c,itm_sniper_crossbow,itm_steel_bolts,
itm_kettle_hat,itm_mail_coif,itm_mail_with_tunic_green,itm_leather_boots,itm_splinted_leather_greaves],
str_14 | agi_5 | int_4 | cha_4|level(25),wp_one_handed (110) | wp_two_handed (110) | wp_polearm (110) | wp_archery (100) | wp_crossbow (140) | wp_throwing (100),knows_common|knows_ironflesh_3|knows_shield_4|knows_power_strike_4|knows_athletics_6,rhodok_face_middle_1, rhodok_face_older_2],
#SB : short_bow, arrows -> light_crossbow & bolts, leather_jerkin -> arena_tunic_green
["rhodok_messenger","Rhodok Messenger","Rhodok Messengers",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_horse|tf_guarantee_ranged,0,0,fac_kingdom_5,
[itm_sword_medieval_b,itm_arena_tunic_green,itm_leather_boots,itm_courser,itm_leather_gloves,itm_light_crossbow,itm_steel_bolts],
def_attrib|agi_21|level(25),wp(130),knows_common|knows_riding_7|knows_horse_archery_5|knows_power_draw_5,rhodok_face_middle_1, rhodok_face_older_2],
["rhodok_deserter","Rhodok Deserter","Rhodok Deserters",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_deserters,
[itm_arrows,itm_spiked_mace,itm_axe,itm_falchion,itm_short_bow,itm_short_bow,itm_hunting_bow,itm_javelin,itm_javelin,itm_steppe_cap,itm_nomad_cap,itm_leather_vest,itm_leather_vest,itm_nomad_armor,itm_nomad_boots],
def_attrib|str_10|level(14),wp(80),knows_ironflesh_1|knows_power_draw_1,rhodok_face_middle_1, rhodok_face_older_2],
["rhodok_prison_guard","Prison Guard","Prison Guards", tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_5,
[itm_ashwood_pike,itm_battle_fork,itm_battle_axe,itm_fighting_axe,itm_tab_shield_pavise_b,itm_bascinet_2,itm_surcoat_over_mail,itm_mail_chausses,itm_iron_greaves,itm_leather_gloves],
def_attrib|level(24),wp(130),knows_athletics_3|knows_shield_2|knows_ironflesh_3,rhodok_face_middle_1, rhodok_face_older_2],
["rhodok_castle_guard","Castle Guard","Castle Guards", tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_5,
[itm_ashwood_pike,itm_battle_fork,itm_battle_axe,itm_fighting_axe,itm_tab_shield_pavise_c,itm_bascinet_2,itm_surcoat_over_mail,itm_mail_chausses,itm_iron_greaves,itm_leather_gloves],
def_attrib|level(24),wp(130),knows_athletics_3|knows_shield_2|knows_ironflesh_3,rhodok_face_middle_1, rhodok_face_older_2],
#peasant - retainer - footman - man-at-arms - knight
["sarranid_recruit","Sarranid Recruit","Sarranid Recruits",tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_6,
[itm_scythe,itm_hatchet,itm_pickaxe,itm_club,itm_stones,itm_tab_shield_heater_a,itm_sarranid_felt_hat,itm_turban,itm_sarranid_boots_a,
itm_sarranid_cloth_robe, itm_sarranid_cloth_robe_b],
def_attrib|level(4),wp(60),knows_common|knows_athletics_1,swadian_face_younger_1, swadian_face_middle_2],
["sarranid_footman","Sarranid Footman","Sarranid Footmen",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_shield,0,0,fac_kingdom_6,
[itm_bamboo_spear,itm_arabian_sword_a,itm_tab_shield_kite_a,itm_desert_turban,
itm_skirmisher_armor,itm_turban,itm_sarranid_boots_a,itm_sarranid_boots_b],
def_attrib|level(9),wp(75),knows_common|knows_athletics_2,swadian_face_young_1, swadian_face_old_2],
["sarranid_veteran_footman","Sarranid Veteran Footman","Sarranid Veteran Footmen",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_shield|tf_guarantee_helmet,0,0,fac_kingdom_6,
[itm_bamboo_spear,itm_arabian_sword_a,itm_arabian_sword_b,itm_tab_shield_kite_b,
itm_sarranid_boots_b,itm_sarranid_warrior_cap,itm_sarranid_leather_armor,itm_jarid,itm_arabian_sword_a,itm_mace_3],
def_attrib|level(14),wp_one_handed (85) | wp_two_handed (85) | wp_polearm (85) | wp_archery (75) | wp_crossbow (75) | wp_throwing (100),knows_common|knows_athletics_2|knows_power_throw_2|knows_ironflesh_1|knows_power_strike_2|knows_shield_2,swadian_face_young_1, swadian_face_old_2],
["sarranid_infantry","Sarranid Infantry","Sarranid Infantries",tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_6,
[itm_sarranid_mail_shirt,itm_sarranid_mail_coif,itm_jarid,itm_sarranid_boots_c,itm_sarranid_boots_b,itm_sarranid_axe_a,itm_arabian_sword_b,itm_mace_3,itm_spear,itm_tab_shield_kite_c],
def_attrib|level(20),wp_one_handed (105) | wp_two_handed (105) | wp_polearm (105) | wp_archery (75) | wp_crossbow (75) | wp_throwing (110),knows_common|knows_riding_3|knows_ironflesh_2|knows_power_strike_3|knows_shield_3 | knows_power_throw_3|knows_athletics_3,swadian_face_middle_1, swadian_face_old_2],
["sarranid_guard","Sarranid Guard","Sarranid Guards",tf_mounted|tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_6,
[itm_military_pick,itm_sarranid_two_handed_axe_a,itm_jarid,itm_scimitar_b,itm_war_spear,itm_mace_4,itm_sarranid_boots_d, itm_sarranid_boots_c,itm_arabian_armor_b,itm_sarranid_mail_coif,itm_sarranid_veiled_helmet,itm_mail_mittens,itm_leather_gloves,itm_tab_shield_kite_d],
def_attrib|level(25),wp_one_handed (135) | wp_two_handed (135) | wp_polearm (135) | wp_archery (75) | wp_crossbow (75) | wp_throwing (140),knows_common|knows_shield_3|knows_ironflesh_3|knows_power_strike_4|knows_power_throw_4|knows_athletics_5,swadian_face_middle_1, swadian_face_older_2],
["sarranid_skirmisher","Sarranid Skirmisher","Sarranid Skirmishers",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_6,
[itm_turban,itm_desert_turban,itm_skirmisher_armor,itm_jarid,itm_jarid,itm_arabian_sword_a,itm_spiked_club,itm_tab_shield_small_round_a,itm_sarranid_warrior_cap,itm_sarranid_boots_a],
def_attrib|level(14),wp(80),knows_common|knows_riding_2|knows_power_throw_2|knows_ironflesh_1|knows_athletics_3,swadian_face_young_1, swadian_face_middle_2],
["sarranid_archer","Sarranid Archer","Sarranid Archers",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_6,
[itm_arrows,itm_arrows,itm_nomad_bow,itm_arabian_sword_a,itm_archers_vest,itm_sarranid_boots_b,itm_sarranid_helmet1,itm_sarranid_warrior_cap,itm_turban,itm_desert_turban],
def_attrib|level(19),wp_one_handed (90) | wp_two_handed (90) | wp_polearm (90) | wp_archery (100) | wp_crossbow (90) | wp_throwing (100),knows_common|knows_power_draw_3|knows_ironflesh_2|knows_power_throw_3|knows_athletics_4,swadian_face_young_1, swadian_face_old_2],
["sarranid_master_archer","Sarranid Master Archer","Sarranid Master Archers",tf_guarantee_ranged|tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_6,
[itm_barbed_arrows,itm_barbed_arrows,itm_arabian_sword_b,itm_mace_3,itm_strong_bow,itm_nomad_bow,
itm_arabian_armor_b,itm_sarranid_boots_c,itm_sarranid_boots_b,itm_sarranid_mail_coif],
str_14 | agi_5 | int_4 | cha_4|level(24),wp_one_handed (100) | wp_two_handed (100) | wp_polearm (100) | wp_archery (130) | wp_crossbow (100) | wp_throwing (130),knows_common|knows_power_draw_4|knows_power_throw_4|knows_ironflesh_3|knows_athletics_5,swadian_face_middle_1, swadian_face_older_2],
["sarranid_horseman","Sarranid Horseman","Sarranid Horsemen",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_horse|tf_guarantee_shield,0,0,fac_kingdom_6,
[itm_lance,itm_arabian_sword_b,itm_scimitar_b,itm_mace_4,itm_tab_shield_small_round_b,
itm_sarranid_mail_shirt,itm_sarranid_boots_c,itm_sarranid_boots_b, itm_sarranid_horseman_helmet,itm_leather_gloves,itm_arabian_horse_a,itm_courser,itm_hunter],
def_attrib|level(20),wp_melee(100),knows_common|knows_riding_4|knows_ironflesh_2|knows_shield_2|knows_power_strike_3,swadian_face_young_1, swadian_face_old_2],
["sarranid_mamluke","Sarranid Mamluke","Sarranid Mamlukes",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_helmet|tf_guarantee_horse|tf_guarantee_shield,0,0,fac_kingdom_6,
[itm_heavy_lance,itm_scimitar_b,itm_sarranid_two_handed_mace_1,itm_sarranid_cavalry_sword,itm_tab_shield_small_round_c,
itm_mamluke_mail,itm_sarranid_boots_d,itm_sarranid_boots_c,itm_sarranid_veiled_helmet,itm_arabian_horse_b,itm_warhorse_sarranid,itm_scale_gauntlets,itm_mail_mittens],
def_attrib|level(27),wp_one_handed (150) | wp_two_handed (130) | wp_polearm (130) | wp_archery (75) | wp_crossbow (75) | wp_throwing (110),knows_common|knows_riding_6|knows_shield_5|knows_ironflesh_5|knows_power_strike_5,swadian_face_middle_1, swadian_face_older_2],
#SB : fix misc filler troop's faction
["sarranid_messenger","Sarranid Messenger","Sarranid Messengers",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_horse|tf_guarantee_ranged,0,0,fac_kingdom_6,
[itm_lance,itm_arabian_sword_b,itm_scimitar_b,itm_mace_4,itm_tab_shield_small_round_b,
itm_sarranid_mail_shirt,itm_mail_chausses,itm_sarranid_helmet1,itm_courser,itm_hunter],
def_attrib|level(20),wp_melee(100),knows_common|knows_riding_7|knows_ironflesh_2|knows_shield_2|knows_power_strike_3,swadian_face_young_1, swadian_face_old_2],
["sarranid_deserter","Sarranid Deserter","Sarranid Deserters",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_deserters,
[itm_lance,itm_arabian_sword_b,itm_scimitar_b,itm_mace_4,itm_tab_shield_small_round_b,
itm_sarranid_mail_shirt,itm_mail_chausses,itm_desert_turban,itm_arabian_horse_a],
def_attrib|level(20),wp_melee(100),knows_common|knows_riding_4|knows_ironflesh_2|knows_shield_2|knows_power_strike_3,swadian_face_young_1, swadian_face_old_2],
["sarranid_prison_guard","Prison Guard","Prison Guards",tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_6,
[itm_arabian_sword_d,itm_scimitar_b,itm_war_spear,itm_mace_4,itm_sarranid_boots_c,itm_arabian_armor_b,itm_sarranid_mail_coif,itm_sarranid_helmet1,itm_sarranid_horseman_helmet,itm_mail_boots,itm_iron_greaves,itm_mail_mittens,itm_leather_gloves,itm_tab_shield_kite_d],
def_attrib|level(25),wp_melee(135)|wp_throwing(100),knows_common|knows_shield_3|knows_ironflesh_3|knows_power_strike_3,swadian_face_middle_1, swadian_face_older_2],
["sarranid_castle_guard","Castle Guard","Castle Guards",tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_6,
[itm_arabian_sword_d,itm_scimitar_b,itm_war_spear,itm_mace_4,itm_sarranid_boots_c, itm_sarranid_boots_d,itm_arabian_armor_b,itm_sarranid_mail_coif,itm_sarranid_helmet1,itm_sarranid_horseman_helmet,itm_mail_boots,itm_iron_greaves,itm_mail_mittens,itm_leather_gloves,itm_tab_shield_kite_d],
def_attrib|level(25),wp_melee(135)|wp_throwing(100),knows_common|knows_shield_3|knows_ironflesh_3|knows_power_strike_3,swadian_face_middle_1, swadian_face_older_2],
["looter","Looter","Looters",0,0,0,fac_outlaws,
[itm_hatchet,itm_club,itm_butchering_knife,itm_falchion,itm_rawhide_coat,itm_stones,itm_nomad_armor,itm_nomad_armor,itm_woolen_cap,itm_woolen_cap,itm_nomad_boots,itm_wrapping_boots],
def_attrib|level(4),wp(20),knows_common,bandit_face1, bandit_face2],
["bandit","Bandit","Bandits",tf_guarantee_armor,0,0,fac_outlaws,
[itm_arrows,itm_spiked_mace,itm_sword_viking_1,itm_short_bow,itm_falchion,itm_nordic_shield,itm_rawhide_coat,itm_leather_cap,itm_leather_jerkin,itm_nomad_armor,itm_nomad_boots,itm_wrapping_boots,itm_saddle_horse],
def_attrib|level(10),wp(60),knows_common|knows_power_draw_1,bandit_face1, bandit_face2],
["brigand","Brigand","Brigands",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_horse,0,0,fac_outlaws,
[itm_arrows,itm_spiked_mace,itm_sword_viking_1,itm_falchion,itm_wooden_shield,itm_hide_covered_round_shield,itm_long_bow,itm_leather_cap,itm_leather_jerkin,itm_nomad_boots,itm_saddle_horse],
def_attrib|level(16),wp(90),knows_common|knows_power_draw_3,bandit_face1, bandit_face2],
["mountain_bandit","Mountain Bandit","Mountain Bandits",tf_guarantee_armor|tf_guarantee_boots,0,0,fac_outlaws,
[itm_arrows,itm_sword_viking_1,itm_spear,itm_winged_mace,itm_maul,itm_falchion,itm_short_bow,itm_javelin,itm_fur_covered_shield,itm_hide_covered_round_shield,
itm_felt_hat,itm_head_wrappings,itm_skullcap,itm_ragged_outfit,itm_rawhide_coat,itm_leather_armor,itm_hide_boots,itm_nomad_boots,itm_wooden_shield,itm_nordic_shield],
def_attrib|level(11),wp(90),knows_common|knows_power_draw_2,rhodok_face_young_1, rhodok_face_old_2],
["forest_bandit","Forest Bandit","Forest Bandits",tf_guarantee_armor|tf_guarantee_ranged|tf_guarantee_boots,0,0,fac_outlaws,
[itm_arrows,itm_axe,itm_hatchet,itm_quarter_staff,itm_short_bow,itm_hunting_bow,
itm_common_hood,itm_black_hood,itm_shirt,itm_padded_leather,itm_leather_jerkin,itm_ragged_outfit,itm_hide_boots,itm_leather_boots],
def_attrib|level(11),wp(90),knows_common|knows_power_draw_3,swadian_face_young_1, swadian_face_old_2],
["sea_raider","Sea Raider","Sea Raiders",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_shield,0,0,fac_outlaws,
[itm_arrows,itm_sword_viking_1,itm_sword_viking_2,itm_fighting_axe,itm_battle_axe,itm_spear,itm_nordic_shield,itm_nordic_shield,itm_nordic_shield,itm_wooden_shield,itm_long_bow,itm_javelin,itm_throwing_axes,
itm_nordic_helmet,itm_nordic_helmet,itm_nasal_helmet,itm_nomad_vest,itm_byrnie,itm_mail_shirt,itm_leather_boots, itm_nomad_boots],
def_attrib|level(16),wp(110),knows_ironflesh_2|knows_power_strike_2|knows_power_draw_3|knows_power_throw_2|knows_riding_1|knows_athletics_2,nord_face_young_1, nord_face_old_2],
["steppe_bandit","Steppe Bandit","Steppe Bandits",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_horse|tf_guarantee_ranged|tf_mounted,0,0,fac_outlaws,
[itm_arrows,itm_sword_khergit_1,itm_winged_mace,itm_spear, itm_light_lance,itm_nomad_bow,itm_nomad_bow,itm_short_bow,itm_jarid,itm_leather_steppe_cap_a,itm_leather_steppe_cap_b,itm_nomad_cap,itm_nomad_cap_b,itm_khergit_armor,itm_steppe_armor,itm_leather_vest,itm_hide_boots,itm_nomad_boots,itm_leather_covered_round_shield,itm_leather_covered_round_shield,itm_saddle_horse,itm_steppe_horse,itm_steppe_horse],
def_attrib|level(12),wp(100),knows_riding_2|knows_horse_archery_3|knows_power_draw_3,khergit_face_young_1, khergit_face_old_2],
["taiga_bandit","Taiga Bandit","Taiga Bandits",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_ranged,0,0,fac_outlaws,
[itm_arrows,itm_sword_khergit_1,itm_winged_mace,itm_spear, itm_light_lance,itm_nomad_bow,itm_nomad_bow,itm_short_bow,itm_jarid,itm_javelin,itm_vaegir_fur_cap,itm_leather_steppe_cap_c,itm_nomad_armor,itm_leather_jerkin,itm_hide_boots,itm_nomad_boots,itm_leather_covered_round_shield,itm_leather_covered_round_shield],
def_attrib|level(15),wp(110),knows_common|knows_power_draw_4|knows_power_throw_3,vaegir_face_young_1, vaegir_face_old_2],
["desert_bandit","Desert Bandit","Desert Bandits",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_horse|tf_mounted,0,0,fac_outlaws,
[itm_arrows,itm_arabian_sword_a,itm_winged_mace,itm_spear, itm_light_lance,itm_jarid,itm_nomad_bow,itm_short_bow,itm_jarid,itm_sarranid_cloth_robe, itm_sarranid_cloth_robe, itm_skirmisher_armor, itm_desert_turban, itm_turban,itm_leather_steppe_cap_b,itm_leather_covered_round_shield,itm_leather_covered_round_shield,itm_saddle_horse,itm_arabian_horse_a],
def_attrib|level(12),wp(100),knows_riding_2|knows_horse_archery_3|knows_power_draw_3,khergit_face_young_1, khergit_face_old_2],
["black_khergit_horseman","Black Khergit Horseman","Black Khergit Horsemen",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_horse,0,0,fac_black_khergits,
[itm_arrows,itm_sword_khergit_2,itm_scimitar,itm_scimitar,itm_winged_mace,itm_spear,itm_lance,itm_khergit_bow,itm_khergit_bow,itm_nomad_bow,itm_nomad_bow,itm_steppe_cap,itm_nomad_cap,itm_khergit_war_helmet,itm_khergit_war_helmet,itm_mail_hauberk,itm_lamellar_armor,itm_hide_boots,itm_plate_covered_round_shield,itm_plate_covered_round_shield,itm_saddle_horse,itm_steppe_horse],
def_attrib|level(21),wp(100),knows_riding_3|knows_ironflesh_3|knows_horse_archery_3|knows_power_draw_3,khergit_face_young_1, khergit_face_old_2],
["manhunter","Manhunter","Manhunters",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_horse|tf_guarantee_shield,0,0,fac_manhunters,
[itm_mace_3,itm_winged_mace,itm_nasal_helmet,itm_padded_cloth,itm_aketon_green,itm_aketon_green,itm_wooden_shield,itm_nomad_boots,itm_wrapping_boots,itm_sumpter_horse],
def_attrib|level(10),wp(50),knows_common|knows_riding_3|knows_power_strike_2|knows_athletics_2,bandit_face1, bandit_face2],
## ["deserter","Deserter","Deserters",tf_guarantee_boots|tf_guarantee_armor,0,0,fac_swadian_deserters,
## [itm_arrows,itm_spear,itm_fighting_pick,itm_short_bow,itm_sword,itm_voulge,itm_nordic_shield,itm_round_shield,itm_kettle_hat,itm_leather_cap,itm_padded_cloth,itm_leather_armor,itm_scale_armor,itm_saddle_horse],
## def_attrib|level(12),wp(60),knows_common,bandit_face1, bandit_face2],
#fac_slavers
## ["slave_keeper","Slave Keeper","Slave Keepers",tf_guarantee_armor ,0,0,fac_slavers,
## [itm_cudgel,itm_club,itm_woolen_cap,itm_rawhide_coat,itm_coarse_tunic,itm_nomad_armor,itm_nordic_shield,itm_nomad_boots,itm_wrapping_boots,itm_sumpter_horse],
## def_attrib|level(10),wp(60),knows_common,bandit_face1, bandit_face2],
["slave_driver","Slave Driver","Slave Drivers",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_horse ,0,0,fac_slavers,
[itm_club_with_spike_head,itm_segmented_helmet,itm_tribal_warrior_outfit,itm_nordic_shield,itm_leather_boots,itm_leather_gloves,itm_khergit_leather_boots,itm_steppe_horse],
def_attrib|level(14),wp(80),knows_common|knows_riding_2|knows_athletics_1,bandit_face1, bandit_face2],
["slave_hunter","Slave Hunter","Slave Hunters",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_horse|tf_guarantee_shield ,0,0,fac_slavers,
[itm_winged_mace,itm_maul,itm_kettle_hat,itm_mail_shirt,itm_tab_shield_round_c,itm_leather_boots,itm_leather_gloves,itm_courser],
def_attrib|level(18),wp(90),knows_common|knows_riding_3|knows_athletics_2,bandit_face1, bandit_face2],
["slave_crusher","Slave Crusher","Slave Crushers",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_horse|tf_guarantee_shield ,0,0,fac_slavers,
[itm_sledgehammer,itm_spiked_mace,itm_mail_hauberk,itm_bascinet_2,itm_bascinet_3,itm_mail_mittens,itm_tab_shield_round_d,itm_mail_chausses,itm_splinted_leather_greaves,itm_hunter],
def_attrib|level(22),wp(110),knows_common|knows_riding_4|knows_power_strike_3,bandit_face1, bandit_face2],
["slaver_chief","Slaver Chief","Slaver Chiefs",tf_mounted|tf_guarantee_all_wo_ranged,0,0,fac_slavers,
[itm_military_hammer,itm_warhammer,itm_brigandine_red,itm_steel_shield,itm_scale_gauntlets,itm_mail_mittens,itm_guard_helmet,itm_plate_boots,itm_mail_boots,itm_warhorse],
def_attrib|level(26),wp(130),knows_common|knows_riding_4|knows_power_strike_5,bandit_face1, bandit_face2],
#Rhodok tribal, Hunter, warrior, veteran, warchief
# ["undead_walker","undead_walker","undead_walkers",tf_undead|tf_allways_fall_dead,0,0,fac_undeads,
# [],
# def_attrib|level(3),wp(60),knows_common,undead_face1, undead_face2],
# ["undead_horseman","undead_horseman","undead_horsemen",tf_undead|tf_allways_fall_dead|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_horse,0,0,fac_undeads,
# [],
# def_attrib|level(19),wp(100),knows_common,undead_face1, undead_face2],
# ["undead_nomad","undead_nomad","undead_nomads",tf_undead|tf_allways_fall_dead|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_horse,0,0,fac_black_khergits,
# [],
# def_attrib|level(21),wp(100),knows_common|knows_riding_4,khergit_face1, khergit_face2],
# ["undead","undead","undead",tf_undead|tf_allways_fall_dead,0,0,fac_undeads,
# [],
# def_attrib|level(3),wp(60),knows_common,undead_face1, undead_face2],
# ["hell_knight","hell_knight","hell_knights",tf_undead|tf_allways_fall_dead|tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_undeads,
# [],
# def_attrib|level(23),wp(100),knows_common|knows_riding_3,undead_face1, undead_face2],
["follower_woman","Camp Follower","Camp Follower",tf_female|tf_guarantee_armor,0,0,fac_commoners,
[itm_bolts,itm_light_crossbow,itm_short_bow,itm_crossbow,itm_nordic_shield,itm_hide_covered_round_shield,itm_hatchet,itm_hand_axe,itm_voulge,itm_fighting_pick,itm_club,itm_dagger,itm_throwing_knives,itm_dress,itm_woolen_dress, itm_skullcap, itm_wrapping_boots],
def_attrib|level(5),wp(70),knows_common,refugee_face1,refugee_face2],
["hunter_woman","Huntress","Huntresses",tf_female|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_commoners,
[itm_bolts,itm_arrows,itm_light_crossbow,itm_short_bow,itm_crossbow,itm_nordic_shield,itm_hide_covered_round_shield,itm_hatchet,itm_hand_axe,itm_voulge,itm_fighting_pick,itm_club,itm_dress,itm_leather_jerkin, itm_skullcap, itm_wrapping_boots],
def_attrib|level(10),wp(85),knows_common|knows_power_strike_1,refugee_face1,refugee_face2],
["fighter_woman","Camp Defender","Camp Defenders",tf_female|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_commoners,
[itm_bolts,itm_arrows,itm_light_crossbow,itm_short_bow,itm_crossbow,itm_fur_covered_shield,itm_hide_covered_round_shield,itm_hatchet,itm_shortened_voulge,itm_mail_shirt,itm_byrnie, itm_skullcap, itm_wrapping_boots, itm_mail_coif, itm_mail_boots],
def_attrib|level(16),wp(100),knows_common|knows_riding_3|knows_power_strike_2|knows_athletics_2|knows_ironflesh_1,refugee_face1,refugee_face2],
["sword_sister","Sword Sister","Sword Sisters",tf_female|tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_shield|tf_guarantee_helmet|tf_guarantee_horse,0,0,fac_commoners,
[itm_bolts,itm_sword_medieval_b,itm_sword_khergit_3,itm_plate_covered_round_shield,itm_tab_shield_small_round_c, itm_crossbow,itm_plate_armor,itm_coat_of_plates,itm_plate_boots,itm_guard_helmet,itm_helmet_with_neckguard,itm_mail_coif,itm_courser,itm_leather_gloves],
def_attrib|level(22),wp(140),knows_common|knows_power_strike_3|knows_riding_5|knows_athletics_3|knows_ironflesh_2|knows_shield_2,refugee_face1,refugee_face2],
["refugee","Refugee","Refugees",tf_female|tf_guarantee_armor,0,0,fac_commoners,
[itm_knife,itm_pitch_fork,itm_sickle,itm_hatchet,itm_club,itm_dress,itm_robe,itm_woolen_dress, itm_headcloth, itm_woolen_hood, itm_wrapping_boots],
def_attrib|level(1),wp(45),knows_common,refugee_face1,refugee_face2],
["peasant_woman","Peasant Woman","Peasant Women",tf_female|tf_guarantee_armor,0,0,fac_commoners,
[itm_knife,itm_pitch_fork,itm_sickle,itm_hatchet,itm_club,itm_dagger,itm_throwing_knives,itm_dress,itm_woolen_dress, itm_headcloth, itm_woolen_hood, itm_wrapping_boots],
def_attrib|level(1),wp(40),knows_common,refugee_face1,refugee_face2],
["caravan_master","Caravan Master","Caravan Masters",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_horse,0,0,fac_commoners,
[itm_sword_medieval_c,itm_fur_coat,itm_hide_boots,itm_saddle_horse,
itm_saddle_horse,itm_saddle_horse,itm_saddle_horse,
itm_leather_jacket, itm_leather_cap],
def_attrib|level(9),wp(100),knows_common|knows_riding_4|knows_ironflesh_3,mercenary_face_1, mercenary_face_2],
["kidnapped_girl","Kidnapped Girl","Kidnapped Girls",tf_hero|tf_randomize_face|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_commoners,
[itm_dress,itm_leather_boots],
def_attrib|level(2),wp(50),knows_common|knows_riding_2,woman_face_1, woman_face_2],
#This troop is the troop marked as soldiers_end and town_walkers_begin
["town_walker_1","Townsman","Townsmen",tf_guarantee_boots|tf_guarantee_armor,0,0,fac_commoners,
[itm_short_tunic, itm_linen_tunic,itm_fur_coat, itm_coarse_tunic, itm_tabard, itm_leather_vest, itm_arena_tunic_white, itm_leather_apron, itm_shirt, itm_arena_tunic_green, itm_arena_tunic_blue, itm_woolen_hose, itm_nomad_boots, itm_blue_hose, itm_hide_boots, itm_ankle_boots, itm_leather_boots, itm_fur_hat, itm_leather_cap, itm_straw_hat, itm_felt_hat],
def_attrib|level(4),wp(60),knows_common,man_face_young_1, man_face_old_2],
["town_walker_2","Townswoman","Townswomen",tf_female|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_commoners,
[itm_blue_dress, itm_dress, itm_woolen_dress, itm_peasant_dress, itm_woolen_hose, itm_blue_hose, itm_wimple_a, itm_wimple_with_veil, itm_female_hood],
def_attrib|level(2),wp(40),knows_common,woman_face_1,woman_face_2],
["khergit_townsman","Townsman","Townsmen",tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_6,
[itm_sarranid_felt_hat,itm_turban,itm_wrapping_boots,itm_khergit_leather_boots,itm_sarranid_cloth_robe, itm_sarranid_cloth_robe_b],
def_attrib|level(4),wp(60),knows_common,swadian_face_younger_1, swadian_face_middle_2],
["khergit_townswoman","Townswoman","Townswomen",tf_female|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_commoners,
[itm_blue_dress, itm_dress, itm_woolen_dress, itm_peasant_dress, itm_woolen_hose, itm_blue_hose, itm_wimple_a, itm_wimple_with_veil, itm_female_hood],
def_attrib|level(2),wp(40),knows_common,woman_face_1,woman_face_2],
["sarranid_townsman","Townsman","Townsmen",tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_6,
[itm_sarranid_felt_hat,itm_turban,itm_wrapping_boots,itm_sarranid_boots_a,itm_sarranid_cloth_robe, itm_sarranid_cloth_robe_b],
def_attrib|level(4),wp(60),knows_common,swadian_face_younger_1, swadian_face_middle_2],
["sarranid_townswoman","Townswoman","Townswomen",tf_female|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_commoners,
[itm_sarranid_common_dress, itm_sarranid_common_dress_b,itm_woolen_hose,itm_sarranid_boots_a, itm_sarranid_felt_head_cloth, itm_sarranid_felt_head_cloth_b],
def_attrib|level(2),wp(40),knows_common,woman_face_1,woman_face_2],
#This troop is the troop marked as town_walkers_end and village_walkers_begin
["village_walker_1","Villager","Villagers",tf_guarantee_boots|tf_guarantee_armor,0,0,fac_commoners,
[itm_short_tunic, itm_linen_tunic, itm_coarse_tunic, itm_leather_vest, itm_leather_apron, itm_shirt, itm_woolen_hose, itm_nomad_boots, itm_blue_hose, itm_hide_boots, itm_ankle_boots, itm_leather_boots, itm_fur_hat, itm_leather_cap, itm_straw_hat, itm_felt_hat],
def_attrib|level(4),wp(60),knows_common,man_face_younger_1, man_face_older_2],
["village_walker_2","Villager","Villagers",tf_female|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_commoners,
[itm_blue_dress, itm_dress, itm_woolen_dress, itm_peasant_dress, itm_woolen_hose, itm_blue_hose, itm_wimple_a, itm_wimple_with_veil, itm_female_hood],
def_attrib|level(2),wp(40),knows_common,woman_face_1,woman_face_2],
#This troop is the troop marked as village_walkers_end and spy_walkers_begin
["spy_walker_1","Townsman","Townsmen",tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_commoners,
[itm_short_tunic, itm_linen_tunic, itm_coarse_tunic, itm_tabard, itm_leather_vest, itm_robe, itm_leather_apron, itm_shirt, itm_woolen_hose, itm_nomad_boots, itm_blue_hose, itm_hide_boots, itm_ankle_boots, itm_leather_boots, itm_fur_hat, itm_leather_cap, itm_straw_hat, itm_felt_hat],
def_attrib|level(4),wp(60),knows_common,man_face_middle_1, man_face_old_2],
["spy_walker_2","Townswoman","Townswomen",tf_female|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_commoners,
[itm_blue_dress, itm_dress, itm_woolen_dress, itm_peasant_dress, itm_woolen_hose, itm_blue_hose, itm_wimple_a, itm_wimple_with_veil, itm_female_hood],
def_attrib|level(2),wp(40),knows_common,woman_face_1,woman_face_2],
# Ryan END
#This troop is the troop marked as spy_walkers_end
# Zendar
["tournament_master","Tournament Master","Tournament Master",tf_hero, scn_zendar_center|entry(1),reserved, fac_commoners,[itm_nomad_armor,itm_nomad_boots],def_attrib|level(2),wp(20),knows_common,0x000000000008414401e28f534c8a2d09],
["trainer","Trainer","Trainer",tf_hero, scn_zendar_center|entry(2),reserved, fac_commoners,[itm_leather_jerkin,itm_hide_boots],def_attrib|level(2),wp(20),knows_common,0x00000000000430c701ea98836781647f],
["Constable_Hareck","Constable Hareck","Constable Hareck",tf_hero, scn_zendar_center|entry(5),reserved, fac_commoners,[itm_leather_jacket,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,0x00000000000c41c001fb15234eb6dd3f],
# Ryan BEGIN
["Ramun_the_slave_trader","Ramun, the slave trader","Ramun, the slave trader",tf_hero, no_scene,reserved, fac_commoners,[itm_leather_jacket,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,0x0000000fd5105592385281c55b8e44eb00000000001d9b220000000000000000],
["guide","Quick Jimmy","Quick Jimmy",tf_hero, no_scene,0, fac_commoners,[itm_coarse_tunic,itm_hide_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, 0x00000000000c318301f24e38a36e38e3],
# Ryan END
["Xerina","Xerina","Xerina",tf_hero|tf_female, scn_the_happy_boar|entry(5),reserved, fac_commoners,[itm_leather_jerkin,itm_hide_boots],def_attrib|str_15|agi_15|level(39),wp(312),knows_power_strike_5|knows_ironflesh_5|knows_riding_6|knows_power_draw_4|knows_athletics_8|knows_shield_3,0x00000001ac0820074920561d0b51e6ed00000000001d40ed0000000000000000],
["Dranton","Dranton","Dranton",tf_hero, scn_the_happy_boar|entry(2),reserved, fac_commoners,[itm_leather_vest,itm_hide_boots],def_attrib|str_15|agi_14|level(42),wp(324),knows_power_strike_5|knows_ironflesh_7|knows_riding_4|knows_power_draw_4|knows_athletics_4|knows_shield_3,0x0000000a460c3002470c50f3502879f800000000001ce0a00000000000000000],
["Kradus","Kradus","Kradus",tf_hero, scn_the_happy_boar|entry(3),reserved, fac_commoners,[itm_padded_leather,itm_hide_boots],def_attrib|str_15|agi_14|level(43),wp(270),knows_power_strike_5|knows_ironflesh_7|knows_riding_4|knows_power_draw_4|knows_athletics_4|knows_shield_3,0x0000000f5b1052c61ce1a9521db1375200000000001ed31b0000000000000000],
#Tutorial
["tutorial_trainer","Training Ground Master","Training Ground Master",tf_hero, 0, 0, fac_commoners,[itm_robe,itm_nomad_boots],def_attrib|level(2),wp(20),knows_common,0x000000000008414401e28f534c8a2d09],
["tutorial_student_1","{!}tutorial_student_1","{!}tutorial_student_1",tf_guarantee_boots|tf_guarantee_armor, 0, 0, fac_neutral,
[itm_practice_sword, itm_practice_shield, itm_leather_jerkin,itm_padded_leather,itm_leather_armor,itm_ankle_boots,itm_padded_coif,itm_footman_helmet],
def_attrib|level(2),wp(20),knows_common, swadian_face_young_1, swadian_face_old_2],
["tutorial_student_2","{!}tutorial_student_2","{!}tutorial_student_2",tf_guarantee_boots|tf_guarantee_armor, 0, 0, fac_neutral,
[itm_practice_sword, itm_practice_shield, itm_leather_jerkin,itm_padded_leather,itm_leather_armor,itm_ankle_boots,itm_padded_coif,itm_footman_helmet],
def_attrib|level(2),wp(20),knows_common, swadian_face_young_1, swadian_face_old_2],
["tutorial_student_3","{!}tutorial_student_3","{!}tutorial_student_3",tf_guarantee_boots|tf_guarantee_armor, 0, 0, fac_neutral,
[itm_practice_staff, itm_leather_jerkin,itm_padded_leather,itm_leather_armor,itm_ankle_boots,itm_padded_coif,itm_footman_helmet],
def_attrib|level(2),wp(20),knows_common, swadian_face_young_1, swadian_face_old_2],
["tutorial_student_4","{!}tutorial_student_4","{!}tutorial_student_4",tf_guarantee_boots|tf_guarantee_armor, 0, 0, fac_neutral,
[itm_practice_staff, itm_leather_jerkin,itm_padded_leather,itm_leather_armor,itm_ankle_boots,itm_padded_coif,itm_footman_helmet],
def_attrib|level(2),wp(20),knows_common, swadian_face_young_1, swadian_face_old_2],
#Sargoth
#halkard, hardawk. lord_taucard lord_caupard. lord_paugard
#Salt mine
["Galeas","Galeas","Galeas",tf_hero, 0, reserved, fac_commoners,[itm_leather_jacket,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,0x000000000004718201c073191a9bb10c],
#Dhorak keep
["farmer_from_bandit_village","Farmer","Farmers",tf_guarantee_armor,no_scene,reserved,fac_commoners,
[itm_linen_tunic,itm_coarse_tunic,itm_shirt,itm_nomad_boots,itm_wrapping_boots],
def_attrib|level(4),wp(60),knows_common,man_face_middle_1, man_face_older_2],
#SB : semi-random arena training rewards
["trainer_1","Trainer","Trainer",tf_hero, scn_training_ground_ranged_melee_1|entry(6),reserved, fac_commoners,[itm_practice_sword,itm_leather_jerkin,itm_hide_boots],def_attrib|level(2),wp(20),knows_common,0x0000000d0d1030c74ae8d661b651c6840000000000000e220000000000000000],
["trainer_2","Trainer","Trainer",tf_hero, scn_training_ground_ranged_melee_2|entry(6),reserved, fac_commoners,[itm_arena_axe,itm_nomad_vest,itm_hide_boots],def_attrib|level(2),wp(20),knows_common,0x0000000e5a04360428ec253846640b5d0000000000000ee80000000000000000],
["trainer_3","Trainer","Trainer",tf_hero, scn_training_ground_ranged_melee_3|entry(6),reserved, fac_commoners,[itm_practice_crossbow,itm_padded_leather,itm_hide_boots],def_attrib|level(2),wp(20),knows_common,0x0000000e4a0445822ca1a11ab1e9eaea0000000000000f510000000000000000],
["trainer_4","Trainer","Trainer",tf_hero, scn_training_ground_ranged_melee_4|entry(6),reserved, fac_commoners,[itm_heavy_practice_sword,itm_leather_jerkin,itm_hide_boots],def_attrib|level(2),wp(20),knows_common,0x0000000e600452c32ef8e5bb92cf1c970000000000000fc20000000000000000],
["trainer_5","Trainer","Trainer",tf_hero, scn_training_ground_ranged_melee_5|entry(6),reserved, fac_commoners,[itm_arena_lance,itm_leather_vest,itm_hide_boots],def_attrib|level(2),wp(20),knows_common,0x0000000e77082000150049a34c42ec960000000000000e080000000000000000],
# Ransom brokers.
["ransom_broker_1","Ransom_Broker","Ransom_Broker",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_leather_vest,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["ransom_broker_2","Ransom_Broker","Ransom_Broker",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_tabard,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["ransom_broker_3","Ransom_Broker","Ransom_Broker",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_leather_vest,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["ransom_broker_4","Ransom_Broker","Ransom_Broker",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_short_tunic,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["ransom_broker_5","Ransom_Broker","Ransom_Broker",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_gambeson,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["ransom_broker_6","Ransom_Broker","Ransom_Broker",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_blue_gambeson,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["ransom_broker_7","Ransom_Broker","Ransom_Broker",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_red_gambeson,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["ransom_broker_8","Ransom_Broker","Ransom_Broker",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_fur_coat,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["ransom_broker_9","Ransom_Broker","Ransom_Broker",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_leather_vest,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["ransom_broker_10","Ransom_Broker","Ransom_Broker",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_leather_jacket,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
# Tavern traveler.
["tavern_traveler_1","Traveller","Traveller",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_fur_coat,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["tavern_traveler_2","Traveller","Traveller",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_tabard,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["tavern_traveler_3","Traveller","Traveller",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_leather_vest,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["tavern_traveler_4","Traveller","Traveller",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_blue_gambeson,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["tavern_traveler_5","Traveller","Traveller",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_short_tunic,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["tavern_traveler_6","Traveller","Traveller",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_fur_coat,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["tavern_traveler_7","Traveller","Traveller",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_leather_jacket,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["tavern_traveler_8","Traveller","Traveller",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_tabard,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["tavern_traveler_9","Traveller","Traveller",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_fur_coat,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["tavern_traveler_10","Traveller","Traveller",tf_hero|tf_randomize_face, 0, reserved, fac_commoners,[itm_leather_jacket,itm_hide_boots],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
# Tavern traveler.
["tavern_bookseller_1","Book_Merchant","Book_Merchant",tf_hero|tf_is_merchant|tf_randomize_face, 0, reserved, fac_commoners,[itm_fur_coat,itm_hide_boots,
itm_book_tactics, itm_book_persuasion, itm_book_wound_treatment_reference, itm_book_leadership,
itm_book_intelligence, itm_book_training_reference, itm_book_surgery_reference],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2],
["tavern_bookseller_2","Book_Merchant","Book_Merchant",tf_hero|tf_is_merchant|tf_randomize_face, 0, reserved, fac_commoners,[itm_fur_coat,itm_hide_boots,
itm_book_wound_treatment_reference, itm_book_leadership, itm_book_intelligence, itm_book_trade,
itm_book_engineering, itm_book_weapon_mastery],def_attrib|level(5),wp(20),knows_common,merchant_face_1, merchant_face_2],
# Tavern minstrel.
["tavern_minstrel_1","Wandering Minstrel","Minstrel",tf_hero|tf_randomize_face|tf_guarantee_shield|tf_guarantee_armor|tf_guarantee_boots, 0, reserved, fac_commoners,[itm_leather_jacket, itm_hide_boots, itm_lute],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2], #lute
["tavern_minstrel_2","Wandering Bard","Minstrel",tf_hero|tf_randomize_face|tf_guarantee_shield|tf_guarantee_armor|tf_guarantee_boots, 0, reserved, fac_commoners,[itm_tunic_with_green_cape, itm_hide_boots, itm_lyre],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2], #early harp/lyre
["tavern_minstrel_3","Wandering Ashik","Minstrel",tf_hero|tf_randomize_face|tf_guarantee_shield|tf_guarantee_armor|tf_guarantee_boots, 0, reserved, fac_commoners,[itm_nomad_robe, itm_hide_boots, itm_lute],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2], #lute/oud or rebab
["tavern_minstrel_4","Wandering Skald","Minstrel",tf_hero|tf_randomize_face|tf_guarantee_shield|tf_guarantee_armor|tf_guarantee_boots, 0, reserved, fac_commoners,[itm_fur_coat, itm_hide_boots, itm_lyre],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2], #No instrument or lyre
["tavern_minstrel_5","Wandering Troubadour","Minstrel",tf_hero|tf_randomize_face|tf_guarantee_shield|tf_guarantee_armor|tf_guarantee_boots, 0, reserved, fac_commoners,[itm_short_tunic, itm_hide_boots, itm_lute],def_attrib|level(5),wp(20),knows_common,merchant_face_1,merchant_face_2], #Lute or Byzantine/Occitan lyra
#NPC system changes begin
#Companions
["kingdom_heroes_including_player_begin", "kingdom_heroes_including_player_begin", "kingdom_heroes_including_player_begin", tf_hero, 0,reserved, fac_kingdom_1,[], lord_attrib,wp(220),knows_lord_1, 0x000000000010918a01f248377289467d],
["npc1","Borcha","Borcha",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_khergit_armor,itm_nomad_boots,itm_knife],
str_8|agi_7|int_12|cha_7|level(3),wp(60),knows_tracker_npc|
knows_ironflesh_1|knows_power_strike_1|knows_pathfinding_3|knows_athletics_2|knows_tracking_1|knows_riding_2,
0x00000004bf086143259d061a9046e23500000000001db52c0000000000000000],
["npc2","Marnid","Marnid", tf_hero|tf_unmoveable_in_party_window, 0,reserved, fac_commoners,[itm_linen_tunic,itm_hide_boots,itm_club],
str_7|agi_7|int_11|cha_6|level(1),wp(40),knows_merchant_npc|
knows_trade_2|knows_weapon_master_1|knows_ironflesh_1|knows_wound_treatment_1|knows_athletics_2|knows_first_aid_1|knows_leadership_1,
0x000000019d004001570b893712c8d28d00000000001dc8990000000000000000],
["npc3","Ymira","Ymira",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_dress,itm_woolen_hose,itm_knife],
str_6|agi_9|int_11|cha_6|level(1),wp(20),knows_merchant_npc|
knows_wound_treatment_1|knows_trade_1|knows_first_aid_3|knows_surgery_1|knows_athletics_1|knows_riding_1,
0x0000000083040001583b6db8dec5925b00000000001d80980000000000000000],
["npc4","Rolf","Rolf",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_leather_jerkin,itm_nomad_boots, itm_sword_medieval_a],
str_10|agi_9|int_13|cha_10|level(10),wp(110),knows_warrior_npc|
knows_weapon_master_2|knows_power_strike_2|knows_riding_2|knows_athletics_2|knows_power_throw_2|knows_first_aid_1|knows_surgery_1|knows_tactics_2|knows_leadership_2,
0x000000057f1074002c75c6a8a58ad72e00000000001e1a890000000000000000],
["npc5","Baheshtur","Baheshtur",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_nomad_vest,itm_nomad_boots, itm_sword_khergit_1],
str_9|agi_9|int_12|cha_7|level(5),wp(90),knows_warrior_npc|
knows_riding_2|knows_horse_archery_3|knows_power_draw_3|knows_leadership_2|knows_weapon_master_1,
0x000000088910318b5c6f972328324a6200000000001cd3310000000000000000],
["npc6","Firentis","Firentis",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_tabard,itm_nomad_boots, itm_sword_medieval_a],
str_10|agi_12|int_10|cha_5|level(6),wp(105),knows_warrior_npc|
knows_riding_2|knows_weapon_master_2|knows_power_strike_2|knows_athletics_3|knows_trainer_1|knows_leadership_1,
0x00000002050052036a1895d0748f3ca30000000000000f0b0000000000000000],
["npc7","Deshavi","Deshavi",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_ragged_outfit,itm_wrapping_boots, itm_hunting_bow, itm_arrows, itm_quarter_staff],
str_8|agi_9|int_10|cha_6|level(2),wp(80),knows_tracker_npc|
knows_tracking_2|knows_athletics_2|knows_spotting_1|knows_pathfinding_1|knows_power_draw_2,
0x00000001fc08400533a15297634d44f400000000001e02db0000000000000000],
["npc8","Matheld","Matheld",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_tribal_warrior_outfit,itm_nomad_boots, itm_sword_viking_1],
str_9|agi_10|int_9|cha_10|level(7),wp(90),knows_warrior_npc|
knows_weapon_master_3|knows_power_strike_2|knows_athletics_2|knows_leadership_3|knows_tactics_1,
0x00000005800c000637db8314e331e76e00000000001c46db0000000000000000],
["npc9","Alayen","Alayen",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_tabard,itm_nomad_boots, itm_sword_medieval_b_small],
str_11|agi_8|int_7|cha_8|level(2),wp(100),knows_warrior_npc|
knows_weapon_master_1|knows_riding_1|knows_athletics_1|knows_leadership_1|knows_tactics_1|knows_power_strike_1,
0x000000030100300f499d5b391b6db8d300000000001dc2e10000000000000000],
["npc10","Bunduk","Bunduk",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_padded_leather,itm_nomad_boots, itm_crossbow, itm_bolts, itm_pickaxe],
str_12|agi_8|int_9|cha_11|level(9),wp(105),knows_warrior_npc|
knows_weapon_master_3|knows_tactics_1|knows_leadership_1|knows_ironflesh_3|knows_trainer_2|knows_first_aid_2,
0x0000000a3f081006572c91c71c8d46cb00000000001e468a0000000000000000],
["npc11","Katrin","Katrin",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_leather_apron, itm_falchion, itm_wrapping_boots],
str_8|agi_11|int_10|cha_10|level(8),wp(70),knows_merchant_npc|
knows_weapon_master_1|knows_first_aid_1|knows_wound_treatment_2|knows_ironflesh_3|knows_inventory_management_5,
0x0000000d7f0400035915aa226b4d975200000000001ea49e0000000000000000],
["npc12","Jeremus","Jeremus",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_pilgrim_disguise,itm_nomad_boots, itm_staff],
str_8|agi_7|int_13|cha_7|level(4),wp(30), knows_merchant_npc|
knows_ironflesh_1|knows_power_strike_1|knows_surgery_4|knows_wound_treatment_3|knows_first_aid_3,
0x000000078000500e4f8ba62a9cd5d36d00000000001e36250000000000000000],
["npc13","Nizar","Nizar",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_nomad_robe,itm_nomad_boots, itm_scimitar, itm_courser],
str_7|agi_7|int_12|cha_8|level(3),wp(80),knows_warrior_npc|
knows_riding_2|knows_leadership_2|knows_athletics_2|knows_ironflesh_2|knows_power_strike_1|knows_weapon_master_1,
0x00000004bf0475c85f4e9592de4e574c00000000001e369c0000000000000000],
["npc14","Lezalit","Lezalit",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_nobleman_outfit,itm_nomad_boots, itm_sword_medieval_b_small],
str_9|agi_8|int_11|cha_8|level(5),wp(100),knows_warrior_npc|
knows_trainer_4|knows_weapon_master_3|knows_leadership_2|knows_power_strike_1,
0x00000001a410259144d5d1d6eb55e96a00000000001db0db0000000000000000],
["npc15","Artimenner","Artimenner",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_rich_outfit,itm_nomad_boots, itm_sword_medieval_b_small],
str_9|agi_9|int_12|cha_8|level(7),wp(80),knows_warrior_npc|
knows_tactics_2|knows_engineer_4|knows_trade_3|knows_tracking_1|knows_spotting_1,
0x0000000f2e1021862b4b9123594eab5300000000001d55360000000000000000],
["npc16","Klethi","Klethi",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_peasant_dress,itm_nomad_boots, itm_dagger, itm_throwing_knives],
str_7|agi_11|int_8|cha_7|level(2),wp(80),knows_tracker_npc|
knows_power_throw_3|knows_athletics_2|knows_power_strike_1,
0x00000000000c100739ce9c805d2f381300000000001cc7ad0000000000000000],
#NPC system changes end
#governers olgrel rasevas Horse Bodywear Footwear_in Footwear_out Armor Weapon Shield Headwaer
["kingdom_1_lord", "King Harlaus", "Harlaus", tf_hero, 0,reserved, fac_kingdom_1,[itm_charger, itm_rich_outfit, itm_blue_hose, itm_plate_boots, itm_plate_armor, itm_gauntlets, itm_bastard_sword_b, itm_tab_shield_heater_cav_b, itm_great_helmet], knight_attrib_5,wp(220),knight_skills_5|knows_trainer_5, 0x0000000f45041105241acd2b5a66a86900000000001e98310000000000000000,swadian_face_older_2],
["kingdom_2_lord", "King Yaroglek", "Yaroglek", tf_hero, 0,reserved, fac_kingdom_2,[itm_hunter, itm_courtly_outfit, itm_leather_boots, itm_plate_boots, itm_heraldic_mail_with_surcoat, itm_gauntlets, itm_military_pick, itm_tab_shield_kite_cav_b, itm_vaegir_mask], knight_attrib_5,wp(220),knight_skills_5|knows_trainer_4, 0x0000000ec50001400a2269f919dee11700000000001cc57d0000000000000000, vaegir_face_old_2],
["kingdom_3_lord", "Sanjar Khan", "Sanjar", tf_hero, 0,reserved, fac_kingdom_3,[itm_courser, itm_nomad_robe, itm_leather_boots, itm_splinted_greaves, itm_khergit_guard_armor, itm_lamellar_gauntlets, itm_sword_khergit_3, itm_tab_shield_small_round_c, itm_guard_helmet], knight_attrib_5,wp(220),knight_skills_5|knows_trainer_6, 0x0000000cee0051cc44be2d14d370c65c00000000001ed6df0000000000000000,khergit_face_old_2],
["kingdom_4_lord", "King Ragnar", "Ragnar", tf_hero, 0,reserved, fac_kingdom_4,[itm_hunter, itm_nobleman_outfit, itm_leather_boots, itm_mail_boots, itm_cuir_bouilli, itm_gauntlets, itm_great_axe, itm_tab_shield_round_e, itm_nordic_helmet], knight_attrib_5,wp(220),knight_skills_5|knows_trainer_4, 0x0000000e2c0c028a068e8c18557b12a500000000001c0fe80000000000000000, nord_face_older_2],
["kingdom_5_lord", "King Graveth", "Graveth", tf_hero, 0,reserved, fac_kingdom_5,[itm_warhorse, itm_tabard, itm_leather_boots, itm_splinted_leather_greaves, itm_heraldic_mail_with_tabard, itm_gauntlets, itm_bastard_sword_b, itm_tab_shield_heater_cav_b, itm_spiked_helmet], knight_attrib_4,wp(220),knight_skills_4|knows_trainer_5, 0x0000000efc04119225848dac5d50d62400000000001d48b80000000000000000, rhodok_face_old_2],
["kingdom_6_lord", "Sultan Hakim", "Hakim", tf_hero, 0,reserved, fac_kingdom_6,[itm_warhorse_sarranid, itm_mamluke_mail, itm_sarranid_boots_c, itm_sarranid_mail_coif, itm_mail_mittens, itm_sarranid_cavalry_sword, itm_tab_shield_small_round_c], knight_attrib_4,wp(220),knight_skills_5|knows_trainer_5, 0x0000000a4b103354189c71d6d386e8ac00000000001e24eb0000000000000000, rhodok_face_old_2],
# Imbrea Belinda Ruby Qaelmas Rose Willow
# Alin Ganzo Zelka Rabugti
# Qlurzach Ruhbus Givea_alsev Belanz Bendina
# Dunga Agatha Dibus Crahask
# Horse Bodywear Armor Footwear_in Footwear_out Headwear Weapon Shield
#Swadian civilian clothes: itm_courtly_outfit itm_gambeson itm_blue_gambeson itm_red_gambeson itm_nobleman_outfit itm_rich_outfit itm_short_tunic itm_tabard
#Older knights with higher skills moved to top
["knight_1_1", "Count Klargus", "Klargus", tf_hero, 0, reserved, fac_kingdom_1, [itm_saddle_horse, itm_courtly_outfit, itm_heraldic_mail_with_surcoat, itm_nomad_boots, itm_splinted_greaves, itm_great_helmet, itm_sword_medieval_c, itm_scale_gauntlets, itm_tab_shield_heater_cav_a], knight_attrib_5,wp(230),knight_skills_5|knows_trainer_1|knows_trainer_3, 0x0000000c3e08601414ab4dc6e39296b200000000001e231b0000000000000000, swadian_face_older_2],
["knight_1_2", "Count Delinard", "Delinard", tf_hero, 0, reserved, fac_kingdom_1, [itm_courser, itm_red_gambeson, itm_heraldic_mail_with_surcoat, itm_nomad_boots, itm_iron_greaves, itm_guard_helmet, itm_gauntlets, itm_bastard_sword_a, itm_tab_shield_heater_cav_b], knight_attrib_5,wp(240),knight_skills_5, 0x0000000c0f0c320627627238dcd6599400000000001c573d0000000000000000, swadian_face_young_2],
["knight_1_3", "Count Haringoth", "Haringoth", tf_hero, 0, reserved, fac_kingdom_1, [itm_warhorse, itm_nobleman_outfit, itm_coat_of_plates, itm_leather_boots, itm_splinted_leather_greaves, itm_flat_topped_helmet, itm_gauntlets, itm_bastard_sword_b, itm_tab_shield_heater_d], knight_attrib_5,wp(260),knight_skills_5|knows_trainer_3, 0x0000000cb700210214ce89db276aa2f400000000001d36730000000000000000, swadian_face_young_2],
["knight_1_4", "Count Clais", "Clais", tf_hero, 0, reserved, fac_kingdom_1, [itm_saddle_horse, itm_short_tunic, itm_heraldic_mail_with_surcoat, itm_leather_boots, itm_mail_chausses, itm_winged_great_helmet, itm_gauntlets, itm_bastard_sword_a, itm_sword_two_handed_a, itm_tab_shield_heater_d], knight_attrib_5,wp(180),knight_skills_5|knows_trainer_4, 0x0000000c370c1194546469ca6c4e450e00000000001ebac40000000000000000, swadian_face_older_2],
["knight_1_5", "Count Deglan", "Deglan", tf_hero, 0, reserved, fac_kingdom_1, [itm_hunter, itm_rich_outfit, itm_mail_hauberk,itm_woolen_hose, itm_mail_chausses, itm_guard_helmet, itm_gauntlets, itm_sword_medieval_c, itm_tab_shield_heater_d], knight_attrib_4,wp(200),knight_skills_4|knows_trainer_6, 0x0000000c0c1064864ba34e2ae291992b00000000001da8720000000000000000, swadian_face_older_2],
["knight_1_6", "Count Tredian", "Tredian", tf_hero, 0, reserved, fac_kingdom_1, [itm_hunter, itm_tabard, itm_heraldic_mail_with_surcoat, itm_leather_boots, itm_mail_boots, itm_winged_great_helmet, itm_gauntlets, itm_bastard_sword_b, itm_sword_two_handed_b, itm_tab_shield_heater_cav_b], knight_attrib_5,wp(240),knight_skills_4|knows_trainer_4, 0x0000000c0a08038736db74c6a396a8e500000000001db8eb0000000000000000, swadian_face_older_2],
["knight_1_7", "Count Grainwad", "Grainwad", tf_hero, 0, reserved, fac_kingdom_1, [itm_hunter, itm_tabard, itm_heraldic_mail_with_surcoat, itm_leather_boots, itm_mail_boots, itm_flat_topped_helmet, itm_gauntlets, itm_bastard_sword_b, itm_sword_two_handed_b, itm_tab_shield_heater_cav_b], knight_attrib_5,wp(290),knight_skills_4|knows_trainer_4, 0x0000000c1e001500589dae4094aa291c00000000001e37a80000000000000000, swadian_face_young_2],
["knight_1_8", "Count Ryis", "Ryis", tf_hero, 0, reserved, fac_kingdom_1, [itm_warhorse, itm_nobleman_outfit, itm_coat_of_plates, itm_leather_boots, itm_splinted_leather_greaves, itm_winged_great_helmet, itm_gauntlets,itm_bastard_sword_b, itm_sword_two_handed_a, itm_tab_shield_heater_d], knight_attrib_4,wp(250),knight_skills_4, 0x0000000c330855054aa9aa431a48d74600000000001ed5240000000000000000, swadian_face_older_2],
#Swadian younger knights
["knight_1_9", "Count Plais", "Plais", tf_hero, 0, reserved, fac_kingdom_1, [itm_steppe_horse, itm_gambeson, itm_heraldic_mail_with_surcoat, itm_blue_hose, itm_mail_boots, itm_nasal_helmet, itm_scale_gauntlets, itm_fighting_pick, itm_tab_shield_heater_c], knight_attrib_3,wp(160),knight_skills_3, 0x0000000c0f08000458739a9a1476199800000000001fb6f10000000000000000, swadian_face_old_2],
["knight_1_10", "Count Mirchaud", "Mirchaud", tf_hero, 0, reserved, fac_kingdom_1, [itm_courser, itm_blue_gambeson, itm_mail_hauberk, itm_woolen_hose, itm_mail_chausses, itm_guard_helmet, itm_gauntlets, itm_sword_two_handed_b, itm_tab_shield_heater_cav_b], knight_attrib_3,wp(190),knight_skills_3, 0x0000000c0610351048e325361d7236cd00000000001d532a0000000000000000, swadian_face_older_2],
["knight_1_11", "Count Stamar", "Stamar", tf_hero, 0, reserved, fac_kingdom_1, [itm_courser, itm_red_gambeson, itm_heraldic_mail_with_surcoat, itm_nomad_boots, itm_iron_greaves, itm_guard_helmet, itm_gauntlets, itm_bastard_sword_a, itm_tab_shield_heater_cav_b], knight_attrib_3,wp(220),knight_skills_3, 0x0000000c03104490280a8cb2a24196ab00000000001eb4dc0000000000000000, swadian_face_older_2],
["knight_1_12", "Count Meltor", "Meltor", tf_hero, 0, reserved, fac_kingdom_1, [itm_saddle_horse, itm_rich_outfit, itm_heraldic_mail_with_surcoat, itm_nomad_boots, itm_mail_boots, itm_guard_helmet, itm_gauntlets, itm_fighting_pick, itm_tab_shield_heater_c], knight_attrib_3,wp(130),knight_skills_3, 0x0000000c2a0805442b2c6cc98c8dbaac00000000001d389b0000000000000000, swadian_face_older_2],
["knight_1_13", "Count Beranz", "Beranz", tf_hero, 0, reserved, fac_kingdom_1, [itm_saddle_horse, itm_ragged_outfit, itm_heraldic_mail_with_surcoat, itm_nomad_boots, itm_splinted_greaves, itm_guard_helmet, itm_gauntlets, itm_sword_medieval_c, itm_sword_two_handed_a, itm_tab_shield_heater_c], knight_attrib_2,wp(160),knight_skills_2, 0x0000000c380c30c2392a8e5322a5392c00000000001e5c620000000000000000, swadian_face_older_2],
["knight_1_14", "Count Rafard", "Rafard", tf_hero, 0, reserved, fac_kingdom_1, [itm_saddle_horse, itm_short_tunic, itm_heraldic_mail_with_tabard, itm_leather_boots, itm_mail_chausses, itm_nasal_helmet, itm_scale_gauntlets, itm_bastard_sword_a, itm_tab_shield_heater_cav_a], knight_attrib_2,wp(190),knight_skills_3|knows_trainer_6, 0x0000000c3f10000532d45203954e192200000000001e47630000000000000000, swadian_face_older_2],
["knight_1_15", "Count Regas", "Regas", tf_hero, 0, reserved, fac_kingdom_1, [itm_hunter, itm_rich_outfit, itm_mail_hauberk, itm_woolen_hose, itm_mail_chausses, itm_great_helmet, itm_gauntlets, itm_sword_viking_3, itm_sword_two_handed_a, itm_tab_shield_heater_d], knight_attrib_4,wp(140),knight_skills_2, 0x0000000c5c0840034895654c9b660c5d00000000001e34530000000000000000, swadian_face_young_2],
["knight_1_16", "Count Devlian", "Devlian", tf_hero, 0, reserved, fac_kingdom_1, [itm_saddle_horse, itm_courtly_outfit, itm_heraldic_mail_with_surcoat, itm_nomad_boots, itm_splinted_greaves, itm_great_helmet, itm_gauntlets, itm_sword_medieval_c, itm_tab_shield_heater_c], knight_attrib_1,wp(130),knight_skills_2, 0x000000095108144657a1ba3ad456e8cb00000000001e325a0000000000000000, swadian_face_young_2],
["knight_1_17", "Count Rafarch", "Rafarch", tf_hero, 0, reserved, fac_kingdom_1, [itm_steppe_horse, itm_gambeson, itm_heraldic_mail_with_surcoat, itm_blue_hose, itm_mail_boots, itm_nasal_helmet, itm_scale_gauntlets, itm_fighting_pick, itm_tab_shield_heater_cav_b], knight_attrib_2,wp(190),knight_skills_1|knows_trainer_4, 0x0000000c010c42c14d9d6918bdb336e200000000001dd6a30000000000000000, swadian_face_young_2],
["knight_1_18", "Count Rochabarth", "Rochabarth", tf_hero, 0, reserved, fac_kingdom_1, [itm_courser, itm_blue_gambeson, itm_mail_hauberk, itm_woolen_hose, itm_mail_chausses, itm_winged_great_helmet, itm_gauntlets, itm_sword_two_handed_a, itm_tab_shield_heater_cav_a], knight_attrib_3,wp(210),knight_skills_1, 0x0000000c150045c6365d8565932a8d6400000000001ec6940000000000000000, swadian_face_young_2],
["knight_1_19", "Count Despin", "Despin", tf_hero, 0, reserved, fac_kingdom_1, [itm_saddle_horse, itm_rich_outfit, itm_heraldic_mail_with_surcoat, itm_nomad_boots, itm_mail_boots, itm_great_helmet, itm_gauntlets, itm_fighting_pick, itm_sword_two_handed_a, itm_tab_shield_heater_cav_a], knight_attrib_1,wp(120),knight_skills_1, 0x00000008200012033d9b6d4a92ada53500000000001cc1180000000000000000, swadian_face_young_2],
["knight_1_20", "Count Montewar", "Montewar", tf_hero, 0, reserved, fac_kingdom_1, [itm_saddle_horse, itm_ragged_outfit, itm_heraldic_mail_with_surcoat, itm_nomad_boots, itm_splinted_greaves, itm_great_helmet, itm_gauntlets, itm_sword_medieval_c, itm_sword_two_handed_a, itm_tab_shield_heater_cav_a], knight_attrib_2,wp(150),knight_skills_1, 0x0000000c4d0840d24a9b2ab4ac2a332400000000001d34db0000000000000000, swadian_face_young_2],
# ["knight_1_21", "Lord Swadian 21", "knight_1_7", tf_hero, 0, reserved, fac_kingdom_1, [itm_saddle_horse, itm_ragged_outfit, itm_heraldic_mail_with_surcoat, itm_nomad_boots, itm_splinted_greaves, itm_great_helmet, itm_gauntlets, itm_sword_medieval_c, itm_sword_two_handed_a, itm_tab_shield_heater_cav_a], knight_attrib_2,wp(150),knight_skills_2, 0x0000000c4d0840d24a9b2ab4ac2a332400000000001d34db0000000000000000, swadian_face_young_2],
# ["knight_1_22", "Lord Swadian 22", "knight_1_8", tf_hero, 0, reserved, fac_kingdom_1, [itm_saddle_horse, itm_short_tunic, itm_heraldic_mail_with_surcoat, itm_leather_boots, itm_mail_chausses, itm_winged_great_helmet, itm_gauntlets, itm_bastard_sword_a, itm_sword_two_handed_a, itm_tab_shield_heater_d], knight_attrib_3,wp(180),knight_skills_3|knows_trainer_4, 0x0000000c370c1194546469ca6c4e450e00000000001ebac40000000000000000, swadian_face_older_2],
# ["knight_1_23", "Lord Swadian 23", "knight_1_9", tf_hero, 0, reserved, fac_kingdom_1, [itm_hunter, itm_rich_outfit, itm_mail_hauberk, itm_woolen_hose, itm_mail_chausses, itm_guard_helmet, itm_gauntlets, itm_sword_medieval_c, itm_tab_shield_heater_d], knight_attrib_4,wp(200),knight_skills_4|knows_trainer_6, 0x0000000c0c1064864ba34e2ae291992b00000000001da8720000000000000000, swadian_face_older_2],
# ["knight_1_24", "Lord Swadian 24", "knight_1_0", tf_hero, 0, reserved, fac_kingdom_1, [itm_hunter, itm_tabard, itm_heraldic_mail_with_surcoat, itm_leather_boots, itm_mail_boots, itm_winged_great_helmet, itm_gauntlets, itm_bastard_sword_b, itm_sword_two_handed_b, itm_tab_shield_heater_cav_b], knight_attrib_5,wp(240),knight_skills_5|knows_trainer_5, 0x0000000c0a08038736db74c6a396a8e500000000001db8eb0000000000000000, swadian_face_older_2],
["knight_2_1", "Boyar Vuldrat", "Vuldrat", tf_hero, 0, reserved, fac_kingdom_2, [itm_saddle_horse, itm_fur_coat, itm_vaegir_elite_armor, itm_nomad_boots, itm_splinted_leather_greaves, itm_vaegir_noble_helmet, itm_mail_mittens, itm_sword_viking_3, itm_tab_shield_kite_c], knight_attrib_1,wp(130),knight_skills_1|knows_trainer_3, 0x00000005590011c33d9b6d4a92ada53500000000001cc1180000000000000000, vaegir_face_middle_2],
["knight_2_2", "Boyar Naldera", "Naldera", tf_hero, 0, reserved, fac_kingdom_2, [itm_saddle_horse, itm_rich_outfit, itm_lamellar_armor, itm_woolen_hose, itm_mail_chausses, itm_vaegir_noble_helmet, itm_mail_mittens, itm_shortened_military_scythe, itm_tab_shield_kite_cav_a], knight_attrib_2,wp(160),knight_skills_2, 0x0000000c2a0015d249b68b46a98e176400000000001d95a40000000000000000, vaegir_face_old_2],
["knight_2_3", "Boyar Meriga", "Meriga", tf_hero, 0, reserved, fac_kingdom_2, [itm_warhorse_steppe, itm_short_tunic, itm_mail_hauberk, itm_woolen_hose, itm_mail_chausses, itm_vaegir_lamellar_helmet, itm_lamellar_gauntlets, itm_great_bardiche, itm_tab_shield_kite_cav_b], knight_attrib_3,wp(190),knight_skills_3, 0x0000000c131031c546a38a2765b4c86000000000001e58d30000000000000000, vaegir_face_older_2],
["knight_2_4", "Boyar Khavel", "Khavel", tf_hero, 0, reserved, fac_kingdom_2, [itm_saddle_horse, itm_courtly_outfit, itm_lamellar_armor, itm_leather_boots, itm_mail_boots, itm_vaegir_noble_helmet, itm_lamellar_gauntlets, itm_bastard_sword_b, itm_tab_shield_kite_cav_b], knight_attrib_4,wp(220),knight_skills_4, 0x0000000c2f0832c748f272540d8ab65900000000001d34e60000000000000000, vaegir_face_older_2],
["knight_2_5", "Boyar Doru", "Doru", tf_hero, 0, reserved, fac_kingdom_2, [itm_warhorse_steppe, itm_rich_outfit, itm_haubergeon, itm_leather_boots, itm_mail_chausses, itm_vaegir_noble_helmet, itm_scale_gauntlets, itm_bastard_sword_b, itm_tab_shield_kite_d], knight_attrib_5,wp(250),knight_skills_5, 0x0000000e310061435d76bb5f55bad9ad00000000001ed8ec0000000000000000, vaegir_face_older_2],
["knight_2_6", "Boyar Belgaru", "Belgaru", tf_hero, 0, reserved, fac_kingdom_2, [itm_saddle_horse, itm_nomad_vest, itm_vaegir_elite_armor, itm_woolen_hose, itm_mail_chausses, itm_vaegir_lamellar_helmet, itm_mail_mittens, itm_sword_viking_3, itm_tab_shield_kite_c], knight_attrib_1,wp(130),knight_skills_1|knows_trainer_3, 0x0000000a0100421038da7157aa4e430a00000000001da8bc0000000000000000, vaegir_face_middle_2],
["knight_2_7", "Boyar Ralcha", "Ralcha", tf_hero, 0, reserved, fac_kingdom_2, [itm_steppe_horse, itm_leather_jacket, itm_mail_hauberk, itm_leather_boots, itm_mail_boots, itm_vaegir_noble_helmet, itm_lamellar_gauntlets, itm_great_bardiche, itm_tab_shield_kite_cav_a], knight_attrib_2,wp(160),knight_skills_2|knows_trainer_4, 0x0000000c04100153335ba9390b2d277500000000001d89120000000000000000, vaegir_face_old_2],
["knight_2_8", "Boyar Vlan", "Vlan", tf_hero, 0, reserved, fac_kingdom_2, [itm_hunter, itm_nomad_robe, itm_nomad_vest, itm_woolen_hose, itm_mail_chausses, itm_vaegir_noble_helmet, itm_lamellar_gauntlets, itm_shortened_military_scythe, itm_tab_shield_kite_d], knight_attrib_3,wp(200),knight_skills_3|knows_trainer_5, 0x0000000c00046581234e8da2cdd248db00000000001f569c0000000000000000, vaegir_face_older_2],
["knight_2_9", "Boyar Mleza", "Mleza", tf_hero, 0, reserved, fac_kingdom_2, [itm_saddle_horse, itm_rich_outfit, itm_vaegir_elite_armor, itm_leather_boots, itm_mail_chausses, itm_vaegir_lamellar_helmet, itm_lamellar_gauntlets, itm_great_bardiche, itm_tab_shield_kite_d], knight_attrib_4,wp(230),knight_skills_4, 0x0000000c160451d2136469c4d9b159ad00000000001e28f10000000000000000, vaegir_face_older_2],
["knight_2_10", "Boyar Nelag", "Nelag", tf_hero, 0, reserved, fac_kingdom_2, [itm_warhorse_steppe, itm_fur_coat, itm_lamellar_armor, itm_woolen_hose, itm_mail_boots, itm_vaegir_noble_helmet, itm_scale_gauntlets, itm_military_pick, itm_tab_shield_kite_cav_b], knight_attrib_5,wp(260),knight_skills_5|knows_trainer_6, 0x0000000f7c00520e66b76edd5cd5eb6e00000000001f691e0000000000000000, vaegir_face_older_2],
["knight_2_11", "Boyar Crahask", "Crahask", tf_hero, 0, reserved, fac_kingdom_2, [itm_saddle_horse, itm_leather_jacket, itm_vaegir_elite_armor, itm_nomad_boots, itm_splinted_leather_greaves, itm_vaegir_noble_helmet, itm_scale_gauntlets, itm_sword_viking_3, itm_tab_shield_kite_cav_a], knight_attrib_1,wp(130),knight_skills_1, 0x0000000c1d0821d236acd6991b74d69d00000000001e476c0000000000000000, vaegir_face_middle_2],
["knight_2_12", "Boyar Bracha", "Bracha", tf_hero, 0, reserved, fac_kingdom_2, [itm_saddle_horse, itm_rich_outfit, itm_lamellar_armor, itm_woolen_hose, itm_mail_chausses, itm_vaegir_noble_helmet, itm_mail_mittens, itm_great_bardiche, itm_tab_shield_kite_cav_a], knight_attrib_2,wp(170),knight_skills_2, 0x0000000c0f04024b2509d5d53944c6a300000000001d5b320000000000000000, vaegir_face_old_2],
["knight_2_13", "Boyar Druli", "Druli", tf_hero, 0, reserved, fac_kingdom_2, [itm_hunter, itm_short_tunic, itm_mail_hauberk, itm_woolen_hose, itm_mail_chausses, itm_vaegir_lamellar_helmet, itm_lamellar_gauntlets, itm_great_bardiche, itm_tab_shield_kite_cav_b], knight_attrib_3,wp(190),knight_skills_3, 0x0000000c680432d3392230cb926d56ca00000000001da69b0000000000000000, vaegir_face_older_2],
["knight_2_14", "Boyar Marmun", "Marmun", tf_hero, 0, reserved, fac_kingdom_2, [itm_saddle_horse, itm_courtly_outfit, itm_lamellar_armor, itm_leather_boots, itm_mail_boots, itm_vaegir_noble_helmet, itm_lamellar_gauntlets, itm_shortened_military_scythe, itm_tab_shield_kite_cav_b], knight_attrib_4,wp(220),knight_skills_4|knows_trainer_6, 0x0000000c27046000471bd2e93375b52c00000000001dd5220000000000000000, vaegir_face_older_2],
["knight_2_15", "Boyar Gastya", "Gastya", tf_hero, 0, reserved, fac_kingdom_2, [itm_hunter, itm_rich_outfit, itm_haubergeon, itm_leather_boots, itm_mail_chausses, itm_vaegir_lamellar_helmet, itm_lamellar_gauntlets, itm_bastard_sword_b, itm_shortened_military_scythe, itm_tab_shield_kite_cav_b], knight_attrib_5,wp(250),knight_skills_5, 0x0000000de50052123b6bb36de5d6eb7400000000001dd72c0000000000000000, vaegir_face_older_2],
["knight_2_16", "Boyar Harish", "Harish", tf_hero, 0, reserved, fac_kingdom_2, [itm_saddle_horse, itm_nomad_vest, itm_vaegir_elite_armor, itm_woolen_hose, itm_mail_chausses, itm_vaegir_noble_helmet, itm_mail_mittens, itm_great_bardiche, itm_tab_shield_kite_c], knight_attrib_1,wp(120),knight_skills_1, 0x000000085f00000539233512e287391d00000000001db7200000000000000000, vaegir_face_middle_2],
["knight_2_17", "Boyar Taisa", "Taisa", tf_hero, 0, reserved, fac_kingdom_2, [itm_steppe_horse, itm_leather_jacket, itm_mail_hauberk, itm_leather_boots, itm_mail_boots, itm_vaegir_noble_helmet, itm_scale_gauntlets, itm_great_bardiche, itm_tab_shield_kite_cav_a], knight_attrib_2,wp(150),knight_skills_2, 0x0000000a070c4387374bd19addd2a4ab00000000001e32cc0000000000000000, vaegir_face_old_2],
["knight_2_18", "Boyar Valishin", "Valishin", tf_hero, 0, reserved, fac_kingdom_2, [itm_hunter, itm_nomad_robe, itm_nomad_vest, itm_woolen_hose, itm_mail_chausses, itm_vaegir_lamellar_helmet, itm_lamellar_gauntlets, itm_great_bardiche, itm_tab_shield_kite_cav_a], knight_attrib_3,wp(180),knight_skills_3, 0x0000000b670012c23d9b6d4a92ada53500000000001cc1180000000000000000, vaegir_face_older_2],
["knight_2_19", "Boyar Rudin", "Rudin", tf_hero, 0, reserved, fac_kingdom_2, [itm_saddle_horse, itm_rich_outfit, itm_vaegir_elite_armor, itm_leather_boots, itm_mail_chausses, itm_vaegir_noble_helmet, itm_scale_gauntlets, itm_fighting_pick, itm_shortened_military_scythe, itm_tab_shield_kite_d], knight_attrib_4,wp(210),knight_skills_4|knows_trainer_4, 0x0000000e070050853b0a6e4994ae272a00000000001db4e10000000000000000, vaegir_face_older_2],
["knight_2_20", "Boyar Kumipa", "Kumipa", tf_hero, 0, reserved, fac_kingdom_2, [itm_warhorse_steppe, itm_fur_coat, itm_lamellar_armor, itm_woolen_hose, itm_mail_boots, itm_vaegir_lamellar_helmet, itm_lamellar_gauntlets, itm_great_bardiche, itm_tab_shield_kite_cav_b], knight_attrib_5,wp(240),knight_skills_5|knows_trainer_5, 0x0000000f800021c63b0a6e4994ae272a00000000001db4e10000000000000000, vaegir_face_older_2],
#khergit civilian clothes: itm_leather_vest, itm_nomad_vest, itm_nomad_robe, itm_lamellar_vest,itm_tribal_warrior_outfit
["knight_3_1", "Alagur Noyan", "Alagur", tf_hero, 0, reserved, fac_kingdom_3, [itm_courser, itm_leather_vest, itm_studded_leather_coat,itm_nomad_boots, itm_mail_boots, itm_khergit_guard_helmet, itm_lamellar_gauntlets, itm_leather_gloves, itm_sword_khergit_3, itm_tab_shield_small_round_c, itm_khergit_bow, itm_arrows], knight_attrib_1,wp(130),knight_skills_1|knows_trainer_3|knows_power_draw_4, 0x000000043000318b54b246b7094dc39c00000000001d31270000000000000000, khergit_face_middle_2],
["knight_3_2", "Tonju Noyan", "Tonju", tf_hero, 0, reserved, fac_kingdom_3, [itm_courser, itm_nomad_vest, itm_lamellar_armor, itm_hide_boots, itm_mail_boots, itm_khergit_cavalry_helmet, itm_lamellar_gauntlets, itm_leather_gloves, itm_khergit_sword_two_handed_b, itm_tab_shield_small_round_b, itm_khergit_bow, itm_arrows], knight_attrib_2,wp(160),knight_skills_2|knows_power_draw_4, 0x0000000c280461004929b334ad632aa200000000001e05120000000000000000, khergit_face_old_2],
["knight_3_3", "Belir Noyan", "Belir", tf_hero, 0, reserved, fac_kingdom_3, [itm_courser, itm_nomad_robe, itm_lamellar_armor,itm_nomad_boots, itm_splinted_leather_greaves, itm_khergit_guard_helmet, itm_lamellar_gauntlets, itm_fighting_pick, itm_tab_shield_small_round_c, itm_khergit_bow, itm_arrows], knight_attrib_3,wp(190),knight_skills_3|knows_trainer_5|knows_power_draw_4, 0x0000000e880062c53b0a6e4994ae272a00000000001db4e10000000000000000, khergit_face_older_2],
["knight_3_4", "Asugan Noyan", "Asugan", tf_hero, 0, reserved, fac_kingdom_3, [itm_courser, itm_lamellar_vest_khergit, itm_khergit_elite_armor, itm_hide_boots, itm_splinted_greaves, itm_khergit_cavalry_helmet, itm_lamellar_gauntlets, itm_khergit_sword_two_handed_b, itm_lance, itm_tab_shield_small_round_c], knight_attrib_4,wp(220),knight_skills_4|knows_power_draw_4, 0x0000000c23085386391b5ac72a96d95c00000000001e37230000000000000000, khergit_face_older_2],
["knight_3_5", "Brula Noyan", "Brula", tf_hero, 0, reserved, fac_kingdom_3, [itm_warhorse_steppe, itm_ragged_outfit, itm_lamellar_vest_khergit, itm_hide_boots, itm_mail_boots, itm_khergit_guard_helmet, itm_lamellar_gauntlets, itm_sword_khergit_3, itm_lance, itm_tab_shield_small_round_c], knight_attrib_5,wp(250),knight_skills_5|knows_power_draw_4, 0x0000000efe0051ca4b377b4964b6eb6500000000001f696c0000000000000000, khergit_face_older_2],
["knight_3_6", "Imirza Noyan", "Imirza", tf_hero, 0, reserved, fac_kingdom_3, [itm_courser, itm_tribal_warrior_outfit,itm_hide_boots, itm_splinted_leather_greaves, itm_khergit_cavalry_helmet, itm_lamellar_gauntlets, itm_sword_khergit_4,itm_lance, itm_tab_shield_small_round_b], knight_attrib_1,wp(130),knight_skills_1|knows_power_draw_4, 0x00000006f600418b54b246b7094dc31a00000000001d37270000000000000000, khergit_face_middle_2],
["knight_3_7", "Urumuda Noyan","Urumuda", tf_hero, 0, reserved, fac_kingdom_3, [itm_courser, itm_leather_vest,itm_leather_boots, itm_hide_boots, itm_skullcap, itm_khergit_guard_helmet, itm_lamellar_gauntlets, itm_sword_khergit_3, itm_tab_shield_small_round_b], knight_attrib_2,wp(160),knight_skills_2|knows_power_draw_4, 0x0000000bdd00510a44be2d14d370c65c00000000001ed6df0000000000000000, khergit_face_old_2],
["knight_3_8", "Kramuk Noyan", "Kramuk", tf_hero, 0, reserved, fac_kingdom_3, [itm_courser, itm_nomad_vest, itm_lamellar_armor, itm_woolen_hose, itm_splinted_greaves, itm_khergit_cavalry_helmet, itm_lamellar_gauntlets, itm_great_bardiche, itm_tab_shield_small_round_c], knight_attrib_3,wp(190),knight_skills_3|knows_power_draw_4, 0x0000000abc00518b5af4ab4b9c8e596400000000001dc76d0000000000000000, khergit_face_older_2],
["knight_3_9", "Chaurka Noyan","Chaurka", tf_hero, 0, reserved, fac_kingdom_3, [itm_hunter, itm_nomad_robe, itm_lamellar_vest_khergit, itm_leather_boots, itm_splinted_leather_greaves, itm_khergit_guard_helmet, itm_lamellar_gauntlets, itm_khergit_sword_two_handed_b, itm_tab_shield_small_round_c], knight_attrib_4,wp(220),knight_skills_4|knows_power_draw_4, 0x0000000a180441c921a30ea68b54971500000000001e54db0000000000000000, khergit_face_older_2],
["knight_3_10", "Sebula Noyan","Sebula", tf_hero, 0, reserved, fac_kingdom_3, [itm_warhorse_steppe, itm_lamellar_vest_khergit, itm_lamellar_armor, itm_hide_boots, itm_mail_chausses, itm_khergit_guard_helmet, itm_lamellar_gauntlets, itm_sword_khergit_4, itm_khergit_sword_two_handed_b, itm_tab_shield_small_round_c], knight_attrib_5,wp(250),knight_skills_5|knows_trainer_6|knows_power_draw_4, 0x0000000a3b00418c5b36c686d920a76100000000001c436f0000000000000000, khergit_face_older_2],
["knight_3_11", "Tulug Noyan", "Tulug", tf_hero, 0, reserved, fac_kingdom_3, [itm_courser, itm_leather_vest, itm_studded_leather_coat, itm_nomad_boots, itm_mail_boots, itm_khergit_cavalry_helmet, itm_leather_gloves, itm_sword_khergit_4, itm_tab_shield_small_round_b, itm_khergit_bow, itm_arrows], knight_attrib_1,wp(150),knight_skills_1|knows_power_draw_4, 0x00000007d100534b44962d14d370c65c00000000001ed6df0000000000000000, khergit_face_middle_2],
["knight_3_12", "Nasugei Noyan", "Nasugei", tf_hero, 0, reserved, fac_kingdom_3, [itm_courser, itm_nomad_vest, itm_lamellar_armor, itm_hide_boots, itm_mail_boots, itm_khergit_guard_helmet, itm_leather_gloves, itm_sword_khergit_3, itm_tab_shield_small_round_b], knight_attrib_2,wp(190),knight_skills_2|knows_power_draw_4, 0x0000000bf400610c5b33d3c9258edb6c00000000001eb96d0000000000000000, khergit_face_old_2],
["knight_3_13", "Urubay Noyan","Urubay", tf_hero, 0, reserved, fac_kingdom_3, [itm_courser, itm_nomad_robe, itm_lamellar_vest_khergit, itm_nomad_boots, itm_splinted_leather_greaves, itm_khergit_cavalry_helmet, itm_lamellar_gauntlets, itm_fighting_pick, itm_tab_shield_small_round_c, itm_khergit_bow, itm_arrows], knight_attrib_3,wp(200),knight_skills_3|knows_trainer_3|knows_power_draw_4, 0x0000000bfd0061c65b6eb33b25d2591d00000000001f58eb0000000000000000, khergit_face_older_2],
["knight_3_14", "Hugu Noyan", "Hugu", tf_hero, 0, reserved, fac_kingdom_3, [itm_courser, itm_lamellar_vest_khergit, itm_hide_boots, itm_splinted_greaves, itm_khergit_guard_helmet, itm_lamellar_gauntlets, itm_shortened_military_scythe, itm_tab_shield_small_round_c, itm_khergit_bow, itm_arrows], knight_attrib_4,wp(300),knight_skills_4|knows_trainer_6|knows_power_draw_4, 0x0000000b6900514144be2d14d370c65c00000000001ed6df0000000000000000, khergit_face_older_2],
["knight_3_15", "Tansugai Noyan", "Tansugai", tf_hero, 0, reserved, fac_kingdom_3, [itm_warhorse_steppe, itm_ragged_outfit, itm_lamellar_vest_khergit, itm_hide_boots, itm_mail_boots, itm_khergit_cavalry_helmet, itm_sword_khergit_4, itm_khergit_sword_two_handed_b, itm_tab_shield_small_round_c], knight_attrib_5,wp(240),knight_skills_5|knows_trainer_4|knows_power_draw_4, 0x0000000c360c524b6454465b59b9d93500000000001ea4860000000000000000, khergit_face_older_2],
["knight_3_16", "Tirida Noyan","Tirida", tf_hero, 0, reserved, fac_kingdom_3, [itm_courser, itm_tribal_warrior_outfit, itm_khergit_elite_armor, itm_hide_boots, itm_splinted_leather_greaves, itm_khergit_guard_helmet, itm_leather_gloves, itm_khergit_sword_two_handed_a, itm_lance, itm_tab_shield_small_round_b, itm_khergit_bow, itm_arrows], knight_attrib_1,wp(120),knight_skills_1|knows_power_draw_4, 0x0000000c350c418438ab85b75c61b8d300000000001d21530000000000000000, khergit_face_middle_2],
["knight_3_17", "Ulusamai Noyan", "Ulusamai", tf_hero, 0, reserved, fac_kingdom_3, [itm_courser, itm_leather_vest, itm_lamellar_vest_khergit, itm_leather_boots, itm_mail_boots, itm_khergit_guard_helmet, itm_leather_gloves, itm_great_bardiche, itm_tab_shield_small_round_c, itm_khergit_bow, itm_arrows], knight_attrib_2,wp(150),knight_skills_2|knows_power_draw_4, 0x0000000c3c0821c647264ab6e68dc4d500000000001e42590000000000000000, khergit_face_old_2],
["knight_3_18", "Karaban Noyan", "Karaban", tf_hero, 0, reserved, fac_kingdom_3, [itm_courser, itm_nomad_vest, itm_khergit_elite_armor, itm_hide_boots, itm_splinted_greaves, itm_khergit_guard_helmet, itm_scale_gauntlets, itm_war_axe, itm_tab_shield_small_round_c, itm_lance, itm_khergit_bow, itm_arrows], knight_attrib_3,wp(180),knight_skills_3|knows_trainer_4|knows_power_draw_4, 0x0000000c0810500347ae7acd0d3ad74a00000000001e289a0000000000000000, khergit_face_older_2],
["knight_3_19", "Akadan Noyan","Akadan", tf_hero, 0, reserved, fac_kingdom_3, [itm_hunter, itm_nomad_robe, itm_lamellar_vest_khergit, itm_leather_boots, itm_splinted_leather_greaves, itm_khergit_cavalry_helmet, itm_lamellar_gauntlets, itm_sword_khergit_4, itm_shortened_military_scythe, itm_tab_shield_small_round_c], knight_attrib_4,wp(210),knight_skills_4|knows_trainer_5|knows_power_draw_4, 0x0000000c1500510528f50d52d20b152300000000001d66db0000000000000000, khergit_face_older_2],
["knight_3_20", "Dundush Noyan","Dundush", tf_hero, 0, reserved, fac_kingdom_3, [itm_warhorse_steppe, itm_lamellar_vest, itm_khergit_elite_armor, itm_hide_boots, itm_mail_chausses, itm_khergit_guard_helmet, itm_scale_gauntlets, itm_khergit_sword_two_handed_a, itm_tab_shield_small_round_c, itm_lance, itm_khergit_bow, itm_arrows], knight_attrib_5,wp(240),knight_skills_5|knows_power_draw_4, 0x0000000f7800620d66b76edd5cd5eb6e00000000001f691e0000000000000000, khergit_face_older_2],
["knight_4_1", "Jarl Aedin", "Aedin", tf_hero, 0, reserved, fac_kingdom_4, [itm_rich_outfit, itm_banded_armor, itm_woolen_hose, itm_mail_boots, itm_nordic_huscarl_helmet, itm_mail_mittens, itm_great_axe, itm_tab_shield_round_d, itm_throwing_axes], knight_attrib_1,wp(130),knight_skills_1, 0x0000000c13002254340eb1d91159392d00000000001eb75a0000000000000000, nord_face_middle_2],
["knight_4_2", "Jarl Irya", "Irya", tf_hero, 0, reserved, fac_kingdom_4, [ itm_short_tunic, itm_banded_armor, itm_blue_hose, itm_splinted_greaves, itm_nordic_warlord_helmet, itm_scale_gauntlets, itm_one_handed_battle_axe_c, itm_tab_shield_round_d, itm_throwing_axes], knight_attrib_2,wp(160),knight_skills_2|knows_trainer_3, 0x0000000c1610218368e29744e9a5985b00000000001db2a10000000000000000, nord_face_old_2],
["knight_4_3", "Jarl Olaf", "Olaf", tf_hero, 0, reserved, fac_kingdom_4, [itm_warhorse, itm_rich_outfit, itm_heraldic_mail_with_tabard, itm_nomad_boots, itm_mail_chausses, itm_scale_gauntlets, itm_nordic_warlord_helmet, itm_great_axe, itm_tab_shield_round_e, itm_throwing_axes], knight_attrib_3,wp(190),knight_skills_3, 0x0000000c03040289245a314b744b30a400000000001eb2a90000000000000000, nord_face_older_2],
["knight_4_4", "Jarl Reamald", "Reamald", tf_hero, 0, reserved, fac_kingdom_4, [itm_hunter, itm_leather_vest, itm_banded_armor, itm_woolen_hose, itm_mail_boots, itm_scale_gauntlets, itm_nordic_huscarl_helmet, itm_fighting_pick, itm_tab_shield_round_e, itm_throwing_axes], knight_attrib_4,wp(210),knight_skills_4, 0x0000000c3f1001ca3d6955b26a8939a300000000001e39b60000000000000000, nord_face_older_2],
["knight_4_5", "Jarl Turya", "Turya", tf_hero, 0, reserved, fac_kingdom_4, [ itm_fur_coat, itm_heraldic_mail_with_surcoat, itm_leather_boots, itm_splinted_leather_greaves, itm_scale_gauntlets, itm_nordic_huscarl_helmet, itm_bastard_sword_b, itm_tab_shield_round_e, itm_throwing_axes, itm_throwing_axes], knight_attrib_5,wp(250),knight_skills_5, 0x0000000ff508330546dc4a59422d450c00000000001e51340000000000000000, nord_face_older_2],
["knight_4_6", "Jarl Gundur", "Gundur", tf_hero, 0, reserved, fac_kingdom_4, [ itm_nomad_robe, itm_banded_armor, itm_nomad_boots, itm_mail_chausses, itm_nordic_warlord_helmet, itm_mail_mittens, itm_war_axe, itm_tab_shield_round_d], knight_attrib_1,wp(130),knight_skills_1, 0x00000005b00011813d9b6d4a92ada53500000000001cc1180000000000000000, nord_face_middle_2],
["knight_4_7", "Jarl Harald", "Harald", tf_hero, 0, reserved, fac_kingdom_4, [ itm_fur_coat, itm_studded_leather_coat, itm_nomad_boots, itm_mail_boots, itm_nordic_warlord_helmet, itm_mail_mittens, itm_sword_viking_3, itm_shortened_military_scythe, itm_tab_shield_round_d], knight_attrib_2,wp(160),knight_skills_2|knows_trainer_4, 0x00000006690002873d9b6d4a92ada53500000000001cc1180000000000000000, nord_face_old_2],
["knight_4_8", "Jarl Knudarr", "Knudarr", tf_hero, 0, reserved, fac_kingdom_4, [ itm_rich_outfit, itm_mail_and_plate, itm_woolen_hose, itm_mail_chausses, itm_segmented_helmet, itm_scale_gauntlets, itm_war_axe, itm_tab_shield_round_e, itm_throwing_axes], knight_attrib_3,wp(190),knight_skills_3, 0x0000000f830051c53b026e4994ae272a00000000001db4e10000000000000000, nord_face_older_2],
["knight_4_9", "Jarl Haeda", "Haeda", tf_hero, 0, reserved, fac_kingdom_4, [itm_warhorse, itm_nomad_robe, itm_haubergeon, itm_blue_hose, itm_mail_boots, itm_guard_helmet, itm_scale_gauntlets, itm_arrows, itm_long_bow, itm_one_handed_battle_axe_c, itm_tab_shield_round_e], knight_attrib_4,wp(220),knight_skills_4|knows_trainer_5|knows_power_draw_4, 0x00000000080c54c1345bd21349b1b67300000000001c90c80000000000000000, nord_face_older_2],
["knight_4_10", "Jarl Turegor", "Turegor", tf_hero, 0, reserved, fac_kingdom_4, [itm_hunter, itm_courtly_outfit, itm_coat_of_plates, itm_nomad_boots, itm_splinted_greaves, itm_scale_gauntlets, itm_winged_great_helmet,itm_great_axe, itm_tab_shield_round_e], knight_attrib_5,wp(250),knight_skills_5|knows_trainer_6, 0x000000084b0002063d9b6d4a92ada53500000000001cc1180000000000000000, nord_face_older_2],
["knight_4_11", "Jarl Logarson", "Logarson", tf_hero, 0, reserved, fac_kingdom_4, [ itm_rich_outfit, itm_banded_armor, itm_woolen_hose, itm_mail_boots, itm_nordic_helmet, itm_mail_mittens, itm_great_bardiche, itm_tab_shield_round_d], knight_attrib_1,wp(140),knight_skills_1, 0x000000002d100005471d4ae69ccacb1d00000000001dca550000000000000000, nord_face_middle_2],
["knight_4_12", "Jarl Aeric", "Aeric", tf_hero, 0, reserved, fac_kingdom_4, [ itm_short_tunic, itm_banded_armor, itm_blue_hose, itm_splinted_greaves, itm_nordic_huscarl_helmet, itm_mail_mittens, itm_one_handed_battle_axe_c, itm_tab_shield_round_d], knight_attrib_2,wp(200),knight_skills_2, 0x0000000b9500020824936cc51cb5bb2500000000001dd4d80000000000000000, nord_face_old_2],
["knight_4_13", "Jarl Faarn", "Faarn", tf_hero, 0, reserved, fac_kingdom_4, [itm_warhorse, itm_rich_outfit, itm_heraldic_mail_with_tabard, itm_nomad_boots, itm_mail_chausses, itm_scale_gauntlets, itm_nordic_warlord_helmet, itm_war_axe, itm_tab_shield_round_e], knight_attrib_3,wp(250),knight_skills_3|knows_trainer_3, 0x0000000a300012c439233512e287391d00000000001db7200000000000000000, nord_face_older_2],
["knight_4_14", "Jarl Bulba", "Bulba", tf_hero, 0, reserved, fac_kingdom_4, [ itm_leather_vest, itm_banded_armor, itm_woolen_hose, itm_mail_boots, itm_nordic_helmet, itm_scale_gauntlets, itm_fighting_pick, itm_tab_shield_round_e, itm_throwing_axes], knight_attrib_4,wp(200),knight_skills_4, 0x0000000c0700414f2cb6aa36ea50a69d00000000001dc55c0000000000000000, nord_face_older_2],
["knight_4_15", "Jarl Rayeck", "Rayeck", tf_hero, 0, reserved, fac_kingdom_4, [itm_hunter, itm_leather_jacket, itm_heraldic_mail_with_tabard, itm_leather_boots, itm_scale_gauntlets, itm_splinted_leather_greaves, itm_nordic_huscarl_helmet, itm_shortened_military_scythe, itm_tab_shield_round_e], knight_attrib_5,wp(290),knight_skills_5|knows_trainer_6, 0x0000000d920801831715d1aa9221372300000000001ec6630000000000000000, nord_face_older_2],
["knight_4_16", "Jarl Dirigun", "Dirigun", tf_hero, 0, reserved, fac_kingdom_4, [ itm_nomad_robe, itm_banded_armor, itm_nomad_boots, itm_mail_chausses, itm_nordic_huscarl_helmet, itm_mail_mittens, itm_war_axe, itm_tab_shield_round_d, itm_throwing_axes], knight_attrib_1,wp(120),knight_skills_1, 0x000000099700124239233512e287391d00000000001db7200000000000000000, nord_face_middle_2],
["knight_4_17", "Jarl Marayirr", "Marayirr", tf_hero, 0, reserved, fac_kingdom_4, [ itm_fur_coat, itm_banded_armor, itm_nomad_boots, itm_mail_boots, itm_nordic_warlord_helmet, itm_mail_mittens, itm_sword_viking_3, itm_tab_shield_round_d, itm_throwing_axes], knight_attrib_2,wp(150),knight_skills_2|knows_trainer_4, 0x0000000c2f0442036d232a2324b5b81400000000001e55630000000000000000, nord_face_old_2],
["knight_4_18", "Jarl Gearth", "Gearth", tf_hero, 0, reserved, fac_kingdom_4, [ itm_rich_outfit, itm_mail_and_plate, itm_woolen_hose, itm_mail_chausses, itm_segmented_helmet, itm_scale_gauntlets, itm_sword_viking_3, itm_shortened_military_scythe, itm_tab_shield_round_d], knight_attrib_3,wp(180),knight_skills_3, 0x0000000c0d00118866e22e3d9735a72600000000001eacad0000000000000000, nord_face_older_2],
["knight_4_19", "Jarl Surdun", "Surdun", tf_hero, 0, reserved, fac_kingdom_4, [itm_warhorse, itm_nomad_robe, itm_haubergeon, itm_blue_hose, itm_mail_boots, itm_guard_helmet, itm_scale_gauntlets, itm_one_handed_battle_axe_c, itm_tab_shield_round_e, itm_throwing_axes], knight_attrib_4,wp(210),knight_skills_4|knows_trainer_5, 0x0000000c0308225124e26d4a6295965a00000000001d23e40000000000000000, nord_face_older_2],
["knight_4_20", "Jarl Gerlad", "Gerlad", tf_hero, 0, reserved, fac_kingdom_4, [itm_hunter, itm_courtly_outfit, itm_coat_of_plates, itm_nomad_boots, itm_splinted_greaves, itm_scale_gauntlets, itm_winged_great_helmet,itm_great_axe, itm_tab_shield_round_e, itm_throwing_axes], knight_attrib_5,wp(240),knight_skills_5, 0x0000000f630052813b6bb36de5d6eb7400000000001dd72c0000000000000000, nord_face_older_2],
["knight_5_1", "Count Matheas", "Matheas", tf_hero, 0, reserved, fac_kingdom_5, [itm_saddle_horse, itm_tabard, itm_heraldic_mail_with_surcoat, itm_leather_boots, itm_mail_boots, itm_guard_helmet, itm_leather_gloves, itm_fighting_pick, itm_tab_shield_heater_c], knight_attrib_1,wp(130),knight_skills_1|knows_trainer_3, 0x0000000a1b0c00483adcbaa5ac9a34a200000000001ca2d40000000000000000, rhodok_face_middle_2],
["knight_5_2", "Count Gutlans", "Gutlans", tf_hero, 0, reserved, fac_kingdom_5, [itm_courser, itm_red_gambeson, itm_heraldic_mail_with_tabard, itm_leather_boots, itm_mail_boots, itm_nasal_helmet, itm_leather_gloves, itm_military_pick, itm_sword_two_handed_a, itm_tab_shield_heater_c], knight_attrib_2,wp(160),knight_skills_2|knows_trainer_4, 0x0000000c390c659229136db45a75251300000000001f16930000000000000000, rhodok_face_old_2],
["knight_5_3", "Count Laruqen", "Laruqen", tf_hero, 0, reserved, fac_kingdom_5, [itm_hunter, itm_short_tunic, itm_mail_and_plate, itm_nomad_boots, itm_splinted_leather_greaves, itm_kettle_hat, itm_gauntlets, itm_shortened_military_scythe, itm_tab_shield_heater_d], knight_attrib_3,wp(190),knight_skills_3, 0x0000000c2f10415108b1aacba27558d300000000001d329c0000000000000000, rhodok_face_older_2],
["knight_5_4", "Count Raichs", "Raichs", tf_hero, 0, reserved, fac_kingdom_5, [itm_hunter, itm_leather_jacket, itm_brigandine_red, itm_woolen_hose, itm_splinted_greaves, itm_flat_topped_helmet, itm_gauntlets, itm_bastard_sword_a, itm_tab_shield_heater_d], knight_attrib_4,wp(220),knight_skills_4, 0x0000000c3c005110345c59d56975ba1200000000001e24e40000000000000000, rhodok_face_older_2],
["knight_5_5", "Count Reland", "Reland", tf_hero, 0, reserved, fac_kingdom_5, [itm_hunter, itm_rich_outfit, itm_heraldic_mail_with_tabard, itm_leather_boots, itm_mail_boots, itm_great_helmet, itm_gauntlets, itm_shortened_military_scythe, itm_tab_shield_heater_d], knight_attrib_5,wp(250),knight_skills_5, 0x0000000c060400c454826e471092299a00000000001d952d0000000000000000, rhodok_face_older_2],
["knight_5_6", "Count Tarchias", "Tarchias", tf_hero, 0, reserved, fac_kingdom_5, [itm_sumpter_horse, itm_ragged_outfit, itm_heraldic_mail_with_tabard, itm_woolen_hose, itm_splinted_greaves, itm_gauntlets, itm_skullcap, itm_sword_two_handed_b, itm_tab_shield_heater_c], knight_attrib_1,wp(130),knight_skills_1, 0x000000001100000648d24d36cd964b1d00000000001e2dac0000000000000000, rhodok_face_middle_2],
["knight_5_7", "Count Gharmall", "Gharmall", tf_hero, 0, reserved, fac_kingdom_5, [itm_saddle_horse, itm_coarse_tunic, itm_heraldic_mail_with_surcoat, itm_leather_boots, itm_mail_chausses, itm_gauntlets, itm_nasal_helmet, itm_bastard_sword_a, itm_tab_shield_heater_c], knight_attrib_2,wp(160),knight_skills_2, 0x0000000c3a0455c443d46e4c8b91291a00000000001ca51b0000000000000000, rhodok_face_old_2],
["knight_5_8", "Count Talbar", "Talbar", tf_hero, 0, reserved, fac_kingdom_5, [itm_saddle_horse, itm_courtly_outfit, itm_heraldic_mail_with_tabard, itm_woolen_hose, itm_mail_boots, itm_nasal_helmet, itm_gauntlets, itm_military_pick, itm_sword_two_handed_b, itm_tab_shield_heater_c], knight_attrib_3,wp(190),knight_skills_3|knows_trainer_3, 0x0000000c2c0844d42914d19b2369b4ea00000000001e331b0000000000000000, rhodok_face_older_2],
["knight_5_9", "Count Rimusk", "Rimusk", tf_hero, 0, reserved, fac_kingdom_5, [itm_warhorse, itm_leather_jacket, itm_heraldic_mail_with_tabard, itm_leather_boots, itm_splinted_leather_greaves, itm_kettle_hat, itm_gauntlets, itm_great_bardiche, itm_tab_shield_heater_d], knight_attrib_4,wp(220),knight_skills_4|knows_trainer_6, 0x00000000420430c32331b5551c4724a100000000001e39a40000000000000000, rhodok_face_older_2],
["knight_5_10", "Count Falsevor", "Falsevor", tf_hero, 0, reserved, fac_kingdom_5, [itm_warhorse, itm_rich_outfit, itm_heraldic_mail_with_tabard, itm_blue_hose, itm_mail_chausses, itm_great_helmet, itm_gauntlets, itm_bastard_sword_a, itm_tab_shield_heater_d], knight_attrib_5,wp(250),knight_skills_5|knows_trainer_4, 0x00000008e20011063d9b6d4a92ada53500000000001cc1180000000000000000, rhodok_face_older_2],
["knight_5_11", "Count Etrosq", "Etrosq", tf_hero, 0, reserved, fac_kingdom_5, [itm_saddle_horse, itm_tabard, itm_heraldic_mail_with_surcoat, itm_leather_boots, itm_mail_boots, itm_skullcap, itm_leather_gloves, itm_fighting_pick, itm_tab_shield_heater_c], knight_attrib_1,wp(130),knight_skills_1, 0x0000000c170c14874752adb6eb3228d500000000001c955c0000000000000000, rhodok_face_middle_2],
["knight_5_12", "Count Kurnias", "Kurnias", tf_hero, 0, reserved, fac_kingdom_5, [itm_courser, itm_red_gambeson, itm_heraldic_mail_with_tabard, itm_leather_boots, itm_mail_boots, itm_nasal_helmet, itm_leather_gloves, itm_military_pick, itm_tab_shield_heater_c], knight_attrib_2,wp(160),knight_skills_2|knows_trainer_5, 0x0000000c080c13d056ec8da85e3126ed00000000001d4ce60000000000000000, rhodok_face_old_2],
["knight_5_13", "Count Tellrog", "Tellrog", tf_hero, 0, reserved, fac_kingdom_5, [itm_hunter, itm_short_tunic, itm_mail_and_plate, itm_nomad_boots, itm_splinted_leather_greaves, itm_winged_great_helmet, itm_gauntlets, itm_sword_two_handed_a, itm_tab_shield_heater_d], knight_attrib_3,wp(190),knight_skills_3, 0x0000000cbf10100562a4954ae731588a00000000001d6b530000000000000000, rhodok_face_older_2],
["knight_5_14", "Count Tribidan", "Tribidan", tf_hero, 0, reserved, fac_kingdom_5, [itm_hunter, itm_leather_jacket, itm_brigandine_red, itm_woolen_hose, itm_splinted_greaves, itm_flat_topped_helmet, itm_gauntlets, itm_bastard_sword_a, itm_tab_shield_heater_d], knight_attrib_4,wp(220),knight_skills_4, 0x0000000c330805823baa77556c4e331a00000000001cb9110000000000000000, rhodok_face_older_2],
["knight_5_15", "Count Gerluchs", "Gerluchs", tf_hero, 0, reserved, fac_kingdom_5, [itm_hunter, itm_rich_outfit, itm_heraldic_mail_with_tabard, itm_leather_boots, itm_mail_boots, itm_great_helmet, itm_gauntlets, itm_sword_two_handed_a, itm_tab_shield_heater_d], knight_attrib_5,wp(250),knight_skills_5, 0x0000000d51000106370c4d4732b536de00000000001db9280000000000000000, rhodok_face_older_2],
["knight_5_16", "Count Fudreim", "Fudreim", tf_hero, 0, reserved, fac_kingdom_5, [itm_sumpter_horse, itm_ragged_outfit, itm_heraldic_mail_with_tabard, itm_woolen_hose, itm_splinted_greaves, itm_guard_helmet, itm_leather_gloves, itm_fighting_pick, itm_tab_shield_heater_c], knight_attrib_1,wp(120),knight_skills_1, 0x0000000c06046151435b5122a37756a400000000001c46e50000000000000000, rhodok_face_middle_2],
["knight_5_17", "Count Nealcha", "Nealcha", tf_hero, 0, reserved, fac_kingdom_5, [itm_saddle_horse, itm_coarse_tunic, itm_heraldic_mail_with_surcoat, itm_leather_boots, itm_mail_chausses, itm_nasal_helmet, itm_leather_gloves, itm_bastard_sword_a, itm_tab_shield_heater_c], knight_attrib_2,wp(150),knight_skills_2, 0x0000000c081001d3465c89a6a452356300000000001cda550000000000000000, rhodok_face_old_2],
["knight_5_18", "Count Fraichin", "Fraichin", tf_hero, 0, reserved, fac_kingdom_5, [itm_saddle_horse, itm_courtly_outfit, itm_heraldic_mail_with_tabard, itm_woolen_hose, itm_mail_boots, itm_nasal_helmet, itm_gauntlets, itm_military_pick, itm_tab_shield_heater_d], knight_attrib_3,wp(180),knight_skills_3, 0x0000000a3d0c13c3452aa967276dc95c00000000001dad350000000000000000, rhodok_face_older_2],
["knight_5_19", "Count Trimbau", "Trimbau", tf_hero, 0, reserved, fac_kingdom_5, [itm_warhorse, itm_leather_jacket, itm_heraldic_mail_with_tabard, itm_leather_boots, itm_splinted_leather_greaves, itm_kettle_hat, itm_gauntlets, itm_fighting_pick, itm_sword_two_handed_a, itm_tab_shield_heater_d], knight_attrib_4,wp(210),knight_skills_4|knows_trainer_5, 0x0000000038043194092ab4b2d9adb44c00000000001e072c0000000000000000, rhodok_face_older_2],
["knight_5_20", "Count Reichsin", "Reichsin", tf_hero, 0, reserved, fac_kingdom_5, [itm_warhorse, itm_rich_outfit, itm_heraldic_mail_with_tabard, itm_blue_hose, itm_mail_chausses, itm_great_helmet, itm_gauntlets, itm_bastard_sword_b, itm_tab_shield_heater_d], knight_attrib_5,wp(240),knight_skills_5|knows_trainer_6, 0x000000003600420515a865b45c64d64c00000000001d544b0000000000000000, rhodok_face_older_2],
["knight_6_1", "Emir Uqais", "Uqais", tf_hero, 0, reserved, fac_kingdom_6, [itm_arabian_horse_a, itm_mamluke_mail, itm_sarranid_boots_c, itm_mail_boots, itm_sarranid_warrior_cap, itm_leather_gloves, itm_heavy_lance, itm_sarranid_cavalry_sword, itm_tab_shield_small_round_c], knight_attrib_1,wp(130),knight_skills_1|knows_trainer_3, 0x00000000600c2084486195383349eae500000000001d16a30000000000000000, rhodok_face_middle_2],
["knight_6_2", "Emir Hamezan", "Hamezan", tf_hero, 0, reserved, fac_kingdom_6, [itm_arabian_horse_b, itm_sarranid_elite_armor, itm_sarranid_boots_c, itm_mail_boots, itm_sarranid_warrior_cap, itm_leather_gloves, itm_lance, itm_military_pick, itm_sword_two_handed_a, itm_tab_shield_small_round_c], knight_attrib_2,wp(160),knight_skills_2|knows_trainer_4, 0x00000001380825d444cb68b92b8d3b1d00000000001dd71e0000000000000000, rhodok_face_old_2],
["knight_6_3", "Emir Atis", "Atis", tf_hero, 0, reserved, fac_kingdom_6, [itm_arabian_horse_a, itm_mamluke_mail, itm_nomad_boots, itm_sarranid_warrior_cap, itm_shortened_military_scythe, itm_lamellar_gauntlets, itm_tab_shield_small_round_c], knight_attrib_3,wp(190),knight_skills_3, 0x000000002208428579723147247ad4e500000000001f14d40000000000000000, rhodok_face_older_2],
["knight_6_4", "Emir Nuwas", "Nuwas", tf_hero, 0, reserved, fac_kingdom_6, [itm_hunter, itm_sarranid_mail_shirt, itm_sarranid_boots_c, itm_sarranid_mail_coif, itm_sarranid_cavalry_sword, itm_lamellar_gauntlets, itm_lance, itm_tab_shield_small_round_c], knight_attrib_4,wp(220),knight_skills_4, 0x00000009bf084285050caa7d285be51a00000000001d11010000000000000000, rhodok_face_older_2],
["knight_6_5", "Emir Mundhalir", "Mundhalir", tf_hero, 0, reserved, fac_kingdom_6, [itm_arabian_horse_b, itm_sarranid_cavalry_robe, itm_sarranid_boots_c, itm_sarranid_veiled_helmet, itm_shortened_military_scythe, itm_tab_shield_small_round_c], knight_attrib_5,wp(250),knight_skills_5, 0x000000002a084003330175aae175da9c00000000001e02150000000000000000, rhodok_face_older_2],
["knight_6_6", "Emir Ghanawa", "Ghanawa", tf_hero, 0, reserved, fac_kingdom_6, [itm_arabian_horse_a, itm_sarranid_elite_armor, itm_sarranid_boots_c, itm_splinted_greaves, itm_sarranid_helmet1, itm_lance, itm_sarranid_cavalry_sword, itm_tab_shield_small_round_c], knight_attrib_1,wp(130),knight_skills_1, 0x00000001830043834733294c89b128e200000000001259510000000000000000, rhodok_face_middle_2],
["knight_6_7", "Emir Nuam", "Nuam", tf_hero, 0, reserved, fac_kingdom_6, [itm_arabian_horse_b, itm_sarranid_mail_shirt, itm_sarranid_boots_c, itm_sarranid_mail_coif, itm_sarranid_cavalry_sword, itm_lamellar_gauntlets, itm_tab_shield_small_round_c], knight_attrib_2,wp(160),knight_skills_2, 0x0000000cbf10434020504bbbda9135d500000000001f62380000000000000000, rhodok_face_old_2],
["knight_6_8", "Emir Dhiyul", "Dhiyul", tf_hero, 0, reserved, fac_kingdom_6, [itm_arabian_horse_a, itm_mamluke_mail, itm_sarranid_boots_c, itm_mail_boots, itm_sarranid_helmet1, itm_military_pick, itm_lance, itm_sarranid_cavalry_sword, itm_tab_shield_small_round_c], knight_attrib_3,wp(190),knight_skills_3|knows_trainer_3, 0x0000000190044003336dcd3ca2cacae300000000001f47640000000000000000, rhodok_face_older_2],
["knight_6_9", "Emir Lakhem", "Lakhem", tf_hero, 0, reserved, fac_kingdom_6, [itm_warhorse_sarranid, itm_sarranid_mail_shirt, itm_sarranid_boots_c, itm_sarranid_helmet1, itm_lamellar_gauntlets, itm_lance, itm_tab_shield_small_round_c], knight_attrib_4,wp(220),knight_skills_4|knows_trainer_6, 0x0000000dde0040c4549dd5ca6f4dd56500000000001e291b0000000000000000, rhodok_face_older_2],
["knight_6_10", "Emir Ghulassen", "Ghulassen", tf_hero, 0, reserved, fac_kingdom_6, [itm_warhorse_sarranid, itm_sarranid_cavalry_robe, itm_sarranid_boots_c, itm_sarranid_boots_c, itm_sarranid_helmet1, itm_lamellar_gauntlets, itm_lance, itm_sarranid_cavalry_sword, itm_tab_shield_small_round_c], knight_attrib_5,wp(250),knight_skills_5|knows_trainer_4, 0x00000001a60441c66ce99256b4ad4b3300000000001d392c0000000000000000, rhodok_face_older_2],
["knight_6_11", "Emir Azadun", "Azadun", tf_hero, 0, reserved, fac_kingdom_6, [itm_arabian_horse_a, itm_sarranid_mail_shirt, itm_sarranid_boots_c, itm_sarranid_boots_c, itm_sarranid_mail_coif, itm_leather_gloves, itm_fighting_pick, itm_tab_shield_small_round_c], knight_attrib_1,wp(130),knight_skills_1, 0x0000000fff08134726c28af8dc96e4da00000000001e541d0000000000000000, rhodok_face_middle_2],
["knight_6_12", "Emir Quryas", "Quryas", tf_hero, 0, reserved, fac_kingdom_6, [itm_courser, itm_mamluke_mail, itm_sarranid_boots_c, itm_mail_boots, itm_sarranid_helmet1, itm_lance, itm_military_pick, itm_tab_shield_small_round_c], knight_attrib_2,wp(160),knight_skills_2|knows_trainer_5, 0x0000000035104084635b74ba5491a7a400000000001e46d60000000000000000, rhodok_face_old_2],
["knight_6_13", "Emir Amdar", "Amdar", tf_hero, 0, reserved, fac_kingdom_6, [itm_arabian_horse_b, itm_sarranid_mail_shirt, itm_sarranid_boots_c, itm_sarranid_boots_c, itm_sarranid_helmet1, itm_lamellar_gauntlets, itm_sword_two_handed_a, itm_tab_shield_small_round_c], knight_attrib_3,wp(190),knight_skills_3, 0x00000000001021435b734d4ad94eba9400000000001eb8eb0000000000000000, rhodok_face_older_2],
["knight_6_14", "Emir Hiwan", "Hiwan", tf_hero, 0, reserved, fac_kingdom_6, [itm_arabian_horse_b, itm_sarranid_elite_armor, itm_sarranid_boots_c, itm_sarranid_boots_c, itm_sarranid_mail_coif, itm_lance, itm_sarranid_cavalry_sword, itm_tab_shield_small_round_c], knight_attrib_4,wp(220),knight_skills_4, 0x000000000c0c45c63a5b921ac22db8e200000000001cca530000000000000000, rhodok_face_older_2],
["knight_6_15", "Emir Muhnir", "Muhnir", tf_hero, 0, reserved, fac_kingdom_6, [itm_hunter, itm_sarranid_mail_shirt, itm_sarranid_boots_c, itm_mail_boots, itm_sarranid_helmet1, itm_sword_two_handed_a, itm_tab_shield_small_round_c], knight_attrib_5,wp(250),knight_skills_5, 0x000000001b0c4185369a6938cecde95600000000001f25210000000000000000, rhodok_face_older_2],
["knight_6_16", "Emir Ayyam", "Ayyam", tf_hero, 0, reserved, fac_kingdom_6, [itm_arabian_horse_a, itm_mamluke_mail, itm_sarranid_boots_c, itm_sarranid_boots_c, itm_sarranid_mail_coif, itm_leather_gloves, itm_lance, itm_fighting_pick, itm_tab_shield_small_round_c], knight_attrib_1,wp(120),knight_skills_1, 0x00000007770841c80a01e1c5eb51ffff00000000001f12d80000000000000000, rhodok_face_middle_2],
["knight_6_17", "Emir Raddoun", "Raddoun", tf_hero, 0, reserved, fac_kingdom_6, [itm_arabian_horse_b, itm_sarranid_mail_shirt, itm_sarranid_boots_c, itm_sarranid_boots_c, itm_sarranid_mail_coif, itm_leather_gloves, itm_sarranid_cavalry_sword, itm_tab_shield_small_round_c], knight_attrib_2,wp(150),knight_skills_2, 0x000000007f0462c32419f47a1aba8bcf00000000001e7e090000000000000000, rhodok_face_old_2],
["knight_6_18", "Emir Tilimsan", "Tilimsan", tf_hero, 0, reserved, fac_kingdom_6, [itm_arabian_horse_a, itm_sarranid_elite_armor, itm_sarranid_boots_c, itm_mail_boots, itm_sarranid_helmet1, itm_lance, itm_military_pick, itm_tab_shield_small_round_c], knight_attrib_3,wp(180),knight_skills_3, 0x000000003410410070d975caac91aca500000000001c27530000000000000000, rhodok_face_older_2],
["knight_6_19", "Emir Dhashwal", "Dhashwal", tf_hero, 0, reserved, fac_kingdom_6, [itm_warhorse_sarranid, itm_sarranid_mail_shirt, itm_sarranid_boots_c, itm_sarranid_boots_c, itm_sarranid_mail_coif, itm_lamellar_gauntlets, itm_fighting_pick, itm_sword_two_handed_a, itm_tab_shield_small_round_c], knight_attrib_4,wp(210),knight_skills_4|knows_trainer_5, 0x000000018a08618016ac36bc8b6e4a9900000000001dd45d0000000000000000, rhodok_face_older_2],
["knight_6_20", "Emir Biliya", "Biliya", tf_hero, 0, reserved, fac_kingdom_6, [itm_warhorse_sarranid, itm_sarranid_cavalry_robe, itm_sarranid_boots_c, itm_sarranid_boots_c, itm_sarranid_veiled_helmet, itm_lance, itm_sarranid_cavalry_sword, itm_tab_shield_small_round_c], knight_attrib_5,wp(240),knight_skills_5|knows_trainer_6, 0x00000001bd0040c0281a899ac956b94b00000000001ec8910000000000000000, rhodok_face_older_2],
["kingdom_1_pretender", "Lady Isolla of Suno", "Isolla", tf_hero|tf_female|tf_unmoveable_in_party_window, 0,reserved, fac_kingdom_1,[itm_charger, itm_rich_outfit, itm_blue_hose, itm_iron_greaves, itm_mail_shirt, itm_sword_medieval_c_small, itm_tab_shield_small_round_c, itm_bascinet], lord_attrib,wp(220),knight_skills_5, 0x00000000ef00000237dc71b90c31631200000000001e371b0000000000000000],
#claims pre-salic descent
["kingdom_2_pretender", "Prince Valdym the Bastard", "Valdym", tf_hero|tf_unmoveable_in_party_window, 0,reserved, fac_kingdom_2,[itm_hunter, itm_courtly_outfit, itm_leather_boots, itm_mail_chausses, itm_lamellar_armor, itm_military_pick, itm_tab_shield_heater_b, itm_flat_topped_helmet], lord_attrib,wp(220),knight_skills_5, 0x00000000200412142452ed631b30365c00000000001c94e80000000000000000, vaegir_face_middle_2],
#had his patrimony falsified
["kingdom_3_pretender", "Dustum Khan", "Dustum", tf_hero|tf_unmoveable_in_party_window, 0,reserved, fac_kingdom_3,[itm_courser, itm_nomad_robe, itm_leather_boots, itm_splinted_greaves, itm_khergit_guard_armor, itm_sword_khergit_2, itm_tab_shield_small_round_c, itm_segmented_helmet], lord_attrib,wp(220),knight_skills_5, 0x000000065504310b30d556b51238f66100000000001c256d0000000000000000, khergit_face_middle_2],
#of the family
["kingdom_4_pretender", "Lethwin Far-Seeker", "Lethwin", tf_hero|tf_unmoveable_in_party_window, 0,reserved, fac_kingdom_4,[itm_hunter, itm_tabard, itm_leather_boots, itm_mail_boots, itm_brigandine_red, itm_sword_medieval_c, itm_tab_shield_heater_cav_a, itm_kettle_hat], lord_attrib,wp(220),knight_skills_5, 0x00000004340c01841d89949529a6776a00000000001c910a0000000000000000, nord_face_young_2],
#dispossessed and wronged
["kingdom_5_pretender", "Lord Kastor of Veluca", "Kastor", tf_hero|tf_unmoveable_in_party_window, 0,reserved, fac_kingdom_5,[itm_warhorse, itm_nobleman_outfit, itm_leather_boots, itm_splinted_leather_greaves, itm_mail_hauberk, itm_sword_medieval_c, itm_tab_shield_heater_d, itm_spiked_helmet], lord_attrib,wp(220),knight_skills_5, 0x0000000bed1031051da9abc49ecce25e00000000001e98680000000000000000, rhodok_face_old_2],
#republican
["kingdom_6_pretender", "Arwa the Pearled One", "Arwa", tf_hero|tf_female|tf_unmoveable_in_party_window, 0,reserved, fac_kingdom_6,[itm_arabian_horse_b, itm_sarranid_mail_shirt, itm_sarranid_boots_c, itm_sarranid_cavalry_sword, itm_tab_shield_small_round_c], lord_attrib,wp(220),knight_skills_5, 0x000000050b003004072d51c293a9a70b00000000001dd6a90000000000000000],
## ["kingdom_1_lord_a", "Kingdom 1 Lord A", "Kingdom 1 Lord A", tf_hero, 0,reserved, fac_kingdom_1,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x00000000000c710201fa51b7286db721],
## ["kingdom_1_lord_b", "Kingdom 1 Lord B", "Kingdom 1 Lord B", tf_hero, 0,reserved, fac_kingdom_2,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x00000000000c710201fa51b7286db721],
## ["kingdom_1_lord_c", "Kingdom 1 Lord C", "Kingdom 1 Lord C", tf_hero, 0,reserved, fac_kingdom_3,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x00000000000c710201fa51b7286db721],
## ["kingdom_1_lord_d", "Kingdom 1 Lord D", "Kingdom 1 Lord D", tf_hero, 0,reserved, fac_kingdom_1,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x00000000000c710201fa51b7286db721],
## ["kingdom_1_lord_e", "Kingdom 1 Lord E", "Kingdom 1 Lord E", tf_hero, 0,reserved, fac_kingdom_1,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x00000000000c710201fa51b7286db721],
## ["kingdom_1_lord_f", "Kingdom 1 Lord F", "Kingdom 1 Lord F", tf_hero, 0,reserved, fac_kingdom_1,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x00000000000c710201fa51b7286db721],
## ["kingdom_1_lord_g", "Kingdom 1 Lord G", "Kingdom 1 Lord G", tf_hero, 0,reserved, fac_kingdom_1,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x00000000000c710201fa51b7286db721],
## ["kingdom_1_lord_h", "Kingdom 1 Lord H", "Kingdom 1 Lord H", tf_hero, 0,reserved, fac_kingdom_2,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x00000000000c710201fa51b7286db721],
## ["kingdom_1_lord_i", "Kingdom 1 Lord I", "Kingdom 1 Lord I", tf_hero, 0,reserved, fac_kingdom_2,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x00000000000c710201fa51b7286db721],
## ["kingdom_1_lord_j", "Kingdom 1 Lord J", "Kingdom 1 Lord J", tf_hero, 0,reserved, fac_kingdom_2,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x00000000000c710201fa51b7286db721],
## ["kingdom_1_lord_k", "Kingdom 1 Lord K", "Kingdom 1 Lord K", tf_hero, 0,reserved, fac_kingdom_2,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x00000000000c710201fa51b7286db721],
## ["kingdom_1_lord_l", "Kingdom 1 Lord L", "Kingdom 1 Lord L", tf_hero, 0,reserved, fac_kingdom_3,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x00000000000c710201fa51b7286db721],
## ["kingdom_1_lord_m", "Kingdom 1 Lord M", "Kingdom 1 Lord M", tf_hero, 0,reserved, fac_kingdom_3,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x00000000000c710201fa51b7286db721],
## ["kingdom_1_lord_n", "Kingdom 1 Lord N", "Kingdom 1 Lord N", tf_hero, 0,reserved, fac_kingdom_3,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x00000000000c710201fa51b7286db721],
# ["town_1_ruler_a", "King Harlaus", "King Harlaus", tf_hero, scn_town_1_castle|entry(9),reserved, fac_swadians,[itm_saddle_horse,itm_courtly_outfit,itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, 0x000000000010908101e36db44b75b6dd],
# ["town_2_ruler_a", "Duke Taugard", "Duke Taugard", tf_hero, scn_town_2_castle|entry(9),reserved, fac_swadians,[itm_saddle_horse,itm_courtly_outfit,itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, 0x000000000000310401e06db86375f6da],
# ["town_3_ruler_a", "Count Grimar", "Count Grimar", tf_hero, scn_town_3_castle|entry(9),reserved, fac_swadians,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, 0x000000000004430301e46136eb75bc0a],
# ["town_4_ruler_a", "Count Haxalye", "Count Haxalye", tf_hero, scn_town_4_castle|entry(9),reserved, fac_swadians,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, 0x000000000010918701e77136e905bc0e
# ["town_5_ruler_a", "Count Belicha", "Count Belicha", tf_hero, scn_town_5_castle|entry(9),reserved, fac_swadians,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, 0x00000000000421c801e7713729c5b8ce],
# ["town_6_ruler_a", "Count Nourbis", "Count Nourbis", tf_hero, scn_town_6_castle|entry(9),reserved, fac_swadians,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, 0x00000000000c640501e371b72bcdb724],
# ["town_7_ruler_a", "Count Rhudolg", "Count Rhudolg", tf_hero, scn_town_7_castle|entry(9),reserved, fac_swadians,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, 0x00000000000c710201fa51b7286db721],
# ["town_8_ruler_b", "King Yaroglek", "King_yaroglek", tf_hero, scn_town_8_castle|entry(9),reserved, fac_vaegirs,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, 0x000000000000128801f294ca6d66d555],
# ["town_9_ruler_b", "Count Aolbrug", "Count_Aolbrug", tf_hero, scn_town_9_castle|entry(9),reserved, fac_vaegirs,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, 0x000000000004234401f26a271c8d38ea],
# ["town_10_ruler_b","Count Rasevas", "Count_Rasevas", tf_hero, scn_town_10_castle|entry(9),reserved, fac_vaegirs,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, 0x00000000001032c201f38e269372471c],
# ["town_11_ruler_b","Count Leomir", "Count_Leomir", tf_hero, scn_town_11_castle|entry(9),reserved, fac_vaegirs,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, 0x00000000000c538001f55148936d3895],
# ["town_12_ruler_b","Count Haelbrad","Count_Haelbrad",tf_hero, scn_town_12_castle|entry(9),reserved, fac_vaegirs,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, 0x00000000000410c701f38598ac8aaaab],
# ["town_13_ruler_b","Count Mira", "Count_Mira", tf_hero, scn_town_13_castle|entry(9),reserved, fac_vaegirs,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, 0x000000000004204401f390c515555594],
# ["town_14_ruler_b","Count Camechaw","Count_Camechaw",tf_hero, scn_town_14_castle|entry(9),reserved, fac_vaegirs,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, 0x000000000008318101f390c515555594],
## ["kingdom_2_lord_a", "Kingdom 2 Lord A", "Kingdom 2 Lord A", tf_hero, 0,reserved, fac_kingdom_10,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x000000000008318101f390c515555594],
## ["kingdom_2_lord_b", "Kingdom 2 Lord B", "Kingdom 2 Lord B", tf_hero, 0,reserved, fac_kingdom_11,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x000000000008318101f390c515555594],
## ["kingdom_2_lord_c", "Kingdom 2 Lord C", "Kingdom 2 Lord C", tf_hero, 0,reserved, fac_kingdom_12,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x000000000008318101f390c515555594],
## ["kingdom_2_lord_d", "Kingdom 2 Lord D", "Kingdom 2 Lord D", tf_hero, 0,reserved, fac_kingdom_10,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x000000000008318101f390c515555594],
## ["kingdom_2_lord_e", "Kingdom 2 Lord E", "Kingdom 2 Lord E", tf_hero, 0,reserved, fac_kingdom_10,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x000000000008318101f390c515555594],
## ["kingdom_2_lord_f", "Kingdom 2 Lord F", "Kingdom 2 Lord F", tf_hero, 0,reserved, fac_kingdom_10,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x000000000008318101f390c515555594],
## ["kingdom_2_lord_g", "Kingdom 2 Lord G", "Kingdom 2 Lord G", tf_hero, 0,reserved, fac_kingdom_10,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x000000000008318101f390c515555594],
## ["kingdom_2_lord_h", "Kingdom 2 Lord H", "Kingdom 2 Lord H", tf_hero, 0,reserved, fac_kingdom_11,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x000000000008318101f390c515555594],
## ["kingdom_2_lord_i", "Kingdom 2 Lord I", "Kingdom 2 Lord I", tf_hero, 0,reserved, fac_kingdom_11,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x000000000008318101f390c515555594],
## ["kingdom_2_lord_j", "Kingdom 2 Lord J", "Kingdom 2 Lord J", tf_hero, 0,reserved, fac_kingdom_11,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x000000000008318101f390c515555594],
## ["kingdom_2_lord_k", "Kingdom 2 Lord K", "Kingdom 2 Lord K", tf_hero, 0,reserved, fac_kingdom_10,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x000000000008318101f390c515555594],
## ["kingdom_2_lord_l", "Kingdom 2 Lord L", "Kingdom 2 Lord L", tf_hero, 0,reserved, fac_kingdom_12,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x000000000008318101f390c515555594],
## ["kingdom_2_lord_m", "Kingdom 2 Lord M", "Kingdom 2 Lord M", tf_hero, 0,reserved, fac_kingdom_12,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x000000000008318101f390c515555594],
## ["kingdom_2_lord_n", "Kingdom 2 Lord N", "Kingdom 2 Lord N", tf_hero, 0,reserved, fac_kingdom_12,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots,itm_coat_of_plates],lord_attrib|level(38),wp(220),knows_common, 0x000000000008318101f390c515555594],
#Royal family members
["knight_1_1_wife","Error - knight_1_1_wife should not appear in game","knight_1_1_wife",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_commoners, [itm_lady_dress_ruby , itm_turret_hat_ruby, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000055910200107632d675a92b92d00000000001e45620000000000000000],
#Swadian ladies - eight mothers, eight daughters, four sisters
["kingdom_1_lady_1","Lady Anna","Anna",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000055910200107632d675a92b92d00000000001e45620000000000000000],
["kingdom_1_lady_2","Lady Nelda","Nelda",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000054f08100232636aa90d6e194b00000000001e43130000000000000000],
["knight_1_lady_3","Lady Bela","Bela",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000018f0410064854c742db74b52200000000001d448b0000000000000000],
["knight_1_lady_4","Lady Elina","Elina",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000000204200629b131e90d6a8ae400000000001e28dd0000000000000000],
["kingdom_l_lady_5","Lady Constanis","Constanis",tf_hero|tf_randomize_face|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, swadian_woman_face_1, swadian_woman_face_2],
["kingdom_1_lady_6","Lady Vera","Vera",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000000d0820011693b142ca6a271a00000000001db6920000000000000000],
["kingdom_1_lady_7","Lady Auberina","Auberina",tf_hero|tf_randomize_face|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, swadian_woman_face_1, swadian_woman_face_2],
["kingdom_1_lady_8","Lady Tibal","Tibal",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000001900000542ac4e76d5d0d35300000000001e26a40000000000000000],
["kingdom_1_lady_9","Lady Magar","Magar",tf_hero|tf_randomize_face|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, swadian_woman_face_1, swadian_woman_face_2],
["kingdom_1_lady_10","Lady Thedosa","Thedosa",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000003a00200646a129464baaa6db00000000001de7a00000000000000000],
["kingdom_1_lady_11","Lady Melisar","Melisar",tf_hero|tf_randomize_face|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, swadian_woman_face_1, swadian_woman_face_2],
["kingdom_1_lady_12","Lady Irena","Irena",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000003f04100148d245d6526d456b00000000001e3b350000000000000000],
["kingdom_l_lady_13","Lady Philenna","Philenna",tf_hero|tf_randomize_face|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, swadian_woman_face_1, swadian_woman_face_2],
["kingdom_1_lady_14","Lady Sonadel","Sonadel",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000003a0c3003358a56d51c8e399400000000000944dc0000000000000000],
["kingdom_1_lady_15","Lady Boadila","Boadila",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, swadian_woman_face_1, swadian_woman_face_2],
["kingdom_1_lady_16","Lady Elys","Elys",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000003b080003531e8932e432bb5a000000000008db6a0000000000000000],
["kingdom_1_lady_17","Lady Johana","Johana",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000000000c000446e4b4c2cc5234d200000000001ea3120000000000000000],
["kingdom_1_lady_18","Lady Bernatys","Bernatys",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x0000000000083006465800000901161200000000001e38cc0000000000000000],
["kingdom_1_lady_19","Lady Enricata","Enricata",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, swadian_woman_face_1],
["kingdom_1_lady_20","Lady Gaeta","Gaeta",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_1, [itm_lady_dress_green, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, swadian_woman_face_2],
#Vaegir ladies
["kingdom_2_lady_1","Lady Junitha","Junitha",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000007c0101002588caf17142ab93d00000000001ddfa40000000000000000],
["kingdom_2_lady_2","Lady Katia","Katia",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000008c00c20032aa5ae36b4259b9300000000001da6a50000000000000000],
["kingdom_2_lady_3","Lady Seomis","Seomis",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x0000000007080004782a6cc4ecae4d1e00000000001eb6e30000000000000000],
["kingdom_2_lady_4","Lady Drina","Drina",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000054008200638db99d89eccbd3500000000001ec91d0000000000000000],
["kingdom_2_lady_5","Lady Nesha","Nesha",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000007c0101002588caf17142ab93d00000000001ddfa40000000000000000],
["kingdom_2_lady_6","Lady Tabath","Tabath",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000008c00c20032aa5ae36b4259b9300000000001da6a50000000000000000],
["kingdom_2_lady_7","Lady Pelaeka","Pelaeka",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x0000000007080004782a6cc4ecae4d1e00000000001eb6e30000000000000000],
["kingdom_2_lady_8","Lady Haris","Haris",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000054008200638db99d89eccbd3500000000001ec91d0000000000000000],
["kingdom_2_lady_9","Lady Vayen","Vayen",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000007c0101002588caf17142ab93d00000000001ddfa40000000000000000],
["kingdom_2_lady_10","Lady Joaka","Joaka",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000008c00c20032aa5ae36b4259b9300000000001da6a50000000000000000],
["kingdom_2_lady_11","Lady Tejina","Tejina",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x0000000007080004782a6cc4ecae4d1e00000000001eb6e30000000000000000],
["kingdom_2_lady_12","Lady Olekseia","Olekseia",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000054008200638db99d89eccbd3500000000001ec91d0000000000000000],
["kingdom_2_lady_13","Lady Myntha","Myntha",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000007c0101002588caf17142ab93d00000000001ddfa40000000000000000],
["kingdom_2_lady_14","Lady Akilina","Akilina",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000008c00c20032aa5ae36b4259b9300000000001da6a50000000000000000],
["kingdom_2_lady_15","Lady Sepana","Sepana",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x0000000007080004782a6cc4ecae4d1e00000000001eb6e30000000000000000],
["kingdom_2_lady_16","Lady Iarina","Iarina",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000054008200638db99d89eccbd3500000000001ec91d0000000000000000],
["kingdom_2_lady_17","Lady Sihavan","Sihavan",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000007c0101002588caf17142ab93d00000000001ddfa40000000000000000],
["kingdom_2_lady_18","Lady Erenchina","Erenchina",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000008c00c20032aa5ae36b4259b9300000000001da6a50000000000000000],
["kingdom_2_lady_19","Lady Tamar","Tamar",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [ itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x0000000007080004782a6cc4ecae4d1e00000000001eb6e30000000000000000],
["kingdom_2_lady_20","Lady Valka","Valka",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_2, [itm_green_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000054008200638db99d89eccbd3500000000001ec91d0000000000000000],
["kingdom_3_lady_1","Lady Borge","Borge",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_brown_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, khergit_woman_face_1],
["kingdom_3_lady_2","Lady Tuan","Tuan",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_green_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000008ec0820062ce4d246b38e632e00000000001d52910000000000000000],
["kingdom_3_lady_3","Lady Mahraz","Mahraz",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [itm_red_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, khergit_woman_face_2],
["kingdom_3_lady_4","Lady Ayasu","Ayasu",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_red_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000002a0c200348a28f2a54aa391c00000000001e46d10000000000000000],
["kingdom_3_lady_5","Lady Ravin","Ravin",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_green_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000056e082002471c91c8aa2a130b00000000001d48a40000000000000000],
["kingdom_3_lady_6","Lady Ruha","Ruha",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_green_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000056e082002471c91c8aa2a130b00000000001d48a40000000000000000],
["kingdom_3_lady_7","Lady Chedina","Chedina",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_brown_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000000320c30023ce23a145a8f27a300000000001ea6dc0000000000000000],
["kingdom_3_lady_8","Lady Kefra","Kefra",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_brown_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000000320c30023ce23a145a8f27a300000000001ea6dc0000000000000000],
["kingdom_3_lady_9","Lady Nirvaz","Nirvaz",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_brown_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000001940c3006019c925165d1129b00000000001d13240000000000000000],
["kingdom_3_lady_10","Lady Dulua","Dulua",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_brown_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000008ec0820062ce4d246b38e632e00000000001d52910000000000000000],
["kingdom_3_lady_11","Lady Selik","Selik",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_brown_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000019b083005389591941379b8d100000000001e63150000000000000000],
["kingdom_3_lady_12","Lady Thalatha","Thalatha",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_brown_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000002a0c200348a28f2a54aa391c00000000001e46d10000000000000000],
["kingdom_3_lady_13","Lady Yasreen","Yasreen",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_brown_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000056e082002471c91c8aa2a130b00000000001d48a40000000000000000],
["kingdom_3_lady_14","Lady Nadha","Nadha",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_brown_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, khergit_woman_face_1],
["kingdom_3_lady_15","Lady Zenur","Zenur",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_brown_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, khergit_woman_face_2],
["kingdom_3_lady_16","Lady Arjis","Zenur",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_brown_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000001ad003001628c54b05d2e48b200000000001d56e60000000000000000],
["kingdom_3_lady_17","Lady Atjahan", "Atjahan",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_brown_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000001a700300265cb6db15d6db6da00000000001f82180000000000000000],
["kingdom_3_lady_18","Lady Qutala","Qutala",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_brown_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000008ec0820062ce4d246b38e632e00000000001d52910000000000000000],
["kingdom_3_lady_19","Lady Hindal","Hindal",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_brown_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000000320c30023ce23a145a8f27a300000000001ea6dc0000000000000000],
["kingdom_3_lady_20","Lady Mechet","Mechet",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_3, [ itm_brown_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000002a0c200348a28f2a54aa391c00000000001e46d10000000000000000],
["kingdom_4_lady_1","Lady Jadeth","Jadeth",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_court_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000054b100003274d65d2d239eb1300000000001d49080000000000000000],
["kingdom_4_lady_2","Lady Miar","Miar",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_court_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000058610000664d3693664f0c54b00000000001d332d0000000000000000],
["kingdom_4_lady_3","Lady Dria","Dria",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_peasant_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000000000c000469a4d5cda4b1349c00000000001cd6600000000000000000],
["kingdom_4_lady_4","Lady Glunde","Glunde",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_peasant_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000000000000021564d196e2aa279400000000001dc4ed0000000000000000],
["kingdom_4_lady_5","Lady Loeka","Loeka",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_court_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000054b100003274d65d2d239eb1300000000001d49080000000000000000],
["kingdom_4_lady_6","Lady Bryn","Bryn",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_court_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000058610000664d3693664f0c54b00000000001d332d0000000000000000],
["kingdom_4_lady_7","Lady Eir","Eir",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_peasant_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000000000c000469a4d5cda4b1349c00000000001cd6600000000000000000],
["knight_4_2b_daughter_1","Lady Thera","Thera",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_peasant_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000000000000021564d196e2aa279400000000001dc4ed0000000000000000],
["kingdom_4_lady_9","Lady Hild","Hild",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_court_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000054b100003274d65d2d239eb1300000000001d49080000000000000000],
["knight_4_2c_wife_1","Lady Endegrid","Endegrid",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_court_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000058610000664d3693664f0c54b00000000001d332d0000000000000000],
["kingdom_4_lady_11","Lady Herjasa","Herjasa",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_peasant_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000000000c000469a4d5cda4b1349c00000000001cd6600000000000000000],
["knight_4_2c_daughter","Lady Svipul","Svipul",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_peasant_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000000000000021564d196e2aa279400000000001dc4ed0000000000000000],
["knight_4_1b_wife","Lady Ingunn","Ingunn",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_court_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000054b100003274d65d2d239eb1300000000001d49080000000000000000],
["kingdom_4_lady_14","Lady Kaeteli","Kaeteli",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_court_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000058610000664d3693664f0c54b00000000001d332d0000000000000000],
["knight_4_1b_daughter","Lady Eilif","Eilif",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_peasant_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000000000c000469a4d5cda4b1349c00000000001cd6600000000000000000],
["knight_4_2b_daughter_2","Lady Gudrun","Gudrun",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_peasant_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000000000000021564d196e2aa279400000000001dc4ed0000000000000000],
["kingdom_4_lady_17","Lady Bergit","Bergit",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_court_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000054b100003274d65d2d239eb1300000000001d49080000000000000000],
["knight_4_2c_wife_2","Lady Aesa","Aesa",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_court_dress , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000058610000664d3693664f0c54b00000000001d332d0000000000000000],
["knight_4_1c_daughter","Lady Alfrun","Alfrun",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_peasant_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000000000c000469a4d5cda4b1349c00000000001cd6600000000000000000],
["kingdom_4_lady_20","Lady Afrid","Afrid",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_4, [ itm_peasant_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000000000000021564d196e2aa279400000000001dc4ed0000000000000000],
["kingdom_5_lady_1","Lady Brina","Brina",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_green, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000007e900200416ed96e88b8d595a00000000001cb8ac0000000000000000],
["kingdom_5_lady_2","Lady Aliena","Aliena",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_green, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000057008200222d432cf6d4a2ae300000000001d37a10000000000000000],
["kingdom_5_lady_3","Lady Aneth","Aneth",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_ruby , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000001b9002002364dd8aa5475d76400000000001db8d30000000000000000],
["kingdom_5_lady_4","Lady Reada","Reada",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_ruby , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000057a0000014123dae69e8e48e200000000001e08db0000000000000000],
["kingdom_5_5_wife","Lady Saraten","Saraten",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_green, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, swadian_woman_face_1],
["kingdom_5_2b_wife_1","Lady Baotheia","Baotheia",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [itm_lady_dress_green, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000000bf0400035913aa236b4d975a00000000001eb69c0000000000000000],
["kingdom_5_1c_daughter_1","Lady Eleandra","Eleandra",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_ruby , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000001b9002002364dd8aa5475d76400000000001db8d30000000000000000],
["kingdom_5_2c_daughter_1","Lady Meraced","Meraced",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_ruby , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000057a0000014123dae69e8e48e200000000001e08db0000000000000000],
["kingdom_5_1c_wife_1","Lady Adelisa","Adelisa",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_green, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000007e900200416ed96e88b8d595a00000000001cb8ac0000000000000000],
["kingdom_5_2c_wife_1","Lady Calantina","Calantina",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_green, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000057008200222d432cf6d4a2ae300000000001d37a10000000000000000],
["kingdom_5_1c_daughter_2","Lady Forbesa","Forbesa",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_ruby , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000001b9002002364dd8aa5475d76400000000001db8d30000000000000000],
["kingdom_5_2c_daughter_2","Lady Claudora","Claudora",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_ruby , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000057a0000014123dae69e8e48e200000000001e08db0000000000000000],
["kingdom_5_1b_wife","Lady Anais","Anais",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_green, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000007e900200416ed96e88b8d595a00000000001cb8ac0000000000000000],
["kingdom_5_2b_wife_2","Lady Miraeia","Miraeia",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_green, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000057008200222d432cf6d4a2ae300000000001d37a10000000000000000],
["kingdom_5_1c_daughter_3","Lady Agasia","Agasia",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_ruby , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000001b9002002364dd8aa5475d76400000000001db8d30000000000000000],
["kingdom_5_lady_16","Lady Geneiava","Geneiava",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_ruby , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000057a0000014123dae69e8e48e200000000001e08db0000000000000000],
["kingdom_5_1c_wife_2","Lady Gwenael","Gwenael",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_green, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000007e900200416ed96e88b8d595a00000000001cb8ac0000000000000000],
["kingdom_5_2c_wife_2","Lady Ysueth","Ysueth",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_green, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000057008200222d432cf6d4a2ae300000000001d37a10000000000000000],
["kingdom_5_1c_daughter_4","Lady Ellian","Ellian",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_ruby , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000001b9002002364dd8aa5475d76400000000001db8d30000000000000000],
["kingdom_5_lady_20","Lady Timethi","Timethi",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_5, [ itm_lady_dress_ruby , itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000057a0000014123dae69e8e48e200000000001e08db0000000000000000],
#Sarranid ladies
["kingdom_6_lady_1","Lady Rayma","Rayma",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress, itm_sarranid_head_cloth, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000055910200107632d675a92b92d00000000001e45620000000000000000],
["kingdom_6_lady_2","Lady Thanaikha","Thanaikha",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress_b, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000054f08100232636aa90d6e194b00000000001e43130000000000000000],
["kingdom_6_lady_3","Lady Sulaha","Sulaha",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000018f0410064854c742db74b52200000000001d448b0000000000000000],
["kingdom_6_lady_4","Lady Shatha","Shatha",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000000204200629b131e90d6a8ae400000000001e28dd0000000000000000],
["kingdom_6_lady_5","Lady Bawthan","Bawthan",tf_hero|tf_randomize_face|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, swadian_woman_face_1, swadian_woman_face_2],
["kingdom_6_lady_6","Lady Mahayl","Mahayl",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress_b, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000000d0820011693b142ca6a271a00000000001db6920000000000000000],
["kingdom_6_lady_7","Lady Isna","Isna",tf_hero|tf_randomize_face|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, swadian_woman_face_1, swadian_woman_face_2],
["kingdom_6_lady_8","Lady Siyafan","Siyafan",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress_b, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000001900000542ac4e76d5d0d35300000000001e26a40000000000000000],
["kingdom_6_lady_9","Lady Ifar","Ifar",tf_hero|tf_randomize_face|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress_b, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, swadian_woman_face_1, swadian_woman_face_2],
["kingdom_6_lady_10","Lady Yasmin","Yasmin",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000003a00200646a129464baaa6db00000000001de7a00000000000000000],
["kingdom_6_lady_11","Lady Dula","Dula",tf_hero|tf_randomize_face|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, swadian_woman_face_1, swadian_woman_face_2],
["kingdom_6_lady_12","Lady Ruwa","Ruwa",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress_b, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000003f04100148d245d6526d456b00000000001e3b350000000000000000],
["kingdom_6_lady_13","Lady Luqa","Luqa",tf_hero|tf_randomize_face|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress_b, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, swadian_woman_face_1, swadian_woman_face_2],
["kingdom_6_lady_14","Lady Zandina","Zandina",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000003a0c3003358a56d51c8e399400000000000944dc0000000000000000],
["kingdom_6_lady_15","Lady Lulya","Lulya",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress_b, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, swadian_woman_face_1, swadian_woman_face_2],
["kingdom_6_lady_16","Lady Zahara","Zahara",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000003b080003531e8932e432bb5a000000000008db6a0000000000000000],
["kingdom_6_lady_17","Lady Safiya","Safiya",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress_b, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x00000000000c000446e4b4c2cc5234d200000000001ea3120000000000000000],
["kingdom_6_lady_18","Lady Khalisa","Khalisa",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x0000000000083006465800000901161200000000001e38cc0000000000000000],
["kingdom_6_lady_19","Lady Janab","Janab",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress_b, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, swadian_woman_face_1],
["kingdom_6_lady_20","Lady Sur","Sur",tf_hero|tf_female|tf_unmoveable_in_party_window,0,reserved,fac_kingdom_6, [itm_sarranid_lady_dress, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, swadian_woman_face_2],
# ["kingdom_11_lord_daughter","kingdom_11_lord_daughter","kingdom_11_lord_daughter",tf_hero|tf_female,0,reserved,fac_kingdom_10, [ itm_lady_dress_blue , itm_turret_hat_blue, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000000008300701c08d34a450ce43],
# ["kingdom_13_lord_daughter","kingdom_13_lord_daughter","kingdom_13_lord_daughter",tf_hero|tf_female,0,reserved,fac_kingdom_10, [ itm_lady_dress_green, itm_turret_hat_green, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000000008000401db10a45b41d6d8],
## ["kingdom_1_lady_a","kingdom_1_lady_a","kingdom_1_lady_a",tf_hero|tf_female,0,reserved,fac_kingdom_1, [ itm_lady_dress_blue , itm_turret_hat_blue, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000000008500201d8ad93708e4694],
## ["kingdom_1_lady_b","kingdom_1_lady_b","kingdom_1_lady_b",tf_hero|tf_female,0,reserved,fac_kingdom_1, [ itm_lady_dress_ruby , itm_turret_hat_ruby, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000000004000101c3ae68e0e944ac],
## ["kingdom_2_lady_a","Kingdom 2 Lady a","Kingdom 2 Lady a",tf_hero|tf_female,0,reserved,fac_kingdom_2, [ itm_lady_dress_green, itm_turret_hat_green, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000000008100501d8ad93708e4694],
## ["kingdom_2_lady_b","Kingdom 2 Lady b","Kingdom 2 Lady b",tf_hero|tf_female,0,reserved,fac_kingdom_2, [ itm_lady_dress_blue , itm_turret_hat_blue, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000000004000401d8ad93708e4694],
## ["kingdom_3_lady_a","Kingdom 3 Lady a","Kingdom 3 Lady a",tf_hero|tf_female,0,reserved,fac_kingdom_3, [ itm_lady_dress_ruby , itm_turret_hat_ruby, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000000010500301d8ad93708e4694],
##
## ["kingdom_3_lady_b","Kingdom 3 Lady b","Kingdom 3 Lady b",tf_hero|tf_female,0,reserved,fac_kingdom_3, [ itm_lady_dress_ruby , itm_turret_hat_ruby, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000000000100601d8b08d76d14a24],
## ["kingdom_4_lady_a","Kingdom 4 Lady a","Kingdom 4 Lady a",tf_hero|tf_female,0,reserved,fac_kingdom_4, [ itm_lady_dress_green, itm_turret_hat_green, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000000010500601d8ad93708e4694],
## ["kingdom_4_lady_b","Kingdom 4 Lady b","Kingdom 4 Lady b",tf_hero|tf_female,0,reserved,fac_kingdom_4, [ itm_lady_dress_blue , itm_turret_hat_blue, itm_leather_boots], def_attrib|level(2),wp(50),knows_common|knows_riding_2, 0x000000000008500201d8ad93708e4694],
["heroes_end", "{!}heroes end", "{!}heroes end", tf_hero, 0,reserved, fac_neutral,[itm_saddle_horse,itm_leather_jacket,itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, 0x000000000008318101f390c515555594],
#Merchants AT SILAH ZIRH BOT Head_wear
## ["merchant_1", "merchant_1_F", "merchant_1_F",tf_hero|tf_female, 0,0, fac_kingdom_1,[itm_courser, itm_fighting_axe, itm_leather_jerkin, itm_leather_boots, itm_straw_hat], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x000000000008200201e54c137a940c91],
## ["merchant_2", "merchant_2", "merchant_2", tf_hero, 0,0, fac_kingdom_2,[itm_saddle_horse, itm_arming_sword, itm_light_leather, itm_woolen_hose, ], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x000000000000000601db6db6db6db6db],
## ["merchant_3", "merchant_3", "merchant_3", tf_hero, 0,0, fac_kingdom_3,[itm_courser, itm_nordic_sword, itm_leather_jerkin, itm_woolen_hose, ], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x000000000008100701db6db6db6db6db],
## ["merchant_4", "merchant_4_F", "merchant_4_F",tf_hero|tf_female, 0,0, fac_kingdom_4,[itm_saddle_horse, itm_falchion, itm_light_leather, itm_blue_hose, ], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x000000000010500401e54c137a945c91],
## ["merchant_5", "merchant_5", "merchant_5", tf_hero, 0,0, fac_kingdom_5,[itm_saddle_horse, itm_sword, itm_ragged_outfit, itm_hide_boots, ], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x000000000008038001e54c135a945c91],
## ["merchant_6", "merchant_6", "merchant_6", tf_hero, 0,0, fac_kingdom_1,[itm_saddle_horse, itm_scimitar, itm_leather_jerkin, itm_leather_boots, ], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x000000000000248e01e54c1b5a945c91],
## ["merchant_7", "merchant_7_F", "merchant_7_F",tf_hero|tf_female, 0,0, fac_kingdom_2,[itm_hunter, itm_arming_sword, itm_padded_leather, itm_blue_hose, ], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x000000000004200601c98ad39c97557a],
## ["merchant_8", "merchant_8", "merchant_8", tf_hero, 0,0, fac_kingdom_3,[itm_saddle_horse, itm_nordic_sword, itm_light_leather, itm_leather_boots, itm_woolen_hood], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x00000000001095ce01d6aad3a497557a],
## ["merchant_9", "merchant_9", "merchant_9", tf_hero, 0,0, fac_kingdom_4,[itm_saddle_horse, itm_sword, itm_padded_leather, itm_hide_boots, ], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x000000000010519601ec26ae99898697],
## ["merchant_10","merchant_10","merchant_10",tf_hero, 0,0, fac_merchants,[itm_hunter, itm_bastard_sword, itm_light_leather, itm_woolen_hose, ], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x00000000000884c401f6837d3294e28a],
## ["merchant_11","merchant_11","merchant_11",tf_hero, 0,0, fac_merchants,[itm_saddle_horse, itm_sword, itm_leather_jacket, itm_woolen_hose, ], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x00000000000c450501e289dd2c692694],
## ["merchant_12","merchant_12","merchant_12",tf_hero, 0,0, fac_merchants,[itm_hunter, itm_falchion, itm_leather_jerkin, itm_hide_boots, ], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x00000000000c660a01e5af3cb2763401],
## ["merchant_13","merchant_13","merchant_13",tf_hero, 0,0, fac_merchants,[itm_sumpter_horse, itm_nordic_sword, itm_padded_leather, itm_leather_boots, ], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x00000000001001d601ec912a89e4d534],
## ["merchant_14","merchant_14","merchant_14",tf_hero, 0,0, fac_merchants,[itm_courser, itm_bastard_sword, itm_light_leather, itm_hide_boots, ], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x000000000004335601ea2c04a8b6a394],
## ["merchant_15","merchant_15","merchant_15",tf_hero, 0,0, fac_merchants,[itm_saddle_horse, itm_sword, itm_padded_leather, itm_woolen_hose, itm_fur_hat], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x000000000008358e01dbf27b6436089d],
## ["merchant_16","merchant_16_F","merchant_16_F",tf_hero|tf_female, 0,0, fac_merchants,[itm_hunter, itm_bastard_sword, itm_light_leather, itm_hide_boots, ], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x00000000000c300101db0b9921494add],
## ["merchant_17","merchant_17","merchant_17",tf_hero, 0,0, fac_merchants,[itm_saddle_horse, itm_sword, itm_leather_jacket, itm_blue_hose, ], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x000000000008740f01e945c360976a0a],
## ["merchant_18","merchant_18","merchant_18",tf_hero, 0,0, fac_merchants,[itm_saddle_horse, itm_nordic_sword, itm_padded_leather, itm_leather_boots, ], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x000000000008020c01fc2db3b4c97685],
## ["merchant_19","merchant_19","merchant_19",tf_hero, 0,0, fac_merchants,[itm_saddle_horse, itm_falchion, itm_leather_jerkin, itm_woolen_hose, ], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x000000000008118301f02af91892725b],
## ["merchant_20","merchant_20_F","merchant_20_F",tf_hero|tf_female, 0,0, fac_merchants,[itm_courser, itm_arming_sword, itm_padded_leather, itm_leather_boots, ], def_attrib|level(15),wp(100),knows_inventory_management_10, 0x000000000010500401f6837d27688212],
#Seneschals
["town_1_seneschal", "{!}Town 1 Seneschal", "{!}Town 1 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_coarse_tunic, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x00000000000c218501ef4f5d2ccb0026],
["town_2_seneschal", "{!}Town 2 Seneschal", "{!}Town 2 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_padded_leather, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000c03cc01cc34a9a467fdfd],
["town_3_seneschal", "{!}Town 3 Seneschal", "{!}Town 3 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_coarse_tunic, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x00000000000c500e01dbb2115a55f3cd],
["town_4_seneschal", "{!}Town 4 Seneschal", "{!}Town 4 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_blue_gambeson, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000008035201e6eebaf3f3eb2b],
["town_5_seneschal", "{!}Town 5 Seneschal", "{!}Town 5 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_jerkin, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000000249101e7898999ac54c6],
["town_6_seneschal", "{!}Town 6 Seneschal", "{!}Town 6 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_red_gambeson, itm_nomad_boots], def_attrib|level(2),wp(20),knows_common, 0x000000000010360b01cef8b57553d34e],
["town_7_seneschal", "{!}Town 7 Seneschal", "{!}Town7 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_jerkin, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000000018101f9487aa831dce4],
["town_8_seneschal", "{!}Town 8 Seneschal", "{!}Town 8 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_red_gambeson, itm_nomad_boots], def_attrib|level(2),wp(20),knows_common, 0x000000000004715201ea236c60a2bcae],
["town_9_seneschal", "{!}Town 9 Seneschal", "{!}Town 9 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_coarse_tunic, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x00000000000440c601e1cd45cfb38550],
["town_10_seneschal", "{!}Town 10 Seneschal", "{!}Town 10 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_jerkin, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000010230c01ef41badb50465e],
["town_11_seneschal", "{!}Town 11 Seneschal", "{!}Town 11 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_jacket, itm_nomad_boots], def_attrib|level(2),wp(20),knows_common, 0x000000000008061301fb89acfb95332f],
["town_12_seneschal", "{!}Town 12 Seneschal", "{!}Town 12 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_coarse_tunic, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x00000000000c13ce01dc4723ab936c82],
["town_13_seneschal", "{!}Town 13 Seneschal", "{!}Town 13 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_jerkin, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000008548e01d952a9b25d6d5a],
["town_14_seneschal", "{!}Town 14 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_blue_gambeson, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000004728b01c293c694944b05],
["town_15_seneschal", "{!}Town 15 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_blue_gambeson, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000004728b01c293c694944b05],
["town_16_seneschal", "{!}Town 16 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_blue_gambeson, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000004728b01c293c694944b05],
["town_17_seneschal", "{!}Town 17 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_blue_gambeson, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000004728b01c293c694944b05],
["town_18_seneschal", "{!}Town 18 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_blue_gambeson, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000004728b01c293c694944b05],
["town_19_seneschal", "{!}Town 19 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_blue_gambeson, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000004728b01c293c694944b05],
["town_20_seneschal", "{!}Town 20 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_blue_gambeson, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000004728b01c293c694944b05],
["town_21_seneschal", "{!}Town 21 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_blue_gambeson, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000004728b01c293c694944b05],
["town_22_seneschal", "{!}Town 22 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_blue_gambeson, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000004728b01c293c694944b05],
["castle_1_seneschal", "{!}Castle 1 Seneschal", "{!}Castle 1 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_coarse_tunic, itm_hide_boots], def_attrib|level(2),wp(20),knows_common, 0x000000000010360b01cef8b57553d34e],
["castle_2_seneschal", "{!}Castle 2 Seneschal", "{!}Castle 2 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_nomad_armor, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000008061301fb89acfb95332f],
["castle_3_seneschal", "{!}Castle 3 Seneschal", "{!}Castle 3 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x000000000008548e01d952a9b25d6d5a],
["castle_4_seneschal", "{!}Castle 4 Seneschal", "{!}Castle 4 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_linen_tunic, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000004715201ea236c60a2bcae],
["castle_5_seneschal", "{!}Castle 5 Seneschal", "{!}Castle 5 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_leather_jerkin, itm_hide_boots], def_attrib|level(2),wp(20),knows_common, 0x00000000000c500e01dbb2115a55f3cd],
["castle_6_seneschal", "{!}Castle 6 Seneschal", "{!}Castle 6 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_coarse_tunic, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x00000000000c03cc01cc34a9a467fdfd],
["castle_7_seneschal", "{!}Castle 7 Seneschal", "{!}Castle 7 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_blue_gambeson, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000c13ce01dc4723ab936c82],
["castle_8_seneschal", "{!}Castle 8 Seneschal", "{!}Castle 8 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_hide_boots], def_attrib|level(2),wp(20),knows_common, 0x00000000000c218501ef4f5d2ccb0026],
["castle_9_seneschal", "{!}Castle 9 Seneschal", "{!}Castle 9 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_leather_jacket, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x000000000008035201e6eebaf3f3eb2b],
["castle_10_seneschal", "{!}Castle 10 Seneschal", "{!}Castle 10 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000440c601e1cd45cfb38550],
["castle_11_seneschal", "{!}Castle 11 Seneschal", "{!}Castle 11 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000440c601e1cd45cfb38550],
["castle_12_seneschal", "{!}Castle 2 Seneschal", "{!}Castle 2 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_nomad_armor, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000008061301fb89acfb95332f],
["castle_13_seneschal", "{!}Castle 3 Seneschal", "{!}Castle 3 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x000000000008548e01d952a9b25d6d5a],
["castle_14_seneschal", "{!}Castle 4 Seneschal", "{!}Castle 4 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_linen_tunic, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000004715201ea236c60a2bcae],
["castle_15_seneschal", "{!}Castle 5 Seneschal", "{!}Castle 5 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_leather_jerkin, itm_hide_boots], def_attrib|level(2),wp(20),knows_common, 0x00000000000c500e01dbb2115a55f3cd],
["castle_16_seneschal", "{!}Castle 6 Seneschal", "{!}Castle 6 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_coarse_tunic, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x00000000000c03cc01cc34a9a467fdfd],
["castle_17_seneschal", "{!}Castle 7 Seneschal", "{!}Castle 7 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_blue_gambeson, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000c13ce01dc4723ab936c82],
["castle_18_seneschal", "{!}Castle 8 Seneschal", "{!}Castle 8 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_hide_boots], def_attrib|level(2),wp(20),knows_common, 0x00000000000c218501ef4f5d2ccb0026],
["castle_19_seneschal", "{!}Castle 9 Seneschal", "{!}Castle 9 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_leather_jacket, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x000000000008035201e6eebaf3f3eb2b],
["castle_20_seneschal", "{!}Castle 20 Seneschal", "{!}Castle 20 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000440c601e1cd45cfb38550],
["castle_21_seneschal", "{!}Castle 11 Seneschal", "{!}Castle 11 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000440c601e1cd45cfb38550],
["castle_22_seneschal", "{!}Castle 2 Seneschal", "{!}Castle 2 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_nomad_armor, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000008061301fb89acfb95332f],
["castle_23_seneschal", "{!}Castle 3 Seneschal", "{!}Castle 3 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x000000000008548e01d952a9b25d6d5a],
["castle_24_seneschal", "{!}Castle 4 Seneschal", "{!}Castle 4 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_linen_tunic, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000004715201ea236c60a2bcae],
["castle_25_seneschal", "{!}Castle 5 Seneschal", "{!}Castle 5 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_leather_jerkin, itm_hide_boots], def_attrib|level(2),wp(20),knows_common, 0x00000000000c500e01dbb2115a55f3cd],
["castle_26_seneschal", "{!}Castle 6 Seneschal", "{!}Castle 6 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_coarse_tunic, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x00000000000c03cc01cc34a9a467fdfd],
["castle_27_seneschal", "{!}Castle 7 Seneschal", "{!}Castle 7 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_blue_gambeson, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000c13ce01dc4723ab936c82],
["castle_28_seneschal", "{!}Castle 8 Seneschal", "{!}Castle 8 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_hide_boots], def_attrib|level(2),wp(20),knows_common, 0x00000000000c218501ef4f5d2ccb0026],
["castle_29_seneschal", "{!}Castle 9 Seneschal", "{!}Castle 9 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_leather_jacket, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x000000000008035201e6eebaf3f3eb2b],
["castle_30_seneschal", "{!}Castle 20 Seneschal", "{!}Castle 20 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000440c601e1cd45cfb38550],
["castle_31_seneschal", "{!}Castle 11 Seneschal", "{!}Castle 11 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000440c601e1cd45cfb38550],
["castle_32_seneschal", "{!}Castle 2 Seneschal", "{!}Castle 2 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_nomad_armor, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000008061301fb89acfb95332f],
["castle_33_seneschal", "{!}Castle 3 Seneschal", "{!}Castle 3 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x000000000008548e01d952a9b25d6d5a],
["castle_34_seneschal", "{!}Castle 4 Seneschal", "{!}Castle 4 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_linen_tunic, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000004715201ea236c60a2bcae],
["castle_35_seneschal", "{!}Castle 5 Seneschal", "{!}Castle 5 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_leather_jerkin, itm_hide_boots], def_attrib|level(2),wp(20),knows_common, 0x00000000000c500e01dbb2115a55f3cd],
["castle_36_seneschal", "{!}Castle 6 Seneschal", "{!}Castle 6 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_coarse_tunic, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x00000000000c03cc01cc34a9a467fdfd],
["castle_37_seneschal", "{!}Castle 7 Seneschal", "{!}Castle 7 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_blue_gambeson, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000c13ce01dc4723ab936c82],
["castle_38_seneschal", "{!}Castle 8 Seneschal", "{!}Castle 8 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_hide_boots], def_attrib|level(2),wp(20),knows_common, 0x00000000000c218501ef4f5d2ccb0026],
["castle_39_seneschal", "{!}Castle 9 Seneschal", "{!}Castle 9 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_leather_jacket, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x000000000008035201e6eebaf3f3eb2b],
["castle_40_seneschal", "{!}Castle 20 Seneschal", "{!}Castle 20 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000440c601e1cd45cfb38550],
["castle_41_seneschal", "{!}Castle 20 Seneschal", "{!}Castle 20 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000440c601e1cd45cfb38550],
["castle_42_seneschal", "{!}Castle 20 Seneschal", "{!}Castle 20 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000440c601e1cd45cfb38550],
["castle_43_seneschal", "{!}Castle 20 Seneschal", "{!}Castle 20 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000440c601e1cd45cfb38550],
["castle_44_seneschal", "{!}Castle 20 Seneschal", "{!}Castle 20 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000440c601e1cd45cfb38550],
["castle_45_seneschal", "{!}Castle 20 Seneschal", "{!}Castle 20 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000440c601e1cd45cfb38550],
["castle_46_seneschal", "{!}Castle 20 Seneschal", "{!}Castle 20 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000440c601e1cd45cfb38550],
["castle_47_seneschal", "{!}Castle 20 Seneschal", "{!}Castle 20 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000440c601e1cd45cfb38550],
["castle_48_seneschal", "{!}Castle 20 Seneschal", "{!}Castle 20 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[itm_padded_leather, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000000440c601e1cd45cfb38550],
#Arena Masters
["town_1_arena_master", "Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_1_arena|entry(52),reserved, fac_commoners,[itm_coarse_tunic, itm_hide_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_2_arena_master", "Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_2_arena|entry(52),reserved, fac_commoners,[itm_linen_tunic, itm_nomad_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_3_arena_master", "Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_3_arena|entry(52),reserved, fac_commoners,[itm_nomad_armor, itm_hide_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_4_arena_master", "Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_4_arena|entry(52),reserved, fac_commoners,[itm_coarse_tunic, itm_hide_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_5_arena_master", "Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_5_arena|entry(52),reserved, fac_commoners,[itm_linen_tunic, itm_nomad_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_6_arena_master", "Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_6_arena|entry(52),reserved, fac_commoners,[itm_leather_jerkin, itm_leather_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_7_arena_master", "Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_7_arena|entry(52),reserved, fac_commoners,[itm_padded_leather, itm_nomad_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_8_arena_master", "Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_8_arena|entry(52),reserved, fac_commoners,[itm_linen_tunic, itm_hide_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_9_arena_master", "Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_9_arena|entry(52),reserved, fac_commoners,[itm_padded_leather, itm_leather_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_10_arena_master","Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_10_arena|entry(52),reserved, fac_commoners,[itm_nomad_armor, itm_nomad_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_11_arena_master","Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_11_arena|entry(52),reserved, fac_commoners,[itm_coarse_tunic, itm_hide_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_12_arena_master","Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_12_arena|entry(52),reserved, fac_commoners,[itm_leather_jerkin, itm_hide_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_13_arena_master","Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_13_arena|entry(52),reserved, fac_commoners,[itm_coarse_tunic, itm_nomad_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_14_arena_master","Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_14_arena|entry(52),reserved, fac_commoners,[itm_padded_leather, itm_hide_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_15_arena_master","Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_15_arena|entry(52),reserved, fac_commoners,[itm_padded_leather, itm_hide_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_16_arena_master","Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_16_arena|entry(52),reserved, fac_commoners,[itm_fur_coat, itm_hide_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_17_arena_master","Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_17_arena|entry(52),reserved, fac_commoners,[itm_padded_leather, itm_hide_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_18_arena_master","Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_18_arena|entry(52),reserved, fac_commoners,[itm_padded_leather, itm_hide_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_19_arena_master","Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_19_arena|entry(52),reserved, fac_commoners,[itm_padded_leather, itm_hide_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_20_arena_master","Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_20_arena|entry(52),reserved, fac_commoners,[itm_fur_coat, itm_hide_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_21_arena_master","Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_21_arena|entry(52),reserved, fac_commoners,[itm_padded_leather, itm_hide_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
["town_22_arena_master","Tournament Master","{!}Tournament Master",tf_hero|tf_randomize_face, scn_town_22_arena|entry(52),reserved, fac_commoners,[itm_padded_leather, itm_hide_boots], def_attrib|level(2),wp(20),knows_common,man_face_middle_1, man_face_older_2],
# Underground
## ["town_1_crook","Town 1 Crook","Town 1 Crook",tf_hero, 0,0, fac_neutral,[itm_linen_tunic, itm_leather_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, 0x000000000004428401f46e44a27144e3],
## ["town_2_crook","Town 2 Crook","Town 2 Crook",tf_hero|tf_female, 0,0, fac_neutral,[itm_lady_dress_ruby, itm_turret_hat_ruby ],def_attrib|level(2),wp(20),knows_inventory_management_10, 0x000000000004300101c36db6db6db6db],
## ["town_3_crook","Town 3 Crook","Town 3 Crook",tf_hero, 0,0, fac_neutral,[itm_leather_apron, itm_hide_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, 0x00000000000c530701f17944a25164e1],
## ["town_4_crook","Town 4 Crook","Town 4 Crook",tf_hero, 0,0, fac_neutral,[itm_coarse_tunic, itm_hide_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, 0x00000000000c840501f36db6db7134db],
## ["town_5_crook","Town 5 Crook","Town 5 Crook",tf_hero, 0,0, fac_neutral,[itm_red_gambeson, itm_blue_hose ],def_attrib|level(5),wp(20),knows_inventory_management_10, 0x00000000000c000601f36db6db7134db],
## ["town_6_crook","Town 6 Crook","Town 6 Crook",tf_hero, 0,0, fac_neutral,[itm_coarse_tunic, itm_hide_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, 0x00000000000c10c801db6db6dd7598aa],
## ["town_7_crook","Town 7 Crook","Town 7 Crook",tf_hero|tf_female, 0,0, fac_neutral,[itm_woolen_dress, itm_woolen_hood ],def_attrib|level(5),wp(20),knows_inventory_management_10, 0x000000000010214101de2f64db6db58d],
##
## ["town_8_crook","Town 8 Crook","Town 8 Crook",tf_hero, 0,0, fac_neutral,[itm_leather_jacket, itm_leather_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, 0x000000000010318401c96db4db6db58d],
## ["town_9_crook","Town 9 Crook","Town 9 Crook",tf_hero, 0,0, fac_neutral,[itm_linen_tunic, itm_hide_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, 0x000000000008520501f16db4db6db58d],
## ["town_10_crook","Town 10 Crook","Town 10 Crook",tf_hero, 0,0, fac_neutral,[itm_coarse_tunic, itm_nomad_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, 0x000000000008600701f35144db6db8a2],
## ["town_11_crook","Town 11 Crook","Town 11 Crook",tf_hero|tf_female, 0,0, fac_neutral,[itm_blue_dress, itm_wimple_with_veil ],def_attrib|level(5),wp(20),knows_inventory_management_10, 0x000000000008408101f386c4db4dd514],
## ["town_12_crook","Town 12 Crook","Town 12 Crook",tf_hero, 0,0, fac_neutral,[itm_coarse_tunic, itm_hide_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, 0x00000000000870c501f386c4f34dbaa1],
## ["town_13_crook","Town 13 Crook","Town 13 Crook",tf_hero, 0,0, fac_neutral,[itm_blue_gambeson, itm_nomad_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, 0x00000000000c114901f245caf34dbaa1],
## ["town_14_crook","Town 14 Crook","Town 14 Crook",tf_hero|tf_female, 0,0, fac_neutral,[itm_woolen_dress, itm_turret_hat_ruby ],def_attrib|level(5),wp(20),knows_inventory_management_10, 0x00000000001021c001f545a49b6eb2bc],
# Armor Merchants
#arena_masters_end = zendar_armorer
["town_1_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_linen_tunic, itm_leather_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_2_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face|tf_female|tf_is_merchant, 0, 0, fac_commoners,[itm_woolen_dress, itm_straw_hat ],def_attrib|level(2),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
["town_3_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_arena_tunic_red, itm_hide_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_4_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_red_gambeson, itm_leather_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_5_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_linen_tunic, itm_nomad_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_6_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_fur_coat, itm_nomad_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_7_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_leather_jerkin, itm_blue_hose ],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_8_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_padded_leather, itm_leather_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_9_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_blue_gambeson, itm_nomad_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_10_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_leather_jerkin, itm_hide_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_11_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_fur_coat, itm_leather_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_12_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_red_gambeson, itm_nomad_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_13_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_leather_jacket, itm_hide_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_14_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face|tf_female|tf_is_merchant, 0, 0, fac_commoners,[itm_woolen_dress, itm_headcloth ],def_attrib|level(5),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
["town_15_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_blue_gambeson, itm_leather_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_16_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_fur_coat, itm_nomad_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_17_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_fur_coat, itm_hide_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_18_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face|tf_female|tf_is_merchant, 0, 0, fac_commoners,[itm_woolen_dress, itm_headcloth ],def_attrib|level(5),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
["town_19_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_blue_gambeson, itm_leather_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_20_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_fur_coat, itm_nomad_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_21_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_fur_coat, itm_hide_boots ],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_22_armorer","Armorer", "{!}Armorer", tf_hero|tf_randomize_face|tf_female|tf_is_merchant, 0, 0, fac_commoners,[itm_sarranid_common_dress, itm_sarranid_head_cloth ],def_attrib|level(5),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
# Weapon merchants
["town_1_weaponsmith", "Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face|tf_female|tf_is_merchant, 0, 0, fac_commoners,[itm_linen_tunic, itm_hide_boots,itm_straw_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
["town_2_weaponsmith", "Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_shirt, itm_nomad_boots],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_3_weaponsmith", "Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_fur_coat, itm_hide_boots],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_4_weaponsmith", "Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_shirt, itm_hide_boots],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_5_weaponsmith", "Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_leather_jerkin, itm_wrapping_boots],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_6_weaponsmith", "Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_linen_tunic, itm_hide_boots],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_7_weaponsmith", "Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_shirt, itm_hide_boots],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_8_weaponsmith", "Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face|tf_female|tf_is_merchant, 0, 0, fac_commoners,[itm_woolen_dress, itm_wrapping_boots,itm_straw_hat],def_attrib|level(5),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
["town_9_weaponsmith", "Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_leather_jerkin, itm_leather_boots],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_10_weaponsmith","Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_linen_tunic, itm_hide_boots],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_11_weaponsmith","Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_leather_jacket, itm_woolen_hose],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_12_weaponsmith","Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_shirt, itm_hide_boots],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_13_weaponsmith","Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_arena_tunic_red, itm_wrapping_boots],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_14_weaponsmith","Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_arena_tunic_blue, itm_wrapping_boots],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_15_weaponsmith","Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_leather_jacket, itm_woolen_hose],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_16_weaponsmith","Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_shirt, itm_hide_boots],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_17_weaponsmith","Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_arena_tunic_green, itm_wrapping_boots],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_18_weaponsmith","Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_linen_tunic, itm_wrapping_boots],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_19_weaponsmith","Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_leather_jacket, itm_sarranid_boots_a],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_20_weaponsmith","Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_shirt, itm_sarranid_boots_a],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_21_weaponsmith","Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_arena_tunic_green, itm_sarranid_boots_a],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
["town_22_weaponsmith","Weaponsmith","{!}Weaponsmith",tf_hero|tf_randomize_face| tf_is_merchant, 0, 0, fac_commoners,[itm_linen_tunic, itm_sarranid_boots_a],def_attrib|level(5),wp(20),knows_inventory_management_10, mercenary_face_1, mercenary_face_2],
#Tavern keepers
["town_1_tavernkeeper", "Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face, scn_town_1_tavern|entry(9),0, fac_commoners,[itm_leather_apron, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_common, mercenary_face_1, mercenary_face_2],
["town_2_tavernkeeper", "Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face, scn_town_2_tavern|entry(9),0, fac_commoners,[itm_leather_apron, itm_leather_boots],def_attrib|level(2),wp(20),knows_common, mercenary_face_1, mercenary_face_2],
["town_3_tavernkeeper", "Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face|tf_female, scn_town_3_tavern|entry(9),0, fac_commoners,[itm_woolen_dress, itm_hide_boots],def_attrib|level(2),wp(20),knows_common, woman_face_1, woman_face_2],
["town_4_tavernkeeper", "Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face, scn_town_4_tavern|entry(9),0, fac_commoners,[itm_leather_apron, itm_leather_boots],def_attrib|level(2),wp(20),knows_common, mercenary_face_1, mercenary_face_2],
["town_5_tavernkeeper", "Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face, scn_town_5_tavern|entry(9),0, fac_commoners,[itm_leather_apron, itm_hide_boots],def_attrib|level(2),wp(20),knows_common, mercenary_face_1, mercenary_face_2],
["town_6_tavernkeeper", "Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face|tf_female, scn_town_6_tavern|entry(9),0, fac_commoners,[itm_woolen_dress, itm_hide_boots],def_attrib|level(2),wp(20),knows_common, woman_face_1, woman_face_2],
["town_7_tavernkeeper", "Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face|tf_female, scn_town_7_tavern|entry(9),0, fac_commoners,[itm_woolen_dress, itm_leather_boots, itm_headcloth],def_attrib|level(2),wp(20),knows_common, woman_face_1, woman_face_2],
["town_8_tavernkeeper", "Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face, scn_town_8_tavern|entry(9),0, fac_commoners,[itm_leather_apron, itm_leather_boots],def_attrib|level(2),wp(20),knows_common, mercenary_face_1, mercenary_face_2],
["town_9_tavernkeeper", "Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face|tf_female, scn_town_9_tavern|entry(9),0, fac_commoners,[itm_woolen_dress, itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, woman_face_1, woman_face_2],
["town_10_tavernkeeper","Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face|tf_female, scn_town_10_tavern|entry(9),0, fac_commoners,[itm_woolen_dress, itm_hide_boots],def_attrib|level(2),wp(20),knows_common, woman_face_1, woman_face_2],
["town_11_tavernkeeper","Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face|tf_female, scn_town_11_tavern|entry(9),0, fac_commoners,[itm_woolen_dress, itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, woman_face_1, woman_face_2],
["town_12_tavernkeeper","Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face, scn_town_12_tavern|entry(9),0, fac_commoners,[itm_leather_apron, itm_hide_boots],def_attrib|level(2),wp(20),knows_common, mercenary_face_1, mercenary_face_2],
["town_13_tavernkeeper","Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face|tf_female, scn_town_13_tavern|entry(9),0, fac_commoners,[itm_woolen_dress, itm_hide_boots, itm_headcloth],def_attrib|level(2),wp(20),knows_common, woman_face_1, woman_face_2],
["town_14_tavernkeeper","Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face, scn_town_14_tavern|entry(9),0, fac_commoners,[itm_shirt, itm_leather_boots],def_attrib|level(2),wp(20),knows_common, mercenary_face_1, mercenary_face_2],
["town_15_tavernkeeper","Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face|tf_female, scn_town_15_tavern|entry(9),0, fac_commoners,[itm_woolen_dress, itm_nomad_boots],def_attrib|level(2),wp(20),knows_common, woman_face_1, woman_face_2],
["town_16_tavernkeeper","Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face, scn_town_16_tavern|entry(9),0, fac_commoners,[itm_leather_apron, itm_hide_boots],def_attrib|level(2),wp(20),knows_common, mercenary_face_1, mercenary_face_2],
["town_17_tavernkeeper","Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face|tf_female, scn_town_17_tavern|entry(9),0, fac_commoners,[itm_woolen_dress, itm_hide_boots, itm_headcloth],def_attrib|level(2),wp(20),knows_common, woman_face_1, woman_face_2],
["town_18_tavernkeeper","Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face, scn_town_18_tavern|entry(9),0, fac_commoners,[itm_shirt, itm_leather_boots],def_attrib|level(2),wp(20),knows_common, mercenary_face_1, mercenary_face_2],
["town_19_tavernkeeper","Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face|tf_female, scn_town_19_tavern|entry(9),0, fac_commoners,[itm_sarranid_dress_a, itm_sarranid_boots_a],def_attrib|level(2),wp(20),knows_common, woman_face_1, woman_face_2],
["town_20_tavernkeeper","Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face, scn_town_20_tavern|entry(9),0, fac_commoners,[itm_sarranid_cloth_robe, itm_sarranid_boots_a],def_attrib|level(2),wp(20),knows_common, mercenary_face_1, mercenary_face_2],
["town_21_tavernkeeper","Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face|tf_female, scn_town_21_tavern|entry(9),0, fac_commoners,[itm_sarranid_common_dress, itm_sarranid_boots_a, itm_headcloth],def_attrib|level(2),wp(20),knows_common, woman_face_1, woman_face_2],
["town_22_tavernkeeper","Tavern_Keeper","{!}Tavern_Keeper",tf_hero|tf_randomize_face, scn_town_22_tavern|entry(9),0, fac_commoners,[itm_sarranid_cloth_robe_b, itm_sarranid_boots_a],def_attrib|level(2),wp(20),knows_common, mercenary_face_1, mercenary_face_2],
#Goods Merchants
["town_1_merchant", "Merchant","{!}Merchant", tf_hero|tf_randomize_face|tf_is_merchant, scn_town_1_store|entry(9),0, fac_commoners, [itm_coarse_tunic, itm_leather_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_2_merchant", "Merchant","{!}Merchant", tf_hero|tf_randomize_face|tf_is_merchant, scn_town_2_store|entry(9),0, fac_commoners, [itm_leather_apron, itm_leather_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_3_merchant", "Merchant","{!}Merchant",tf_female|tf_hero|tf_randomize_face|tf_is_merchant, scn_town_3_store|entry(9),0, fac_commoners, [itm_dress, itm_leather_boots, itm_straw_hat ],def_attrib|level(2),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
["town_4_merchant", "Merchant","{!}Merchant", tf_hero|tf_randomize_face|tf_is_merchant, scn_town_4_store|entry(9),0, fac_commoners, [itm_leather_apron, itm_leather_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_5_merchant", "Merchant","{!}Merchant", tf_hero|tf_randomize_face|tf_is_merchant, scn_town_5_store|entry(9),0, fac_commoners, [itm_nomad_armor, itm_leather_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_6_merchant", "Merchant","{!}Merchant",tf_female|tf_hero|tf_randomize_face|tf_is_merchant, scn_town_6_store|entry(9),0, fac_commoners, [itm_woolen_dress, itm_leather_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_7_merchant", "Merchant","{!}Merchant", tf_hero|tf_randomize_face|tf_is_merchant, scn_town_7_store|entry(9),0, fac_commoners, [itm_leather_jerkin,itm_leather_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_8_merchant", "Merchant","{!}Merchant", tf_hero|tf_randomize_face|tf_is_merchant, scn_town_8_store|entry(9),0, fac_commoners, [itm_leather_apron, itm_leather_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_9_merchant", "Merchant","{!}Merchant", tf_hero|tf_randomize_face|tf_is_merchant, scn_town_9_store|entry(9),0, fac_commoners, [itm_leather_apron, itm_leather_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_10_merchant","Merchant","{!}Merchant", tf_hero|tf_randomize_face|tf_is_merchant, scn_town_10_store|entry(9),0, fac_commoners, [itm_leather_jerkin,itm_leather_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_11_merchant","Merchant","{!}Merchant", tf_hero|tf_randomize_face|tf_is_merchant, scn_town_11_store|entry(9),0, fac_commoners, [itm_leather_apron, itm_leather_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_12_merchant","Merchant","{!}Merchant",tf_female|tf_hero|tf_randomize_face|tf_is_merchant, scn_town_12_store|entry(9),0, fac_commoners, [itm_woolen_dress, itm_leather_boots, itm_female_hood ],def_attrib|level(2),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
["town_13_merchant","Merchant","{!}Merchant",tf_female|tf_hero|tf_randomize_face|tf_is_merchant, scn_town_13_store|entry(9),0, fac_commoners, [itm_dress, itm_leather_boots, itm_straw_hat ],def_attrib|level(2),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
["town_14_merchant","Merchant","{!}Merchant", tf_hero|tf_randomize_face|tf_is_merchant, scn_town_14_store|entry(9),0, fac_commoners, [itm_leather_apron, itm_leather_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_15_merchant","Merchant","{!}Merchant", tf_hero|tf_randomize_face|tf_is_merchant, scn_town_15_store|entry(9),0, fac_commoners, [itm_leather_apron, itm_leather_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_16_merchant","Merchant","{!}Merchant",tf_female|tf_hero|tf_randomize_face|tf_is_merchant, scn_town_16_store|entry(9),0, fac_commoners, [itm_woolen_dress, itm_leather_boots, itm_female_hood ],def_attrib|level(2),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
["town_17_merchant","Merchant","{!}Merchant",tf_female|tf_hero|tf_randomize_face|tf_is_merchant, scn_town_17_store|entry(9),0, fac_commoners, [itm_dress, itm_leather_boots, itm_straw_hat ],def_attrib|level(2),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
["town_18_merchant","Merchant","{!}Merchant", tf_hero|tf_randomize_face|tf_is_merchant, scn_town_18_store|entry(9),0, fac_commoners, [itm_leather_apron, itm_leather_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_19_merchant","Merchant","{!}Merchant", tf_hero|tf_randomize_face|tf_is_merchant, scn_town_19_store|entry(9),0, fac_commoners, [itm_leather_apron, itm_leather_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_20_merchant","Merchant","{!}Merchant",tf_female|tf_hero|tf_randomize_face|tf_is_merchant, scn_town_20_store|entry(9),0, fac_commoners, [itm_sarranid_common_dress_b, itm_sarranid_boots_a, itm_sarranid_felt_head_cloth_b ],def_attrib|level(2),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
["town_21_merchant","Merchant","{!}Merchant",tf_female|tf_hero|tf_randomize_face|tf_is_merchant, scn_town_21_store|entry(9),0, fac_commoners, [itm_sarranid_dress_a, itm_sarranid_boots_a, itm_sarranid_felt_head_cloth ],def_attrib|level(2),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
["town_22_merchant","Merchant","{!}Merchant", tf_hero|tf_randomize_face|tf_is_merchant, scn_town_22_store|entry(9),0, fac_commoners, [itm_leather_apron, itm_leather_boots ],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["salt_mine_merchant","Barezan","Barezan", tf_hero|tf_is_merchant, scn_salt_mine|entry(1),0, fac_commoners, [itm_leather_apron, itm_leather_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, 0x00000000000c528601ea69b6e46dbdb6],
# Horse Merchants
["town_1_horse_merchant","Horse Merchant","{!}Town 1 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant|tf_female, 0, 0, fac_commoners,[itm_blue_dress, itm_blue_hose, itm_female_hood], def_attrib|level(2),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
["town_2_horse_merchant","Horse Merchant","{!}Town 2 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant, 0, 0, fac_commoners,[itm_linen_tunic, itm_nomad_boots,], def_attrib|level(5),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_3_horse_merchant","Horse Merchant","{!}Town 3 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant, 0, 0, fac_commoners,[itm_nomad_armor, itm_hide_boots], def_attrib|level(5),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_4_horse_merchant","Horse Merchant","{!}Town 4 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant, 0, 0, fac_commoners,[itm_leather_jerkin, itm_nomad_boots], def_attrib|level(5),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_5_horse_merchant","Horse Merchant","{!}Town 5 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant|tf_female, 0, 0, fac_commoners,[itm_dress, itm_woolen_hose, itm_woolen_hood], def_attrib|level(5),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
["town_6_horse_merchant","Horse Merchant","{!}Town 6 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant, 0, 0, fac_commoners,[itm_coarse_tunic, itm_hide_boots], def_attrib|level(5),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_7_horse_merchant","Horse Merchant","{!}Town 7 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant, 0, 0, fac_commoners,[itm_coarse_tunic, itm_leather_boots], def_attrib|level(5),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_8_horse_merchant","Horse Merchant","{!}Town 8 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant, 0, 0, fac_commoners,[itm_coarse_tunic, itm_hide_boots], def_attrib|level(5),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_9_horse_merchant","Horse Merchant","{!}Town 9 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant, 0, 0, fac_commoners,[itm_leather_jerkin, itm_woolen_hose], def_attrib|level(5),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_10_horse_merchant","Horse Merchant","{!}Town 10 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant|tf_female, 0, 0, fac_commoners,[itm_blue_dress, itm_blue_hose, itm_straw_hat], def_attrib|level(5),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
["town_11_horse_merchant","Horse Merchant","{!}Town 11 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant, 0, 0, fac_commoners,[itm_nomad_armor, itm_leather_boots], def_attrib|level(5),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_12_horse_merchant","Horse Merchant","{!}Town 12 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant, 0, 0, fac_commoners,[itm_leather_jacket, itm_hide_boots], def_attrib|level(5),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_13_horse_merchant","Horse Merchant","{!}Town 13 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant, 0, 0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots], def_attrib|level(5),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_14_horse_merchant","Horse Merchant","{!}Town 14 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant|tf_female, 0, 0, fac_commoners,[itm_peasant_dress, itm_blue_hose, itm_headcloth], def_attrib|level(5),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
["town_15_horse_merchant","Horse Merchant","{!}Town 15 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant, 0, 0, fac_commoners,[itm_nomad_armor, itm_leather_boots], def_attrib|level(5),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_16_horse_merchant","Horse Merchant","{!}Town 16 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant, 0, 0, fac_commoners,[itm_leather_jacket, itm_hide_boots], def_attrib|level(5),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_17_horse_merchant","Horse Merchant","{!}Town 17 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant, 0, 0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots], def_attrib|level(5),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_18_horse_merchant","Horse Merchant","{!}Town 18 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant|tf_female, 0, 0, fac_commoners,[itm_peasant_dress, itm_blue_hose, itm_headcloth], def_attrib|level(5),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
["town_19_horse_merchant","Horse Merchant","{!}Town 15 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant, 0, 0, fac_commoners,[itm_nomad_armor, itm_sarranid_boots_a], def_attrib|level(5),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_20_horse_merchant","Horse Merchant","{!}Town 16 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant, 0, 0, fac_commoners,[itm_sarranid_cloth_robe, itm_sarranid_boots_a], def_attrib|level(5),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_21_horse_merchant","Horse Merchant","{!}Town 17 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant, 0, 0, fac_commoners,[itm_sarranid_cloth_robe_b, itm_sarranid_boots_a], def_attrib|level(5),wp(20),knows_inventory_management_10, man_face_young_1, man_face_older_2],
["town_22_horse_merchant","Horse Merchant","{!}Town 18 Horse Merchant",tf_hero|tf_randomize_face|tf_is_merchant|tf_female, 0, 0, fac_commoners,[itm_sarranid_common_dress_b, itm_blue_hose, itm_sarranid_felt_head_cloth_b], def_attrib|level(5),wp(20),knows_inventory_management_10, woman_face_1, woman_face_2],
#Town Mayors #itm_courtly_outfit itm_gambeson itm_blue_gambeson itm_red_gambeson itm_nobleman_outfit itm_rich_outfit
["town_1_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_courtly_outfit, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_2_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_gambeson, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_3_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_blue_gambeson, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_4_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_fur_coat, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_5_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_nobleman_outfit, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_6_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_red_gambeson, itm_nomad_boots], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_7_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_rich_outfit, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_8_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_red_gambeson, itm_nomad_boots], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_9_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_courtly_outfit, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_10_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_leather_jerkin, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_11_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_leather_jacket, itm_nomad_boots], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_12_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_red_gambeson, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_13_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_nobleman_outfit, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_14_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_blue_gambeson, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_15_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_leather_jacket, itm_nomad_boots], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_16_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_fur_coat, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_17_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_nobleman_outfit, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_18_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_blue_gambeson, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_19_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_sarranid_cloth_robe, itm_sarranid_boots_a], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_20_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_sarranid_cloth_robe, itm_sarranid_boots_a], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_21_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_sarranid_cloth_robe, itm_sarranid_boots_a], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["town_22_mayor", "Guild_Master", "{!}Guild_Master", tf_hero|tf_randomize_face, 0,reserved, fac_neutral,[ itm_sarranid_cloth_robe, itm_sarranid_boots_a], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
#Village stores
["village_1_elder", "Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots, itm_felt_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_2_elder", "Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_3_elder", "Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_4_elder", "Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_5_elder", "Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_6_elder", "Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_7_elder", "Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_fur_coat, itm_nomad_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_8_elder", "Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_wrapping_boots, itm_felt_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_9_elder", "Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_10_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_11_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_12_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_13_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_14_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_15_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots, itm_felt_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_16_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots, itm_leather_warrior_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_17_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_fur_coat, itm_nomad_boots,itm_fur_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_18_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots, itm_leather_warrior_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_19_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots, itm_fur_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_20_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots, itm_leather_warrior_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_21_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_22_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_fur_coat, itm_nomad_boots,itm_fur_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_23_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots, itm_felt_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_24_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_25_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_fur_coat, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_26_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_27_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_wrapping_boots, itm_felt_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_28_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_29_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_30_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_31_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_32_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_33_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_34_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots,itm_fur_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_35_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_36_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_37_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_38_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_39_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_40_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_41_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_42_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_43_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_44_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots,itm_fur_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_45_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_46_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_47_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_48_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_49_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_fur_coat, itm_nomad_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_50_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_51_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_52_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots,itm_fur_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_53_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots, itm_felt_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_54_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_55_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_56_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_57_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_wrapping_boots, itm_felt_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_58_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_fur_coat, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_59_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_60_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_61_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_62_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots,itm_fur_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_63_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots, itm_felt_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_64_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_65_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_fur_coat, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_66_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_67_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_wrapping_boots, itm_felt_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_68_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_69_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_70_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_71_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_72_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots,itm_fur_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_73_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots, itm_felt_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_74_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_75_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_76_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_fur_coat, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_77_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_wrapping_boots, itm_felt_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_78_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_79_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_80_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_81_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_82_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_83_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_fur_coat, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_84_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots,itm_fur_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_85_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_86_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_87_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_88_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_fur_coat, itm_hide_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_89_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_90_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_sarranid_cloth_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_91_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_sarranid_cloth_robe, itm_nomad_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_92_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_93_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_sarranid_cloth_robe, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_94_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_sarranid_cloth_robe, itm_nomad_boots,itm_fur_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_95_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_sarranid_cloth_robe_b, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_96_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_97_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_sarranid_cloth_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_98_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_sarranid_cloth_robe, itm_hide_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_99_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_100_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_sarranid_cloth_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_101_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_sarranid_cloth_robe_b, itm_nomad_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_102_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_103_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_sarranid_cloth_robe, itm_wrapping_boots, itm_leather_cap],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_104_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_nomad_boots,itm_fur_hat],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_105_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_sarranid_cloth_robe_b, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_106_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_coarse_tunic, itm_hide_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_107_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_108_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_sarranid_cloth_robe_b, itm_hide_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_109_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_sarranid_cloth_robe_b, itm_nomad_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
["village_110_elder","Village_Elder", "{!}village_1_elder",tf_hero|tf_randomize_face|tf_is_merchant, 0,0, fac_commoners,[itm_robe, itm_wrapping_boots],def_attrib|level(2),wp(20),knows_inventory_management_10, man_face_old_1, man_face_older_2],
# Place extra merchants before this point
["merchants_end","merchants_end","merchants_end",tf_hero, 0,0, fac_commoners,[],def_attrib|level(2),wp(20),knows_inventory_management_10,0],
#Used for player enterprises
["town_1_master_craftsman", "{!}Town 1 Craftsman", "{!}Town 1 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_apron, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x000000003a0c629346edb2335a82b6e300000000000d634a0000000000000000],
["town_2_master_craftsman", "{!}Town 2 Craftsman", "{!}Town 2 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_padded_leather, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x0000000f010811c92d3295e46a96c72300000000001f5a980000000000000000],
["town_3_master_craftsman", "{!}Town 3 Craftsman", "{!}Town 3 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_coarse_tunic, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x000000001b083203151d2ad5648e52b400000000001b172e0000000000000000],
["town_4_master_craftsman", "{!}Town 4 Craftsman", "{!}Town 4 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_apron, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x000000001a10114f091b2c259cd4c92300000000000228dd0000000000000000],
["town_5_master_craftsman", "{!}Town 5 Craftsman", "{!}Town 5 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_jerkin, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x000000000d1044c578598cd92b5256db00000000001f23340000000000000000],
["town_6_master_craftsman", "{!}Town 6 Craftsman", "{!}Town 6 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_apron, itm_nomad_boots], def_attrib|level(2),wp(20),knows_common, 0x000000001f046285493eaf1b048abcdb00000000001a8aad0000000000000000],
["town_7_master_craftsman", "{!}Town 7 Craftsman", "{!}Town 7 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_jerkin, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x000000002b0052c34c549225619356d400000000001cc6e60000000000000000],
["town_8_master_craftsman", "{!}Town 8 Craftsman", "{!}Town 8 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_apron, itm_nomad_boots], def_attrib|level(2),wp(20),knows_common, 0x0000000fdb0c20465b6e51e8a12c82d400000000001e148c0000000000000000],
["town_9_master_craftsman", "{!}Town 9 Craftsman", "{!}Town 9 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_coarse_tunic, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x00000009f7005246071db236e296a45300000000001a8b0a0000000000000000],
["town_10_master_craftsman", "{!}Town 10 Craftsman", "{!}Town 10 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_jerkin, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x00000009f71012c2456a921aa379321a000000000012c6d90000000000000000],
["town_11_master_craftsman", "{!}Town 11 Craftsman", "{!}Town 11 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_apron, itm_nomad_boots], def_attrib|level(2),wp(20),knows_common, 0x00000009f308514428db71b9ad70b72400000000001dc9140000000000000000],
["town_12_master_craftsman", "{!}Town 12 Seneschal", "{!}Town 12 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_coarse_tunic, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, 0x00000009e90825863853a5b91cd71a5b00000000000598db0000000000000000],
["town_13_master_craftsman", "{!}Town 13 Seneschal", "{!}Town 13 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_jerkin, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, 0x00000009fa0c708f274c8eb4c64e271300000000001eb69a0000000000000000],
["town_14_master_craftsman", "{!}Town 14 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_apron, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x00000007590c3206155c8b475a4e439a00000000001f489a0000000000000000],
["town_15_master_craftsman", "{!}Town 15 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_apron, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x00000007440022d04b2c6cb7d3723d5a00000000001dc90a0000000000000000],
["town_16_master_craftsman", "{!}Town 16 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_apron, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x00000007680c3586054b8e372e4db65c00000000001db7230000000000000000],
["town_17_master_craftsman", "{!}Town 17 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_apron, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x0000000766046186591b564cec85d2e200000000001e4cea0000000000000000],
["town_18_master_craftsman", "{!}Town 18 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_leather_apron, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x0000000e7e0075523a6aa9b6da61e8dd00000000001d96d30000000000000000],
["town_19_master_craftsman", "{!}Town 19 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_sarranid_cloth_robe, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x000000002408314852a432e88aaa42e100000000001e284e0000000000000000],
["town_20_master_craftsman", "{!}Town 20 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_sarranid_cloth_robe_b, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x000000001104449136e44cbd1c9352bc000000000005e8d10000000000000000],
["town_21_master_craftsman", "{!}Town 21 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_sarranid_cloth_robe, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000131032d3351c6e43226ec96c000000000005b5240000000000000000],
["town_22_master_craftsman", "{!}Town 22 Seneschal", "{!}Town 14 Seneschal", tf_hero|tf_is_merchant, 0,reserved, fac_neutral,[ itm_sarranid_cloth_robe_b, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, 0x00000000200c658a5723b1a3148dc455000000000015ab920000000000000000],
# Chests
["zendar_chest","{!}Zendar Chest","{!}Zendar Chest",tf_hero|tf_inactive, 0,reserved, fac_neutral,
[],def_attrib|level(18),wp(60),knows_common, 0],
["tutorial_chest_1","{!}Melee Weapons Chest","{!}Melee Weapons Chest",tf_hero|tf_inactive, 0,reserved, fac_neutral,[itm_tutorial_sword, itm_tutorial_axe, itm_tutorial_spear, itm_tutorial_club, itm_tutorial_battle_axe],def_attrib|level(18),wp(60),knows_common, 0],
["tutorial_chest_2","{!}Ranged Weapons Chest","{!}Ranged Weapons Chest",tf_hero|tf_inactive, 0,reserved, fac_neutral,[itm_tutorial_short_bow, itm_tutorial_arrows, itm_tutorial_crossbow, itm_tutorial_bolts, itm_tutorial_throwing_daggers],def_attrib|level(18),wp(60),knows_common, 0],
#SB : move samurai back to Rivacheg (other chests were inaccessible)
["bonus_chest_1","{!}Bonus Chest","{!}Bonus Chest",tf_hero|tf_inactive, 0,reserved, fac_neutral,[itm_strange_armor,itm_strange_short_sword,itm_strange_boots,itm_strange_sword,itm_strange_helmet,itm_strange_great_sword],def_attrib|level(18),wp(60),knows_common, 0],
["bonus_chest_2","{!}Bonus Chest","{!}Bonus Chest",tf_hero|tf_inactive, 0,reserved, fac_neutral,[(itm_bride_dress,imod_day_old),(itm_bride_crown,imod_deadly),(itm_bride_shoes,imod_stubborn),itm_torch,(itm_practice_bow_2,imod_tempered),(itm_practice_arrows_2,imod_large_bag)],def_attrib|level(18),wp(60),knows_common, 0],
["bonus_chest_3","{!}Bonus Chest","{!}Bonus Chest",tf_hero|tf_inactive, 0,reserved, fac_neutral,[itm_black_helmet,itm_black_armor,itm_black_greaves,(itm_horse_meat,imod_rotten)],def_attrib|level(18),wp(60),knows_common, 0],
["household_possessions","{!}household_possessions","{!}household_possessions",tf_hero|tf_inactive|tf_is_merchant, 0,reserved, fac_neutral,[],def_attrib|level(18),wp(60),knows_inventory_management_10, 0],
# These are used as arrays in the scripts. #SB : give full inventory
["temp_array_a","{!}temp_array_a","{!}temp_array_a",tf_hero|tf_inactive, 0,reserved, fac_neutral,[],def_attrib|level(18),wp(60),knows_inventory_management_10, 0],
["temp_array_b","{!}temp_array_b","{!}temp_array_b",tf_hero|tf_inactive, 0,reserved, fac_neutral,[],def_attrib|level(18),wp(60),knows_inventory_management_10, 0],
["temp_array_c","{!}temp_array_c","{!}temp_array_c",tf_hero|tf_inactive, 0,reserved, fac_neutral,[],def_attrib|level(18),wp(60),knows_inventory_management_10, 0],
["stack_selection_amounts","{!}stack_selection_amounts","{!}stack_selection_amounts",tf_hero|tf_inactive,0,reserved,fac_neutral,[],def_attrib,0,knows_common,0],
["stack_selection_ids","{!}stack_selection_ids","{!}stack_selection_ids",tf_hero|tf_inactive,0,reserved,fac_neutral,[],def_attrib,0,knows_common,0],
["notification_menu_types","{!}notification_menu_types","{!}notification_menu_types",tf_hero|tf_inactive,0,reserved,fac_neutral,[],def_attrib,0,knows_common,0],
["notification_menu_var1","{!}notification_menu_var1","{!}notification_menu_var1",tf_hero|tf_inactive,0,reserved,fac_neutral,[],def_attrib,0,knows_common,0],
["notification_menu_var2","{!}notification_menu_var2","{!}notification_menu_var2",tf_hero|tf_inactive,0,reserved,fac_neutral,[],def_attrib,0,knows_common,0],
["banner_background_color_array","{!}banner_background_color_array","{!}banner_background_color_array",tf_hero|tf_inactive,0,reserved,fac_neutral,[],def_attrib,0,knows_common,0],
["multiplayer_data","{!}multiplayer_data","{!}multiplayer_data",tf_hero|tf_inactive,0,reserved,fac_neutral,[],def_attrib,0,knows_common,0],
## ["black_khergit_guard","Black Khergit Guard","Black Khergit Guard",tf_mounted|tf_guarantee_ranged|tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_helmet|tf_guarantee_armor|tf_guarantee_horse,0,0,fac_black_khergits,
## [itm_arrows,itm_nomad_sabre,itm_scimitar,itm_winged_mace,itm_lance,itm_khergit_bow,itm_khergit_guard_helmet,itm_khergit_cavalry_helmet,itm_khergit_guard_boots,itm_khergit_guard_armor,itm_nomad_shield,itm_steppe_horse,itm_warhorse],
## def_attrib|level(28),wp(140),knows_riding_6|knows_ironflesh_4|knows_horse_archery_6|knows_power_draw_6,khergit_face1, khergit_face2],
# Add Extra Quest NPCs below this point
["local_merchant","Local Merchant","Local Merchants",tf_guarantee_boots|tf_guarantee_armor, 0,0, fac_commoners,[itm_leather_apron,itm_leather_boots,itm_butchering_knife],def_attrib|level(5),wp(40),knows_power_strike_1, merchant_face_1, merchant_face_2],
["tax_rebel","Peasant Rebel","Peasant Rebels",tf_guarantee_armor,0,reserved,fac_commoners,
[itm_cleaver,itm_knife,itm_pitch_fork,itm_sickle,itm_club,itm_stones,itm_leather_cap,itm_felt_hat,itm_felt_hat,itm_linen_tunic,itm_coarse_tunic,itm_nomad_boots,itm_wrapping_boots],
def_attrib|level(4),wp(60),knows_common,vaegir_face1, vaegir_face2],
["trainee_peasant","Peasant","Peasants",tf_guarantee_armor,0,reserved,fac_commoners,
[itm_cleaver,itm_knife,itm_pitch_fork,itm_sickle,itm_club,itm_stones,itm_leather_cap,itm_felt_hat,itm_felt_hat,itm_linen_tunic,itm_coarse_tunic,itm_nomad_boots,itm_wrapping_boots],
def_attrib|level(4),wp(60),knows_common,vaegir_face1, vaegir_face2],
["fugitive","Nervous Man","Nervous Men",tf_guarantee_boots|tf_guarantee_armor,0,0,fac_commoners,
[itm_short_tunic,itm_linen_tunic,itm_coarse_tunic, itm_tabard, itm_leather_vest, itm_woolen_hose, itm_nomad_boots, itm_blue_hose, itm_wrapping_boots, itm_fur_hat, itm_leather_cap, itm_sword_medieval_b, itm_throwing_daggers],
def_attrib|str_24|agi_25|level(26),wp(180),knows_common|knows_power_throw_6|knows_power_strike_6|knows_ironflesh_9,man_face_middle_1, man_face_old_2],
#SB : adjust drunk swords, 1 from each faction type
["belligerent_drunk","Belligerent Drunk","Belligerent Drunks",tf_guarantee_boots|tf_guarantee_armor,0,0,fac_commoners,
[itm_short_tunic,itm_linen_tunic,itm_coarse_tunic, itm_tabard, itm_leather_vest, itm_woolen_hose, itm_nomad_boots, itm_blue_hose, itm_wrapping_boots, itm_fur_hat, itm_leather_cap, itm_sword_viking_1, itm_sword_medieval_a, itm_sword_khergit_1, itm_arabian_sword_a,],
def_attrib|str_20|agi_8|level(15),wp(120),knows_common|knows_power_strike_2|knows_ironflesh_9, bandit_face1, bandit_face2],
["hired_assassin","Hired Assassin","Hired Assassin",tf_guarantee_boots|tf_guarantee_armor,0,0,fac_commoners, #they look like belligerent drunks
[itm_short_tunic,itm_linen_tunic,itm_coarse_tunic, itm_tabard, itm_leather_vest, itm_woolen_hose, itm_nomad_boots, itm_blue_hose, itm_wrapping_boots, itm_fur_hat, itm_leather_cap, itm_sword_viking_3, itm_sword_medieval_d_long, itm_sword_khergit_4, itm_arabian_sword_d, itm_strange_sword],
def_attrib|str_20|agi_16|level(20),wp(180),knows_common|knows_power_strike_5|knows_ironflesh_3, bandit_face1, bandit_face2],
["fight_promoter","Rough-Looking Character","Rough-Looking Character",tf_guarantee_boots|tf_guarantee_armor,0,0,fac_commoners,
[itm_short_tunic,itm_linen_tunic,itm_coarse_tunic, itm_tabard, itm_leather_vest, itm_woolen_hose, itm_nomad_boots, itm_blue_hose, itm_wrapping_boots, itm_fur_hat, itm_leather_cap, itm_sword_viking_1],
def_attrib|str_20|agi_16|level(20),wp(180),knows_common|knows_power_strike_5|knows_ironflesh_3, bandit_face1, bandit_face2],
["spy","Ordinary Townsman","Ordinary Townsmen", tf_unkillable|tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_horse,0,0,fac_neutral,
[itm_sword_viking_1,itm_leather_jerkin,itm_leather_boots,itm_courser,itm_leather_gloves],
def_attrib|agi_11|level(20),wp(130),knows_common,man_face_middle_1, man_face_older_2],
["spy_partner","Unremarkable Townsman","Unremarkable Townsmen", tf_unkillable|tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_horse,0,0,fac_neutral,
[itm_sword_medieval_b,itm_leather_jerkin,itm_leather_boots,itm_courser,itm_leather_gloves],
def_attrib|agi_11|level(10),wp(130),knows_common,vaegir_face1, vaegir_face2],
["nurse_for_lady","Nurse","Nurse",tf_female|tf_guarantee_armor,0,reserved,fac_commoners,
[itm_robe, itm_black_hood, itm_wrapping_boots],
def_attrib|level(4),wp(60),knows_common,woman_face_1, woman_face_2],
["temporary_minister","Minister","Minister",tf_guarantee_armor|tf_guarantee_boots,0,reserved,fac_commoners,
[itm_rich_outfit, itm_wrapping_boots],
def_attrib|level(4),wp(60),knows_common,man_face_middle_1, man_face_older_2],
## ["conspirator","Conspirator","Conspirators", tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_horse,0,0,fac_neutral,
## [itm_sword,itm_leather_jerkin,itm_leather_boots,itm_hunter,itm_leather_gloves],
## def_attrib|agi_11|level(10),wp(130),knows_common,vaegir_face1, vaegir_face2],
## ["conspirator_leader","Conspirator","Conspirators", tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_horse,0,0,fac_neutral,
## [itm_sword,itm_leather_jerkin,itm_leather_boots,itm_hunter,itm_leather_gloves],
## def_attrib|agi_11|level(10),wp(130),knows_common,vaegir_face1, vaegir_face2],
## ["peasant_rebel","Peasant Rebel","Peasant Rebels",tf_guarantee_armor,0,reserved,fac_peasant_rebels,
## [itm_cleaver,itm_knife,itm_pitch_fork,itm_sickle,itm_club,itm_stones,itm_leather_cap,itm_felt_hat,itm_felt_hat,itm_linen_tunic,itm_coarse_tunic,itm_nomad_boots,itm_wrapping_boots],
## def_attrib|level(4),wp(60),knows_common,vaegir_face1, vaegir_face2],
## ["noble_refugee","Noble Refugee","Noble Refugees",tf_guarantee_boots|tf_guarantee_armor,0,0,fac_noble_refugees,
## [itm_sword,itm_leather_jacket,itm_hide_boots, itm_saddle_horse, itm_leather_jacket, itm_leather_cap],
## def_attrib|level(9),wp(100),knows_common,swadian_face1, swadian_face2],
## ["noble_refugee_woman","Noble Refugee Woman","Noble Refugee Women",tf_female|tf_guarantee_armor|tf_guarantee_boots,0,0,fac_noble_refugees,
## [itm_knife,itm_dagger,itm_hunting_crossbow,itm_dress,itm_robe,itm_woolen_dress, itm_headcloth, itm_woolen_hood, itm_wrapping_boots],
## def_attrib|level(3),wp(45),knows_common,refugee_face1,refugee_face2],
["quick_battle_6_player", "{!}quick_battle_6_player", "{!}quick_battle_6_player", tf_hero, 0, reserved, fac_player_faction, [itm_padded_cloth,itm_nomad_boots, itm_splinted_leather_greaves, itm_skullcap, itm_sword_medieval_b, itm_crossbow, itm_bolts, itm_plate_covered_round_shield], knight_attrib_1,wp(130),knight_skills_1, 0x000000000008010b01f041a9249f65fd],
#Multiplayer ai troops
["swadian_crossbowman_multiplayer_ai","Swadian Crossbowman","Swadian Crossbowmen",tf_guarantee_all,0,0,fac_kingdom_1,
[itm_bolts,itm_crossbow,itm_sword_medieval_a,itm_tab_shield_heater_b,
itm_leather_jerkin,itm_leather_armor,itm_ankle_boots,itm_footman_helmet],
def_attrib|level(19),wp_melee(90)|wp_crossbow(100),knows_common|knows_ironflesh_4|knows_athletics_6|knows_shield_5|knows_power_strike_3,swadian_face_young_1, swadian_face_old_2],
["swadian_infantry_multiplayer_ai","Swadian Infantry","Swadian Infantry",tf_guarantee_all_wo_ranged,0,0,fac_kingdom_1,
[itm_pike,itm_bastard_sword_a,itm_tab_shield_heater_c,
itm_studded_leather_coat,itm_ankle_boots,itm_flat_topped_helmet],
def_attrib|level(19),wp_melee(105),knows_common|knows_ironflesh_5|knows_shield_4|knows_power_strike_5|knows_athletics_4,swadian_face_middle_1, swadian_face_old_2],
["swadian_man_at_arms_multiplayer_ai","Swadian Man at Arms","Swadian Men at Arms",tf_mounted|tf_guarantee_all_wo_ranged,0,0,fac_kingdom_1,
[itm_lance,itm_bastard_sword_a,itm_tab_shield_heater_cav_a,
itm_mail_with_surcoat,itm_hide_boots,itm_norman_helmet,itm_hunter],
def_attrib|level(19),wp_melee(100),knows_common|knows_riding_4|knows_ironflesh_4|knows_shield_4|knows_power_strike_4|knows_athletics_1,swadian_face_young_1, swadian_face_old_2],
["vaegir_archer_multiplayer_ai","Vaegir Archer","Vaegir Archers",tf_guarantee_all,0,0,fac_kingdom_2,
[itm_arrows,itm_scimitar,itm_nomad_bow,
itm_leather_vest,itm_nomad_boots,itm_spiked_helmet,itm_nomad_cap],
def_attrib|str_12|level(19),wp_melee(70)|wp_archery(110),knows_ironflesh_4|knows_power_draw_5|knows_athletics_6|knows_shield_2,vaegir_face_young_1, vaegir_face_older_2],
["vaegir_spearman_multiplayer_ai","Vaegir Spearman","Vaegir Spearmen",tf_guarantee_all_wo_ranged,0,0,fac_kingdom_2,
[itm_padded_leather,itm_nomad_boots,itm_spiked_helmet,itm_nomad_cap, itm_spear, itm_tab_shield_kite_b, itm_mace_1, itm_javelin],
def_attrib|str_12|level(19),wp_melee(90),knows_ironflesh_4|knows_athletics_6|knows_power_throw_3|knows_power_strike_3|knows_shield_2,vaegir_face_young_1, vaegir_face_older_2],
["vaegir_horseman_multiplayer_ai","Vaegir Horseman","Vaegir Horsemen",tf_mounted|tf_guarantee_all_wo_ranged,0,0,fac_kingdom_2,
[itm_battle_axe,itm_scimitar,itm_lance,itm_tab_shield_kite_cav_a,
itm_studded_leather_coat,itm_lamellar_vest,itm_nomad_boots,itm_spiked_helmet,itm_saddle_horse],
def_attrib|level(19),wp(100),knows_riding_4|knows_ironflesh_4|knows_power_strike_4|knows_shield_3,vaegir_face_young_1, vaegir_face_older_2],
["khergit_dismounted_lancer_multiplayer_ai","Khergit Dismounted Lancer","Khergit Dismounted Lancer",tf_guarantee_all_wo_ranged,0,0,fac_kingdom_3,
[itm_sword_khergit_4,itm_spiked_mace,itm_one_handed_war_axe_b,itm_one_handed_war_axe_a,itm_hafted_blade_a,itm_hafted_blade_b,itm_heavy_lance,itm_lance,
itm_khergit_cavalry_helmet,itm_khergit_war_helmet,itm_lamellar_vest_khergit,itm_tribal_warrior_outfit,itm_khergit_leather_boots,itm_splinted_leather_greaves,itm_leather_gloves,itm_mail_mittens,itm_tab_shield_small_round_b,itm_tab_shield_small_round_c],
def_attrib|level(19),wp(100),knows_riding_4|knows_power_strike_1|knows_power_draw_4|knows_power_throw_2|knows_ironflesh_1|knows_horse_archery_1,khergit_face_middle_1, khergit_face_older_2],
["khergit_veteran_horse_archer_multiplayer_ai","Khergit Horse Archer","Khergit Horse Archers",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_3,
[itm_sword_khergit_3,itm_khergit_bow,itm_khergit_arrows,itm_tab_shield_small_round_b,
itm_khergit_cavalry_helmet,itm_tribal_warrior_outfit,itm_khergit_leather_boots,itm_steppe_horse],
def_attrib|level(19),wp(90)|wp_archery(100),knows_riding_6|knows_power_draw_5|knows_shield_2|knows_horse_archery_5,khergit_face_middle_1, khergit_face_older_2],
["khergit_lancer_multiplayer_ai","Khergit Lancer","Khergit Lancers",tf_guarantee_all_wo_ranged,0,0,fac_kingdom_3,
[itm_sword_khergit_4,itm_spiked_mace,itm_one_handed_war_axe_b,itm_one_handed_war_axe_a,itm_hafted_blade_a,itm_hafted_blade_b,itm_heavy_lance,itm_lance,
itm_khergit_guard_helmet,itm_khergit_cavalry_helmet,itm_khergit_war_helmet,itm_lamellar_vest_khergit,itm_lamellar_armor,itm_khergit_leather_boots,itm_splinted_leather_greaves,itm_leather_gloves,itm_mail_mittens,itm_scale_gauntlets,itm_tab_shield_small_round_b,itm_tab_shield_small_round_c,itm_courser],
def_attrib|level(19),wp(100),knows_riding_7|knows_power_strike_2|knows_power_draw_4|knows_power_throw_2|knows_ironflesh_1|knows_horse_archery_1,khergit_face_middle_1, khergit_face_older_2],
["nord_veteran_multiplayer_ai","Nord Footman","Nord Footmen",tf_guarantee_all_wo_ranged,0,0,fac_kingdom_4,
[itm_sword_viking_2,itm_one_handed_battle_axe_b,itm_two_handed_axe,itm_tab_shield_round_d,itm_throwing_axes,
itm_nordic_helmet,itm_nordic_fighter_helmet,itm_mail_hauberk,itm_splinted_leather_greaves,itm_leather_boots,itm_leather_gloves],
def_attrib|level(19),wp(130),knows_ironflesh_3|knows_power_strike_5|knows_power_throw_3|knows_athletics_5|knows_shield_3,nord_face_young_1, nord_face_older_2],
["nord_scout_multiplayer_ai","Nord Scout","Nord Scouts",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_4,
[itm_javelin,itm_sword_viking_1,itm_two_handed_axe,itm_spear,itm_tab_shield_round_a,
itm_skullcap,itm_nordic_archer_helmet,itm_leather_jerkin,itm_leather_boots,itm_saddle_horse],
def_attrib|level(19),wp(100),knows_riding_5|knows_ironflesh_2|knows_power_strike_2|knows_shield_1|knows_horse_archery_2|knows_power_throw_3,nord_face_young_1, nord_face_older_2],
["nord_archer_multiplayer_ai","Nord Archer","Nord Archers",tf_guarantee_all,0,0,fac_kingdom_4,
[itm_arrows,itm_two_handed_axe,itm_sword_viking_2,itm_short_bow,
itm_leather_jerkin,itm_blue_tunic,itm_leather_boots,itm_nasal_helmet,itm_leather_cap],
def_attrib|str_11|level(19),wp_melee(80)|wp_archery(110),knows_ironflesh_4|knows_power_strike_2|knows_shield_1|knows_power_draw_5|knows_athletics_6,nord_face_young_1, nord_face_old_2],
["rhodok_veteran_crossbowman_multiplayer_ai","Rhodok Crossbowman","Rhodok Crossbowmen",tf_guarantee_all,0,0,fac_kingdom_5,
[itm_fighting_pick,itm_club_with_spike_head,itm_maul,itm_tab_shield_pavise_c,itm_heavy_crossbow,itm_bolts,
itm_leather_cap,itm_padded_leather,itm_nomad_boots],
def_attrib|level(19),wp_melee(100)|wp_crossbow(120),knows_common|knows_ironflesh_4|knows_shield_5|knows_power_strike_3|knows_athletics_6,rhodok_face_middle_1, rhodok_face_older_2],
["rhodok_veteran_spearman_multiplayer_ai","Rhodok Spearman","Rhodok Spearmen",tf_guarantee_all_wo_ranged,0,0,fac_kingdom_5,
[itm_ashwood_pike,itm_war_spear,itm_pike,itm_club_with_spike_head,itm_sledgehammer,itm_tab_shield_pavise_c,itm_sword_medieval_a,
itm_leather_cap,itm_byrnie,itm_ragged_outfit,itm_nomad_boots],
def_attrib|level(19),wp(115),knows_common|knows_ironflesh_5|knows_shield_3|knows_power_strike_4|knows_athletics_3,rhodok_face_young_1, rhodok_face_older_2],
["rhodok_scout_multiplayer_ai","Rhodok Scout","Rhodok Scouts",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_5,
#TODO: Change weapons, copied from Nord Scout
[itm_sword_medieval_a,itm_tab_shield_heater_cav_a,itm_light_lance,itm_skullcap,itm_aketon_green,
itm_ragged_outfit,itm_nomad_boots,itm_ankle_boots,itm_saddle_horse],
def_attrib|level(19),wp(100),knows_riding_5|knows_ironflesh_2|knows_power_strike_2|knows_shield_1|knows_horse_archery_2|knows_power_throw_3,rhodok_face_young_1, rhodok_face_older_2],
["sarranid_infantry_multiplayer_ai","Sarranid Infantry","Sarranid Infantries",tf_guarantee_shield|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet,0,0,fac_kingdom_6,
[itm_sarranid_mail_shirt,itm_sarranid_horseman_helmet,itm_sarranid_boots_b,itm_sarranid_boots_c,itm_splinted_leather_greaves,itm_arabian_sword_b,itm_mace_3,itm_spear,itm_tab_shield_kite_c],
def_attrib|level(20),wp_melee(105),knows_common|knows_riding_3|knows_ironflesh_2|knows_shield_3,swadian_face_middle_1, swadian_face_old_2],
["sarranid_archer_multiplayer_ai","Sarranid Archer","Sarranid Archers",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_6,
[itm_arrows,itm_nomad_bow,itm_arabian_sword_a,itm_archers_vest,itm_sarranid_boots_b,itm_sarranid_helmet1,itm_turban,itm_desert_turban],
def_attrib|level(19),wp_melee(90)|wp_archery(100),knows_common|knows_riding_2|knows_ironflesh_1,swadian_face_young_1, swadian_face_old_2],
["sarranid_horseman_multiplayer_ai","Sarranid Horseman","Sarranid Horsemen",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_horse|tf_guarantee_shield,0,0,fac_kingdom_6,
[itm_lance,itm_arabian_sword_b,itm_scimitar_b,itm_mace_4,itm_tab_shield_small_round_b,
itm_sarranid_mail_shirt,itm_sarranid_boots_b,itm_sarranid_boots_c,itm_sarranid_horseman_helmet,itm_courser,itm_hunter],
def_attrib|level(20),wp_melee(100),knows_common|knows_riding_4|knows_ironflesh_2|knows_shield_2|knows_power_strike_3,swadian_face_young_1, swadian_face_old_2],
#Multiplayer troops (they must have the base items only, nothing else)
["swadian_crossbowman_multiplayer","Swadian Crossbowman","Swadian Crossbowmen",tf_guarantee_all,0,0,fac_kingdom_1,
[itm_bolts,itm_crossbow,itm_sword_medieval_b_small,itm_tab_shield_heater_a,itm_red_shirt,itm_ankle_boots],
str_14 | agi_15 |def_attrib_multiplayer|level(19),wpe(90,60,180,90),knows_common_multiplayer|knows_ironflesh_2|knows_athletics_4|knows_shield_5|knows_power_strike_2|knows_riding_1,swadian_face_young_1, swadian_face_old_2],
["swadian_infantry_multiplayer","Swadian Infantry","Swadian Infantry",tf_guarantee_all,0,0,fac_kingdom_1,
[itm_sword_medieval_a,itm_tab_shield_heater_a,itm_red_tunic,itm_ankle_boots],
str_15 | agi_15 |def_attrib_multiplayer|level(20),wpex(105,130,110,40,60,110),knows_common_multiplayer|knows_ironflesh_5|knows_shield_4|knows_power_strike_4|knows_power_throw_2|knows_athletics_6|knows_riding_1,swadian_face_middle_1, swadian_face_old_2],
["swadian_man_at_arms_multiplayer","Swadian Man at Arms","Swadian Men at Arms",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_1,
[itm_lance,itm_sword_medieval_a,itm_tab_shield_heater_a,
itm_red_tunic,itm_ankle_boots,itm_saddle_horse],
str_14 | agi_16 |def_attrib_multiplayer|level(20),wp_melee(110),knows_common_multiplayer|knows_riding_5|knows_ironflesh_3|knows_shield_2|knows_power_throw_2|knows_power_strike_3|knows_athletics_3,swadian_face_young_1, swadian_face_old_2],
# ["swadian_mounted_crossbowman_multiplayer","Swadian Mounted Crossbowman","Swadian Mounted Crossbowmen",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_1,
# [itm_bolts,itm_light_crossbow,itm_tab_shield_heater_cav_a,itm_bastard_sword_a,
# itm_red_shirt,itm_hide_boots,itm_saddle_horse],
# def_attrib_multiplayer|level(20),wp_melee(100)|wp_crossbow(120),knows_common_multiplayer|knows_riding_4|knows_shield_3|knows_ironflesh_3|knows_horse_archery_2|knows_power_strike_3|knows_athletics_2|knows_shield_2,swadian_face_young_1, swadian_face_old_2],
["vaegir_archer_multiplayer","Vaegir Archer","Vaegir Archers",tf_guarantee_all,0,0,fac_kingdom_2,
[itm_arrows,itm_mace_1,itm_nomad_bow,
itm_linen_tunic,itm_hide_boots],
str_14 | agi_14 |def_attrib_multiplayer|str_12|level(19),wpe(80,150,60,80),knows_common_multiplayer|knows_ironflesh_2|knows_power_draw_7|knows_athletics_3|knows_shield_2|knows_riding_1,vaegir_face_young_1, vaegir_face_older_2],
["vaegir_spearman_multiplayer","Vaegir Spearman","Vaegir spearman",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_ranged|tf_guarantee_shield,0,0,fac_kingdom_2,
[itm_spear, itm_tab_shield_kite_a, itm_mace_1,
itm_linen_tunic,itm_hide_boots],
str_15 | agi_15 |def_attrib_multiplayer|str_12|level(19),wpex(110,100,130,30,50,120),knows_common_multiplayer|knows_ironflesh_4|knows_shield_2|knows_power_throw_3|knows_power_strike_4|knows_athletics_6|knows_riding_1,vaegir_face_young_1, vaegir_face_older_2],
["vaegir_horseman_multiplayer","Vaegir Horseman","Vaegir Horsemen",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_2,
[itm_scimitar,itm_lance,itm_tab_shield_kite_cav_a,
itm_linen_tunic,itm_hide_boots,itm_saddle_horse],
str_16 | agi_15 |def_attrib_multiplayer|level(19),wpe(110,90,60,110),knows_common_multiplayer|knows_riding_5|knows_ironflesh_4|knows_power_strike_3|knows_shield_3|knows_power_throw_4|knows_horse_archery_1,vaegir_face_young_1, vaegir_face_older_2],
["khergit_veteran_horse_archer_multiplayer","Khergit Horse Archer","Khergit Horse Archers",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_3,
[itm_sword_khergit_1,itm_nomad_bow,itm_arrows,
itm_khergit_armor,itm_leather_steppe_cap_a,itm_hide_boots,itm_steppe_horse],
str_15 | agi_18 |def_attrib_multiplayer|level(21),wpe(70,142,60,100),knows_common_multiplayer|knows_riding_2|knows_power_draw_5|knows_horse_archery_3|knows_athletics_3|knows_shield_1,khergit_face_middle_1, khergit_face_older_2],
["khergit_infantry_multiplayer","Khergit Infantry","Khergit Infantries",tf_guarantee_all,0,0,fac_kingdom_3,
[itm_sword_khergit_1,itm_spear,itm_tab_shield_small_round_a,
itm_steppe_armor,itm_hide_boots,itm_leather_gloves],
str_14 | agi_15 |def_attrib_multiplayer|level(19),wp(110),knows_common_multiplayer|knows_ironflesh_3|knows_power_throw_3|knows_shield_4|knows_power_strike_3|knows_athletics_6|knows_riding_1,khergit_face_middle_1, khergit_face_older_2],
["khergit_lancer_multiplayer","Khergit Lancer","Khergit Lancers",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_3,
[itm_sword_khergit_1,itm_lance,itm_tab_shield_small_round_a,
itm_khergit_armor,itm_leather_steppe_cap_a,itm_hide_boots,itm_steppe_horse],
str_15 | agi_14 |def_attrib_multiplayer|level(21),wp(115),knows_common_multiplayer|knows_riding_6|knows_ironflesh_3|knows_power_throw_3|knows_shield_4|knows_power_strike_3|knows_athletics_4,khergit_face_middle_1, khergit_face_older_2],
["nord_archer_multiplayer","Nord Archer","Nord Archers",tf_guarantee_all,0,0,fac_kingdom_4,
[itm_arrows,itm_sword_viking_2_small,itm_short_bow,
itm_blue_tunic,itm_leather_boots],
str_15 | agi_14 |def_attrib_multiplayer|str_11|level(15),wpe(90,150,60,80),knows_common_multiplayer|knows_ironflesh_2|knows_power_strike_2|knows_shield_3|knows_power_draw_5|knows_athletics_3|knows_riding_1,nord_face_young_1, nord_face_old_2],
["nord_veteran_multiplayer","Nord Huscarl","Nord Huscarls",tf_guarantee_all,0,0,fac_kingdom_4,
[itm_sword_viking_1,itm_one_handed_war_axe_a,itm_tab_shield_round_a,
itm_blue_tunic,itm_leather_boots],
str_17 | agi_15 |def_attrib_multiplayer|level(24),wpex(110,135,100,40,60,140),knows_common_multiplayer|knows_ironflesh_4|knows_power_strike_5|knows_power_throw_4|knows_athletics_6|knows_shield_3|knows_riding_1,nord_face_young_1, nord_face_older_2],
["nord_scout_multiplayer","Nord Scout","Nord Scouts",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_4,
[itm_javelin,itm_sword_viking_1,itm_spear,itm_tab_shield_small_round_a,
itm_blue_tunic,itm_leather_boots,itm_saddle_horse],
str_16 | agi_15 |def_attrib_multiplayer|level(19),wp(105),knows_common_multiplayer|knows_riding_6|knows_ironflesh_2|knows_power_strike_2|knows_shield_1|knows_horse_archery_3|knows_power_throw_3|knows_athletics_3,vaegir_face_young_1, vaegir_face_older_2],
["rhodok_veteran_crossbowman_multiplayer","Rhodok Crossbowman","Rhodok Crossbowmen",tf_guarantee_all,0,0,fac_kingdom_5,
[itm_crossbow,itm_bolts,itm_fighting_pick,itm_tab_shield_pavise_a,
itm_tunic_with_green_cape,itm_ankle_boots],
str_16 | agi_15 |def_attrib_multiplayer|level(20),wpe(100,60,180,90),knows_common_multiplayer|knows_ironflesh_2|knows_shield_2|knows_power_strike_2|knows_athletics_4|knows_riding_1,rhodok_face_middle_1, rhodok_face_older_2],
["rhodok_sergeant_multiplayer","Rhodok Sergeant","Rhodok Sergeants",tf_guarantee_all,0,0,fac_kingdom_5,
[itm_fighting_pick,itm_tab_shield_pavise_a,itm_spear,
itm_green_tunic,itm_ankle_boots],
str_16 | agi_14 |def_attrib_multiplayer|level(20),wpex(110,100,140,30,50,110),knows_common_multiplayer|knows_ironflesh_4|knows_shield_5|knows_power_strike_4|knows_power_throw_1|knows_athletics_6|knows_riding_1,rhodok_face_middle_1, rhodok_face_older_2],
["rhodok_horseman_multiplayer","Rhodok Horseman","Rhodok Horsemen",tf_guarantee_all,0,0,fac_kingdom_5,
[itm_sword_medieval_a,itm_tab_shield_heater_cav_a, itm_light_lance,
itm_green_tunic,itm_ankle_boots,itm_saddle_horse],
str_15 | agi_15 |def_attrib_multiplayer|level(20),wp(100),knows_common_multiplayer|knows_riding_4|knows_ironflesh_3|knows_shield_3|knows_power_strike_3|knows_power_throw_1|knows_athletics_3,rhodok_face_middle_1, rhodok_face_older_2],
["sarranid_archer_multiplayer","Sarranid Archer","Sarranid Archers",tf_guarantee_all,0,0,fac_kingdom_6,
[itm_arrows,itm_arabian_sword_a,itm_nomad_bow,
itm_sarranid_cloth_robe, itm_sarranid_boots_b],
str_15 | agi_16 |def_attrib_multiplayer|str_12|level(19),wpe(80,150,60,80),knows_common_multiplayer|knows_ironflesh_4|knows_power_draw_5|knows_athletics_3|knows_shield_2|knows_riding_1|knows_weapon_master_1,vaegir_face_young_1, vaegir_face_older_2],
["sarranid_footman_multiplayer","Sarranid Footman","Sarranid footman",tf_guarantee_all,0,0,fac_kingdom_6,
[itm_bamboo_spear, itm_tab_shield_kite_a, itm_arabian_sword_a,
itm_sarranid_cloth_robe, itm_sarranid_boots_b],
str_14 | agi_15 |def_attrib_multiplayer|str_12|level(19),wpex(110,100,130,30,50,120),knows_common_multiplayer|knows_ironflesh_4|knows_shield_2|knows_power_throw_3|knows_power_strike_4|knows_athletics_6|knows_riding_1,vaegir_face_young_1, vaegir_face_older_2],
["sarranid_mamluke_multiplayer","Sarranid Mamluke","Sarranid Mamluke",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_6,
[itm_arabian_sword_a,itm_lance,itm_tab_shield_small_round_a,
itm_sarranid_cloth_robe, itm_sarranid_boots_b,itm_saddle_horse],
str_15 | agi_14 |def_attrib_multiplayer|level(19),wpe(110,90,60,110),knows_common_multiplayer|knows_riding_5|knows_ironflesh_3|knows_power_strike_2|knows_shield_3|knows_power_throw_2|knows_weapon_master_1,vaegir_face_young_1, vaegir_face_older_2],
["multiplayer_end","{!}multiplayer_end","{!}multiplayer_end", 0, 0, 0, fac_kingdom_5, [], 0, 0, 0, 0, 0],
#Player history array
["log_array_entry_type", "{!}Local Merchant","{!}Local Merchant",tf_guarantee_boots|tf_guarantee_armor, 0,0, fac_commoners,[itm_leather_apron,itm_leather_boots,itm_butchering_knife],def_attrib|level(5),wp(40),knows_power_strike_1, merchant_face_1, merchant_face_2],
["log_array_entry_time", "{!}Local Merchant","{!}Local Merchant",tf_guarantee_boots|tf_guarantee_armor, 0,0, fac_commoners,[itm_leather_apron,itm_leather_boots,itm_butchering_knife],def_attrib|level(5),wp(40),knows_power_strike_1, merchant_face_1, merchant_face_2],
["log_array_actor", "{!}Local Merchant","{!}Local Merchant",tf_guarantee_boots|tf_guarantee_armor, 0,0, fac_commoners,[itm_leather_apron,itm_leather_boots,itm_butchering_knife],def_attrib|level(5),wp(40),knows_power_strike_1, merchant_face_1, merchant_face_2],
["log_array_center_object", "{!}Local Merchant","{!}Local Merchant",tf_guarantee_boots|tf_guarantee_armor, 0,0, fac_commoners,[itm_leather_apron,itm_leather_boots,itm_butchering_knife],def_attrib|level(5),wp(40),knows_power_strike_1, merchant_face_1, merchant_face_2],
["log_array_center_object_lord", "{!}Local Merchant","{!}Local Merchant",tf_guarantee_boots|tf_guarantee_armor, 0,0, fac_commoners,[itm_leather_apron,itm_leather_boots,itm_butchering_knife],def_attrib|level(5),wp(40),knows_power_strike_1, merchant_face_1, merchant_face_2],
["log_array_center_object_faction", "{!}Local Merchant","{!}Local Merchant",tf_guarantee_boots|tf_guarantee_armor, 0,0, fac_commoners,[itm_leather_apron,itm_leather_boots,itm_butchering_knife],def_attrib|level(5),wp(40),knows_power_strike_1, merchant_face_1, merchant_face_2],
["log_array_troop_object", "{!}Local Merchant","{!}Local Merchant",tf_guarantee_boots|tf_guarantee_armor, 0,0, fac_commoners,[itm_leather_apron,itm_leather_boots,itm_butchering_knife],def_attrib|level(5),wp(40),knows_power_strike_1, merchant_face_1, merchant_face_2],
["log_array_troop_object_faction", "{!}Local Merchant","{!}Local Merchant",tf_guarantee_boots|tf_guarantee_armor, 0,0, fac_commoners,[itm_leather_apron,itm_leather_boots,itm_butchering_knife],def_attrib|level(5),wp(40),knows_power_strike_1, merchant_face_1, merchant_face_2],
["log_array_faction_object", "{!}Local Merchant","{!}Local Merchant",tf_guarantee_boots|tf_guarantee_armor, 0,0, fac_commoners,[itm_leather_apron,itm_leather_boots,itm_butchering_knife],def_attrib|level(5),wp(40),knows_power_strike_1, merchant_face_1, merchant_face_2],
["quick_battle_troop_1","Rodrigo de Braganca","Rodrigo de Braganca", tf_hero,0,0,fac_kingdom_1,
[itm_long_hafted_knobbed_mace, itm_wooden_shield, itm_iron_staff, itm_throwing_daggers,
itm_felt_hat, itm_fur_coat, itm_light_leather_boots, itm_leather_gloves],
str_9|agi_15|int_12|cha_12|level(15),wpex(109,33,132,15,32,100),knows_riding_3|knows_athletics_5|knows_shield_3|knows_weapon_master_3|knows_power_throw_3|knows_power_strike_2|knows_ironflesh_3,0x0000000e240070cd598bb02b9556428c00000000001eabce0000000000000000, swadian_face_old_2],
["quick_battle_troop_2","Usiatra","Usiatra", tf_hero|tf_female,0,0,fac_kingdom_1,
[itm_nomad_bow, itm_barbed_arrows, itm_scimitar, itm_tab_shield_small_round_c, itm_sumpter_horse,
itm_leather_armor, itm_splinted_greaves],
str_12|agi_14|int_11|cha_18|level(22),wpex(182,113,112,159,82,115),knows_horse_archery_2|knows_riding_3|knows_athletics_4|knows_shield_2|knows_weapon_master_4|knows_power_draw_2|knows_power_throw_1|knows_power_strike_3|knows_ironflesh_4,0x000000007f004000719b69422165b71300000000001d5d1d0000000000000000, swadian_face_old_2],
["quick_battle_troop_3","Hegen","Hegen", tf_hero,0,0,fac_kingdom_1,
[itm_heavy_lance, itm_sword_two_handed_b, itm_sword_medieval_c, itm_tab_shield_heater_c, itm_warhorse,
itm_guard_helmet, itm_coat_of_plates, itm_mail_mittens, itm_mail_boots],
str_18|agi_16|int_12|cha_11|level(24),wpex(90,152,102,31,33,34),knows_riding_5|knows_athletics_5|knows_shield_3|knows_weapon_master_5|knows_power_strike_6|knows_ironflesh_6,0x000000018000324428db8a431491472400000000001e44a90000000000000000, swadian_face_old_2],
["quick_battle_troop_4","Konrad","Konrad", tf_hero,0,0,fac_kingdom_1,
[itm_sword_two_handed_a, itm_mace_4, itm_tab_shield_kite_d,
itm_bascinet_3, itm_scale_armor, itm_mail_mittens, itm_mail_boots],
str_18|agi_15|int_12|cha_12|level(24),wpex(130,150,130,30,50,90),knows_riding_2|knows_athletics_5|knows_shield_4|knows_weapon_master_5|knows_power_throw_3|knows_power_strike_6|knows_ironflesh_6,0x000000081700205434db6df4636db8e400000000001db6e30000000000000000, swadian_face_old_2],
["quick_battle_troop_5","Sverre","Sverre", tf_hero,0,0,fac_kingdom_1,
[itm_long_axe, itm_sword_viking_1, itm_light_throwing_axes, itm_tab_shield_round_d,
itm_nordic_fighter_helmet, itm_byrnie, itm_leather_gloves, itm_leather_boots],
str_15|agi_15|int_12|cha_12|level(21),wpex(110,130,110,80,15,110),knows_riding_1|knows_athletics_5|knows_shield_4|knows_weapon_master_5|knows_power_draw_2|knows_power_throw_4|knows_power_strike_5|knows_ironflesh_5,0x000000048a00024723134e24cb51c91b00000000001dc6aa0000000000000000, swadian_face_old_2],
["quick_battle_troop_6","Borislav","Borislav", tf_hero,0,0,fac_kingdom_1,
[itm_strong_bow, itm_barbed_arrows, itm_barbed_arrows, itm_shortened_spear,
itm_leather_warrior_cap, itm_leather_jerkin, itm_leather_gloves, itm_ankle_boots],
str_12|agi_15|int_15|cha_9|level(18),wpex(70,70,100,140,15,100),knows_horse_archery_2|knows_riding_2|knows_athletics_5|knows_weapon_master_3|knows_power_draw_4|knows_power_throw_3|knows_power_strike_2|knows_ironflesh_2,0x000000089e00444415136e36e34dc8e400000000001d46d90000000000000000, swadian_face_old_2],
["quick_battle_troop_7","Stavros","Stavros", tf_hero,0,0,fac_kingdom_1,
[itm_heavy_crossbow, itm_bolts, itm_sword_medieval_b_small, itm_tab_shield_pavise_c,
itm_nasal_helmet, itm_padded_leather, itm_leather_gloves, itm_leather_boots],
str_12|agi_15|int_15|cha_12|level(21),wpex(100,70,70,30,140,80),knows_horse_archery_2|knows_riding_2|knows_athletics_5|knows_shield_3|knows_weapon_master_5|knows_power_throw_2|knows_power_strike_4|knows_ironflesh_4,0x0000000e1400659226e34dcaa46e36db00000000001e391b0000000000000000, swadian_face_old_2],
["quick_battle_troop_8","Gamara","Gamara", tf_hero|tf_female,0,0,fac_kingdom_1,
[itm_throwing_spears, itm_throwing_spears, itm_scimitar, itm_leather_covered_round_shield,
itm_desert_turban, itm_skirmisher_armor, itm_leather_gloves, itm_sarranid_boots_b],
str_12|agi_15|int_12|cha_12|level(18),wpex(100,40,100,85,15,130),knows_horse_archery_2|knows_riding_2|knows_athletics_5|knows_shield_2|knows_weapon_master_4|knows_power_draw_2|knows_power_throw_4|knows_power_strike_2|knows_ironflesh_2,0x000000015400300118d36636db6dc8e400000000001db6db0000000000000000, swadian_face_old_2],
["quick_battle_troop_9","Aethrod","Aethrod", tf_hero,0,0,fac_kingdom_1,
[itm_nomad_bow, itm_barbed_arrows, itm_barbed_arrows, itm_scimitar_b,
itm_splinted_greaves, itm_lamellar_vest],
str_16|agi_21|int_12|cha_14|level(26),wpex(182,113,112,159,82,115),knows_horse_archery_2|knows_riding_2|knows_athletics_7|knows_shield_2|knows_weapon_master_4|knows_power_draw_7|knows_power_throw_3|knows_power_strike_3|knows_ironflesh_4,0x000000000000210536db6db6db6db6db00000000001db6db0000000000000000, swadian_face_old_2],
["quick_battle_troop_10","Zaira","Zaira", tf_hero|tf_female,0,0,fac_kingdom_1,
[itm_sarranid_cavalry_sword, itm_strong_bow, itm_bodkin_arrows, itm_bodkin_arrows, itm_arabian_horse_b,
itm_sarranid_felt_head_cloth_b, itm_sarranid_common_dress, itm_sarranid_boots_b],
str_13|agi_18|int_15|cha_9|level(18),wpex(126,19,23,149,41,26),knows_horse_archery_6|knows_riding_6|knows_weapon_master_2|knows_power_draw_4|knows_power_throw_1|knows_power_strike_4|knows_ironflesh_1,0x0000000502003001471a6a24dc6594cb00000000001da4840000000000000000, swadian_face_old_2],
["quick_battle_troop_11","Argo Sendnar","Argo Sendnar", tf_hero,0,0,fac_kingdom_1,
[itm_morningstar, itm_tab_shield_round_d, itm_war_spear, itm_courser,
itm_leather_gloves, itm_fur_hat, itm_leather_boots, itm_leather_jacket],
str_15|agi_12|int_14|cha_20|level(28),wpex(101,35,136,15,17,19),knows_riding_4|knows_athletics_2|knows_shield_4|knows_weapon_master_4|knows_power_strike_5|knows_ironflesh_5,0x0000000e800015125adb702de3459a9c00000000001ea6d00000000000000000, swadian_face_old_2],
["quick_battle_troops_end","{!}quick_battle_troops_end","{!}quick_battle_troops_end", 0, 0, 0, fac_kingdom_5, [], 0, 0, 0, 0, 0],
["tutorial_fighter_1","Novice Fighter","Fighters",tf_hero,0,0,fac_kingdom_2,
[itm_linen_tunic,itm_nomad_boots],
def_attrib|level(1),wp_melee(10),knows_athletics_1|knows_ironflesh_2|knows_shield_2,0x000000088c1073144252b1929a85569300000000000496a50000000000000000, vaegir_face_older_2],
["tutorial_fighter_2","Novice Fighter","Fighters",tf_hero,0,0,fac_kingdom_2,
[itm_green_tunic,itm_nomad_boots],
def_attrib|level(1),wp_melee(10),knows_athletics_1|knows_ironflesh_2|knows_shield_2,0x000000088b08049056ab56566135c46500000000001dda1b0000000000000000, vaegir_face_older_2],
["tutorial_fighter_3","Regular Fighter","Fighters",tf_hero,0,0,fac_kingdom_2,
[itm_green_tunic,itm_nomad_boots],
def_attrib|level(9),wp_melee(50),knows_athletics_1|knows_ironflesh_2|knows_shield_2,0x00000008bc00400654914a3b0d0de74d00000000001d584e0000000000000000, vaegir_face_older_2],
["tutorial_fighter_4","Veteran Fighter","Fighters",tf_hero,0,0,fac_kingdom_2,
[itm_linen_tunic,itm_nomad_boots],
def_attrib|level(16),wp_melee(110),knows_athletics_1|knows_ironflesh_3|knows_power_strike_2|knows_shield_2,0x000000089910324a495175324949671800000000001cd8ab0000000000000000, vaegir_face_older_2],
["tutorial_archer_1","Archer","Archers",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor,0,0,fac_kingdom_2,
[itm_leather_jerkin,itm_leather_vest,itm_nomad_boots,itm_vaegir_spiked_helmet,itm_vaegir_fur_helmet,itm_vaegir_fur_cap,itm_nomad_cap],
def_attrib|str_12|level(19),wp_melee(70)|wp_archery(110),knows_ironflesh_1|knows_power_draw_2|knows_athletics_2|knows_power_throw_1,vaegir_face_young_1, vaegir_face_older_2],
["tutorial_master_archer","Archery Trainer","Archery Trainer",tf_hero,0,0,fac_kingdom_2,
[itm_linen_tunic,itm_nomad_boots],
def_attrib|str_12|level(19),wp_melee(70)|wp_archery(110),knows_ironflesh_1|knows_power_draw_2|knows_athletics_2|knows_power_throw_1,0x0000000ea508540642f34d461d2d54a300000000001d5d9a0000000000000000, vaegir_face_older_2],
["tutorial_rider_1","Rider","{!}Vaegir Knights",tf_mounted|tf_guarantee_boots|tf_guarantee_gloves|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_horse|tf_guarantee_shield,0,0,fac_kingdom_2,
[itm_green_tunic,itm_hunter, itm_saddle_horse,itm_leather_gloves],
def_attrib|level(24),wp(130),knows_riding_4|knows_shield_2|knows_ironflesh_3|knows_power_strike_2,vaegir_face_middle_1, vaegir_face_older_2],
["tutorial_rider_2","Horse archer","{!}Khergit Horse Archers",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_ranged|tf_guarantee_horse,0,0,fac_kingdom_3,
[itm_tribal_warrior_outfit,itm_nomad_robe,itm_hide_boots,itm_tab_shield_small_round_a,itm_steppe_horse],
def_attrib|level(14),wp(80)|wp_archery(110),knows_riding_5|knows_power_draw_3|knows_ironflesh_1|knows_horse_archery_4|knows_power_throw_1,khergit_face_young_1, khergit_face_older_2],
["tutorial_master_horseman","Riding Trainer","Riding Trainer",tf_hero,0,0,fac_kingdom_2,
[itm_leather_vest,itm_nomad_boots],
def_attrib|str_12|level(19),wp_melee(70)|wp_archery(110),knows_ironflesh_1|knows_power_draw_2|knows_athletics_2|knows_power_throw_1,0x0000000ea0084140478a692894ba185500000000001d4af30000000000000000, vaegir_face_older_2],
["swadian_merchant", "Merchant of Praven", "{!}Prominent", tf_hero|tf_randomize_face, 0, reserved, fac_kingdom_4, [itm_sword_two_handed_a, itm_courtly_outfit, itm_leather_boots], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["vaegir_merchant", "Merchant of Reyvadin", "{!}Prominent", tf_hero|tf_randomize_face, 0, reserved, fac_kingdom_5, [itm_sword_two_handed_a, itm_nobleman_outfit, itm_woolen_hose], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["khergit_merchant", "Merchant of Tulga", "{!}Prominent", tf_hero|tf_randomize_face, 0, reserved, fac_kingdom_1, [itm_sword_two_handed_a, itm_red_gambeson, itm_nomad_boots], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["nord_merchant", "Merchant of Sargoth", "{!}Prominent", tf_hero|tf_randomize_face, 0, reserved, fac_kingdom_2, [itm_sword_two_handed_a, itm_red_gambeson, itm_nomad_boots], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["rhodok_merchant", "Merchant of Jelkala", "{!}Prominent", tf_hero|tf_randomize_face, 0, reserved, fac_kingdom_3, [itm_sword_two_handed_a, itm_leather_jerkin, itm_blue_hose], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["sarranid_merchant", "Merchant of Shariz", "{!}Prominent", tf_hero|tf_randomize_face, 0, reserved, fac_kingdom_6, [itm_sword_two_handed_a, itm_sarranid_cloth_robe, itm_sarranid_boots_a], def_attrib|level(2),wp(20),knows_common, man_face_middle_1, mercenary_face_2],
["startup_merchants_end","startup_merchants_end","startup_merchants_end",tf_hero, 0,0, fac_commoners,[],def_attrib|level(2),wp(20),knows_inventory_management_10,0],
["sea_raider_leader","Sea Raider Captain","Sea Raider Captains",tf_hero|tf_guarantee_all_wo_ranged,0,0,fac_outlaws,
[itm_arrows,itm_sword_viking_1,itm_sword_viking_2,itm_fighting_axe,itm_battle_axe,itm_spear,itm_nordic_shield,itm_nordic_shield,itm_nordic_shield,itm_wooden_shield,itm_long_bow,itm_javelin,itm_throwing_axes,
itm_nordic_helmet,itm_nordic_helmet,itm_nasal_helmet,itm_mail_shirt,itm_byrnie,itm_mail_hauberk,itm_leather_boots, itm_nomad_boots],
def_attrib|level(24),wp(110),knows_ironflesh_2|knows_power_strike_2|knows_power_draw_3|knows_power_throw_2|knows_riding_1|knows_athletics_2,nord_face_young_1, nord_face_old_2],
["looter_leader","Robber","Looters",tf_hero,0,0,fac_outlaws,
[itm_hatchet,itm_club,itm_butchering_knife,itm_falchion,itm_rawhide_coat,itm_stones,itm_nomad_armor,itm_nomad_armor,itm_woolen_cap,itm_woolen_cap,itm_nomad_boots,itm_wrapping_boots],
def_attrib|level(4),wp(20),knows_common,0x00000001b80032473ac49738206626b200000000001da7660000000000000000, bandit_face2],
["bandit_leaders_end","bandit_leaders_end","bandit_leaders_end",tf_hero, 0,0, fac_commoners,[],def_attrib|level(2),wp(20),knows_inventory_management_10,0],
["relative_of_merchant", "Merchant's Brother", "{!}Prominent",tf_hero,0,0,fac_kingdom_2,
[itm_linen_tunic,itm_nomad_boots],
def_attrib|level(1),wp_melee(10),knows_athletics_1|knows_ironflesh_2|knows_shield_2, 0x00000000320410022d2595495491afa400000000001d9ae30000000000000000, mercenary_face_2],
["relative_of_merchants_end","relative_of_merchants_end","relative_of_merchants_end",tf_hero, 0,0, fac_commoners,[],def_attrib|level(2),wp(20),knows_inventory_management_10,0],
["swadian_crossbowman_multiplayer_coop_tier_1","Swadian Crossbowman","Swadian Crossbowmen",tf_guarantee_all,0,0,fac_kingdom_1,
[itm_hunting_crossbow,itm_bolts,itm_fighting_pick,itm_tab_shield_heater_a,itm_arming_cap,itm_padded_cloth,itm_ankle_boots],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["swadian_infantry_multiplayer_coop_tier_1","Swadian Infantry","Swadian Infantry",tf_guarantee_all,0,0,fac_kingdom_1,
[itm_spiked_club,itm_tab_shield_heater_b,itm_felt_hat,itm_leather_apron,itm_wrapping_boots],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["swadian_man_at_arms_multiplayer_coop_tier_1","Swadian Man at Arms","Swadian Men at Arms",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_1,
[itm_light_lance,itm_sword_medieval_b_small,itm_tab_shield_heater_a,itm_leather_cap,itm_leather_gloves,itm_padded_cloth,itm_wrapping_boots,itm_warhorse],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["vaegir_archer_multiplayer_coop_tier_1","Vaegir Archer","Vaegir Archers",tf_guarantee_all,0,0,fac_kingdom_2,
[itm_arrows,itm_axe,itm_hunting_bow,itm_linen_tunic,itm_nomad_boots],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["vaegir_spearman_multiplayer_coop_tier_1","Vaegir Spearman","Vaegir spearman",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_ranged|tf_guarantee_shield,0,0,fac_kingdom_2,
[itm_tab_shield_kite_a, itm_axe,itm_rawhide_coat,itm_hide_boots],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["vaegir_horseman_multiplayer_coop_tier_1","Vaegir Horseman","Vaegir Horsemen",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_2,
[itm_spear,itm_tab_shield_kite_cav_a,itm_linen_tunic,itm_hide_boots,itm_hunter],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["khergit_veteran_horse_archer_multiplayer_coop_tier_1","Khergit Horse Archer","Khergit Horse Archers",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_3,
[itm_sword_khergit_1,itm_nomad_bow,itm_arrows,itm_steppe_armor,itm_hide_boots,itm_steppe_horse],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["khergit_infantry_multiplayer_coop_tier_1","Khergit Infantry","Khergit Infantries",tf_guarantee_all,0,0,fac_kingdom_3,
[itm_sword_khergit_1,itm_tab_shield_small_round_a,itm_steppe_armor,itm_hide_boots,itm_leather_gloves],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["khergit_lancer_multiplayer_coop_tier_1","Khergit Lancer","Khergit Lancers",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_3,
[itm_spear,itm_tab_shield_small_round_a,itm_steppe_armor,itm_steppe_cap,itm_hide_boots,itm_leather_gloves,itm_courser],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["nord_archer_multiplayer_coop_tier_1","Nord Archer","Nord Archers",tf_guarantee_all,0,0,fac_kingdom_4,
[itm_arrows,itm_sword_viking_2_small,itm_short_bow,itm_blue_tunic,itm_leather_boots],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["nord_veteran_multiplayer_coop_tier_1","Nord Huscarl","Nord Huscarls",tf_guarantee_all,0,0,fac_kingdom_4,
[itm_sword_viking_1,itm_one_handed_war_axe_a,itm_tab_shield_round_a,itm_blue_tunic,itm_leather_boots],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["nord_scout_multiplayer_coop_tier_1","Nord Scout","Nord Scouts",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_4,
[itm_javelin,itm_war_spear,itm_tab_shield_small_round_a,itm_blue_tunic,itm_leather_boots,itm_saddle_horse],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["rhodok_veteran_crossbowman_multiplayer_coop_tier_1","Rhodok Crossbowman","Rhodok Crossbowmen",tf_guarantee_all,0,0,fac_kingdom_5,
[itm_crossbow,itm_bolts,itm_fighting_pick,itm_tab_shield_pavise_a,itm_tunic_with_green_cape,itm_ankle_boots],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["rhodok_sergeant_multiplayer_coop_tier_1","Rhodok Sergeant","Rhodok Sergeants",tf_guarantee_all,0,0,fac_kingdom_5,
[itm_military_cleaver_b,itm_tab_shield_pavise_a,itm_darts,itm_green_tunic,itm_ankle_boots,itm_leather_cap],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["rhodok_horseman_multiplayer_coop_tier_1","Rhodok Horseman","Rhodok Horsemen",tf_guarantee_all,0,0,fac_kingdom_5,
[itm_tab_shield_heater_cav_a, itm_light_lance, itm_green_tunic,itm_ankle_boots,itm_padded_coif,itm_saddle_horse],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["sarranid_archer_multiplayer_coop_tier_1","Sarranid Archer","Sarranid Archers",tf_guarantee_all,0,0,fac_kingdom_6,
[itm_arrows,itm_sarranid_mace_1,itm_short_bow,itm_sarranid_cloth_robe, itm_sarranid_boots_b],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["sarranid_footman_multiplayer_coop_tier_1","Sarranid Footman","Sarranid footman",tf_guarantee_all,0,0,fac_kingdom_6,
[itm_tab_shield_kite_a, itm_sarranid_axe_a,itm_sarranid_cloth_robe, itm_sarranid_boots_b],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["sarranid_mamluke_multiplayer_coop_tier_1","Sarranid Mamluke","Sarranid Mamluke",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_6,
[itm_lance,itm_tab_shield_small_round_a,itm_sarranid_cloth_robe, itm_sarranid_boots_b,itm_arabian_horse_a],
level(4)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["swadian_crossbowman_multiplayer_coop_tier_2","Swadian Crossbowman","Swadian Crossbowmen",tf_guarantee_all,0,0,fac_kingdom_1,
[itm_spiked_club,itm_crossbow,itm_bolts,itm_tab_shield_heater_b,itm_arming_cap,itm_red_gambeson,itm_ankle_boots],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["swadian_infantry_multiplayer_coop_tier_2","Swadian Infantry","Swadian Infantry",tf_guarantee_all,0,0,fac_kingdom_1,
[itm_sword_medieval_b,itm_tab_shield_heater_c,itm_spear,itm_mail_coif,itm_leather_gloves,itm_mail_with_tunic_red,itm_ankle_boots],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["swadian_man_at_arms_multiplayer_coop_tier_2","Swadian Man at Arms","Swadian Men at Arms",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_1,
[itm_lance,itm_sword_medieval_a,itm_tab_shield_heater_b,itm_helmet_with_neckguard,itm_leather_gloves,itm_haubergeon,itm_leather_boots,itm_warhorse],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["vaegir_archer_multiplayer_coop_tier_2","Vaegir Archer","Vaegir Archers",tf_guarantee_all,0,0,fac_kingdom_2,
[itm_barbed_arrows,itm_axe,itm_nomad_bow,itm_leather_vest,itm_nomad_boots,itm_vaegir_fur_helmet],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["vaegir_spearman_multiplayer_coop_tier_2","Vaegir Spearman","Vaegir spearman",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_ranged|tf_guarantee_shield,0,0,fac_kingdom_2,
[itm_javelin,itm_scimitar,itm_tab_shield_kite_b,itm_leather_jerkin,itm_nomad_boots,itm_vaegir_lamellar_helmet,itm_leather_gloves],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["vaegir_horseman_multiplayer_coop_tier_2","Vaegir Horseman","Vaegir Horsemen",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_2,
[itm_war_spear,itm_tab_shield_kite_cav_b,itm_javelin,itm_studded_leather_coat,itm_leather_gloves,itm_nomad_boots,itm_vaegir_lamellar_helmet,itm_hunter],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["khergit_veteran_horse_archer_multiplayer_coop_tier_2","Khergit Horse Archer","Khergit Horse Archers",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_3,
[itm_sword_khergit_2,itm_khergit_bow,itm_barbed_arrows,itm_steppe_armor,itm_leather_steppe_cap_a,itm_nomad_boots,itm_steppe_horse],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["khergit_infantry_multiplayer_coop_tier_2","Khergit Infantry","Khergit Infantries",tf_guarantee_all,0,0,fac_kingdom_3,
[itm_sword_khergit_2,itm_tab_shield_small_round_b,itm_javelin,itm_tribal_warrior_outfit,itm_nomad_boots,itm_leather_gloves],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["khergit_lancer_multiplayer_coop_tier_2","Khergit Lancer","Khergit Lancers",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_3,
[itm_war_spear,itm_tab_shield_small_round_b,itm_javelin,itm_tribal_warrior_outfit,itm_leather_steppe_cap_b,itm_nomad_boots,itm_courser],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["nord_archer_multiplayer_coop_tier_2","Nord Archer","Nord Archers",tf_guarantee_all,0,0,fac_kingdom_4,
[itm_arrows,itm_sword_viking_2,itm_long_bow,itm_leather_jerkin,itm_leather_boots,itm_nordic_archer_helmet],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["nord_veteran_multiplayer_coop_tier_2","Nord Huscarl","Nord Huscarls",tf_guarantee_all,0,0,fac_kingdom_4,
[itm_one_handed_war_axe_a,itm_tab_shield_round_b,itm_throwing_axes,itm_leather_jerkin,itm_leather_boots,itm_nordic_footman_helmet,itm_leather_gloves],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["nord_scout_multiplayer_coop_tier_2","Nord Scout","Nord Scouts",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_4,
[itm_javelin,itm_lance,itm_tab_shield_small_round_a,itm_leather_jerkin,itm_leather_boots,itm_leather_gloves,itm_nordic_footman_helmet,itm_saddle_horse],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["rhodok_veteran_crossbowman_multiplayer_coop_tier_2","Rhodok Crossbowman","Rhodok Crossbowmen",tf_guarantee_all,0,0,fac_kingdom_5,
[itm_heavy_crossbow,itm_bolts,itm_club_with_spike_head,itm_tab_shield_pavise_b,itm_leather_armor,itm_leather_boots,itm_leather_gloves,itm_leather_cap],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["rhodok_sergeant_multiplayer_coop_tier_2","Rhodok Sergeant","Rhodok Sergeants",tf_guarantee_all,0,0,fac_kingdom_5,
[itm_military_cleaver_b,itm_tab_shield_pavise_b,itm_war_darts,itm_padded_cloth,itm_leather_boots,itm_leather_gloves,itm_footman_helmet],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["rhodok_horseman_multiplayer_coop_tier_2","Rhodok Horseman","Rhodok Horsemen",tf_guarantee_all,0,0,fac_kingdom_5,
[itm_tab_shield_heater_cav_b, itm_heavy_lance,itm_javelin,itm_padded_cloth,itm_leather_boots,itm_leather_gloves,itm_footman_helmet,itm_saddle_horse],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["sarranid_archer_multiplayer_coop_tier_2","Sarranid Archer","Sarranid Archers",tf_guarantee_all,0,0,fac_kingdom_6,
[itm_barbed_arrows,itm_sarranid_mace_1,itm_nomad_bow,itm_archers_vest,itm_desert_turban,itm_leather_gloves,itm_sarranid_boots_b],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["sarranid_footman_multiplayer_coop_tier_2","Sarranid Footman","Sarranid footman",tf_guarantee_all,0,0,fac_kingdom_6,
[itm_tab_shield_kite_b, itm_sarranid_axe_b,itm_javelin,itm_archers_vest,itm_sarranid_warrior_cap,itm_leather_gloves,itm_sarranid_boots_b],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["sarranid_mamluke_multiplayer_coop_tier_2","Sarranid Mamluke","Sarranid Mamluke",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_6,
[itm_heavy_lance,itm_tab_shield_small_round_b,itm_javelin,itm_archers_vest, itm_sarranid_warrior_cap,itm_leather_gloves,itm_sarranid_boots_b,itm_arabian_horse_a],
level(5)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["swadian_crossbowman_multiplayer_coop_tier_3","Swadian Crossbowman","Swadian Crossbowmen",tf_guarantee_all,0,0,fac_kingdom_1,
[itm_sword_medieval_b,itm_heavy_crossbow,itm_steel_bolts,itm_tab_shield_heater_c,itm_segmented_helmet,itm_leather_jerkin,itm_leather_boots],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["swadian_infantry_multiplayer_coop_tier_3","Swadian Infantry","Swadian Infantry",tf_guarantee_all,0,0,fac_kingdom_1,
[itm_bastard_sword_a,itm_awlpike,itm_tab_shield_heater_c,itm_bascinet,itm_mail_mittens,itm_mail_with_surcoat,itm_mail_chausses],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["swadian_man_at_arms_multiplayer_coop_tier_3","Swadian Man at Arms","Swadian Men at Arms",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_1,
[itm_heavy_lance,itm_bastard_sword_b,itm_tab_shield_heater_cav_a,itm_flat_topped_helmet,itm_mail_mittens,itm_mail_with_surcoat,itm_mail_chausses,itm_warhorse],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["vaegir_archer_multiplayer_coop_tier_3","Vaegir Archer","Vaegir Archers",tf_guarantee_all,0,0,fac_kingdom_2,
[itm_barbed_arrows,itm_scimitar_b,itm_strong_bow,itm_leather_jerkin,itm_splinted_leather_greaves,itm_vaegir_spiked_helmet,itm_leather_gloves],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["vaegir_spearman_multiplayer_coop_tier_3","Vaegir Spearman","Vaegir spearman",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_ranged|tf_guarantee_shield,0,0,fac_kingdom_2,
[itm_scimitar_b, itm_tab_shield_kite_b,itm_javelin,itm_lamellar_armor,itm_splinted_leather_greaves,itm_vaegir_lamellar_helmet,itm_leather_gloves],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["vaegir_horseman_multiplayer_coop_tier_3","Vaegir Horseman","Vaegir Horsemen",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_2,
[itm_heavy_lance,itm_tab_shield_kite_cav_b, itm_javelin,itm_lamellar_armor,itm_splinted_leather_greaves,itm_vaegir_lamellar_helmet,itm_hunter,itm_mail_mittens],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["khergit_veteran_horse_archer_multiplayer_coop_tier_3","Khergit Horse Archer","Khergit Horse Archers",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_3,
[itm_sword_khergit_3,itm_strong_bow,itm_khergit_arrows,itm_tribal_warrior_outfit,itm_leather_steppe_cap_c,itm_khergit_leather_boots,itm_steppe_horse],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["khergit_infantry_multiplayer_coop_tier_3","Khergit Infantry","Khergit Infantries",tf_guarantee_all,0,0,fac_kingdom_3,
[itm_hafted_blade_a,itm_javelin,itm_leather_steppe_cap_c,itm_lamellar_armor,itm_splinted_leather_greaves,itm_mail_mittens],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["khergit_lancer_multiplayer_coop_tier_3","Khergit Lancer","Khergit Lancers",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_3,
[itm_heavy_lance,itm_tab_shield_small_round_a,itm_lamellar_armor,itm_leather_steppe_cap_c,itm_splinted_leather_greaves,itm_courser],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["nord_archer_multiplayer_coop_tier_3","Nord Archer","Nord Archers",tf_guarantee_all,0,0,fac_kingdom_4,
[itm_barbed_arrows,itm_sword_viking_3,itm_long_bow,itm_leather_jerkin,itm_leather_boots,itm_nordic_veteran_archer_helmet,itm_leather_gloves],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["nord_veteran_multiplayer_coop_tier_3","Nord Huscarl","Nord Huscarls",tf_guarantee_all,0,0,fac_kingdom_4,
[itm_one_handed_war_axe_b,itm_tab_shield_round_d,itm_heavy_throwing_axes,itm_mail_shirt,itm_splinted_leather_greaves,itm_nordic_huscarl_helmet],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["nord_scout_multiplayer_coop_tier_3","Nord Scout","Nord Scouts",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_4,
[itm_throwing_spears,itm_heavy_lance,itm_tab_shield_small_round_b,itm_mail_shirt,itm_splinted_leather_greaves,itm_nordic_fighter_helmet,itm_saddle_horse],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["rhodok_veteran_crossbowman_multiplayer_coop_tier_3","Rhodok Crossbowman","Rhodok Crossbowmen",tf_guarantee_all,0,0,fac_kingdom_5,
[itm_sniper_crossbow,itm_steel_bolts,itm_military_cleaver_c,itm_tab_shield_pavise_c,itm_padded_cloth,itm_leather_boots,itm_footman_helmet,itm_leather_gloves],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["rhodok_sergeant_multiplayer_coop_tier_3","Rhodok Sergeant","Rhodok Sergeants",tf_guarantee_all,0,0,fac_kingdom_5,
[itm_tab_shield_pavise_c,itm_military_cleaver_c,itm_javelin,itm_ragged_outfit,itm_splinted_greaves,itm_kettle_hat,itm_mail_mittens],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["rhodok_horseman_multiplayer_coop_tier_3","Rhodok Horseman","Rhodok Horsemen",tf_guarantee_all,0,0,fac_kingdom_5,
[itm_javelin,itm_tab_shield_heater_cav_b, itm_heavy_lance, itm_ragged_outfit,itm_splinted_greaves,itm_bascinet_2,itm_mail_mittens,itm_saddle_horse],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["sarranid_archer_multiplayer_coop_tier_3","Sarranid Archer","Sarranid Archers",tf_guarantee_all,0,0,fac_kingdom_6,
[itm_khergit_arrows,itm_sarranid_mace_1,itm_nomad_bow,itm_archers_vest,itm_sarranid_mail_coif,itm_leather_gloves,itm_sarranid_boots_c],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["sarranid_footman_multiplayer_coop_tier_3","Sarranid Footman","Sarranid footman",tf_guarantee_all,0,0,fac_kingdom_6,
[itm_jarid, itm_tab_shield_kite_c, itm_sarranid_axe_b,itm_sarranid_mail_shirt,itm_sarranid_mail_coif,itm_mail_mittens,itm_sarranid_boots_c],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["sarranid_mamluke_multiplayer_coop_tier_3","Sarranid Mamluke","Sarranid Mamluke",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_6,
[itm_heavy_lance,itm_tab_shield_small_round_b,itm_jarid,itm_sarranid_cavalry_robe,itm_sarranid_horseman_helmet,itm_mail_mittens,itm_sarranid_boots_c,itm_arabian_horse_a],
level(6)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["swadian_crossbowman_multiplayer_coop_tier_4","Swadian Crossbowman","Swadian Crossbowmen",tf_guarantee_all,0,0,fac_kingdom_1,
[itm_sword_medieval_b,itm_sniper_crossbow,itm_steel_bolts,itm_tab_shield_heater_c,itm_helmet_with_neckguard,itm_leather_gloves,itm_haubergeon,itm_mail_chausses],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["swadian_infantry_multiplayer_coop_tier_4","Swadian Infantry","Swadian Infantry",tf_guarantee_all,0,0,fac_kingdom_1,
[itm_bastard_sword_b,itm_awlpike_long,itm_tab_shield_heater_d,itm_guard_helmet,itm_gauntlets,itm_coat_of_plates,itm_iron_greaves],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["swadian_man_at_arms_multiplayer_coop_tier_4","Swadian Man at Arms","Swadian Men at Arms",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_1,
[itm_great_lance,itm_morningstar,itm_tab_shield_heater_cav_b,itm_great_helmet,itm_gauntlets,itm_coat_of_plates_red,itm_plate_boots,itm_warhorse],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["vaegir_archer_multiplayer_coop_tier_4","Vaegir Archer","Vaegir Archers",tf_guarantee_all,0,0,fac_kingdom_2,
[itm_barbed_arrows,itm_bardiche,itm_war_bow,itm_lamellar_vest,itm_splinted_leather_greaves,itm_vaegir_lamellar_helmet,itm_leather_gloves],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["vaegir_spearman_multiplayer_coop_tier_4","Vaegir Spearman","Vaegir spearman",tf_guarantee_ranged|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_helmet|tf_guarantee_ranged|tf_guarantee_shield,0,0,fac_kingdom_2,
[itm_bardiche,itm_javelin,itm_vaegir_elite_armor,itm_splinted_greaves,itm_vaegir_war_helmet,itm_mail_mittens],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["vaegir_horseman_multiplayer_coop_tier_4","Vaegir Horseman","Vaegir Horsemen",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_2,
[itm_heavy_lance,itm_tab_shield_kite_cav_b,itm_javelin,itm_vaegir_elite_armor,itm_splinted_greaves,itm_hunter,itm_vaegir_war_helmet,itm_scale_gauntlets],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["khergit_veteran_horse_archer_multiplayer_coop_tier_4","Khergit Horse Archer","Khergit Horse Archers",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_3,
[itm_hafted_blade_b,itm_strong_bow,itm_khergit_arrows,itm_lamellar_vest_khergit,itm_khergit_guard_helmet,itm_splinted_leather_greaves,itm_steppe_horse],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["khergit_infantry_multiplayer_coop_tier_4","Khergit Infantry","Khergit Infantries",tf_guarantee_all,0,0,fac_kingdom_3,
[itm_hafted_blade_b,itm_tab_shield_small_round_a,itm_jarid,itm_khergit_elite_armor,itm_khergit_guard_boots,itm_khergit_war_helmet,itm_lamellar_gauntlets],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["khergit_lancer_multiplayer_coop_tier_4","Khergit Lancer","Khergit Lancers",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_3,
[itm_great_lance,itm_tab_shield_small_round_c,itm_khergit_elite_armor,itm_khergit_war_helmet,itm_khergit_guard_boots,itm_lamellar_gauntlets,itm_courser],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["nord_archer_multiplayer_coop_tier_4","Nord Archer","Nord Archers",tf_guarantee_all,0,0,fac_kingdom_4,
[itm_khergit_arrows,itm_sword_viking_3,itm_long_bow,itm_byrnie,itm_splinted_leather_greaves,itm_nordic_footman_helmet,itm_leather_gloves],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["nord_veteran_multiplayer_coop_tier_4","Nord Huscarl","Nord Huscarls",tf_guarantee_all,0,0,fac_kingdom_4,
[itm_great_axe,itm_tab_shield_round_e,itm_heavy_throwing_axes,itm_banded_armor,itm_mail_boots,itm_nordic_warlord_helmet],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["nord_scout_multiplayer_coop_tier_4","Nord Scout","Nord Scouts",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_4,
[itm_throwing_spears,itm_great_lance,itm_tab_shield_small_round_c,itm_mail_hauberk,itm_splinted_leather_greaves,itm_nordic_huscarl_helmet,itm_saddle_horse],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["rhodok_veteran_crossbowman_multiplayer_coop_tier_4","Rhodok Crossbowman","Rhodok Crossbowmen",tf_guarantee_all,0,0,fac_kingdom_5,
[itm_sniper_crossbow,itm_steel_bolts,itm_sledgehammer,itm_tab_shield_pavise_d,itm_mail_with_tunic_green,itm_kettle_hat,itm_splinted_greaves,itm_leather_gloves],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["rhodok_sergeant_multiplayer_coop_tier_4","Rhodok Sergeant","Rhodok Sergeants",tf_guarantee_all,0,0,fac_kingdom_5,
[itm_two_handed_cleaver,itm_tab_shield_pavise_d,itm_javelin,itm_surcoat_over_mail,itm_iron_greaves,itm_gauntlets,itm_full_helm,],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["rhodok_horseman_multiplayer_coop_tier_4","Rhodok Horseman","Rhodok Horsemen",tf_guarantee_all,0,0,fac_kingdom_5,
[itm_javelin,itm_tab_shield_heater_cav_b, itm_great_lance, itm_surcoat_over_mail,itm_iron_greaves,itm_gauntlets,itm_full_helm,itm_saddle_horse],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["sarranid_archer_multiplayer_coop_tier_4","Sarranid Archer","Sarranid Archers",tf_guarantee_all,0,0,fac_kingdom_6,
[itm_khergit_arrows,itm_sarranid_two_handed_mace_1,itm_strong_bow,itm_sarranid_mail_shirt,itm_sarranid_mail_coif,itm_leather_gloves,itm_sarranid_boots_d],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["sarranid_footman_multiplayer_coop_tier_4","Sarranid Footman","Sarranid footman",tf_guarantee_all,0,0,fac_kingdom_6,
[itm_bamboo_spear, itm_tab_shield_kite_c, itm_arabian_sword_a,itm_sarranid_elite_armor,itm_sarranid_veiled_helmet,itm_scale_gauntlets, itm_sarranid_boots_d],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["sarranid_mamluke_multiplayer_coop_tier_4","Sarranid Mamluke","Sarranid Mamluke",tf_mounted|tf_guarantee_all,0,0,fac_kingdom_6,
[itm_arabian_sword_a,itm_lance,itm_tab_shield_small_round_c,itm_mamluke_mail,itm_sarranid_veiled_helmet,itm_scale_gauntlets, itm_sarranid_boots_d,itm_arabian_horse_a],
level(7)|str_20, wp(300), knows_power_draw_10|knows_power_throw_10|knows_riding_10, 0, 0],
["coop_faction_troop_templates_end","{!}multiplayer_end","{!}multiplayer_end", 0, 0, 0, fac_kingdom_5, [], 0, 0, 0, 0, 0],
#tier 1
["npc1_1","Borcha","Borcha",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_khergit_armor,itm_nomad_boots,itm_knife, itm_courser],
str_16|agi_17|int_6|cha_30|level(25),wpex(250,80,140,160,90,250),knows_tracking_10|knows_engineer_10|knows_first_aid_10|knows_surgery_10|knows_wound_treatment_10|knows_tactics_10|knows_trainer_10|knows_looting_10|
knows_ironflesh_1|knows_power_strike_7|knows_pathfinding_3|knows_athletics_5|knows_tracking_1|knows_riding_6|knows_power_throw_7|knows_power_draw_5,
0x00000004bf086143259d061a9046e23500000000001db52c0000000000000000],
["npc2_1","Marnid","Marnid", tf_hero|tf_unmoveable_in_party_window, 0,reserved, fac_commoners,[itm_linen_tunic,itm_hide_boots,itm_club, itm_saddle_horse],
str_14|agi_17|int_6|cha_30|level(25),wpex(240,130,170,150,170,90),knows_tracking_10|knows_engineer_10|knows_first_aid_10|knows_surgery_10|knows_wound_treatment_10|knows_tactics_10|knows_trainer_10|knows_looting_10|
knows_trade_2|knows_weapon_master_1|knows_ironflesh_1|knows_wound_treatment_1|knows_athletics_5|knows_first_aid_1|knows_leadership_1|knows_riding_4|knows_power_strike_7|knows_power_draw_3|knows_power_throw_3,
0x000000019d004001570b893712c8d28d00000000001dc8990000000000000000],
["npc3_1","Ymira","Ymira",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_dress,itm_woolen_hose,itm_knife, itm_hunter],
str_24|agi_13|int_6|cha_30|level(25),wpex(190,80,240,180,180,80),knows_tracking_10|knows_engineer_10|knows_first_aid_10|knows_surgery_10|knows_wound_treatment_10|knows_tactics_10|knows_trainer_10|knows_looting_10|
knows_wound_treatment_1|knows_trade_1|knows_first_aid_3|knows_surgery_1|knows_athletics_6|knows_riding_8|knows_power_strike_5|knows_power_draw_3|knows_power_throw_3,
0x0000000083040001583b6db8dec5925b00000000001d80980000000000000000],
["npc4_1","Rolf","Rolf",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_leather_jerkin,itm_nomad_boots, itm_sword_medieval_a, itm_hunter],
str_20|agi_13|int_6|cha_30|level(25),wpex(210,230,200,90,100,95),knows_tracking_10|knows_engineer_10|knows_first_aid_10|knows_surgery_10|knows_wound_treatment_10|knows_tactics_10|knows_trainer_10|knows_looting_10|
knows_weapon_master_2|knows_power_strike_9|knows_riding_8|knows_athletics_7|knows_power_throw_3|knows_first_aid_1|knows_surgery_1|knows_tactics_2|knows_leadership_2|knows_power_draw_2,
0x000000057f1074002c75c6a8a58ad72e00000000001e1a890000000000000000],
["npc5_1","Baheshtur","Baheshtur",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_nomad_vest,itm_nomad_boots, itm_sword_khergit_1, itm_steppe_horse],
str_18|agi_13|int_6|cha_30|level(25),wpex(160,80,130,250,50,230),knows_tracking_10|knows_engineer_10|knows_first_aid_10|knows_surgery_10|knows_wound_treatment_10|knows_tactics_10|knows_trainer_10|knows_looting_10|
knows_riding_7|knows_horse_archery_9|knows_power_draw_8|knows_leadership_2|knows_weapon_master_1|knows_power_strike_5|knows_power_throw_8|knows_athletics_5,
0x000000088910318b5c6f972328324a6200000000001cd3310000000000000000],
["npc6_1","Firentis","Firentis",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_tabard,itm_nomad_boots, itm_sword_medieval_a, itm_sumpter_horse],
str_20|agi_19|int_6|cha_30|level(25),wpex(240,210,180,90,100,80),knows_tracking_10|knows_engineer_10|knows_first_aid_10|knows_surgery_10|knows_wound_treatment_10|knows_tactics_10|knows_trainer_10|knows_looting_10|
knows_riding_7|knows_weapon_master_2|knows_athletics_8|knows_trainer_1|knows_leadership_1|knows_power_strike_7|knows_power_draw_2|knows_power_throw_3,
0x00000002050052036a1895d0748f3ca30000000000000f0b0000000000000000],
["npc7_1","Deshavi","Deshavi",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_ragged_outfit,itm_wrapping_boots, itm_hunting_bow, itm_arrows, itm_quarter_staff, itm_arabian_horse_b],
str_16|agi_13|int_6|cha_30|level(25),wpex(90,80,230,280,110,130),knows_tracking_10|knows_engineer_10|knows_first_aid_10|knows_surgery_10|knows_wound_treatment_10|knows_tactics_10|knows_trainer_10|knows_looting_10|
knows_tracking_2|knows_athletics_8|knows_spotting_1|knows_pathfinding_1|knows_power_draw_10|knows_riding_4|knows_power_strike_6|knows_power_throw_5,
0x00000001fc08400533a15297634d44f400000000001e02db0000000000000000],
["npc8_1","Matheld","Matheld",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_tribal_warrior_outfit,itm_nomad_boots, itm_sword_viking_1, itm_courser],
str_18|agi_15|int_6|cha_30|level(25),wpex(190,250,80,120,80,250),knows_tracking_10|knows_engineer_10|knows_first_aid_10|knows_surgery_10|knows_wound_treatment_10|knows_tactics_10|knows_trainer_10|knows_looting_10|
knows_weapon_master_3|knows_athletics_10|knows_leadership_3|knows_tactics_1|knows_riding_4|knows_power_strike_10|knows_power_draw_2|knows_power_throw_8,
0x00000005800c000637db8314e331e76e00000000001c46db0000000000000000],
["npc9_1","Alayen","Alayen",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_tabard,itm_nomad_boots, itm_sword_medieval_b_small, itm_courser],
str_22|agi_19|int_6|cha_30|level(25),wpex(80,230,130,220,70,160),knows_tracking_10|knows_engineer_10|knows_first_aid_10|knows_surgery_10|knows_wound_treatment_10|knows_tactics_10|knows_trainer_10|knows_looting_10|
knows_weapon_master_1|knows_riding_4|knows_athletics_6|knows_leadership_1|knows_tactics_1|knows_power_strike_4|knows_power_draw_7|knows_power_throw_5,
0x000000030100300f499d5b391b6db8d300000000001dc2e10000000000000000],
["npc10_1","Bunduk","Bunduk",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_padded_leather,itm_nomad_boots, itm_crossbow, itm_bolts, itm_pickaxe, itm_saddle_horse],
str_24|agi_19|int_6|cha_30|level(25),wpex(170,80,80,160,290,150),knows_tracking_10|knows_engineer_10|knows_first_aid_10|knows_surgery_10|knows_wound_treatment_10|knows_tactics_10|knows_trainer_10|knows_looting_10|
knows_weapon_master_3|knows_tactics_1|knows_leadership_1|knows_ironflesh_3|knows_trainer_2|knows_first_aid_2|knows_riding_4|knows_power_strike_5|knows_power_draw_5|knows_power_throw_5|knows_athletics_7,
0x0000000a3f081006572c91c71c8d46cb00000000001e468a0000000000000000],
["npc11_1","Katrin","Katrin",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_leather_apron, itm_falchion, itm_wrapping_boots, itm_sumpter_horse],
str_16|agi_17|int_6|cha_30|level(25),wpex(140,230,130,80,210,170),knows_tracking_10|knows_engineer_10|knows_first_aid_10|knows_surgery_10|knows_wound_treatment_10|knows_tactics_10|knows_trainer_10|knows_looting_10|
knows_weapon_master_1|knows_first_aid_1|knows_wound_treatment_2|knows_ironflesh_3|knows_inventory_management_5|knows_riding_4|knows_power_strike_5|knows_power_draw_2|knows_power_throw_7|knows_athletics_5,
0x0000000d7f0400035915aa226b4d975200000000001ea49e0000000000000000],
["npc12_1","Jeremus","Jeremus",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_pilgrim_disguise,itm_nomad_boots, itm_staff, itm_sumpter_horse],
str_16|agi_17|int_6|cha_30|level(25),wpex(120,110,290,80,110,120), knows_tracking_10|knows_engineer_10|knows_first_aid_10|knows_surgery_10|knows_wound_treatment_10|knows_tactics_10|knows_trainer_10|knows_looting_10|
knows_ironflesh_1|knows_power_strike_7|knows_surgery_4|knows_wound_treatment_3|knows_first_aid_3|knows_riding_4|knows_power_draw_2|knows_power_throw_3|knows_athletics_7,
0x000000078000500e4f8ba62a9cd5d36d00000000001e36250000000000000000],
["npc13_1","Nizar","Nizar",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_nomad_robe,itm_nomad_boots, itm_scimitar, itm_courser],
str_14|agi_17|int_6|cha_30|level(25),wpex(250,80,140,210,110,140),knows_tracking_10|knows_engineer_10|knows_first_aid_10|knows_surgery_10|knows_wound_treatment_10|knows_tactics_10|knows_trainer_10|knows_looting_10|
knows_riding_9|knows_leadership_2|knows_athletics_5|knows_ironflesh_2|knows_power_strike_6|knows_weapon_master_1|knows_power_draw_7|knows_power_throw_4,
0x00000004bf0475c85f4e9592de4e574c00000000001e369c0000000000000000],
["npc14_1","Lezalit","Lezalit",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_nobleman_outfit,itm_nomad_boots, itm_sword_medieval_b_small, itm_courser],
str_18|agi_19|int_6|cha_30|level(25),wpex(280,170,170,170,170,180),knows_tracking_10|knows_engineer_10|knows_first_aid_10|knows_surgery_10|knows_wound_treatment_10|knows_tactics_10|knows_trainer_10|knows_looting_10|
knows_trainer_4|knows_weapon_master_3|knows_leadership_2|knows_power_strike_1|knows_riding_7|knows_power_strike_7|knows_power_draw_6|knows_power_throw_6|knows_athletics_8,
0x00000001a410259144d5d1d6eb55e96a00000000001db0db0000000000000000],
["npc15_1","Artimenner","Artimenner",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_rich_outfit,itm_nomad_boots, itm_sword_medieval_b_small, itm_hunter],
str_18|agi_13|int_6|cha_30|level(25),wpex(190,290,130,210,90,90),knows_tracking_10|knows_engineer_10|knows_first_aid_10|knows_surgery_10|knows_wound_treatment_10|knows_tactics_10|knows_trainer_10|knows_looting_10|
knows_tactics_2|knows_engineer_4|knows_trade_3|knows_tracking_1|knows_spotting_1|knows_riding_6|knows_power_strike_7|knows_power_draw_7|knows_power_throw_3|knows_athletics_5,
0x0000000f2e1021862b4b9123594eab5300000000001d55360000000000000000],
["npc16_1","Klethi","Klethi",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_peasant_dress,itm_nomad_boots, itm_dagger, itm_throwing_knives, itm_saddle_horse],
str_14|agi_17|int_6|cha_30|level(25),wpex(260,10,100,160,30,300),knows_tracking_10|knows_engineer_10|knows_first_aid_10|knows_surgery_10|knows_wound_treatment_10|knows_tactics_10|knows_trainer_10|knows_looting_10|
knows_power_throw_10|knows_athletics_10|knows_power_strike_8|knows_riding_4|knows_power_draw_5,
0x00000000000c100739ce9c805d2f381300000000001cc7ad0000000000000000],
#tier 2
["npc1_2","Borcha","Borcha",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_leather_steppe_cap_c,itm_leather_gloves,itm_nomad_robe,itm_hide_boots,itm_sword_medieval_b_small, itm_courser],
str_16|agi_17|int_12|cha_7|level(14),wp(60),knows_tracker_npc|
knows_ironflesh_1|knows_power_strike_1|knows_pathfinding_3|knows_athletics_2|knows_tracking_1|knows_riding_2,
0x00000004bf086143259d061a9046e23500000000001db52c0000000000000000],
["npc2_2","Marnid","Marnid", tf_hero|tf_unmoveable_in_party_window, 0,reserved, fac_commoners,[itm_nasal_helmet,itm_padded_leather,itm_leather_boots,itm_mace_2,itm_tab_shield_small_round_a, itm_saddle_horse],
str_14|agi_17|int_11|cha_6|level(14),wp(40),knows_merchant_npc|
knows_trade_2|knows_weapon_master_1|knows_ironflesh_1|knows_wound_treatment_1|knows_athletics_2|knows_first_aid_1|knows_leadership_1,
0x000000019d004001570b893712c8d28d00000000001dc8990000000000000000],
["npc3_2","Ymira","Ymira",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_head_wrappings,itm_leather_jerkin,itm_wrapping_boots,itm_sword_medieval_b_small, itm_hunter],
str_24|agi_13|int_11|cha_6|level(14),wp(20),knows_merchant_npc|
knows_wound_treatment_1|knows_trade_1|knows_first_aid_3|knows_surgery_1|knows_athletics_1|knows_riding_1,
0x0000000083040001583b6db8dec5925b00000000001d80980000000000000000],
["npc4_2","Rolf","Rolf",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_kettle_hat,itm_leather_gloves,itm_studded_leather_coat,itm_leather_boots,itm_sword_medieval_c,itm_tab_shield_heater_c, itm_hunter],
str_20|agi_13|int_13|cha_10|level(27),wp(110),knows_warrior_npc|
knows_weapon_master_2|knows_power_strike_2|knows_riding_2|knows_athletics_2|knows_power_throw_2|knows_first_aid_1|knows_surgery_1|knows_tactics_2|knows_leadership_2,
0x000000057f1074002c75c6a8a58ad72e00000000001e1a890000000000000000],
["npc5_2","Baheshtur","Baheshtur",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_sword_khergit_2, itm_tab_shield_small_round_b, itm_leather_steppe_cap_b, itm_tribal_warrior_outfit, itm_khergit_leather_boots, itm_steppe_horse],
str_18|agi_13|int_12|cha_7|level(23),wp(90),knows_warrior_npc|
knows_riding_2|knows_horse_archery_3|knows_power_draw_3|knows_leadership_2|knows_weapon_master_1,
0x000000088910318b5c6f972328324a6200000000001cd3310000000000000000],
["npc6_2","Firentis","Firentis",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_bastard_sword_a, itm_mail_coif, itm_mail_with_tunic_red, itm_ankle_boots, itm_sumpter_horse],
str_20|agi_19|int_10|cha_5|level(25),wp(105),knows_warrior_npc|
knows_riding_2|knows_weapon_master_2|knows_power_strike_2|knows_athletics_3|knows_trainer_1|knows_leadership_1,
0x00000002050052036a1895d0748f3ca30000000000000f0b0000000000000000],
["npc7_2","Deshavi","Deshavi",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_voulge, itm_short_bow, itm_barbed_arrows, itm_nordic_fighter_helmet, itm_leather_gloves, itm_studded_leather_coat, itm_leather_boots, itm_arabian_horse_b],
str_16|agi_13|int_10|cha_6|level(17),wp(80),knows_tracker_npc|
knows_tracking_2|knows_athletics_2|knows_spotting_1|knows_pathfinding_1|knows_power_draw_2,
0x00000001fc08400533a15297634d44f400000000001e02db0000000000000000],
["npc8_2","Matheld","Matheld",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_sword_viking_2, itm_nordic_helmet, itm_byrnie, itm_leather_boots, itm_courser],
str_18|agi_15|int_9|cha_10|level(26),wp(90),knows_warrior_npc|
knows_weapon_master_3|knows_power_strike_2|knows_athletics_2|knows_leadership_3|knows_tactics_1,
0x00000005800c000637db8314e331e76e00000000001c46db0000000000000000],
["npc9_2","Alayen","Alayen",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_sword_medieval_c, itm_vaegir_fur_cap, itm_leather_vest, itm_nomad_boots, itm_courser],
str_22|agi_19|int_7|cha_8|level(17),wp(100),knows_warrior_npc|
knows_weapon_master_1|knows_riding_1|knows_athletics_1|knows_leadership_1|knows_tactics_1|knows_power_strike_1,
0x000000030100300f499d5b391b6db8d300000000001dc2e10000000000000000],
["npc10_2","Bunduk","Bunduk",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_military_sickle_a, itm_heavy_crossbow, itm_bolts, itm_mail_coif, itm_leather_gloves, itm_aketon_green, itm_leather_boots, itm_saddle_horse],
str_24|agi_19|int_9|cha_11|level(27),wp(105),knows_warrior_npc|
knows_weapon_master_3|knows_tactics_1|knows_leadership_1|knows_ironflesh_3|knows_trainer_2|knows_first_aid_2,
0x0000000a3f081006572c91c71c8d46cb00000000001e468a0000000000000000],
["npc11_2","Katrin","Katrin",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_sarranid_axe_a, itm_arming_cap, itm_leather_gloves, itm_padded_cloth, itm_ankle_boots, itm_sumpter_horse],
str_16|agi_17|int_10|cha_10|level(26),wp(70),knows_merchant_npc|
knows_weapon_master_1|knows_first_aid_1|knows_wound_treatment_2|knows_ironflesh_3|knows_inventory_management_5,
0x0000000d7f0400035915aa226b4d975200000000001ea49e0000000000000000],
["npc12_2","Jeremus","Jeremus",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_iron_staff, itm_padded_coif, itm_leather_gloves, itm_pilgrim_disguise, itm_leather_boots, itm_sumpter_horse],
str_16|agi_17|int_13|cha_7|level(20),wp(30), knows_merchant_npc|
knows_ironflesh_1|knows_power_strike_1|knows_surgery_4|knows_wound_treatment_3|knows_first_aid_3,
0x000000078000500e4f8ba62a9cd5d36d00000000001e36250000000000000000],
["npc13_2","Nizar","Nizar",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_scimitar, itm_tab_shield_small_round_b, itm_sarranid_warrior_cap, itm_sarranid_leather_armor, itm_sarranid_boots_b, itm_courser],
str_14|agi_17|int_12|cha_8|level(19),wp(80),knows_warrior_npc|
knows_riding_2|knows_leadership_2|knows_athletics_2|knows_ironflesh_2|knows_power_strike_1|knows_weapon_master_1,
0x00000004bf0475c85f4e9592de4e574c00000000001e369c0000000000000000],
["npc14_2","Lezalit","Lezalit",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_sword_medieval_b, itm_tab_shield_heater_c, itm_mail_coif, itm_studded_leather_coat, itm_leather_boots, itm_courser],
str_18|agi_19|int_11|cha_8|level(23),wp(100),knows_warrior_npc|
knows_trainer_4|knows_weapon_master_3|knows_leadership_2|knows_power_strike_1,
0x00000001a410259144d5d1d6eb55e96a00000000001db0db0000000000000000],
["npc15_2","Artimenner","Artimenner",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_long_axe, itm_helmet_with_neckguard, itm_leather_gloves, itm_red_gambeson, itm_leather_boots, itm_hunter],
str_18|agi_13|int_12|cha_8|level(25),wp(80),knows_warrior_npc|
knows_tactics_2|knows_engineer_4|knows_trade_3|knows_tracking_1|knows_spotting_1,
0x0000000f2e1021862b4b9123594eab5300000000001d55360000000000000000],
["npc16_2","Klethi","Klethi",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_sword_viking_2_small, itm_light_throwing_axes, itm_helmet_with_neckguard, itm_leather_gloves, itm_leather_jerkin, itm_ankle_boots, itm_saddle_horse],
str_14|agi_17|int_8|cha_7|level(17),wp(80),knows_tracker_npc|
knows_power_throw_3|knows_athletics_2|knows_power_strike_1,
0x00000000000c100739ce9c805d2f381300000000001cc7ad0000000000000000],
#tier 3
["npc1_3","Borcha","Borcha",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_khergit_war_helmet,itm_lamellar_gauntlets,itm_lamellar_vest_khergit,itm_khergit_leather_boots,itm_sword_medieval_c_small, itm_courser],
str_16|agi_17|int_12|cha_7|level(14),wp(60),knows_tracker_npc|
knows_ironflesh_1|knows_power_strike_1|knows_pathfinding_3|knows_athletics_2|knows_tracking_1|knows_riding_2,
0x00000004bf086143259d061a9046e23500000000001db52c0000000000000000],
["npc2_3","Marnid","Marnid", tf_hero|tf_unmoveable_in_party_window, 0,reserved, fac_commoners,[itm_nordic_veteran_archer_helmet,itm_leather_gloves,itm_byrnie,itm_leather_boots,itm_mace_3,itm_tab_shield_small_round_b, itm_saddle_horse],
str_14|agi_17|int_11|cha_6|level(14),wp(40),knows_merchant_npc|
knows_trade_2|knows_weapon_master_1|knows_ironflesh_1|knows_wound_treatment_1|knows_athletics_2|knows_first_aid_1|knows_leadership_1,
0x000000019d004001570b893712c8d28d00000000001dc8990000000000000000],
["npc3_3","Ymira","Ymira",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_skullcap,itm_leather_gloves,itm_mail_shirt,itm_wrapping_boots,itm_sword_medieval_c_small, itm_hunter],
str_24|agi_13|int_11|cha_6|level(14),wp(20),knows_merchant_npc|
knows_wound_treatment_1|knows_trade_1|knows_first_aid_3|knows_surgery_1|knows_athletics_1|knows_riding_1,
0x0000000083040001583b6db8dec5925b00000000001d80980000000000000000],
["npc4_3","Rolf","Rolf",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_bascinet_2,itm_leather_gloves,itm_surcoat_over_mail,itm_mail_chausses,itm_sword_medieval_c_long,itm_tab_shield_heater_c, itm_hunter],
str_20|agi_13|int_13|cha_10|level(27),wp(110),knows_warrior_npc|
knows_weapon_master_2|knows_power_strike_2|knows_riding_2|knows_athletics_2|knows_power_throw_2|knows_first_aid_1|knows_surgery_1|knows_tactics_2|knows_leadership_2,
0x000000057f1074002c75c6a8a58ad72e00000000001e1a890000000000000000],
["npc5_3","Baheshtur","Baheshtur",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_scimitar, itm_tab_shield_small_round_c, itm_khergit_cavalry_helmet, itm_leather_gloves, itm_lamellar_vest, itm_khergit_leather_boots, itm_steppe_horse],
str_18|agi_13|int_12|cha_7|level(23),wp(90),knows_warrior_npc|
knows_riding_2|knows_horse_archery_3|knows_power_draw_3|knows_leadership_2|knows_weapon_master_1,
0x000000088910318b5c6f972328324a6200000000001cd3310000000000000000],
["npc6_3","Firentis","Firentis",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_bastard_sword_b, itm_flat_topped_helmet, itm_mail_mittens, itm_haubergeon, itm_mail_chausses, itm_sumpter_horse],
str_20|agi_19|int_10|cha_5|level(25),wp(105),knows_warrior_npc|
knows_riding_2|knows_weapon_master_2|knows_power_strike_2|knows_athletics_3|knows_trainer_1|knows_leadership_1,
0x00000002050052036a1895d0748f3ca30000000000000f0b0000000000000000],
["npc7_3","Deshavi","Deshavi",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_long_bardiche, itm_strong_bow, itm_barbed_arrows, itm_nordic_helmet, itm_leather_gloves, itm_mail_hauberk, itm_splinted_leather_greaves, itm_arabian_horse_b],
str_16|agi_13|int_10|cha_6|level(17),wp(80),knows_tracker_npc|
knows_tracking_2|knows_athletics_2|knows_spotting_1|knows_pathfinding_1|knows_power_draw_2,
0x00000001fc08400533a15297634d44f400000000001e02db0000000000000000],
["npc8_3","Matheld","Matheld",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_battle_axe, itm_nordic_huscarl_helmet, itm_leather_gloves, itm_mail_hauberk, itm_mail_chausses, itm_courser],
str_18|agi_15|int_9|cha_10|level(26),wp(90),knows_warrior_npc|
knows_weapon_master_3|knows_power_strike_2|knows_athletics_2|knows_leadership_3|knows_tactics_1,
0x00000005800c000637db8314e331e76e00000000001c46db0000000000000000],
["npc9_3","Alayen","Alayen",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_sword_medieval_c_long, itm_vaegir_lamellar_helmet, itm_leather_gloves, itm_lamellar_vest, itm_leather_boots, itm_courser],
str_22|agi_19|int_7|cha_8|level(17),wp(100),knows_warrior_npc|
knows_weapon_master_1|knows_riding_1|knows_athletics_1|knows_leadership_1|knows_tactics_1|knows_power_strike_1,
0x000000030100300f499d5b391b6db8d300000000001dc2e10000000000000000],
["npc10_3","Bunduk","Bunduk",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_military_pick, itm_heavy_crossbow, itm_steel_bolts, itm_kettle_hat, itm_leather_gloves, itm_mail_with_tunic_green, itm_leather_boots, itm_saddle_horse],
str_24|agi_19|int_9|cha_11|level(27),wp(105),knows_warrior_npc|
knows_weapon_master_3|knows_tactics_1|knows_leadership_1|knows_ironflesh_3|knows_trainer_2|knows_first_aid_2,
0x0000000a3f081006572c91c71c8d46cb00000000001e468a0000000000000000],
["npc11_3","Katrin","Katrin",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_sarranid_axe_b, itm_arming_cap, itm_leather_gloves, itm_mail_with_surcoat, itm_mail_chausses, itm_sumpter_horse],
str_16|agi_17|int_10|cha_10|level(26),wp(70),knows_merchant_npc|
knows_weapon_master_1|knows_first_aid_1|knows_wound_treatment_2|knows_ironflesh_3|knows_inventory_management_5,
0x0000000d7f0400035915aa226b4d975200000000001ea49e0000000000000000],
["npc12_3","Jeremus","Jeremus",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_iron_staff, itm_mail_coif, itm_mail_mittens, itm_pilgrim_disguise, itm_mail_chausses, itm_sumpter_horse],
str_16|agi_17|int_13|cha_7|level(20),wp(30), knows_merchant_npc|
knows_ironflesh_1|knows_power_strike_1|knows_surgery_4|knows_wound_treatment_3|knows_first_aid_3,
0x000000078000500e4f8ba62a9cd5d36d00000000001e36250000000000000000],
["npc13_3","Nizar","Nizar",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_scimitar, itm_tab_shield_small_round_c, itm_sarranid_mail_coif, itm_arabian_armor_b, itm_sarranid_boots_c, itm_courser],
str_14|agi_17|int_12|cha_8|level(19),wp(80),knows_warrior_npc|
knows_riding_2|knows_leadership_2|knows_athletics_2|knows_ironflesh_2|knows_power_strike_1|knows_weapon_master_1,
0x00000004bf0475c85f4e9592de4e574c00000000001e369c0000000000000000],
["npc14_3","Lezalit","Lezalit",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_sword_medieval_c, itm_tab_shield_heater_c, itm_bascinet_2, itm_leather_gloves, itm_surcoat_over_mail, itm_mail_chausses, itm_courser],
str_18|agi_19|int_11|cha_8|level(23),wp(100),knows_warrior_npc|
knows_trainer_4|knows_weapon_master_3|knows_leadership_2|knows_power_strike_1,
0x00000001a410259144d5d1d6eb55e96a00000000001db0db0000000000000000],
["npc15_3","Artimenner","Artimenner",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_long_axe_b, itm_guard_helmet, itm_mail_mittens, itm_haubergeon, itm_mail_chausses, itm_hunter],
str_18|agi_13|int_12|cha_8|level(25),wp(80),knows_warrior_npc|
knows_tactics_2|knows_engineer_4|knows_trade_3|knows_tracking_1|knows_spotting_1,
0x0000000f2e1021862b4b9123594eab5300000000001d55360000000000000000],
["npc16_3","Klethi","Klethi",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_sword_viking_2_small, itm_throwing_axes, itm_vaegir_fur_helmet, itm_leather_gloves, itm_lamellar_vest, itm_leather_boots, itm_saddle_horse],
str_14|agi_17|int_8|cha_7|level(17),wp(80),knows_tracker_npc|
knows_power_throw_3|knows_athletics_2|knows_power_strike_1,
0x00000000000c100739ce9c805d2f381300000000001cc7ad0000000000000000],
#tier 4
["npc1_4","Borcha","Borcha",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_khergit_guard_helmet,itm_lamellar_gauntlets,itm_khergit_guard_armor,itm_khergit_guard_boots,itm_sword_viking_3_small, itm_courser],
str_16|agi_17|int_12|cha_7|level(14),wp(60),knows_tracker_npc|
knows_ironflesh_1|knows_power_strike_1|knows_pathfinding_3|knows_athletics_2|knows_tracking_1|knows_riding_2,
0x00000004bf086143259d061a9046e23500000000001db52c0000000000000000],
["npc2_4","Marnid","Marnid", tf_hero|tf_unmoveable_in_party_window, 0,reserved, fac_commoners,[itm_nordic_helmet,itm_mail_mittens,itm_mail_hauberk,itm_mail_chausses,itm_mace_4,itm_tab_shield_small_round_c, itm_saddle_horse],
str_14|agi_17|int_11|cha_6|level(14),wp(40),knows_merchant_npc|
knows_trade_2|knows_weapon_master_1|knows_ironflesh_1|knows_wound_treatment_1|knows_athletics_2|knows_first_aid_1|knows_leadership_1,
0x000000019d004001570b893712c8d28d00000000001dc8990000000000000000],
["npc3_4","Ymira","Ymira",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_guard_helmet,itm_gauntlets,itm_plate_armor,itm_plate_boots,itm_sword_viking_3_small, itm_hunter],
str_24|agi_13|int_11|cha_6|level(14),wp(20),knows_merchant_npc|
knows_wound_treatment_1|knows_trade_1|knows_first_aid_3|knows_surgery_1|knows_athletics_1|knows_riding_1,
0x0000000083040001583b6db8dec5925b00000000001d80980000000000000000],
["npc4_4","Rolf","Rolf",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_full_helm,itm_scale_gauntlets,itm_heraldic_mail_with_tabard,itm_iron_greaves,itm_sword_medieval_d_long,itm_tab_shield_heater_d, itm_hunter],
str_20|agi_13|int_13|cha_10|level(27),wp(110),knows_warrior_npc|
knows_weapon_master_2|knows_power_strike_2|knows_riding_2|knows_athletics_2|knows_power_throw_2|knows_first_aid_1|knows_surgery_1|knows_tactics_2|knows_leadership_2,
0x000000057f1074002c75c6a8a58ad72e00000000001e1a890000000000000000],
["npc5_4","Baheshtur","Baheshtur",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_scimitar_b, itm_tab_shield_small_round_c, itm_khergit_guard_helmet, itm_scale_gauntlets, itm_lamellar_armor, itm_iron_greaves, itm_steppe_horse],
str_18|agi_13|int_12|cha_7|level(23),wp(90),knows_warrior_npc|
knows_riding_2|knows_horse_archery_3|knows_power_draw_3|knows_leadership_2|knows_weapon_master_1,
0x000000088910318b5c6f972328324a6200000000001cd3310000000000000000],
["npc6_4","Firentis","Firentis",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_sword_two_handed_b, itm_bascinet, itm_gauntlets, itm_cuir_bouilli, itm_plate_boots, itm_sumpter_horse],
str_20|agi_19|int_10|cha_5|level(25),wp(105),knows_warrior_npc|
knows_riding_2|knows_weapon_master_2|knows_power_strike_2|knows_athletics_3|knows_trainer_1|knows_leadership_1,
0x00000002050052036a1895d0748f3ca30000000000000f0b0000000000000000],
["npc7_4","Deshavi","Deshavi",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_great_long_bardiche, itm_war_bow, itm_khergit_arrows, itm_nordic_huscarl_helmet, itm_scale_gauntlets, itm_heraldic_mail_with_tabard, itm_iron_greaves, itm_arabian_horse_b],
str_16|agi_13|int_10|cha_6|level(17),wp(80),knows_tracker_npc|
knows_tracking_2|knows_athletics_2|knows_spotting_1|knows_pathfinding_1|knows_power_draw_2,
0x00000001fc08400533a15297634d44f400000000001e02db0000000000000000],
["npc8_4","Matheld","Matheld",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_great_axe, itm_nordic_warlord_helmet, itm_mail_mittens, itm_banded_armor, itm_mail_chausses, itm_courser],
str_18|agi_15|int_9|cha_10|level(26),wp(90),knows_warrior_npc|
knows_weapon_master_3|knows_power_strike_2|knows_athletics_2|knows_leadership_3|knows_tactics_1,
0x00000005800c000637db8314e331e76e00000000001c46db0000000000000000],
["npc9_4","Alayen","Alayen",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_bastard_sword_b, itm_vaegir_war_helmet, itm_lamellar_gauntlets, itm_banded_armor, itm_iron_greaves, itm_courser],
str_22|agi_19|int_7|cha_8|level(17),wp(100),knows_warrior_npc|
knows_weapon_master_1|knows_riding_1|knows_athletics_1|knows_leadership_1|knows_tactics_1|knows_power_strike_1,
0x000000030100300f499d5b391b6db8d300000000001dc2e10000000000000000],
["npc10_4","Bunduk","Bunduk",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_military_pick, itm_sniper_crossbow, itm_steel_bolts, itm_full_helm, itm_mail_mittens, itm_surcoat_over_mail, itm_splinted_leather_greaves, itm_saddle_horse],
str_24|agi_19|int_9|cha_11|level(27),wp(105),knows_warrior_npc|
knows_weapon_master_3|knows_tactics_1|knows_leadership_1|knows_ironflesh_3|knows_trainer_2|knows_first_aid_2,
0x0000000a3f081006572c91c71c8d46cb00000000001e468a0000000000000000],
["npc11_4","Katrin","Katrin",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_sarranid_two_handed_axe_a, itm_great_helmet, itm_gauntlets, itm_brigandine_red, itm_plate_boots, itm_sumpter_horse],
str_16|agi_17|int_10|cha_10|level(26),wp(70),knows_merchant_npc|
knows_weapon_master_1|knows_first_aid_1|knows_wound_treatment_2|knows_ironflesh_3|knows_inventory_management_5,
0x0000000d7f0400035915aa226b4d975200000000001ea49e0000000000000000],
["npc12_4","Jeremus","Jeremus",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_iron_staff, itm_kettle_hat, itm_gauntlets, itm_surcoat_over_mail, itm_plate_boots, itm_sumpter_horse],
str_16|agi_17|int_13|cha_7|level(20),wp(30), knows_merchant_npc|
knows_ironflesh_1|knows_power_strike_1|knows_surgery_4|knows_wound_treatment_3|knows_first_aid_3,
0x000000078000500e4f8ba62a9cd5d36d00000000001e36250000000000000000],
["npc13_4","Nizar","Nizar",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_scimitar_b, itm_tab_shield_small_round_c, itm_sarranid_veiled_helmet, itm_scale_gauntlets, itm_mamluke_mail, itm_sarranid_boots_d, itm_courser],
str_14|agi_17|int_12|cha_8|level(19),wp(80),knows_warrior_npc|
knows_riding_2|knows_leadership_2|knows_athletics_2|knows_ironflesh_2|knows_power_strike_1|knows_weapon_master_1,
0x00000004bf0475c85f4e9592de4e574c00000000001e369c0000000000000000],
["npc14_4","Lezalit","Lezalit",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_sword_medieval_d_long, itm_tab_shield_heater_d, itm_great_helmet, itm_gauntlets, itm_heraldic_mail_with_surcoat, itm_plate_boots, itm_courser],
str_18|agi_19|int_11|cha_8|level(23),wp(100),knows_warrior_npc|
knows_trainer_4|knows_weapon_master_3|knows_leadership_2|knows_power_strike_1,
0x00000001a410259144d5d1d6eb55e96a00000000001db0db0000000000000000],
["npc15_4","Artimenner","Artimenner",tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_long_axe_c, itm_full_helm, itm_scale_gauntlets, itm_heraldic_mail_with_surcoat, itm_iron_greaves, itm_hunter],
str_18|agi_13|int_12|cha_8|level(25),wp(80),knows_warrior_npc|
knows_tactics_2|knows_engineer_4|knows_trade_3|knows_tracking_1|knows_spotting_1,
0x0000000f2e1021862b4b9123594eab5300000000001d55360000000000000000],
["npc16_4","Klethi","Klethi",tf_female|tf_hero|tf_unmoveable_in_party_window, 0, reserved, fac_commoners,[itm_sword_viking_3_small, itm_heavy_throwing_axes, itm_vaegir_lamellar_helmet, itm_lamellar_gauntlets, itm_lamellar_armor, itm_khergit_guard_boots, itm_saddle_horse],
str_14|agi_17|int_8|cha_7|level(17),wp(80),knows_tracker_npc|
knows_power_throw_3|knows_athletics_2|knows_power_strike_1,
0x00000000000c100739ce9c805d2f381300000000001cc7ad0000000000000000],
["coop_companion_equipment_ui_0","{!}multiplayer_end","{!}multiplayer_end", 0, 0, 0, fac_kingdom_5, [], 0, 0, 0, 0, 0],
["coop_companion_equipment_ui_0_f","{!}multiplayer_end","{!}multiplayer_end", tf_female, 0, 0, fac_kingdom_5, [], 0, 0, 0, 0, 0],
["coop_companion_equipment_ui_1","{!}multiplayer_end","{!}multiplayer_end", 0, 0, 0, fac_kingdom_5, [], 0, 0, 0, 0, 0],
["coop_companion_equipment_ui_1_f","{!}multiplayer_end","{!}multiplayer_end", tf_female, 0, 0, fac_kingdom_5, [], 0, 0, 0, 0, 0],
["coop_companion_equipment_sets_end","{!}multiplayer_end","{!}multiplayer_end", 0, 0, 0, fac_kingdom_5, [], 0, 0, 0, 0, 0],
##diplomacy begin
#SB : fixed plural name (hero name), TODO actually use name/gender in hiring dialogues
["dplmc_chamberlain","Chamberlain Aubrey de Vere", "Aubrey de Vere",tf_hero|tf_male,0,0,fac_commoners,[itm_tabard, itm_leather_boots], def_attrib|level(10), wp(40),knows_inventory_management_10,0x0000000dfc0c238838e571c8d469c91b00000000001e39230000000000000000],
["dplmc_constable","Constable Miles de Gloucester","Miles de Gloucester",tf_hero|tf_male,0,0,fac_commoners,[itm_dplmc_coat_of_plates_red_constable, itm_leather_boots],
knight_attrib_4,wp_melee(200),knows_common|knows_shield_3|knows_ironflesh_3|knows_power_strike_4|knows_athletics_4,0x0000000b4b1015054b1b4d591cba28d300000000001e472b0000000000000000],
["dplmc_chancellor","Chancellor Herfast","Herfast",tf_hero|tf_male,0,0,fac_commoners,[itm_nobleman_outfit, itm_leather_boots],def_attrib|level(10), wp(40),knows_inventory_management_10, 0x00000009a20c21cf491bad28a28628d400000000001e371a0000000000000000],
["dplmc_messenger","Messenger","Messengers",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_horse|tf_guarantee_ranged,0,0,fac_neutral,
[itm_sword_medieval_a,itm_leather_jerkin,itm_leather_boots,itm_courser,itm_leather_gloves,itm_light_crossbow,itm_bolts],
def_attrib|agi_21|int_30|cha_21|level(25),wp(130),knows_common|knows_riding_7|knows_horse_archery_5|knows_leadership_7,man_face_young_1,man_face_old_2],
["dplmc_scout","Scout","Scouts",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_horse|tf_guarantee_ranged,0,0,fac_neutral,
[itm_sword_medieval_a,itm_leather_jerkin,itm_leather_boots,itm_courser,itm_leather_gloves,itm_light_crossbow,itm_bolts],
def_attrib|agi_21|int_30|cha_21|level(25),wp(130),knows_common|knows_riding_7|knows_horse_archery_5|knows_leadership_7,man_face_young_1,man_face_old_2],
# recruiter kit begin
["dplmc_recruiter","Recruiter","Recruiter",tf_mounted|tf_guarantee_boots|tf_guarantee_armor|tf_guarantee_gloves|tf_guarantee_horse|tf_guarantee_ranged,0,0,fac_neutral,
[itm_sword_medieval_a,itm_leather_jerkin,itm_leather_boots,itm_courser,itm_leather_gloves,itm_light_crossbow,itm_bolts],
def_attrib|agi_21|int_30|cha_21|level(25),wp(130),knows_common|knows_riding_7|knows_horse_archery_5|knows_leadership_7,swadian_face_young_1, swadian_face_old_2],
# recruiter kit end
##diplomacy end
]
#Troop upgrade declarations
upgrade(troops,"farmer", "watchman")
upgrade(troops,"townsman","watchman")
upgrade2(troops,"watchman","caravan_guard","mercenary_crossbowman")
upgrade2(troops,"caravan_guard","mercenary_swordsman","mercenary_horseman")
upgrade(troops,"mercenary_swordsman","hired_blade")
upgrade(troops,"mercenary_horseman","mercenary_cavalry")
upgrade(troops,"swadian_recruit","swadian_militia")
upgrade2(troops,"swadian_militia","swadian_footman","swadian_skirmisher")
upgrade2(troops,"swadian_footman","swadian_man_at_arms","swadian_infantry")
upgrade(troops,"swadian_infantry","swadian_sergeant")
upgrade(troops,"swadian_skirmisher","swadian_crossbowman")
upgrade(troops,"swadian_crossbowman","swadian_sharpshooter")
upgrade(troops,"swadian_man_at_arms","swadian_knight")
upgrade(troops,"vaegir_recruit","vaegir_footman")
upgrade2(troops,"vaegir_footman","vaegir_veteran","vaegir_skirmisher")
upgrade(troops,"vaegir_skirmisher","vaegir_archer")
upgrade(troops,"vaegir_archer","vaegir_marksman")
upgrade2(troops,"vaegir_veteran","vaegir_horseman","vaegir_infantry")
upgrade(troops,"vaegir_infantry","vaegir_guard")
upgrade(troops,"vaegir_horseman","vaegir_knight")
upgrade(troops,"khergit_tribesman","khergit_skirmisher")
upgrade(troops,"khergit_skirmisher","khergit_horseman")
upgrade2(troops,"khergit_horseman","khergit_lancer","khergit_horse_archer")
upgrade(troops,"khergit_horse_archer","khergit_veteran_horse_archer")
upgrade2(troops,"nord_recruit","nord_footman","nord_huntsman")
upgrade(troops,"nord_footman","nord_trained_footman")
upgrade(troops,"nord_trained_footman","nord_warrior")
upgrade(troops,"nord_warrior","nord_veteran")
upgrade(troops,"nord_veteran","nord_champion")
upgrade(troops,"nord_huntsman","nord_archer")
upgrade(troops,"nord_archer","nord_veteran_archer")
upgrade2(troops,"rhodok_tribesman","rhodok_spearman","rhodok_crossbowman")
upgrade(troops,"rhodok_spearman","rhodok_trained_spearman")
upgrade(troops,"rhodok_trained_spearman","rhodok_veteran_spearman")
upgrade(troops,"rhodok_veteran_spearman","rhodok_sergeant")
upgrade(troops,"rhodok_crossbowman","rhodok_trained_crossbowman")
upgrade(troops,"rhodok_trained_crossbowman","rhodok_veteran_crossbowman") #new 1.126
upgrade(troops,"rhodok_veteran_crossbowman","rhodok_sharpshooter")
upgrade(troops,"sarranid_recruit","sarranid_footman")
upgrade2(troops,"sarranid_footman","sarranid_veteran_footman","sarranid_skirmisher")
upgrade2(troops,"sarranid_veteran_footman","sarranid_horseman","sarranid_infantry")
upgrade(troops,"sarranid_infantry","sarranid_guard")
upgrade(troops,"sarranid_skirmisher","sarranid_archer")
upgrade(troops,"sarranid_archer","sarranid_master_archer")
upgrade(troops,"sarranid_horseman","sarranid_mamluke")
upgrade2(troops,"looter","mountain_bandit", "forest_bandit")
#new tree connections
upgrade(troops,"mountain_bandit","rhodok_tribesman")
upgrade(troops,"forest_bandit","swadian_recruit")
upgrade(troops,"steppe_bandit","khergit_tribesman")
upgrade(troops,"taiga_bandit","vaegir_recruit")
upgrade(troops,"sea_raider","nord_recruit")
upgrade(troops,"desert_bandit","sarranid_recruit")
#new tree connections ended
upgrade2(troops,"bandit","brigand","mercenary_swordsman")
upgrade(troops,"manhunter","slave_driver")
#upgrade(troops,"forest_bandit","mercenary_crossbowman")
upgrade(troops,"slave_driver","slave_hunter")
upgrade(troops,"slave_hunter","slave_crusher")
upgrade(troops,"slave_crusher","slaver_chief")
upgrade(troops,"follower_woman","hunter_woman")
upgrade(troops,"hunter_woman","fighter_woman")
upgrade(troops,"fighter_woman","sword_sister")
upgrade(troops,"refugee","follower_woman")
upgrade(troops,"peasant_woman","follower_woman")
| 157.60378
| 515
| 0.816677
| 65,649
| 433,568
| 4.855687
| 0.025956
| 0.016658
| 0.033203
| 0.022304
| 0.849569
| 0.82952
| 0.793457
| 0.766619
| 0.737664
| 0.710036
| 0
| 0.111351
| 0.079314
| 433,568
| 2,750
| 516
| 157.661091
| 0.687214
| 0.083722
| 0
| 0.173225
| 0
| 0
| 0.125079
| 0.019459
| 0
| 0
| 0.079624
| 0.000364
| 0
| 1
| 0.001809
| false
| 0
| 0.004071
| 0
| 0.007689
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fbb9a3a9f69d94df8105ca7f039df6db3309cf1d
| 91,897
|
py
|
Python
|
TweakApi/apis/product_type_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
TweakApi/apis/product_type_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
TweakApi/apis/product_type_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
tweak-api
Tweak API to integrate with all the Tweak services. You can find out more about Tweak at <a href='https://www.tweak.com'>https://www.tweak.com</a>, #tweak.
OpenAPI spec version: 1.0.8-beta.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ProductTypeApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def product_types_change_stream_get(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_change_stream_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_change_stream_get_with_http_info(**kwargs)
else:
(data) = self.product_types_change_stream_get_with_http_info(**kwargs)
return data
def product_types_change_stream_get_with_http_info(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_change_stream_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['options']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_change_stream_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductTypes/change-stream'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'options' in params:
query_params['options'] = params['options']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_change_stream_post(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_change_stream_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_change_stream_post_with_http_info(**kwargs)
else:
(data) = self.product_types_change_stream_post_with_http_info(**kwargs)
return data
def product_types_change_stream_post_with_http_info(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_change_stream_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['options']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_change_stream_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductTypes/change-stream'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'options' in params:
form_params.append(('options', params['options']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_count_get(self, **kwargs):
"""
Count instances of the model matched by where from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_count_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_count_get_with_http_info(**kwargs)
else:
(data) = self.product_types_count_get_with_http_info(**kwargs)
return data
def product_types_count_get_with_http_info(self, **kwargs):
"""
Count instances of the model matched by where from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_count_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_count_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductTypes/count'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_find_one_get(self, **kwargs):
"""
Find first instance of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_find_one_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ProductType
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_find_one_get_with_http_info(**kwargs)
else:
(data) = self.product_types_find_one_get_with_http_info(**kwargs)
return data
def product_types_find_one_get_with_http_info(self, **kwargs):
"""
Find first instance of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_find_one_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ProductType
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_find_one_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductTypes/findOne'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductType',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_get(self, **kwargs):
"""
Find all instances of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[ProductType]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_get_with_http_info(**kwargs)
else:
(data) = self.product_types_get_with_http_info(**kwargs)
return data
def product_types_get_with_http_info(self, **kwargs):
"""
Find all instances of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[ProductType]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductTypes'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ProductType]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_id_delete(self, id, **kwargs):
"""
Delete a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_id_delete_with_http_info(id, **kwargs)
else:
(data) = self.product_types_id_delete_with_http_info(id, **kwargs)
return data
def product_types_id_delete_with_http_info(self, id, **kwargs):
"""
Delete a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_types_id_delete`")
collection_formats = {}
resource_path = '/ProductTypes/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_id_exists_get(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_exists_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_id_exists_get_with_http_info(id, **kwargs)
else:
(data) = self.product_types_id_exists_get_with_http_info(id, **kwargs)
return data
def product_types_id_exists_get_with_http_info(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_exists_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_id_exists_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_types_id_exists_get`")
collection_formats = {}
resource_path = '/ProductTypes/{id}/exists'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_id_get(self, id, **kwargs):
"""
Find a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ProductType
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_id_get_with_http_info(id, **kwargs)
else:
(data) = self.product_types_id_get_with_http_info(id, **kwargs)
return data
def product_types_id_get_with_http_info(self, id, **kwargs):
"""
Find a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ProductType
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_types_id_get`")
collection_formats = {}
resource_path = '/ProductTypes/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductType',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_id_group_get(self, id, **kwargs):
"""
Fetches belongsTo relation group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_group_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductType id (required)
:param bool refresh:
:return: ProductGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_id_group_get_with_http_info(id, **kwargs)
else:
(data) = self.product_types_id_group_get_with_http_info(id, **kwargs)
return data
def product_types_id_group_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_group_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductType id (required)
:param bool refresh:
:return: ProductGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_id_group_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_types_id_group_get`")
collection_formats = {}
resource_path = '/ProductTypes/{id}/group'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductGroup',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_id_head(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_head(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_id_head_with_http_info(id, **kwargs)
else:
(data) = self.product_types_id_head_with_http_info(id, **kwargs)
return data
def product_types_id_head_with_http_info(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_head_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_id_head" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_types_id_head`")
collection_formats = {}
resource_path = '/ProductTypes/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_id_patch(self, id, **kwargs):
"""
Patch attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_patch(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductType id (required)
:param ProductType data: An object of model property name/value pairs
:return: ProductType
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_id_patch_with_http_info(id, **kwargs)
else:
(data) = self.product_types_id_patch_with_http_info(id, **kwargs)
return data
def product_types_id_patch_with_http_info(self, id, **kwargs):
"""
Patch attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_patch_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductType id (required)
:param ProductType data: An object of model property name/value pairs
:return: ProductType
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_id_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_types_id_patch`")
collection_formats = {}
resource_path = '/ProductTypes/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductType',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_id_put(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_put(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ProductType data: Model instance data
:return: ProductType
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_id_put_with_http_info(id, **kwargs)
else:
(data) = self.product_types_id_put_with_http_info(id, **kwargs)
return data
def product_types_id_put_with_http_info(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_put_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ProductType data: Model instance data
:return: ProductType
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_types_id_put`")
collection_formats = {}
resource_path = '/ProductTypes/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductType',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_id_replace_post(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_replace_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ProductType data: Model instance data
:return: ProductType
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_id_replace_post_with_http_info(id, **kwargs)
else:
(data) = self.product_types_id_replace_post_with_http_info(id, **kwargs)
return data
def product_types_id_replace_post_with_http_info(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_replace_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ProductType data: Model instance data
:return: ProductType
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_id_replace_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_types_id_replace_post`")
collection_formats = {}
resource_path = '/ProductTypes/{id}/replace'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductType',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_id_sizes_count_get(self, id, **kwargs):
"""
Counts sizes of ProductType.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_sizes_count_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductType id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_id_sizes_count_get_with_http_info(id, **kwargs)
else:
(data) = self.product_types_id_sizes_count_get_with_http_info(id, **kwargs)
return data
def product_types_id_sizes_count_get_with_http_info(self, id, **kwargs):
"""
Counts sizes of ProductType.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_sizes_count_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductType id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_id_sizes_count_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_types_id_sizes_count_get`")
collection_formats = {}
resource_path = '/ProductTypes/{id}/sizes/count'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_id_sizes_fk_delete(self, id, fk, **kwargs):
"""
Delete a related item by id for sizes.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_sizes_fk_delete(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductType id (required)
:param str fk: Foreign key for sizes (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_id_sizes_fk_delete_with_http_info(id, fk, **kwargs)
else:
(data) = self.product_types_id_sizes_fk_delete_with_http_info(id, fk, **kwargs)
return data
def product_types_id_sizes_fk_delete_with_http_info(self, id, fk, **kwargs):
"""
Delete a related item by id for sizes.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_sizes_fk_delete_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductType id (required)
:param str fk: Foreign key for sizes (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_id_sizes_fk_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_types_id_sizes_fk_delete`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `product_types_id_sizes_fk_delete`")
collection_formats = {}
resource_path = '/ProductTypes/{id}/sizes/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_id_sizes_fk_get(self, id, fk, **kwargs):
"""
Find a related item by id for sizes.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_sizes_fk_get(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductType id (required)
:param str fk: Foreign key for sizes (required)
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_id_sizes_fk_get_with_http_info(id, fk, **kwargs)
else:
(data) = self.product_types_id_sizes_fk_get_with_http_info(id, fk, **kwargs)
return data
def product_types_id_sizes_fk_get_with_http_info(self, id, fk, **kwargs):
"""
Find a related item by id for sizes.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_sizes_fk_get_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductType id (required)
:param str fk: Foreign key for sizes (required)
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_id_sizes_fk_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_types_id_sizes_fk_get`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `product_types_id_sizes_fk_get`")
collection_formats = {}
resource_path = '/ProductTypes/{id}/sizes/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSize',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_id_sizes_fk_put(self, id, fk, **kwargs):
"""
Update a related item by id for sizes.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_sizes_fk_put(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductType id (required)
:param str fk: Foreign key for sizes (required)
:param ProductSize data:
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_id_sizes_fk_put_with_http_info(id, fk, **kwargs)
else:
(data) = self.product_types_id_sizes_fk_put_with_http_info(id, fk, **kwargs)
return data
def product_types_id_sizes_fk_put_with_http_info(self, id, fk, **kwargs):
"""
Update a related item by id for sizes.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_sizes_fk_put_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductType id (required)
:param str fk: Foreign key for sizes (required)
:param ProductSize data:
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_id_sizes_fk_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_types_id_sizes_fk_put`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `product_types_id_sizes_fk_put`")
collection_formats = {}
resource_path = '/ProductTypes/{id}/sizes/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSize',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_id_sizes_get(self, id, **kwargs):
"""
Queries sizes of ProductType.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_sizes_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductType id (required)
:param str filter:
:return: list[ProductSize]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_id_sizes_get_with_http_info(id, **kwargs)
else:
(data) = self.product_types_id_sizes_get_with_http_info(id, **kwargs)
return data
def product_types_id_sizes_get_with_http_info(self, id, **kwargs):
"""
Queries sizes of ProductType.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_sizes_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductType id (required)
:param str filter:
:return: list[ProductSize]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_id_sizes_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_types_id_sizes_get`")
collection_formats = {}
resource_path = '/ProductTypes/{id}/sizes'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ProductSize]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_id_sizes_post(self, id, **kwargs):
"""
Creates a new instance in sizes of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_sizes_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductType id (required)
:param ProductSize data:
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_id_sizes_post_with_http_info(id, **kwargs)
else:
(data) = self.product_types_id_sizes_post_with_http_info(id, **kwargs)
return data
def product_types_id_sizes_post_with_http_info(self, id, **kwargs):
"""
Creates a new instance in sizes of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_id_sizes_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ProductType id (required)
:param ProductSize data:
:return: ProductSize
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_id_sizes_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `product_types_id_sizes_post`")
collection_formats = {}
resource_path = '/ProductTypes/{id}/sizes'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSize',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def product_types_post(self, **kwargs):
"""
Create a new instance of the model and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProductType data: Model instance data
:return: ProductType
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.product_types_post_with_http_info(**kwargs)
else:
(data) = self.product_types_post_with_http_info(**kwargs)
return data
def product_types_post_with_http_info(self, **kwargs):
"""
Create a new instance of the model and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.product_types_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProductType data: Model instance data
:return: ProductType
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method product_types_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ProductTypes'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductType',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
| 40.806838
| 165
| 0.561977
| 9,455
| 91,897
| 5.238075
| 0.028451
| 0.064613
| 0.032508
| 0.029076
| 0.975669
| 0.973166
| 0.971025
| 0.963958
| 0.959456
| 0.957679
| 0
| 0.000992
| 0.352775
| 91,897
| 2,251
| 166
| 40.824967
| 0.831686
| 0.317268
| 0
| 0.834915
| 0
| 0
| 0.178371
| 0.064203
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038899
| false
| 0
| 0.006641
| 0
| 0.103416
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fbd6a370ea58a345aa3c3436cbde654c8e8191fd
| 8,109
|
py
|
Python
|
tests/test_util.py
|
wiml/asn1crypto
|
98d23f5fcce5d3ed3296bac77aef487b6245807a
|
[
"MIT"
] | null | null | null |
tests/test_util.py
|
wiml/asn1crypto
|
98d23f5fcce5d3ed3296bac77aef487b6245807a
|
[
"MIT"
] | null | null | null |
tests/test_util.py
|
wiml/asn1crypto
|
98d23f5fcce5d3ed3296bac77aef487b6245807a
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import unittest
import sys
import os
from datetime import date, datetime, time
from asn1crypto import util
from .unittest_data import data_decorator
from ._unittest_compat import patch
patch()
if sys.version_info < (3,):
py2 = True
byte_cls = str
num_cls = long # noqa
else:
py2 = False
byte_cls = bytes
num_cls = int
tests_root = os.path.dirname(__file__)
fixtures_dir = os.path.join(tests_root, 'fixtures')
utc = util.timezone.utc
@data_decorator
class UtilTests(unittest.TestCase):
def test_extended_date_strftime(self):
self.assertEqual('0000-01-01', util.extended_date(0, 1, 1).strftime('%Y-%m-%d'))
self.assertEqual('Sat Saturday Jan January', util.extended_date(0, 1, 1).strftime('%a %A %b %B'))
self.assertEqual('Tue Tuesday Feb February 29', util.extended_date(0, 2, 29).strftime('%a %A %b %B %d'))
if sys.platform == 'win32' and sys.version_info < (3, 5):
self.assertEqual('01/01/00 00:00:00', util.extended_date(0, 1, 1).strftime('%c'))
else:
self.assertEqual('Sat Jan 1 00:00:00 0000', util.extended_date(0, 1, 1).strftime('%c'))
self.assertEqual('01/01/00', util.extended_date(0, 1, 1).strftime('%x'))
def test_extended_datetime_strftime(self):
self.assertEqual('0000-01-01 00:00:00', util.extended_datetime(0, 1, 1).strftime('%Y-%m-%d %H:%M:%S'))
self.assertEqual('Sat Saturday Jan January', util.extended_datetime(0, 1, 1).strftime('%a %A %b %B'))
self.assertEqual('Tue Tuesday Feb February 29', util.extended_datetime(0, 2, 29).strftime('%a %A %b %B %d'))
if sys.platform == 'win32' and sys.version_info < (3, 5):
self.assertEqual('01/01/00 00:00:00', util.extended_datetime(0, 1, 1).strftime('%c'))
else:
self.assertEqual('Sat Jan 1 00:00:00 0000', util.extended_datetime(0, 1, 1).strftime('%c'))
self.assertEqual('01/01/00', util.extended_datetime(0, 1, 1).strftime('%x'))
def test_extended_date_compare(self):
self.assertTrue(util.extended_date(0, 1, 1) < date(1, 1, 1))
self.assertTrue(util.extended_date(0, 1, 1) <= date(1, 1, 1))
self.assertTrue(util.extended_date(0, 1, 1) != date(1, 1, 1))
self.assertFalse(util.extended_date(0, 1, 1) == date(1, 1, 1))
self.assertFalse(util.extended_date(0, 1, 1) >= date(1, 1, 1))
self.assertFalse(util.extended_date(0, 1, 1) > date(1, 1, 1))
self.assertFalse(util.extended_date(0, 1, 1) < util.extended_date(0, 1, 1))
self.assertTrue(util.extended_date(0, 1, 1) <= util.extended_date(0, 1, 1))
self.assertFalse(util.extended_date(0, 1, 1) != util.extended_date(0, 1, 1))
self.assertTrue(util.extended_date(0, 1, 1) == util.extended_date(0, 1, 1))
self.assertTrue(util.extended_date(0, 1, 1) >= util.extended_date(0, 1, 1))
self.assertFalse(util.extended_date(0, 1, 1) > util.extended_date(0, 1, 1))
self.assertTrue(util.extended_date(0, 1, 1) < util.extended_date(0, 1, 2))
self.assertTrue(util.extended_date(0, 1, 1) <= util.extended_date(0, 1, 2))
self.assertTrue(util.extended_date(0, 1, 1) != util.extended_date(0, 1, 2))
self.assertFalse(util.extended_date(0, 1, 1) == util.extended_date(0, 1, 2))
self.assertFalse(util.extended_date(0, 1, 1) >= util.extended_date(0, 1, 2))
self.assertFalse(util.extended_date(0, 1, 1) > util.extended_date(0, 1, 2))
self.assertFalse(util.extended_date(0, 1, 3) < util.extended_date(0, 1, 2))
self.assertFalse(util.extended_date(0, 1, 3) <= util.extended_date(0, 1, 2))
self.assertTrue(util.extended_date(0, 1, 3) != util.extended_date(0, 1, 2))
self.assertFalse(util.extended_date(0, 1, 3) == util.extended_date(0, 1, 2))
self.assertTrue(util.extended_date(0, 1, 3) >= util.extended_date(0, 1, 2))
self.assertTrue(util.extended_date(0, 1, 3) > util.extended_date(0, 1, 2))
def test_extended_datetime_compare(self):
self.assertTrue(util.extended_datetime(0, 1, 1) < datetime(1, 1, 1))
self.assertTrue(util.extended_datetime(0, 1, 1) <= datetime(1, 1, 1))
self.assertTrue(util.extended_datetime(0, 1, 1) != datetime(1, 1, 1))
self.assertFalse(util.extended_datetime(0, 1, 1) == datetime(1, 1, 1))
self.assertFalse(util.extended_datetime(0, 1, 1) >= datetime(1, 1, 1))
self.assertFalse(util.extended_datetime(0, 1, 1) > datetime(1, 1, 1))
self.assertFalse(util.extended_datetime(0, 1, 1) < util.extended_datetime(0, 1, 1))
self.assertTrue(util.extended_datetime(0, 1, 1) <= util.extended_datetime(0, 1, 1))
self.assertFalse(util.extended_datetime(0, 1, 1) != util.extended_datetime(0, 1, 1))
self.assertTrue(util.extended_datetime(0, 1, 1) == util.extended_datetime(0, 1, 1))
self.assertTrue(util.extended_datetime(0, 1, 1) >= util.extended_datetime(0, 1, 1))
self.assertFalse(util.extended_datetime(0, 1, 1) > util.extended_datetime(0, 1, 1))
self.assertTrue(util.extended_datetime(0, 1, 1) < util.extended_datetime(0, 1, 2))
self.assertTrue(util.extended_datetime(0, 1, 1) <= util.extended_datetime(0, 1, 2))
self.assertTrue(util.extended_datetime(0, 1, 1) != util.extended_datetime(0, 1, 2))
self.assertFalse(util.extended_datetime(0, 1, 1) == util.extended_datetime(0, 1, 2))
self.assertFalse(util.extended_datetime(0, 1, 1) >= util.extended_datetime(0, 1, 2))
self.assertFalse(util.extended_datetime(0, 1, 1) > util.extended_datetime(0, 1, 2))
self.assertFalse(util.extended_datetime(0, 1, 3) < util.extended_datetime(0, 1, 2))
self.assertFalse(util.extended_datetime(0, 1, 3) <= util.extended_datetime(0, 1, 2))
self.assertTrue(util.extended_datetime(0, 1, 3) != util.extended_datetime(0, 1, 2))
self.assertFalse(util.extended_datetime(0, 1, 3) == util.extended_datetime(0, 1, 2))
self.assertTrue(util.extended_datetime(0, 1, 3) >= util.extended_datetime(0, 1, 2))
self.assertTrue(util.extended_datetime(0, 1, 3) > util.extended_datetime(0, 1, 2))
def test_extended_datetime_compare_tzinfo(self):
with self.assertRaises(TypeError):
self.assertTrue(util.extended_datetime(0, 1, 1, tzinfo=utc) < datetime(1, 1, 1))
with self.assertRaises(TypeError):
self.assertTrue(util.extended_datetime(0, 1, 1) < datetime(1, 1, 1, tzinfo=utc))
def test_extended_datetime_date_time(self):
self.assertEqual(util.extended_date(0, 1, 1), util.extended_datetime(0, 1, 1).date())
self.assertEqual(util.extended_date(0, 2, 29), util.extended_datetime(0, 2, 29).date())
self.assertEqual(time(0, 0, 0), util.extended_datetime(0, 1, 1).time())
def test_iri_to_uri(self):
self.assertEqual(
b'ldap://ldap.e-szigno.hu/CN=Microsec%20e-Szigno%20Root%20CA,OU=e-Szigno%20CA,'
b'O=Microsec%20Ltd.,L=Budapest,C=HU?certificateRevocationList;binary',
util.iri_to_uri(
'ldap://ldap.e-szigno.hu/CN=Microsec e-Szigno Root CA,'
'OU=e-Szigno CA,O=Microsec Ltd.,L=Budapest,C=HU?certificateRevocationList;binary'
)
)
self.assertEqual(
b'ldap://directory.d-trust.net/CN=D-TRUST%20Root%20Class%203%20CA%202%202009,'
b'O=D-Trust%20GmbH,C=DE?certificaterevocationlist',
util.iri_to_uri(
'ldap://directory.d-trust.net/CN=D-TRUST Root Class 3 CA 2 2009,'
'O=D-Trust GmbH,C=DE?certificaterevocationlist'
)
)
self.assertEqual(
b'ldap://directory.d-trust.net/CN=D-TRUST%20Root%20Class%203%20CA%202%20EV%202009,'
b'O=D-Trust%20GmbH,C=DE?certificaterevocationlist',
util.iri_to_uri(
'ldap://directory.d-trust.net/CN=D-TRUST Root Class 3 CA 2 EV 2009,'
'O=D-Trust GmbH,C=DE?certificaterevocationlist'
)
)
| 54.790541
| 116
| 0.647059
| 1,239
| 8,109
| 4.107345
| 0.103309
| 0.242877
| 0.037139
| 0.218707
| 0.861269
| 0.858911
| 0.824327
| 0.784437
| 0.717626
| 0.712517
| 0
| 0.07937
| 0.193612
| 8,109
| 147
| 117
| 55.163265
| 0.698884
| 0.00222
| 0
| 0.137097
| 0
| 0.048387
| 0.133037
| 0.077522
| 0
| 0
| 0
| 0
| 0.564516
| 1
| 0.056452
| false
| 0
| 0.064516
| 0
| 0.129032
| 0.008065
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.