hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1d6eaa114d44b26662940c242b717f124a9a9555
| 276
|
py
|
Python
|
template/{{cookiecutter.repository_name}}/{{cookiecutter.package_name}}/datastream/__init__.py
|
Aiwizo/ml-workflow
|
88e104fce571dd3b76914626a52f9001342c07cc
|
[
"Apache-2.0"
] | 4
|
2020-09-23T15:39:24.000Z
|
2021-09-12T22:11:00.000Z
|
template/{{cookiecutter.repository_name}}/{{cookiecutter.package_name}}/datastream/__init__.py
|
Aiwizo/ml-workflow
|
88e104fce571dd3b76914626a52f9001342c07cc
|
[
"Apache-2.0"
] | 4
|
2020-09-23T15:07:39.000Z
|
2020-10-30T10:26:24.000Z
|
template/{{cookiecutter.repository_name}}/{{cookiecutter.package_name}}/datastream/__init__.py
|
Aiwizo/ml-workflow
|
88e104fce571dd3b76914626a52f9001342c07cc
|
[
"Apache-2.0"
] | null | null | null |
from {{cookiecutter.package_name}}.datastream.augmenter import augmenter
from {{cookiecutter.package_name}}.datastream.evaluate_datastreams import (
evaluate_datastreams
)
from {{cookiecutter.package_name}}.datastream.gradient_datastream import (
GradientDatastream
)
| 34.5
| 75
| 0.822464
| 27
| 276
| 8.185185
| 0.407407
| 0.217195
| 0.312217
| 0.366516
| 0.502262
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 276
| 7
| 76
| 39.428571
| 0.876984
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.428571
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
d51cd7a05fc2cad56cbb3e60a0ff4d0e1269c426
| 123,822
|
py
|
Python
|
sdk/python/pulumi_alicloud/dts/subscription_job.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/dts/subscription_job.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/dts/subscription_job.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['SubscriptionJobArgs', 'SubscriptionJob']
@pulumi.input_type
class SubscriptionJobArgs:
def __init__(__self__, *,
payment_type: pulumi.Input[str],
checkpoint: Optional[pulumi.Input[str]] = None,
compute_unit: Optional[pulumi.Input[int]] = None,
database_count: Optional[pulumi.Input[int]] = None,
db_list: Optional[pulumi.Input[str]] = None,
delay_notice: Optional[pulumi.Input[bool]] = None,
delay_phone: Optional[pulumi.Input[str]] = None,
delay_rule_time: Optional[pulumi.Input[str]] = None,
destination_endpoint_engine_name: Optional[pulumi.Input[str]] = None,
destination_region: Optional[pulumi.Input[str]] = None,
dts_instance_id: Optional[pulumi.Input[str]] = None,
dts_job_name: Optional[pulumi.Input[str]] = None,
error_notice: Optional[pulumi.Input[bool]] = None,
error_phone: Optional[pulumi.Input[str]] = None,
instance_class: Optional[pulumi.Input[str]] = None,
payment_duration: Optional[pulumi.Input[int]] = None,
payment_duration_unit: Optional[pulumi.Input[str]] = None,
reserve: Optional[pulumi.Input[str]] = None,
source_endpoint_database_name: Optional[pulumi.Input[str]] = None,
source_endpoint_engine_name: Optional[pulumi.Input[str]] = None,
source_endpoint_instance_id: Optional[pulumi.Input[str]] = None,
source_endpoint_instance_type: Optional[pulumi.Input[str]] = None,
source_endpoint_ip: Optional[pulumi.Input[str]] = None,
source_endpoint_oracle_sid: Optional[pulumi.Input[str]] = None,
source_endpoint_owner_id: Optional[pulumi.Input[str]] = None,
source_endpoint_password: Optional[pulumi.Input[str]] = None,
source_endpoint_port: Optional[pulumi.Input[str]] = None,
source_endpoint_region: Optional[pulumi.Input[str]] = None,
source_endpoint_role: Optional[pulumi.Input[str]] = None,
source_endpoint_user_name: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
subscription_data_type_ddl: Optional[pulumi.Input[bool]] = None,
subscription_data_type_dml: Optional[pulumi.Input[bool]] = None,
subscription_instance_network_type: Optional[pulumi.Input[str]] = None,
subscription_instance_vpc_id: Optional[pulumi.Input[str]] = None,
subscription_instance_vswitch_id: Optional[pulumi.Input[str]] = None,
sync_architecture: Optional[pulumi.Input[str]] = None,
synchronization_direction: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None):
"""
The set of arguments for constructing a SubscriptionJob resource.
:param pulumi.Input[str] payment_type: The payment type of the resource. Valid values: `Subscription`, `PayAsYouGo`.
:param pulumi.Input[str] checkpoint: Subscription start time in Unix timestamp format.
:param pulumi.Input[int] compute_unit: [ETL specifications](https://help.aliyun.com/document_detail/212324.html). The unit is the computing unit ComputeUnit (CU), 1CU=1vCPU+4 GB memory. The value range is an integer greater than or equal to 2.
:param pulumi.Input[int] database_count: The number of private customized RDS instances under PolarDB-X. The default value is 1. This parameter needs to be passed only when `source_endpoint_engine_name` equals `drds`.
:param pulumi.Input[str] db_list: Subscription object, in the format of JSON strings. For detailed definitions, please refer to the description of migration, synchronization or subscription objects [document](https://help.aliyun.com/document_detail/209545.html).
:param pulumi.Input[bool] delay_notice: This parameter decides whether to monitor the delay status. Valid values: `true`, `false`.
:param pulumi.Input[str] delay_phone: The mobile phone number of the contact who delayed the alarm. Multiple mobile phone numbers separated by English commas `,`. This parameter currently only supports China stations, and only supports mainland mobile phone numbers, and up to 10 mobile phone numbers can be passed in.
:param pulumi.Input[str] delay_rule_time: When `delay_notice` is set to `true`, this parameter must be passed in. The threshold for triggering the delay alarm. The unit is second and needs to be an integer. The threshold can be set according to business needs. It is recommended to set it above 10 seconds to avoid delay fluctuations caused by network and database load.
:param pulumi.Input[str] destination_endpoint_engine_name: The destination endpoint engine name. Valid values: `ADS`, `DB2`, `DRDS`, `DataHub`, `Greenplum`, `MSSQL`, `MySQL`, `PolarDB`, `PostgreSQL`, `Redis`, `Tablestore`, `as400`, `clickhouse`, `kafka`, `mongodb`, `odps`, `oracle`, `polardb_o`, `polardb_pg`, `tidb`.
:param pulumi.Input[str] destination_region: The destination region. List of [supported regions](https://help.aliyun.com/document_detail/141033.html).
:param pulumi.Input[str] dts_instance_id: The ID of subscription instance.
:param pulumi.Input[str] dts_job_name: The name of subscription task.
:param pulumi.Input[bool] error_notice: This parameter decides whether to monitor abnormal status. Valid values: `true`, `false`.
:param pulumi.Input[str] error_phone: The mobile phone number of the contact for abnormal alarm. Multiple mobile phone numbers separated by English commas `,`. This parameter currently only supports China stations, and only supports mainland mobile phone numbers, and up to 10 mobile phone numbers can be passed in.
:param pulumi.Input[str] instance_class: The instance class. Valid values: `large`, `medium`, `micro`, `small`, `xlarge`, `xxlarge`.
:param pulumi.Input[int] payment_duration: The duration of prepaid instance purchase. When `payment_type` is `Subscription`, this parameter is valid and must be passed in.
:param pulumi.Input[str] payment_duration_unit: The payment duration unit. Valid values: `Month`, `Year`. When `payment_type` is `Subscription`, this parameter is valid and must be passed in.
:param pulumi.Input[str] reserve: DTS reserves parameters, the format is a JSON string, you can pass in this parameter to complete the source and target database information (such as the data storage format of the target Kafka database, the instance ID of the cloud enterprise network CEN). For more information, please refer to the parameter description of the [Reserve parameter](https://help.aliyun.com/document_detail/176470.html).
:param pulumi.Input[str] source_endpoint_database_name: To subscribe to the name of the database.
:param pulumi.Input[str] source_endpoint_engine_name: The source database type value is MySQL or Oracle. Valid values: `MySQL`, `Oracle`.
:param pulumi.Input[str] source_endpoint_instance_id: The ID of source instance. Only when the type of source database instance was RDS MySQL, PolarDB-X 1.0, PolarDB MySQL, this parameter can be available and must be set.
:param pulumi.Input[str] source_endpoint_instance_type: The type of source instance. Valid values: `RDS`, `PolarDB`, `DRDS`, `LocalInstance`, `ECS`, `Express`, `CEN`, `dg`.
:param pulumi.Input[str] source_endpoint_ip: The IP of source endpoint.
:param pulumi.Input[str] source_endpoint_oracle_sid: The SID of Oracle Database. When the source database is self-built Oracle and the Oracle database is a non-RAC instance, this parameter is available and must be passed in.
:param pulumi.Input[str] source_endpoint_owner_id: The Alibaba Cloud account ID to which the source instance belongs. This parameter is only available when configuring data subscriptions across Alibaba Cloud accounts and must be passed in.
:param pulumi.Input[str] source_endpoint_password: The password of source database instance account.
:param pulumi.Input[str] source_endpoint_port: The port of source database.
:param pulumi.Input[str] source_endpoint_region: The region of source database.
:param pulumi.Input[str] source_endpoint_role: Both the authorization roles. When the source instance and configure subscriptions task of the Alibaba Cloud account is not the same as the need to pass the parameter, to specify the source of the authorization roles, to allow configuration subscription task of the Alibaba Cloud account to access the source of the source instance information.
:param pulumi.Input[str] source_endpoint_user_name: The username of source database instance account.
:param pulumi.Input[str] status: The status of the task. Valid values: `Normal`, `Abnormal`. When a task created, it is in this state of `NotStarted`. You can specify this state to `Normal` to start the job, and specify this state of `Abnormal` to stop the job. **Note: We treat the state `Starting` as the state of `Normal`, and consider the two states to be consistent on the user side.**
:param pulumi.Input[bool] subscription_data_type_ddl: Whether to subscribe the DDL type of data. Valid values: `true`, `false`.
:param pulumi.Input[bool] subscription_data_type_dml: Whether to subscribe the DML type of data. Valid values: `true`, `false`.
:param pulumi.Input[str] subscription_instance_network_type: Subscription task type of network value: classic: classic Network. Virtual Private Cloud (vpc): a vpc. Valid values: `classic`, `vpc`.
:param pulumi.Input[str] subscription_instance_vpc_id: The ID of subscription vpc instance. When the value of `subscription_instance_network_type` is vpc, this parameter is available and must be passed in.
:param pulumi.Input[str] subscription_instance_vswitch_id: The ID of subscription VSwitch instance. When the value of `subscription_instance_network_type` is vpc, this parameter is available and must be passed in.
:param pulumi.Input[str] sync_architecture: The sync architecture. Valid values: `bidirectional`, `oneway`.
:param pulumi.Input[str] synchronization_direction: The synchronization direction. Valid values: `Forward`, `Reverse`. When the topology type of the data synchronization instance is bidirectional, it can be passed in to reverse to start the reverse synchronization link.
"""
pulumi.set(__self__, "payment_type", payment_type)
if checkpoint is not None:
pulumi.set(__self__, "checkpoint", checkpoint)
if compute_unit is not None:
pulumi.set(__self__, "compute_unit", compute_unit)
if database_count is not None:
pulumi.set(__self__, "database_count", database_count)
if db_list is not None:
pulumi.set(__self__, "db_list", db_list)
if delay_notice is not None:
pulumi.set(__self__, "delay_notice", delay_notice)
if delay_phone is not None:
pulumi.set(__self__, "delay_phone", delay_phone)
if delay_rule_time is not None:
pulumi.set(__self__, "delay_rule_time", delay_rule_time)
if destination_endpoint_engine_name is not None:
pulumi.set(__self__, "destination_endpoint_engine_name", destination_endpoint_engine_name)
if destination_region is not None:
pulumi.set(__self__, "destination_region", destination_region)
if dts_instance_id is not None:
pulumi.set(__self__, "dts_instance_id", dts_instance_id)
if dts_job_name is not None:
pulumi.set(__self__, "dts_job_name", dts_job_name)
if error_notice is not None:
pulumi.set(__self__, "error_notice", error_notice)
if error_phone is not None:
pulumi.set(__self__, "error_phone", error_phone)
if instance_class is not None:
pulumi.set(__self__, "instance_class", instance_class)
if payment_duration is not None:
pulumi.set(__self__, "payment_duration", payment_duration)
if payment_duration_unit is not None:
pulumi.set(__self__, "payment_duration_unit", payment_duration_unit)
if reserve is not None:
pulumi.set(__self__, "reserve", reserve)
if source_endpoint_database_name is not None:
pulumi.set(__self__, "source_endpoint_database_name", source_endpoint_database_name)
if source_endpoint_engine_name is not None:
pulumi.set(__self__, "source_endpoint_engine_name", source_endpoint_engine_name)
if source_endpoint_instance_id is not None:
pulumi.set(__self__, "source_endpoint_instance_id", source_endpoint_instance_id)
if source_endpoint_instance_type is not None:
pulumi.set(__self__, "source_endpoint_instance_type", source_endpoint_instance_type)
if source_endpoint_ip is not None:
pulumi.set(__self__, "source_endpoint_ip", source_endpoint_ip)
if source_endpoint_oracle_sid is not None:
pulumi.set(__self__, "source_endpoint_oracle_sid", source_endpoint_oracle_sid)
if source_endpoint_owner_id is not None:
pulumi.set(__self__, "source_endpoint_owner_id", source_endpoint_owner_id)
if source_endpoint_password is not None:
pulumi.set(__self__, "source_endpoint_password", source_endpoint_password)
if source_endpoint_port is not None:
pulumi.set(__self__, "source_endpoint_port", source_endpoint_port)
if source_endpoint_region is not None:
pulumi.set(__self__, "source_endpoint_region", source_endpoint_region)
if source_endpoint_role is not None:
pulumi.set(__self__, "source_endpoint_role", source_endpoint_role)
if source_endpoint_user_name is not None:
pulumi.set(__self__, "source_endpoint_user_name", source_endpoint_user_name)
if status is not None:
pulumi.set(__self__, "status", status)
if subscription_data_type_ddl is not None:
pulumi.set(__self__, "subscription_data_type_ddl", subscription_data_type_ddl)
if subscription_data_type_dml is not None:
pulumi.set(__self__, "subscription_data_type_dml", subscription_data_type_dml)
if subscription_instance_network_type is not None:
pulumi.set(__self__, "subscription_instance_network_type", subscription_instance_network_type)
if subscription_instance_vpc_id is not None:
pulumi.set(__self__, "subscription_instance_vpc_id", subscription_instance_vpc_id)
if subscription_instance_vswitch_id is not None:
pulumi.set(__self__, "subscription_instance_vswitch_id", subscription_instance_vswitch_id)
if sync_architecture is not None:
pulumi.set(__self__, "sync_architecture", sync_architecture)
if synchronization_direction is not None:
pulumi.set(__self__, "synchronization_direction", synchronization_direction)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="paymentType")
def payment_type(self) -> pulumi.Input[str]:
"""
The payment type of the resource. Valid values: `Subscription`, `PayAsYouGo`.
"""
return pulumi.get(self, "payment_type")
@payment_type.setter
def payment_type(self, value: pulumi.Input[str]):
pulumi.set(self, "payment_type", value)
@property
@pulumi.getter
def checkpoint(self) -> Optional[pulumi.Input[str]]:
"""
Subscription start time in Unix timestamp format.
"""
return pulumi.get(self, "checkpoint")
@checkpoint.setter
def checkpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "checkpoint", value)
@property
@pulumi.getter(name="computeUnit")
def compute_unit(self) -> Optional[pulumi.Input[int]]:
"""
[ETL specifications](https://help.aliyun.com/document_detail/212324.html). The unit is the computing unit ComputeUnit (CU), 1CU=1vCPU+4 GB memory. The value range is an integer greater than or equal to 2.
"""
return pulumi.get(self, "compute_unit")
@compute_unit.setter
def compute_unit(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "compute_unit", value)
@property
@pulumi.getter(name="databaseCount")
def database_count(self) -> Optional[pulumi.Input[int]]:
"""
The number of private customized RDS instances under PolarDB-X. The default value is 1. This parameter needs to be passed only when `source_endpoint_engine_name` equals `drds`.
"""
return pulumi.get(self, "database_count")
@database_count.setter
def database_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "database_count", value)
@property
@pulumi.getter(name="dbList")
def db_list(self) -> Optional[pulumi.Input[str]]:
"""
Subscription object, in the format of JSON strings. For detailed definitions, please refer to the description of migration, synchronization or subscription objects [document](https://help.aliyun.com/document_detail/209545.html).
"""
return pulumi.get(self, "db_list")
@db_list.setter
def db_list(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "db_list", value)
@property
@pulumi.getter(name="delayNotice")
def delay_notice(self) -> Optional[pulumi.Input[bool]]:
"""
This parameter decides whether to monitor the delay status. Valid values: `true`, `false`.
"""
return pulumi.get(self, "delay_notice")
@delay_notice.setter
def delay_notice(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "delay_notice", value)
@property
@pulumi.getter(name="delayPhone")
def delay_phone(self) -> Optional[pulumi.Input[str]]:
"""
The mobile phone number of the contact who delayed the alarm. Multiple mobile phone numbers separated by English commas `,`. This parameter currently only supports China stations, and only supports mainland mobile phone numbers, and up to 10 mobile phone numbers can be passed in.
"""
return pulumi.get(self, "delay_phone")
@delay_phone.setter
def delay_phone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "delay_phone", value)
@property
@pulumi.getter(name="delayRuleTime")
def delay_rule_time(self) -> Optional[pulumi.Input[str]]:
"""
When `delay_notice` is set to `true`, this parameter must be passed in. The threshold for triggering the delay alarm. The unit is second and needs to be an integer. The threshold can be set according to business needs. It is recommended to set it above 10 seconds to avoid delay fluctuations caused by network and database load.
"""
return pulumi.get(self, "delay_rule_time")
@delay_rule_time.setter
def delay_rule_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "delay_rule_time", value)
@property
@pulumi.getter(name="destinationEndpointEngineName")
def destination_endpoint_engine_name(self) -> Optional[pulumi.Input[str]]:
"""
The destination endpoint engine name. Valid values: `ADS`, `DB2`, `DRDS`, `DataHub`, `Greenplum`, `MSSQL`, `MySQL`, `PolarDB`, `PostgreSQL`, `Redis`, `Tablestore`, `as400`, `clickhouse`, `kafka`, `mongodb`, `odps`, `oracle`, `polardb_o`, `polardb_pg`, `tidb`.
"""
return pulumi.get(self, "destination_endpoint_engine_name")
@destination_endpoint_engine_name.setter
def destination_endpoint_engine_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "destination_endpoint_engine_name", value)
@property
@pulumi.getter(name="destinationRegion")
def destination_region(self) -> Optional[pulumi.Input[str]]:
"""
The destination region. List of [supported regions](https://help.aliyun.com/document_detail/141033.html).
"""
return pulumi.get(self, "destination_region")
@destination_region.setter
def destination_region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "destination_region", value)
@property
@pulumi.getter(name="dtsInstanceId")
def dts_instance_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of subscription instance.
"""
return pulumi.get(self, "dts_instance_id")
@dts_instance_id.setter
def dts_instance_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dts_instance_id", value)
@property
@pulumi.getter(name="dtsJobName")
def dts_job_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of subscription task.
"""
return pulumi.get(self, "dts_job_name")
@dts_job_name.setter
def dts_job_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dts_job_name", value)
@property
@pulumi.getter(name="errorNotice")
def error_notice(self) -> Optional[pulumi.Input[bool]]:
"""
This parameter decides whether to monitor abnormal status. Valid values: `true`, `false`.
"""
return pulumi.get(self, "error_notice")
@error_notice.setter
def error_notice(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "error_notice", value)
@property
@pulumi.getter(name="errorPhone")
def error_phone(self) -> Optional[pulumi.Input[str]]:
"""
The mobile phone number of the contact for abnormal alarm. Multiple mobile phone numbers separated by English commas `,`. This parameter currently only supports China stations, and only supports mainland mobile phone numbers, and up to 10 mobile phone numbers can be passed in.
"""
return pulumi.get(self, "error_phone")
@error_phone.setter
def error_phone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "error_phone", value)
@property
@pulumi.getter(name="instanceClass")
def instance_class(self) -> Optional[pulumi.Input[str]]:
"""
The instance class. Valid values: `large`, `medium`, `micro`, `small`, `xlarge`, `xxlarge`.
"""
return pulumi.get(self, "instance_class")
@instance_class.setter
def instance_class(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_class", value)
@property
@pulumi.getter(name="paymentDuration")
def payment_duration(self) -> Optional[pulumi.Input[int]]:
"""
The duration of prepaid instance purchase. When `payment_type` is `Subscription`, this parameter is valid and must be passed in.
"""
return pulumi.get(self, "payment_duration")
@payment_duration.setter
def payment_duration(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "payment_duration", value)
@property
@pulumi.getter(name="paymentDurationUnit")
def payment_duration_unit(self) -> Optional[pulumi.Input[str]]:
"""
The payment duration unit. Valid values: `Month`, `Year`. When `payment_type` is `Subscription`, this parameter is valid and must be passed in.
"""
return pulumi.get(self, "payment_duration_unit")
@payment_duration_unit.setter
def payment_duration_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "payment_duration_unit", value)
@property
@pulumi.getter
def reserve(self) -> Optional[pulumi.Input[str]]:
"""
DTS reserves parameters, the format is a JSON string, you can pass in this parameter to complete the source and target database information (such as the data storage format of the target Kafka database, the instance ID of the cloud enterprise network CEN). For more information, please refer to the parameter description of the [Reserve parameter](https://help.aliyun.com/document_detail/176470.html).
"""
return pulumi.get(self, "reserve")
@reserve.setter
def reserve(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reserve", value)
@property
@pulumi.getter(name="sourceEndpointDatabaseName")
def source_endpoint_database_name(self) -> Optional[pulumi.Input[str]]:
"""
To subscribe to the name of the database.
"""
return pulumi.get(self, "source_endpoint_database_name")
@source_endpoint_database_name.setter
def source_endpoint_database_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_database_name", value)
@property
@pulumi.getter(name="sourceEndpointEngineName")
def source_endpoint_engine_name(self) -> Optional[pulumi.Input[str]]:
"""
The source database type value is MySQL or Oracle. Valid values: `MySQL`, `Oracle`.
"""
return pulumi.get(self, "source_endpoint_engine_name")
@source_endpoint_engine_name.setter
def source_endpoint_engine_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_engine_name", value)
@property
@pulumi.getter(name="sourceEndpointInstanceId")
def source_endpoint_instance_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of source instance. Only when the type of source database instance was RDS MySQL, PolarDB-X 1.0, PolarDB MySQL, this parameter can be available and must be set.
"""
return pulumi.get(self, "source_endpoint_instance_id")
@source_endpoint_instance_id.setter
def source_endpoint_instance_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_instance_id", value)
@property
@pulumi.getter(name="sourceEndpointInstanceType")
def source_endpoint_instance_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of source instance. Valid values: `RDS`, `PolarDB`, `DRDS`, `LocalInstance`, `ECS`, `Express`, `CEN`, `dg`.
"""
return pulumi.get(self, "source_endpoint_instance_type")
@source_endpoint_instance_type.setter
def source_endpoint_instance_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_instance_type", value)
@property
@pulumi.getter(name="sourceEndpointIp")
def source_endpoint_ip(self) -> Optional[pulumi.Input[str]]:
"""
The IP of source endpoint.
"""
return pulumi.get(self, "source_endpoint_ip")
@source_endpoint_ip.setter
def source_endpoint_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_ip", value)
@property
@pulumi.getter(name="sourceEndpointOracleSid")
def source_endpoint_oracle_sid(self) -> Optional[pulumi.Input[str]]:
"""
The SID of Oracle Database. When the source database is self-built Oracle and the Oracle database is a non-RAC instance, this parameter is available and must be passed in.
"""
return pulumi.get(self, "source_endpoint_oracle_sid")
@source_endpoint_oracle_sid.setter
def source_endpoint_oracle_sid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_oracle_sid", value)
@property
@pulumi.getter(name="sourceEndpointOwnerId")
def source_endpoint_owner_id(self) -> Optional[pulumi.Input[str]]:
"""
The Alibaba Cloud account ID to which the source instance belongs. This parameter is only available when configuring data subscriptions across Alibaba Cloud accounts and must be passed in.
"""
return pulumi.get(self, "source_endpoint_owner_id")
@source_endpoint_owner_id.setter
def source_endpoint_owner_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_owner_id", value)
@property
@pulumi.getter(name="sourceEndpointPassword")
def source_endpoint_password(self) -> Optional[pulumi.Input[str]]:
"""
The password of source database instance account.
"""
return pulumi.get(self, "source_endpoint_password")
@source_endpoint_password.setter
def source_endpoint_password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_password", value)
@property
@pulumi.getter(name="sourceEndpointPort")
def source_endpoint_port(self) -> Optional[pulumi.Input[str]]:
"""
The port of source database.
"""
return pulumi.get(self, "source_endpoint_port")
@source_endpoint_port.setter
def source_endpoint_port(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_port", value)
@property
@pulumi.getter(name="sourceEndpointRegion")
def source_endpoint_region(self) -> Optional[pulumi.Input[str]]:
"""
The region of source database.
"""
return pulumi.get(self, "source_endpoint_region")
@source_endpoint_region.setter
def source_endpoint_region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_region", value)
@property
@pulumi.getter(name="sourceEndpointRole")
def source_endpoint_role(self) -> Optional[pulumi.Input[str]]:
"""
Both the authorization roles. When the source instance and configure subscriptions task of the Alibaba Cloud account is not the same as the need to pass the parameter, to specify the source of the authorization roles, to allow configuration subscription task of the Alibaba Cloud account to access the source of the source instance information.
"""
return pulumi.get(self, "source_endpoint_role")
@source_endpoint_role.setter
def source_endpoint_role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_role", value)
@property
@pulumi.getter(name="sourceEndpointUserName")
def source_endpoint_user_name(self) -> Optional[pulumi.Input[str]]:
"""
The username of source database instance account.
"""
return pulumi.get(self, "source_endpoint_user_name")
@source_endpoint_user_name.setter
def source_endpoint_user_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_user_name", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
The status of the task. Valid values: `Normal`, `Abnormal`. When a task created, it is in this state of `NotStarted`. You can specify this state to `Normal` to start the job, and specify this state of `Abnormal` to stop the job. **Note: We treat the state `Starting` as the state of `Normal`, and consider the two states to be consistent on the user side.**
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter(name="subscriptionDataTypeDdl")
def subscription_data_type_ddl(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to subscribe the DDL type of data. Valid values: `true`, `false`.
"""
return pulumi.get(self, "subscription_data_type_ddl")
@subscription_data_type_ddl.setter
def subscription_data_type_ddl(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "subscription_data_type_ddl", value)
@property
@pulumi.getter(name="subscriptionDataTypeDml")
def subscription_data_type_dml(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to subscribe the DML type of data. Valid values: `true`, `false`.
"""
return pulumi.get(self, "subscription_data_type_dml")
@subscription_data_type_dml.setter
def subscription_data_type_dml(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "subscription_data_type_dml", value)
@property
@pulumi.getter(name="subscriptionInstanceNetworkType")
def subscription_instance_network_type(self) -> Optional[pulumi.Input[str]]:
"""
Subscription task type of network value: classic: classic Network. Virtual Private Cloud (vpc): a vpc. Valid values: `classic`, `vpc`.
"""
return pulumi.get(self, "subscription_instance_network_type")
@subscription_instance_network_type.setter
def subscription_instance_network_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subscription_instance_network_type", value)
@property
@pulumi.getter(name="subscriptionInstanceVpcId")
def subscription_instance_vpc_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of subscription vpc instance. When the value of `subscription_instance_network_type` is vpc, this parameter is available and must be passed in.
"""
return pulumi.get(self, "subscription_instance_vpc_id")
@subscription_instance_vpc_id.setter
def subscription_instance_vpc_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subscription_instance_vpc_id", value)
@property
@pulumi.getter(name="subscriptionInstanceVswitchId")
def subscription_instance_vswitch_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of subscription VSwitch instance. When the value of `subscription_instance_network_type` is vpc, this parameter is available and must be passed in.
"""
return pulumi.get(self, "subscription_instance_vswitch_id")
@subscription_instance_vswitch_id.setter
def subscription_instance_vswitch_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subscription_instance_vswitch_id", value)
@property
@pulumi.getter(name="syncArchitecture")
def sync_architecture(self) -> Optional[pulumi.Input[str]]:
"""
The sync architecture. Valid values: `bidirectional`, `oneway`.
"""
return pulumi.get(self, "sync_architecture")
@sync_architecture.setter
def sync_architecture(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sync_architecture", value)
@property
@pulumi.getter(name="synchronizationDirection")
def synchronization_direction(self) -> Optional[pulumi.Input[str]]:
"""
The synchronization direction. Valid values: `Forward`, `Reverse`. When the topology type of the data synchronization instance is bidirectional, it can be passed in to reverse to start the reverse synchronization link.
"""
return pulumi.get(self, "synchronization_direction")
@synchronization_direction.setter
def synchronization_direction(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "synchronization_direction", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _SubscriptionJobState:
def __init__(__self__, *,
checkpoint: Optional[pulumi.Input[str]] = None,
compute_unit: Optional[pulumi.Input[int]] = None,
database_count: Optional[pulumi.Input[int]] = None,
db_list: Optional[pulumi.Input[str]] = None,
delay_notice: Optional[pulumi.Input[bool]] = None,
delay_phone: Optional[pulumi.Input[str]] = None,
delay_rule_time: Optional[pulumi.Input[str]] = None,
destination_endpoint_engine_name: Optional[pulumi.Input[str]] = None,
destination_region: Optional[pulumi.Input[str]] = None,
dts_instance_id: Optional[pulumi.Input[str]] = None,
dts_job_name: Optional[pulumi.Input[str]] = None,
error_notice: Optional[pulumi.Input[bool]] = None,
error_phone: Optional[pulumi.Input[str]] = None,
instance_class: Optional[pulumi.Input[str]] = None,
payment_duration: Optional[pulumi.Input[int]] = None,
payment_duration_unit: Optional[pulumi.Input[str]] = None,
payment_type: Optional[pulumi.Input[str]] = None,
reserve: Optional[pulumi.Input[str]] = None,
source_endpoint_database_name: Optional[pulumi.Input[str]] = None,
source_endpoint_engine_name: Optional[pulumi.Input[str]] = None,
source_endpoint_instance_id: Optional[pulumi.Input[str]] = None,
source_endpoint_instance_type: Optional[pulumi.Input[str]] = None,
source_endpoint_ip: Optional[pulumi.Input[str]] = None,
source_endpoint_oracle_sid: Optional[pulumi.Input[str]] = None,
source_endpoint_owner_id: Optional[pulumi.Input[str]] = None,
source_endpoint_password: Optional[pulumi.Input[str]] = None,
source_endpoint_port: Optional[pulumi.Input[str]] = None,
source_endpoint_region: Optional[pulumi.Input[str]] = None,
source_endpoint_role: Optional[pulumi.Input[str]] = None,
source_endpoint_user_name: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
subscription_data_type_ddl: Optional[pulumi.Input[bool]] = None,
subscription_data_type_dml: Optional[pulumi.Input[bool]] = None,
subscription_instance_network_type: Optional[pulumi.Input[str]] = None,
subscription_instance_vpc_id: Optional[pulumi.Input[str]] = None,
subscription_instance_vswitch_id: Optional[pulumi.Input[str]] = None,
sync_architecture: Optional[pulumi.Input[str]] = None,
synchronization_direction: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None):
"""
Input properties used for looking up and filtering SubscriptionJob resources.
:param pulumi.Input[str] checkpoint: Subscription start time in Unix timestamp format.
:param pulumi.Input[int] compute_unit: [ETL specifications](https://help.aliyun.com/document_detail/212324.html). The unit is the computing unit ComputeUnit (CU), 1CU=1vCPU+4 GB memory. The value range is an integer greater than or equal to 2.
:param pulumi.Input[int] database_count: The number of private customized RDS instances under PolarDB-X. The default value is 1. This parameter needs to be passed only when `source_endpoint_engine_name` equals `drds`.
:param pulumi.Input[str] db_list: Subscription object, in the format of JSON strings. For detailed definitions, please refer to the description of migration, synchronization or subscription objects [document](https://help.aliyun.com/document_detail/209545.html).
:param pulumi.Input[bool] delay_notice: This parameter decides whether to monitor the delay status. Valid values: `true`, `false`.
:param pulumi.Input[str] delay_phone: The mobile phone number of the contact who delayed the alarm. Multiple mobile phone numbers separated by English commas `,`. This parameter currently only supports China stations, and only supports mainland mobile phone numbers, and up to 10 mobile phone numbers can be passed in.
:param pulumi.Input[str] delay_rule_time: When `delay_notice` is set to `true`, this parameter must be passed in. The threshold for triggering the delay alarm. The unit is second and needs to be an integer. The threshold can be set according to business needs. It is recommended to set it above 10 seconds to avoid delay fluctuations caused by network and database load.
:param pulumi.Input[str] destination_endpoint_engine_name: The destination endpoint engine name. Valid values: `ADS`, `DB2`, `DRDS`, `DataHub`, `Greenplum`, `MSSQL`, `MySQL`, `PolarDB`, `PostgreSQL`, `Redis`, `Tablestore`, `as400`, `clickhouse`, `kafka`, `mongodb`, `odps`, `oracle`, `polardb_o`, `polardb_pg`, `tidb`.
:param pulumi.Input[str] destination_region: The destination region. List of [supported regions](https://help.aliyun.com/document_detail/141033.html).
:param pulumi.Input[str] dts_instance_id: The ID of subscription instance.
:param pulumi.Input[str] dts_job_name: The name of subscription task.
:param pulumi.Input[bool] error_notice: This parameter decides whether to monitor abnormal status. Valid values: `true`, `false`.
:param pulumi.Input[str] error_phone: The mobile phone number of the contact for abnormal alarm. Multiple mobile phone numbers separated by English commas `,`. This parameter currently only supports China stations, and only supports mainland mobile phone numbers, and up to 10 mobile phone numbers can be passed in.
:param pulumi.Input[str] instance_class: The instance class. Valid values: `large`, `medium`, `micro`, `small`, `xlarge`, `xxlarge`.
:param pulumi.Input[int] payment_duration: The duration of prepaid instance purchase. When `payment_type` is `Subscription`, this parameter is valid and must be passed in.
:param pulumi.Input[str] payment_duration_unit: The payment duration unit. Valid values: `Month`, `Year`. When `payment_type` is `Subscription`, this parameter is valid and must be passed in.
:param pulumi.Input[str] payment_type: The payment type of the resource. Valid values: `Subscription`, `PayAsYouGo`.
:param pulumi.Input[str] reserve: DTS reserves parameters, the format is a JSON string, you can pass in this parameter to complete the source and target database information (such as the data storage format of the target Kafka database, the instance ID of the cloud enterprise network CEN). For more information, please refer to the parameter description of the [Reserve parameter](https://help.aliyun.com/document_detail/176470.html).
:param pulumi.Input[str] source_endpoint_database_name: To subscribe to the name of the database.
:param pulumi.Input[str] source_endpoint_engine_name: The source database type value is MySQL or Oracle. Valid values: `MySQL`, `Oracle`.
:param pulumi.Input[str] source_endpoint_instance_id: The ID of source instance. Only when the type of source database instance was RDS MySQL, PolarDB-X 1.0, PolarDB MySQL, this parameter can be available and must be set.
:param pulumi.Input[str] source_endpoint_instance_type: The type of source instance. Valid values: `RDS`, `PolarDB`, `DRDS`, `LocalInstance`, `ECS`, `Express`, `CEN`, `dg`.
:param pulumi.Input[str] source_endpoint_ip: The IP of source endpoint.
:param pulumi.Input[str] source_endpoint_oracle_sid: The SID of Oracle Database. When the source database is self-built Oracle and the Oracle database is a non-RAC instance, this parameter is available and must be passed in.
:param pulumi.Input[str] source_endpoint_owner_id: The Alibaba Cloud account ID to which the source instance belongs. This parameter is only available when configuring data subscriptions across Alibaba Cloud accounts and must be passed in.
:param pulumi.Input[str] source_endpoint_password: The password of source database instance account.
:param pulumi.Input[str] source_endpoint_port: The port of source database.
:param pulumi.Input[str] source_endpoint_region: The region of source database.
:param pulumi.Input[str] source_endpoint_role: Both the authorization roles. When the source instance and configure subscriptions task of the Alibaba Cloud account is not the same as the need to pass the parameter, to specify the source of the authorization roles, to allow configuration subscription task of the Alibaba Cloud account to access the source of the source instance information.
:param pulumi.Input[str] source_endpoint_user_name: The username of source database instance account.
:param pulumi.Input[str] status: The status of the task. Valid values: `Normal`, `Abnormal`. When a task created, it is in this state of `NotStarted`. You can specify this state to `Normal` to start the job, and specify this state of `Abnormal` to stop the job. **Note: We treat the state `Starting` as the state of `Normal`, and consider the two states to be consistent on the user side.**
:param pulumi.Input[bool] subscription_data_type_ddl: Whether to subscribe the DDL type of data. Valid values: `true`, `false`.
:param pulumi.Input[bool] subscription_data_type_dml: Whether to subscribe the DML type of data. Valid values: `true`, `false`.
:param pulumi.Input[str] subscription_instance_network_type: Subscription task type of network value: classic: classic Network. Virtual Private Cloud (vpc): a vpc. Valid values: `classic`, `vpc`.
:param pulumi.Input[str] subscription_instance_vpc_id: The ID of subscription vpc instance. When the value of `subscription_instance_network_type` is vpc, this parameter is available and must be passed in.
:param pulumi.Input[str] subscription_instance_vswitch_id: The ID of subscription VSwitch instance. When the value of `subscription_instance_network_type` is vpc, this parameter is available and must be passed in.
:param pulumi.Input[str] sync_architecture: The sync architecture. Valid values: `bidirectional`, `oneway`.
:param pulumi.Input[str] synchronization_direction: The synchronization direction. Valid values: `Forward`, `Reverse`. When the topology type of the data synchronization instance is bidirectional, it can be passed in to reverse to start the reverse synchronization link.
"""
if checkpoint is not None:
pulumi.set(__self__, "checkpoint", checkpoint)
if compute_unit is not None:
pulumi.set(__self__, "compute_unit", compute_unit)
if database_count is not None:
pulumi.set(__self__, "database_count", database_count)
if db_list is not None:
pulumi.set(__self__, "db_list", db_list)
if delay_notice is not None:
pulumi.set(__self__, "delay_notice", delay_notice)
if delay_phone is not None:
pulumi.set(__self__, "delay_phone", delay_phone)
if delay_rule_time is not None:
pulumi.set(__self__, "delay_rule_time", delay_rule_time)
if destination_endpoint_engine_name is not None:
pulumi.set(__self__, "destination_endpoint_engine_name", destination_endpoint_engine_name)
if destination_region is not None:
pulumi.set(__self__, "destination_region", destination_region)
if dts_instance_id is not None:
pulumi.set(__self__, "dts_instance_id", dts_instance_id)
if dts_job_name is not None:
pulumi.set(__self__, "dts_job_name", dts_job_name)
if error_notice is not None:
pulumi.set(__self__, "error_notice", error_notice)
if error_phone is not None:
pulumi.set(__self__, "error_phone", error_phone)
if instance_class is not None:
pulumi.set(__self__, "instance_class", instance_class)
if payment_duration is not None:
pulumi.set(__self__, "payment_duration", payment_duration)
if payment_duration_unit is not None:
pulumi.set(__self__, "payment_duration_unit", payment_duration_unit)
if payment_type is not None:
pulumi.set(__self__, "payment_type", payment_type)
if reserve is not None:
pulumi.set(__self__, "reserve", reserve)
if source_endpoint_database_name is not None:
pulumi.set(__self__, "source_endpoint_database_name", source_endpoint_database_name)
if source_endpoint_engine_name is not None:
pulumi.set(__self__, "source_endpoint_engine_name", source_endpoint_engine_name)
if source_endpoint_instance_id is not None:
pulumi.set(__self__, "source_endpoint_instance_id", source_endpoint_instance_id)
if source_endpoint_instance_type is not None:
pulumi.set(__self__, "source_endpoint_instance_type", source_endpoint_instance_type)
if source_endpoint_ip is not None:
pulumi.set(__self__, "source_endpoint_ip", source_endpoint_ip)
if source_endpoint_oracle_sid is not None:
pulumi.set(__self__, "source_endpoint_oracle_sid", source_endpoint_oracle_sid)
if source_endpoint_owner_id is not None:
pulumi.set(__self__, "source_endpoint_owner_id", source_endpoint_owner_id)
if source_endpoint_password is not None:
pulumi.set(__self__, "source_endpoint_password", source_endpoint_password)
if source_endpoint_port is not None:
pulumi.set(__self__, "source_endpoint_port", source_endpoint_port)
if source_endpoint_region is not None:
pulumi.set(__self__, "source_endpoint_region", source_endpoint_region)
if source_endpoint_role is not None:
pulumi.set(__self__, "source_endpoint_role", source_endpoint_role)
if source_endpoint_user_name is not None:
pulumi.set(__self__, "source_endpoint_user_name", source_endpoint_user_name)
if status is not None:
pulumi.set(__self__, "status", status)
if subscription_data_type_ddl is not None:
pulumi.set(__self__, "subscription_data_type_ddl", subscription_data_type_ddl)
if subscription_data_type_dml is not None:
pulumi.set(__self__, "subscription_data_type_dml", subscription_data_type_dml)
if subscription_instance_network_type is not None:
pulumi.set(__self__, "subscription_instance_network_type", subscription_instance_network_type)
if subscription_instance_vpc_id is not None:
pulumi.set(__self__, "subscription_instance_vpc_id", subscription_instance_vpc_id)
if subscription_instance_vswitch_id is not None:
pulumi.set(__self__, "subscription_instance_vswitch_id", subscription_instance_vswitch_id)
if sync_architecture is not None:
pulumi.set(__self__, "sync_architecture", sync_architecture)
if synchronization_direction is not None:
pulumi.set(__self__, "synchronization_direction", synchronization_direction)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def checkpoint(self) -> Optional[pulumi.Input[str]]:
"""
Subscription start time in Unix timestamp format.
"""
return pulumi.get(self, "checkpoint")
@checkpoint.setter
def checkpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "checkpoint", value)
@property
@pulumi.getter(name="computeUnit")
def compute_unit(self) -> Optional[pulumi.Input[int]]:
"""
[ETL specifications](https://help.aliyun.com/document_detail/212324.html). The unit is the computing unit ComputeUnit (CU), 1CU=1vCPU+4 GB memory. The value range is an integer greater than or equal to 2.
"""
return pulumi.get(self, "compute_unit")
@compute_unit.setter
def compute_unit(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "compute_unit", value)
@property
@pulumi.getter(name="databaseCount")
def database_count(self) -> Optional[pulumi.Input[int]]:
"""
The number of private customized RDS instances under PolarDB-X. The default value is 1. This parameter needs to be passed only when `source_endpoint_engine_name` equals `drds`.
"""
return pulumi.get(self, "database_count")
@database_count.setter
def database_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "database_count", value)
@property
@pulumi.getter(name="dbList")
def db_list(self) -> Optional[pulumi.Input[str]]:
"""
Subscription object, in the format of JSON strings. For detailed definitions, please refer to the description of migration, synchronization or subscription objects [document](https://help.aliyun.com/document_detail/209545.html).
"""
return pulumi.get(self, "db_list")
@db_list.setter
def db_list(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "db_list", value)
@property
@pulumi.getter(name="delayNotice")
def delay_notice(self) -> Optional[pulumi.Input[bool]]:
"""
This parameter decides whether to monitor the delay status. Valid values: `true`, `false`.
"""
return pulumi.get(self, "delay_notice")
@delay_notice.setter
def delay_notice(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "delay_notice", value)
@property
@pulumi.getter(name="delayPhone")
def delay_phone(self) -> Optional[pulumi.Input[str]]:
"""
The mobile phone number of the contact who delayed the alarm. Multiple mobile phone numbers separated by English commas `,`. This parameter currently only supports China stations, and only supports mainland mobile phone numbers, and up to 10 mobile phone numbers can be passed in.
"""
return pulumi.get(self, "delay_phone")
@delay_phone.setter
def delay_phone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "delay_phone", value)
@property
@pulumi.getter(name="delayRuleTime")
def delay_rule_time(self) -> Optional[pulumi.Input[str]]:
"""
When `delay_notice` is set to `true`, this parameter must be passed in. The threshold for triggering the delay alarm. The unit is second and needs to be an integer. The threshold can be set according to business needs. It is recommended to set it above 10 seconds to avoid delay fluctuations caused by network and database load.
"""
return pulumi.get(self, "delay_rule_time")
@delay_rule_time.setter
def delay_rule_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "delay_rule_time", value)
@property
@pulumi.getter(name="destinationEndpointEngineName")
def destination_endpoint_engine_name(self) -> Optional[pulumi.Input[str]]:
"""
The destination endpoint engine name. Valid values: `ADS`, `DB2`, `DRDS`, `DataHub`, `Greenplum`, `MSSQL`, `MySQL`, `PolarDB`, `PostgreSQL`, `Redis`, `Tablestore`, `as400`, `clickhouse`, `kafka`, `mongodb`, `odps`, `oracle`, `polardb_o`, `polardb_pg`, `tidb`.
"""
return pulumi.get(self, "destination_endpoint_engine_name")
@destination_endpoint_engine_name.setter
def destination_endpoint_engine_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "destination_endpoint_engine_name", value)
@property
@pulumi.getter(name="destinationRegion")
def destination_region(self) -> Optional[pulumi.Input[str]]:
"""
The destination region. List of [supported regions](https://help.aliyun.com/document_detail/141033.html).
"""
return pulumi.get(self, "destination_region")
@destination_region.setter
def destination_region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "destination_region", value)
@property
@pulumi.getter(name="dtsInstanceId")
def dts_instance_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of subscription instance.
"""
return pulumi.get(self, "dts_instance_id")
@dts_instance_id.setter
def dts_instance_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dts_instance_id", value)
@property
@pulumi.getter(name="dtsJobName")
def dts_job_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of subscription task.
"""
return pulumi.get(self, "dts_job_name")
@dts_job_name.setter
def dts_job_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dts_job_name", value)
@property
@pulumi.getter(name="errorNotice")
def error_notice(self) -> Optional[pulumi.Input[bool]]:
"""
This parameter decides whether to monitor abnormal status. Valid values: `true`, `false`.
"""
return pulumi.get(self, "error_notice")
@error_notice.setter
def error_notice(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "error_notice", value)
@property
@pulumi.getter(name="errorPhone")
def error_phone(self) -> Optional[pulumi.Input[str]]:
"""
The mobile phone number of the contact for abnormal alarm. Multiple mobile phone numbers separated by English commas `,`. This parameter currently only supports China stations, and only supports mainland mobile phone numbers, and up to 10 mobile phone numbers can be passed in.
"""
return pulumi.get(self, "error_phone")
@error_phone.setter
def error_phone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "error_phone", value)
@property
@pulumi.getter(name="instanceClass")
def instance_class(self) -> Optional[pulumi.Input[str]]:
"""
The instance class. Valid values: `large`, `medium`, `micro`, `small`, `xlarge`, `xxlarge`.
"""
return pulumi.get(self, "instance_class")
@instance_class.setter
def instance_class(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_class", value)
@property
@pulumi.getter(name="paymentDuration")
def payment_duration(self) -> Optional[pulumi.Input[int]]:
"""
The duration of prepaid instance purchase. When `payment_type` is `Subscription`, this parameter is valid and must be passed in.
"""
return pulumi.get(self, "payment_duration")
@payment_duration.setter
def payment_duration(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "payment_duration", value)
@property
@pulumi.getter(name="paymentDurationUnit")
def payment_duration_unit(self) -> Optional[pulumi.Input[str]]:
"""
The payment duration unit. Valid values: `Month`, `Year`. When `payment_type` is `Subscription`, this parameter is valid and must be passed in.
"""
return pulumi.get(self, "payment_duration_unit")
@payment_duration_unit.setter
def payment_duration_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "payment_duration_unit", value)
@property
@pulumi.getter(name="paymentType")
def payment_type(self) -> Optional[pulumi.Input[str]]:
"""
The payment type of the resource. Valid values: `Subscription`, `PayAsYouGo`.
"""
return pulumi.get(self, "payment_type")
@payment_type.setter
def payment_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "payment_type", value)
@property
@pulumi.getter
def reserve(self) -> Optional[pulumi.Input[str]]:
"""
DTS reserves parameters, the format is a JSON string, you can pass in this parameter to complete the source and target database information (such as the data storage format of the target Kafka database, the instance ID of the cloud enterprise network CEN). For more information, please refer to the parameter description of the [Reserve parameter](https://help.aliyun.com/document_detail/176470.html).
"""
return pulumi.get(self, "reserve")
@reserve.setter
def reserve(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reserve", value)
@property
@pulumi.getter(name="sourceEndpointDatabaseName")
def source_endpoint_database_name(self) -> Optional[pulumi.Input[str]]:
"""
To subscribe to the name of the database.
"""
return pulumi.get(self, "source_endpoint_database_name")
@source_endpoint_database_name.setter
def source_endpoint_database_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_database_name", value)
@property
@pulumi.getter(name="sourceEndpointEngineName")
def source_endpoint_engine_name(self) -> Optional[pulumi.Input[str]]:
"""
The source database type value is MySQL or Oracle. Valid values: `MySQL`, `Oracle`.
"""
return pulumi.get(self, "source_endpoint_engine_name")
@source_endpoint_engine_name.setter
def source_endpoint_engine_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_engine_name", value)
@property
@pulumi.getter(name="sourceEndpointInstanceId")
def source_endpoint_instance_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of source instance. Only when the type of source database instance was RDS MySQL, PolarDB-X 1.0, PolarDB MySQL, this parameter can be available and must be set.
"""
return pulumi.get(self, "source_endpoint_instance_id")
@source_endpoint_instance_id.setter
def source_endpoint_instance_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_instance_id", value)
@property
@pulumi.getter(name="sourceEndpointInstanceType")
def source_endpoint_instance_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of source instance. Valid values: `RDS`, `PolarDB`, `DRDS`, `LocalInstance`, `ECS`, `Express`, `CEN`, `dg`.
"""
return pulumi.get(self, "source_endpoint_instance_type")
@source_endpoint_instance_type.setter
def source_endpoint_instance_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_instance_type", value)
@property
@pulumi.getter(name="sourceEndpointIp")
def source_endpoint_ip(self) -> Optional[pulumi.Input[str]]:
"""
The IP of source endpoint.
"""
return pulumi.get(self, "source_endpoint_ip")
@source_endpoint_ip.setter
def source_endpoint_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_ip", value)
@property
@pulumi.getter(name="sourceEndpointOracleSid")
def source_endpoint_oracle_sid(self) -> Optional[pulumi.Input[str]]:
"""
The SID of Oracle Database. When the source database is self-built Oracle and the Oracle database is a non-RAC instance, this parameter is available and must be passed in.
"""
return pulumi.get(self, "source_endpoint_oracle_sid")
@source_endpoint_oracle_sid.setter
def source_endpoint_oracle_sid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_oracle_sid", value)
@property
@pulumi.getter(name="sourceEndpointOwnerId")
def source_endpoint_owner_id(self) -> Optional[pulumi.Input[str]]:
"""
The Alibaba Cloud account ID to which the source instance belongs. This parameter is only available when configuring data subscriptions across Alibaba Cloud accounts and must be passed in.
"""
return pulumi.get(self, "source_endpoint_owner_id")
@source_endpoint_owner_id.setter
def source_endpoint_owner_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_owner_id", value)
@property
@pulumi.getter(name="sourceEndpointPassword")
def source_endpoint_password(self) -> Optional[pulumi.Input[str]]:
"""
The password of source database instance account.
"""
return pulumi.get(self, "source_endpoint_password")
@source_endpoint_password.setter
def source_endpoint_password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_password", value)
@property
@pulumi.getter(name="sourceEndpointPort")
def source_endpoint_port(self) -> Optional[pulumi.Input[str]]:
"""
The port of source database.
"""
return pulumi.get(self, "source_endpoint_port")
@source_endpoint_port.setter
def source_endpoint_port(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_port", value)
@property
@pulumi.getter(name="sourceEndpointRegion")
def source_endpoint_region(self) -> Optional[pulumi.Input[str]]:
"""
The region of source database.
"""
return pulumi.get(self, "source_endpoint_region")
@source_endpoint_region.setter
def source_endpoint_region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_region", value)
@property
@pulumi.getter(name="sourceEndpointRole")
def source_endpoint_role(self) -> Optional[pulumi.Input[str]]:
"""
Both the authorization roles. When the source instance and configure subscriptions task of the Alibaba Cloud account is not the same as the need to pass the parameter, to specify the source of the authorization roles, to allow configuration subscription task of the Alibaba Cloud account to access the source of the source instance information.
"""
return pulumi.get(self, "source_endpoint_role")
@source_endpoint_role.setter
def source_endpoint_role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_role", value)
@property
@pulumi.getter(name="sourceEndpointUserName")
def source_endpoint_user_name(self) -> Optional[pulumi.Input[str]]:
"""
The username of source database instance account.
"""
return pulumi.get(self, "source_endpoint_user_name")
@source_endpoint_user_name.setter
def source_endpoint_user_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_endpoint_user_name", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
The status of the task. Valid values: `Normal`, `Abnormal`. When a task created, it is in this state of `NotStarted`. You can specify this state to `Normal` to start the job, and specify this state of `Abnormal` to stop the job. **Note: We treat the state `Starting` as the state of `Normal`, and consider the two states to be consistent on the user side.**
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter(name="subscriptionDataTypeDdl")
def subscription_data_type_ddl(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to subscribe the DDL type of data. Valid values: `true`, `false`.
"""
return pulumi.get(self, "subscription_data_type_ddl")
@subscription_data_type_ddl.setter
def subscription_data_type_ddl(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "subscription_data_type_ddl", value)
@property
@pulumi.getter(name="subscriptionDataTypeDml")
def subscription_data_type_dml(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to subscribe the DML type of data. Valid values: `true`, `false`.
"""
return pulumi.get(self, "subscription_data_type_dml")
@subscription_data_type_dml.setter
def subscription_data_type_dml(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "subscription_data_type_dml", value)
@property
@pulumi.getter(name="subscriptionInstanceNetworkType")
def subscription_instance_network_type(self) -> Optional[pulumi.Input[str]]:
"""
Subscription task type of network value: classic: classic Network. Virtual Private Cloud (vpc): a vpc. Valid values: `classic`, `vpc`.
"""
return pulumi.get(self, "subscription_instance_network_type")
@subscription_instance_network_type.setter
def subscription_instance_network_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subscription_instance_network_type", value)
@property
@pulumi.getter(name="subscriptionInstanceVpcId")
def subscription_instance_vpc_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of subscription vpc instance. When the value of `subscription_instance_network_type` is vpc, this parameter is available and must be passed in.
"""
return pulumi.get(self, "subscription_instance_vpc_id")
@subscription_instance_vpc_id.setter
def subscription_instance_vpc_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subscription_instance_vpc_id", value)
@property
@pulumi.getter(name="subscriptionInstanceVswitchId")
def subscription_instance_vswitch_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of subscription VSwitch instance. When the value of `subscription_instance_network_type` is vpc, this parameter is available and must be passed in.
"""
return pulumi.get(self, "subscription_instance_vswitch_id")
@subscription_instance_vswitch_id.setter
def subscription_instance_vswitch_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subscription_instance_vswitch_id", value)
@property
@pulumi.getter(name="syncArchitecture")
def sync_architecture(self) -> Optional[pulumi.Input[str]]:
"""
The sync architecture. Valid values: `bidirectional`, `oneway`.
"""
return pulumi.get(self, "sync_architecture")
@sync_architecture.setter
def sync_architecture(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sync_architecture", value)
@property
@pulumi.getter(name="synchronizationDirection")
def synchronization_direction(self) -> Optional[pulumi.Input[str]]:
"""
The synchronization direction. Valid values: `Forward`, `Reverse`. When the topology type of the data synchronization instance is bidirectional, it can be passed in to reverse to start the reverse synchronization link.
"""
return pulumi.get(self, "synchronization_direction")
@synchronization_direction.setter
def synchronization_direction(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "synchronization_direction", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "tags", value)
class SubscriptionJob(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
checkpoint: Optional[pulumi.Input[str]] = None,
compute_unit: Optional[pulumi.Input[int]] = None,
database_count: Optional[pulumi.Input[int]] = None,
db_list: Optional[pulumi.Input[str]] = None,
delay_notice: Optional[pulumi.Input[bool]] = None,
delay_phone: Optional[pulumi.Input[str]] = None,
delay_rule_time: Optional[pulumi.Input[str]] = None,
destination_endpoint_engine_name: Optional[pulumi.Input[str]] = None,
destination_region: Optional[pulumi.Input[str]] = None,
dts_instance_id: Optional[pulumi.Input[str]] = None,
dts_job_name: Optional[pulumi.Input[str]] = None,
error_notice: Optional[pulumi.Input[bool]] = None,
error_phone: Optional[pulumi.Input[str]] = None,
instance_class: Optional[pulumi.Input[str]] = None,
payment_duration: Optional[pulumi.Input[int]] = None,
payment_duration_unit: Optional[pulumi.Input[str]] = None,
payment_type: Optional[pulumi.Input[str]] = None,
reserve: Optional[pulumi.Input[str]] = None,
source_endpoint_database_name: Optional[pulumi.Input[str]] = None,
source_endpoint_engine_name: Optional[pulumi.Input[str]] = None,
source_endpoint_instance_id: Optional[pulumi.Input[str]] = None,
source_endpoint_instance_type: Optional[pulumi.Input[str]] = None,
source_endpoint_ip: Optional[pulumi.Input[str]] = None,
source_endpoint_oracle_sid: Optional[pulumi.Input[str]] = None,
source_endpoint_owner_id: Optional[pulumi.Input[str]] = None,
source_endpoint_password: Optional[pulumi.Input[str]] = None,
source_endpoint_port: Optional[pulumi.Input[str]] = None,
source_endpoint_region: Optional[pulumi.Input[str]] = None,
source_endpoint_role: Optional[pulumi.Input[str]] = None,
source_endpoint_user_name: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
subscription_data_type_ddl: Optional[pulumi.Input[bool]] = None,
subscription_data_type_dml: Optional[pulumi.Input[bool]] = None,
subscription_instance_network_type: Optional[pulumi.Input[str]] = None,
subscription_instance_vpc_id: Optional[pulumi.Input[str]] = None,
subscription_instance_vswitch_id: Optional[pulumi.Input[str]] = None,
sync_architecture: Optional[pulumi.Input[str]] = None,
synchronization_direction: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
__props__=None):
"""
Provides a DTS Subscription Job resource.
For information about DTS Subscription Job and how to use it, see [What is Subscription Job](https://help.aliyun.com/document_detail/254791.html).
> **NOTE:** Available in v1.138.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
config = pulumi.Config()
name = config.get("name")
if name is None:
name = "dtsSubscriptionJob"
creation = config.get("creation")
if creation is None:
creation = "Rds"
default_zones = alicloud.get_zones(available_resource_creation=creation)
default_network = alicloud.vpc.Network("defaultNetwork",
vpc_name=name,
cidr_block="172.16.0.0/16")
default_switch = alicloud.vpc.Switch("defaultSwitch",
vpc_id=default_network.id,
cidr_block="172.16.0.0/24",
zone_id=default_zones.zones[0].id,
vswitch_name=name)
instance = alicloud.rds.Instance("instance",
engine="MySQL",
engine_version="5.6",
instance_type="rds.mysql.s1.small",
instance_storage=10,
vswitch_id=default_switch.id,
instance_name=name)
db = []
for range in [{"value": i} for i in range(0, 2)]:
db.append(alicloud.rds.Database(f"db-{range['value']}",
instance_id=instance.id,
description="from terraform"))
account = alicloud.rds.Account("account",
instance_id=instance.id,
password="Test12345",
description="from terraform")
privilege = alicloud.rds.AccountPrivilege("privilege",
instance_id=instance.id,
account_name=account.name,
privilege="ReadWrite",
db_names=[__item.name for __item in db])
default1_networks = alicloud.vpc.get_networks(name_regex="default-NODELETING")
default1_switches = alicloud.vpc.get_switches(vpc_id=data["alicloud_vpcs"]["default"]["ids"])
default_subscription_job = alicloud.dts.SubscriptionJob("defaultSubscriptionJob",
dts_job_name=name,
payment_type="PostPaid",
source_endpoint_engine_name="MySQL",
source_endpoint_region="cn-hangzhou",
source_endpoint_instance_type="RDS",
source_endpoint_instance_id=instance.id,
source_endpoint_database_name="tfaccountpri_0",
source_endpoint_user_name="tftestprivilege",
source_endpoint_password="Test12345",
db_list=" {\"dtstestdata\": {\"name\": \"tfaccountpri_0\", \"all\": true}}\n",
subscription_instance_network_type="vpc",
subscription_instance_vpc_id=default1_networks.ids[0],
subscription_instance_vswitch_id=default1_switches.ids[0],
status="Normal")
```
## Import
DTS Subscription Job can be imported using the id, e.g.
```sh
$ pulumi import alicloud:dts/subscriptionJob:SubscriptionJob example <id>
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] checkpoint: Subscription start time in Unix timestamp format.
:param pulumi.Input[int] compute_unit: [ETL specifications](https://help.aliyun.com/document_detail/212324.html). The unit is the computing unit ComputeUnit (CU), 1CU=1vCPU+4 GB memory. The value range is an integer greater than or equal to 2.
:param pulumi.Input[int] database_count: The number of private customized RDS instances under PolarDB-X. The default value is 1. This parameter needs to be passed only when `source_endpoint_engine_name` equals `drds`.
:param pulumi.Input[str] db_list: Subscription object, in the format of JSON strings. For detailed definitions, please refer to the description of migration, synchronization or subscription objects [document](https://help.aliyun.com/document_detail/209545.html).
:param pulumi.Input[bool] delay_notice: This parameter decides whether to monitor the delay status. Valid values: `true`, `false`.
:param pulumi.Input[str] delay_phone: The mobile phone number of the contact who delayed the alarm. Multiple mobile phone numbers separated by English commas `,`. This parameter currently only supports China stations, and only supports mainland mobile phone numbers, and up to 10 mobile phone numbers can be passed in.
:param pulumi.Input[str] delay_rule_time: When `delay_notice` is set to `true`, this parameter must be passed in. The threshold for triggering the delay alarm. The unit is second and needs to be an integer. The threshold can be set according to business needs. It is recommended to set it above 10 seconds to avoid delay fluctuations caused by network and database load.
:param pulumi.Input[str] destination_endpoint_engine_name: The destination endpoint engine name. Valid values: `ADS`, `DB2`, `DRDS`, `DataHub`, `Greenplum`, `MSSQL`, `MySQL`, `PolarDB`, `PostgreSQL`, `Redis`, `Tablestore`, `as400`, `clickhouse`, `kafka`, `mongodb`, `odps`, `oracle`, `polardb_o`, `polardb_pg`, `tidb`.
:param pulumi.Input[str] destination_region: The destination region. List of [supported regions](https://help.aliyun.com/document_detail/141033.html).
:param pulumi.Input[str] dts_instance_id: The ID of subscription instance.
:param pulumi.Input[str] dts_job_name: The name of subscription task.
:param pulumi.Input[bool] error_notice: This parameter decides whether to monitor abnormal status. Valid values: `true`, `false`.
:param pulumi.Input[str] error_phone: The mobile phone number of the contact for abnormal alarm. Multiple mobile phone numbers separated by English commas `,`. This parameter currently only supports China stations, and only supports mainland mobile phone numbers, and up to 10 mobile phone numbers can be passed in.
:param pulumi.Input[str] instance_class: The instance class. Valid values: `large`, `medium`, `micro`, `small`, `xlarge`, `xxlarge`.
:param pulumi.Input[int] payment_duration: The duration of prepaid instance purchase. When `payment_type` is `Subscription`, this parameter is valid and must be passed in.
:param pulumi.Input[str] payment_duration_unit: The payment duration unit. Valid values: `Month`, `Year`. When `payment_type` is `Subscription`, this parameter is valid and must be passed in.
:param pulumi.Input[str] payment_type: The payment type of the resource. Valid values: `Subscription`, `PayAsYouGo`.
:param pulumi.Input[str] reserve: DTS reserves parameters, the format is a JSON string, you can pass in this parameter to complete the source and target database information (such as the data storage format of the target Kafka database, the instance ID of the cloud enterprise network CEN). For more information, please refer to the parameter description of the [Reserve parameter](https://help.aliyun.com/document_detail/176470.html).
:param pulumi.Input[str] source_endpoint_database_name: To subscribe to the name of the database.
:param pulumi.Input[str] source_endpoint_engine_name: The source database type value is MySQL or Oracle. Valid values: `MySQL`, `Oracle`.
:param pulumi.Input[str] source_endpoint_instance_id: The ID of source instance. Only when the type of source database instance was RDS MySQL, PolarDB-X 1.0, PolarDB MySQL, this parameter can be available and must be set.
:param pulumi.Input[str] source_endpoint_instance_type: The type of source instance. Valid values: `RDS`, `PolarDB`, `DRDS`, `LocalInstance`, `ECS`, `Express`, `CEN`, `dg`.
:param pulumi.Input[str] source_endpoint_ip: The IP of source endpoint.
:param pulumi.Input[str] source_endpoint_oracle_sid: The SID of Oracle Database. When the source database is self-built Oracle and the Oracle database is a non-RAC instance, this parameter is available and must be passed in.
:param pulumi.Input[str] source_endpoint_owner_id: The Alibaba Cloud account ID to which the source instance belongs. This parameter is only available when configuring data subscriptions across Alibaba Cloud accounts and must be passed in.
:param pulumi.Input[str] source_endpoint_password: The password of source database instance account.
:param pulumi.Input[str] source_endpoint_port: The port of source database.
:param pulumi.Input[str] source_endpoint_region: The region of source database.
:param pulumi.Input[str] source_endpoint_role: Both the authorization roles. When the source instance and configure subscriptions task of the Alibaba Cloud account is not the same as the need to pass the parameter, to specify the source of the authorization roles, to allow configuration subscription task of the Alibaba Cloud account to access the source of the source instance information.
:param pulumi.Input[str] source_endpoint_user_name: The username of source database instance account.
:param pulumi.Input[str] status: The status of the task. Valid values: `Normal`, `Abnormal`. When a task created, it is in this state of `NotStarted`. You can specify this state to `Normal` to start the job, and specify this state of `Abnormal` to stop the job. **Note: We treat the state `Starting` as the state of `Normal`, and consider the two states to be consistent on the user side.**
:param pulumi.Input[bool] subscription_data_type_ddl: Whether to subscribe the DDL type of data. Valid values: `true`, `false`.
:param pulumi.Input[bool] subscription_data_type_dml: Whether to subscribe the DML type of data. Valid values: `true`, `false`.
:param pulumi.Input[str] subscription_instance_network_type: Subscription task type of network value: classic: classic Network. Virtual Private Cloud (vpc): a vpc. Valid values: `classic`, `vpc`.
:param pulumi.Input[str] subscription_instance_vpc_id: The ID of subscription vpc instance. When the value of `subscription_instance_network_type` is vpc, this parameter is available and must be passed in.
:param pulumi.Input[str] subscription_instance_vswitch_id: The ID of subscription VSwitch instance. When the value of `subscription_instance_network_type` is vpc, this parameter is available and must be passed in.
:param pulumi.Input[str] sync_architecture: The sync architecture. Valid values: `bidirectional`, `oneway`.
:param pulumi.Input[str] synchronization_direction: The synchronization direction. Valid values: `Forward`, `Reverse`. When the topology type of the data synchronization instance is bidirectional, it can be passed in to reverse to start the reverse synchronization link.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: SubscriptionJobArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a DTS Subscription Job resource.
For information about DTS Subscription Job and how to use it, see [What is Subscription Job](https://help.aliyun.com/document_detail/254791.html).
> **NOTE:** Available in v1.138.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
config = pulumi.Config()
name = config.get("name")
if name is None:
name = "dtsSubscriptionJob"
creation = config.get("creation")
if creation is None:
creation = "Rds"
default_zones = alicloud.get_zones(available_resource_creation=creation)
default_network = alicloud.vpc.Network("defaultNetwork",
vpc_name=name,
cidr_block="172.16.0.0/16")
default_switch = alicloud.vpc.Switch("defaultSwitch",
vpc_id=default_network.id,
cidr_block="172.16.0.0/24",
zone_id=default_zones.zones[0].id,
vswitch_name=name)
instance = alicloud.rds.Instance("instance",
engine="MySQL",
engine_version="5.6",
instance_type="rds.mysql.s1.small",
instance_storage=10,
vswitch_id=default_switch.id,
instance_name=name)
db = []
for range in [{"value": i} for i in range(0, 2)]:
db.append(alicloud.rds.Database(f"db-{range['value']}",
instance_id=instance.id,
description="from terraform"))
account = alicloud.rds.Account("account",
instance_id=instance.id,
password="Test12345",
description="from terraform")
privilege = alicloud.rds.AccountPrivilege("privilege",
instance_id=instance.id,
account_name=account.name,
privilege="ReadWrite",
db_names=[__item.name for __item in db])
default1_networks = alicloud.vpc.get_networks(name_regex="default-NODELETING")
default1_switches = alicloud.vpc.get_switches(vpc_id=data["alicloud_vpcs"]["default"]["ids"])
default_subscription_job = alicloud.dts.SubscriptionJob("defaultSubscriptionJob",
dts_job_name=name,
payment_type="PostPaid",
source_endpoint_engine_name="MySQL",
source_endpoint_region="cn-hangzhou",
source_endpoint_instance_type="RDS",
source_endpoint_instance_id=instance.id,
source_endpoint_database_name="tfaccountpri_0",
source_endpoint_user_name="tftestprivilege",
source_endpoint_password="Test12345",
db_list=" {\"dtstestdata\": {\"name\": \"tfaccountpri_0\", \"all\": true}}\n",
subscription_instance_network_type="vpc",
subscription_instance_vpc_id=default1_networks.ids[0],
subscription_instance_vswitch_id=default1_switches.ids[0],
status="Normal")
```
## Import
DTS Subscription Job can be imported using the id, e.g.
```sh
$ pulumi import alicloud:dts/subscriptionJob:SubscriptionJob example <id>
```
:param str resource_name: The name of the resource.
:param SubscriptionJobArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SubscriptionJobArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
checkpoint: Optional[pulumi.Input[str]] = None,
compute_unit: Optional[pulumi.Input[int]] = None,
database_count: Optional[pulumi.Input[int]] = None,
db_list: Optional[pulumi.Input[str]] = None,
delay_notice: Optional[pulumi.Input[bool]] = None,
delay_phone: Optional[pulumi.Input[str]] = None,
delay_rule_time: Optional[pulumi.Input[str]] = None,
destination_endpoint_engine_name: Optional[pulumi.Input[str]] = None,
destination_region: Optional[pulumi.Input[str]] = None,
dts_instance_id: Optional[pulumi.Input[str]] = None,
dts_job_name: Optional[pulumi.Input[str]] = None,
error_notice: Optional[pulumi.Input[bool]] = None,
error_phone: Optional[pulumi.Input[str]] = None,
instance_class: Optional[pulumi.Input[str]] = None,
payment_duration: Optional[pulumi.Input[int]] = None,
payment_duration_unit: Optional[pulumi.Input[str]] = None,
payment_type: Optional[pulumi.Input[str]] = None,
reserve: Optional[pulumi.Input[str]] = None,
source_endpoint_database_name: Optional[pulumi.Input[str]] = None,
source_endpoint_engine_name: Optional[pulumi.Input[str]] = None,
source_endpoint_instance_id: Optional[pulumi.Input[str]] = None,
source_endpoint_instance_type: Optional[pulumi.Input[str]] = None,
source_endpoint_ip: Optional[pulumi.Input[str]] = None,
source_endpoint_oracle_sid: Optional[pulumi.Input[str]] = None,
source_endpoint_owner_id: Optional[pulumi.Input[str]] = None,
source_endpoint_password: Optional[pulumi.Input[str]] = None,
source_endpoint_port: Optional[pulumi.Input[str]] = None,
source_endpoint_region: Optional[pulumi.Input[str]] = None,
source_endpoint_role: Optional[pulumi.Input[str]] = None,
source_endpoint_user_name: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
subscription_data_type_ddl: Optional[pulumi.Input[bool]] = None,
subscription_data_type_dml: Optional[pulumi.Input[bool]] = None,
subscription_instance_network_type: Optional[pulumi.Input[str]] = None,
subscription_instance_vpc_id: Optional[pulumi.Input[str]] = None,
subscription_instance_vswitch_id: Optional[pulumi.Input[str]] = None,
sync_architecture: Optional[pulumi.Input[str]] = None,
synchronization_direction: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SubscriptionJobArgs.__new__(SubscriptionJobArgs)
__props__.__dict__["checkpoint"] = checkpoint
__props__.__dict__["compute_unit"] = compute_unit
__props__.__dict__["database_count"] = database_count
__props__.__dict__["db_list"] = db_list
__props__.__dict__["delay_notice"] = delay_notice
__props__.__dict__["delay_phone"] = delay_phone
__props__.__dict__["delay_rule_time"] = delay_rule_time
__props__.__dict__["destination_endpoint_engine_name"] = destination_endpoint_engine_name
__props__.__dict__["destination_region"] = destination_region
__props__.__dict__["dts_instance_id"] = dts_instance_id
__props__.__dict__["dts_job_name"] = dts_job_name
__props__.__dict__["error_notice"] = error_notice
__props__.__dict__["error_phone"] = error_phone
__props__.__dict__["instance_class"] = instance_class
__props__.__dict__["payment_duration"] = payment_duration
__props__.__dict__["payment_duration_unit"] = payment_duration_unit
if payment_type is None and not opts.urn:
raise TypeError("Missing required property 'payment_type'")
__props__.__dict__["payment_type"] = payment_type
__props__.__dict__["reserve"] = reserve
__props__.__dict__["source_endpoint_database_name"] = source_endpoint_database_name
__props__.__dict__["source_endpoint_engine_name"] = source_endpoint_engine_name
__props__.__dict__["source_endpoint_instance_id"] = source_endpoint_instance_id
__props__.__dict__["source_endpoint_instance_type"] = source_endpoint_instance_type
__props__.__dict__["source_endpoint_ip"] = source_endpoint_ip
__props__.__dict__["source_endpoint_oracle_sid"] = source_endpoint_oracle_sid
__props__.__dict__["source_endpoint_owner_id"] = source_endpoint_owner_id
__props__.__dict__["source_endpoint_password"] = source_endpoint_password
__props__.__dict__["source_endpoint_port"] = source_endpoint_port
__props__.__dict__["source_endpoint_region"] = source_endpoint_region
__props__.__dict__["source_endpoint_role"] = source_endpoint_role
__props__.__dict__["source_endpoint_user_name"] = source_endpoint_user_name
__props__.__dict__["status"] = status
__props__.__dict__["subscription_data_type_ddl"] = subscription_data_type_ddl
__props__.__dict__["subscription_data_type_dml"] = subscription_data_type_dml
__props__.__dict__["subscription_instance_network_type"] = subscription_instance_network_type
__props__.__dict__["subscription_instance_vpc_id"] = subscription_instance_vpc_id
__props__.__dict__["subscription_instance_vswitch_id"] = subscription_instance_vswitch_id
__props__.__dict__["sync_architecture"] = sync_architecture
__props__.__dict__["synchronization_direction"] = synchronization_direction
__props__.__dict__["tags"] = tags
super(SubscriptionJob, __self__).__init__(
'alicloud:dts/subscriptionJob:SubscriptionJob',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
checkpoint: Optional[pulumi.Input[str]] = None,
compute_unit: Optional[pulumi.Input[int]] = None,
database_count: Optional[pulumi.Input[int]] = None,
db_list: Optional[pulumi.Input[str]] = None,
delay_notice: Optional[pulumi.Input[bool]] = None,
delay_phone: Optional[pulumi.Input[str]] = None,
delay_rule_time: Optional[pulumi.Input[str]] = None,
destination_endpoint_engine_name: Optional[pulumi.Input[str]] = None,
destination_region: Optional[pulumi.Input[str]] = None,
dts_instance_id: Optional[pulumi.Input[str]] = None,
dts_job_name: Optional[pulumi.Input[str]] = None,
error_notice: Optional[pulumi.Input[bool]] = None,
error_phone: Optional[pulumi.Input[str]] = None,
instance_class: Optional[pulumi.Input[str]] = None,
payment_duration: Optional[pulumi.Input[int]] = None,
payment_duration_unit: Optional[pulumi.Input[str]] = None,
payment_type: Optional[pulumi.Input[str]] = None,
reserve: Optional[pulumi.Input[str]] = None,
source_endpoint_database_name: Optional[pulumi.Input[str]] = None,
source_endpoint_engine_name: Optional[pulumi.Input[str]] = None,
source_endpoint_instance_id: Optional[pulumi.Input[str]] = None,
source_endpoint_instance_type: Optional[pulumi.Input[str]] = None,
source_endpoint_ip: Optional[pulumi.Input[str]] = None,
source_endpoint_oracle_sid: Optional[pulumi.Input[str]] = None,
source_endpoint_owner_id: Optional[pulumi.Input[str]] = None,
source_endpoint_password: Optional[pulumi.Input[str]] = None,
source_endpoint_port: Optional[pulumi.Input[str]] = None,
source_endpoint_region: Optional[pulumi.Input[str]] = None,
source_endpoint_role: Optional[pulumi.Input[str]] = None,
source_endpoint_user_name: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
subscription_data_type_ddl: Optional[pulumi.Input[bool]] = None,
subscription_data_type_dml: Optional[pulumi.Input[bool]] = None,
subscription_instance_network_type: Optional[pulumi.Input[str]] = None,
subscription_instance_vpc_id: Optional[pulumi.Input[str]] = None,
subscription_instance_vswitch_id: Optional[pulumi.Input[str]] = None,
sync_architecture: Optional[pulumi.Input[str]] = None,
synchronization_direction: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None) -> 'SubscriptionJob':
"""
Get an existing SubscriptionJob resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] checkpoint: Subscription start time in Unix timestamp format.
:param pulumi.Input[int] compute_unit: [ETL specifications](https://help.aliyun.com/document_detail/212324.html). The unit is the computing unit ComputeUnit (CU), 1CU=1vCPU+4 GB memory. The value range is an integer greater than or equal to 2.
:param pulumi.Input[int] database_count: The number of private customized RDS instances under PolarDB-X. The default value is 1. This parameter needs to be passed only when `source_endpoint_engine_name` equals `drds`.
:param pulumi.Input[str] db_list: Subscription object, in the format of JSON strings. For detailed definitions, please refer to the description of migration, synchronization or subscription objects [document](https://help.aliyun.com/document_detail/209545.html).
:param pulumi.Input[bool] delay_notice: This parameter decides whether to monitor the delay status. Valid values: `true`, `false`.
:param pulumi.Input[str] delay_phone: The mobile phone number of the contact who delayed the alarm. Multiple mobile phone numbers separated by English commas `,`. This parameter currently only supports China stations, and only supports mainland mobile phone numbers, and up to 10 mobile phone numbers can be passed in.
:param pulumi.Input[str] delay_rule_time: When `delay_notice` is set to `true`, this parameter must be passed in. The threshold for triggering the delay alarm. The unit is second and needs to be an integer. The threshold can be set according to business needs. It is recommended to set it above 10 seconds to avoid delay fluctuations caused by network and database load.
:param pulumi.Input[str] destination_endpoint_engine_name: The destination endpoint engine name. Valid values: `ADS`, `DB2`, `DRDS`, `DataHub`, `Greenplum`, `MSSQL`, `MySQL`, `PolarDB`, `PostgreSQL`, `Redis`, `Tablestore`, `as400`, `clickhouse`, `kafka`, `mongodb`, `odps`, `oracle`, `polardb_o`, `polardb_pg`, `tidb`.
:param pulumi.Input[str] destination_region: The destination region. List of [supported regions](https://help.aliyun.com/document_detail/141033.html).
:param pulumi.Input[str] dts_instance_id: The ID of subscription instance.
:param pulumi.Input[str] dts_job_name: The name of subscription task.
:param pulumi.Input[bool] error_notice: This parameter decides whether to monitor abnormal status. Valid values: `true`, `false`.
:param pulumi.Input[str] error_phone: The mobile phone number of the contact for abnormal alarm. Multiple mobile phone numbers separated by English commas `,`. This parameter currently only supports China stations, and only supports mainland mobile phone numbers, and up to 10 mobile phone numbers can be passed in.
:param pulumi.Input[str] instance_class: The instance class. Valid values: `large`, `medium`, `micro`, `small`, `xlarge`, `xxlarge`.
:param pulumi.Input[int] payment_duration: The duration of prepaid instance purchase. When `payment_type` is `Subscription`, this parameter is valid and must be passed in.
:param pulumi.Input[str] payment_duration_unit: The payment duration unit. Valid values: `Month`, `Year`. When `payment_type` is `Subscription`, this parameter is valid and must be passed in.
:param pulumi.Input[str] payment_type: The payment type of the resource. Valid values: `Subscription`, `PayAsYouGo`.
:param pulumi.Input[str] reserve: DTS reserves parameters, the format is a JSON string, you can pass in this parameter to complete the source and target database information (such as the data storage format of the target Kafka database, the instance ID of the cloud enterprise network CEN). For more information, please refer to the parameter description of the [Reserve parameter](https://help.aliyun.com/document_detail/176470.html).
:param pulumi.Input[str] source_endpoint_database_name: To subscribe to the name of the database.
:param pulumi.Input[str] source_endpoint_engine_name: The source database type value is MySQL or Oracle. Valid values: `MySQL`, `Oracle`.
:param pulumi.Input[str] source_endpoint_instance_id: The ID of source instance. Only when the type of source database instance was RDS MySQL, PolarDB-X 1.0, PolarDB MySQL, this parameter can be available and must be set.
:param pulumi.Input[str] source_endpoint_instance_type: The type of source instance. Valid values: `RDS`, `PolarDB`, `DRDS`, `LocalInstance`, `ECS`, `Express`, `CEN`, `dg`.
:param pulumi.Input[str] source_endpoint_ip: The IP of source endpoint.
:param pulumi.Input[str] source_endpoint_oracle_sid: The SID of Oracle Database. When the source database is self-built Oracle and the Oracle database is a non-RAC instance, this parameter is available and must be passed in.
:param pulumi.Input[str] source_endpoint_owner_id: The Alibaba Cloud account ID to which the source instance belongs. This parameter is only available when configuring data subscriptions across Alibaba Cloud accounts and must be passed in.
:param pulumi.Input[str] source_endpoint_password: The password of source database instance account.
:param pulumi.Input[str] source_endpoint_port: The port of source database.
:param pulumi.Input[str] source_endpoint_region: The region of source database.
:param pulumi.Input[str] source_endpoint_role: Both the authorization roles. When the source instance and configure subscriptions task of the Alibaba Cloud account is not the same as the need to pass the parameter, to specify the source of the authorization roles, to allow configuration subscription task of the Alibaba Cloud account to access the source of the source instance information.
:param pulumi.Input[str] source_endpoint_user_name: The username of source database instance account.
:param pulumi.Input[str] status: The status of the task. Valid values: `Normal`, `Abnormal`. When a task created, it is in this state of `NotStarted`. You can specify this state to `Normal` to start the job, and specify this state of `Abnormal` to stop the job. **Note: We treat the state `Starting` as the state of `Normal`, and consider the two states to be consistent on the user side.**
:param pulumi.Input[bool] subscription_data_type_ddl: Whether to subscribe the DDL type of data. Valid values: `true`, `false`.
:param pulumi.Input[bool] subscription_data_type_dml: Whether to subscribe the DML type of data. Valid values: `true`, `false`.
:param pulumi.Input[str] subscription_instance_network_type: Subscription task type of network value: classic: classic Network. Virtual Private Cloud (vpc): a vpc. Valid values: `classic`, `vpc`.
:param pulumi.Input[str] subscription_instance_vpc_id: The ID of subscription vpc instance. When the value of `subscription_instance_network_type` is vpc, this parameter is available and must be passed in.
:param pulumi.Input[str] subscription_instance_vswitch_id: The ID of subscription VSwitch instance. When the value of `subscription_instance_network_type` is vpc, this parameter is available and must be passed in.
:param pulumi.Input[str] sync_architecture: The sync architecture. Valid values: `bidirectional`, `oneway`.
:param pulumi.Input[str] synchronization_direction: The synchronization direction. Valid values: `Forward`, `Reverse`. When the topology type of the data synchronization instance is bidirectional, it can be passed in to reverse to start the reverse synchronization link.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _SubscriptionJobState.__new__(_SubscriptionJobState)
__props__.__dict__["checkpoint"] = checkpoint
__props__.__dict__["compute_unit"] = compute_unit
__props__.__dict__["database_count"] = database_count
__props__.__dict__["db_list"] = db_list
__props__.__dict__["delay_notice"] = delay_notice
__props__.__dict__["delay_phone"] = delay_phone
__props__.__dict__["delay_rule_time"] = delay_rule_time
__props__.__dict__["destination_endpoint_engine_name"] = destination_endpoint_engine_name
__props__.__dict__["destination_region"] = destination_region
__props__.__dict__["dts_instance_id"] = dts_instance_id
__props__.__dict__["dts_job_name"] = dts_job_name
__props__.__dict__["error_notice"] = error_notice
__props__.__dict__["error_phone"] = error_phone
__props__.__dict__["instance_class"] = instance_class
__props__.__dict__["payment_duration"] = payment_duration
__props__.__dict__["payment_duration_unit"] = payment_duration_unit
__props__.__dict__["payment_type"] = payment_type
__props__.__dict__["reserve"] = reserve
__props__.__dict__["source_endpoint_database_name"] = source_endpoint_database_name
__props__.__dict__["source_endpoint_engine_name"] = source_endpoint_engine_name
__props__.__dict__["source_endpoint_instance_id"] = source_endpoint_instance_id
__props__.__dict__["source_endpoint_instance_type"] = source_endpoint_instance_type
__props__.__dict__["source_endpoint_ip"] = source_endpoint_ip
__props__.__dict__["source_endpoint_oracle_sid"] = source_endpoint_oracle_sid
__props__.__dict__["source_endpoint_owner_id"] = source_endpoint_owner_id
__props__.__dict__["source_endpoint_password"] = source_endpoint_password
__props__.__dict__["source_endpoint_port"] = source_endpoint_port
__props__.__dict__["source_endpoint_region"] = source_endpoint_region
__props__.__dict__["source_endpoint_role"] = source_endpoint_role
__props__.__dict__["source_endpoint_user_name"] = source_endpoint_user_name
__props__.__dict__["status"] = status
__props__.__dict__["subscription_data_type_ddl"] = subscription_data_type_ddl
__props__.__dict__["subscription_data_type_dml"] = subscription_data_type_dml
__props__.__dict__["subscription_instance_network_type"] = subscription_instance_network_type
__props__.__dict__["subscription_instance_vpc_id"] = subscription_instance_vpc_id
__props__.__dict__["subscription_instance_vswitch_id"] = subscription_instance_vswitch_id
__props__.__dict__["sync_architecture"] = sync_architecture
__props__.__dict__["synchronization_direction"] = synchronization_direction
__props__.__dict__["tags"] = tags
return SubscriptionJob(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def checkpoint(self) -> pulumi.Output[str]:
"""
Subscription start time in Unix timestamp format.
"""
return pulumi.get(self, "checkpoint")
@property
@pulumi.getter(name="computeUnit")
def compute_unit(self) -> pulumi.Output[Optional[int]]:
"""
[ETL specifications](https://help.aliyun.com/document_detail/212324.html). The unit is the computing unit ComputeUnit (CU), 1CU=1vCPU+4 GB memory. The value range is an integer greater than or equal to 2.
"""
return pulumi.get(self, "compute_unit")
@property
@pulumi.getter(name="databaseCount")
def database_count(self) -> pulumi.Output[Optional[int]]:
"""
The number of private customized RDS instances under PolarDB-X. The default value is 1. This parameter needs to be passed only when `source_endpoint_engine_name` equals `drds`.
"""
return pulumi.get(self, "database_count")
@property
@pulumi.getter(name="dbList")
def db_list(self) -> pulumi.Output[Optional[str]]:
"""
Subscription object, in the format of JSON strings. For detailed definitions, please refer to the description of migration, synchronization or subscription objects [document](https://help.aliyun.com/document_detail/209545.html).
"""
return pulumi.get(self, "db_list")
@property
@pulumi.getter(name="delayNotice")
def delay_notice(self) -> pulumi.Output[Optional[bool]]:
"""
This parameter decides whether to monitor the delay status. Valid values: `true`, `false`.
"""
return pulumi.get(self, "delay_notice")
@property
@pulumi.getter(name="delayPhone")
def delay_phone(self) -> pulumi.Output[Optional[str]]:
"""
The mobile phone number of the contact who delayed the alarm. Multiple mobile phone numbers separated by English commas `,`. This parameter currently only supports China stations, and only supports mainland mobile phone numbers, and up to 10 mobile phone numbers can be passed in.
"""
return pulumi.get(self, "delay_phone")
@property
@pulumi.getter(name="delayRuleTime")
def delay_rule_time(self) -> pulumi.Output[Optional[str]]:
"""
When `delay_notice` is set to `true`, this parameter must be passed in. The threshold for triggering the delay alarm. The unit is second and needs to be an integer. The threshold can be set according to business needs. It is recommended to set it above 10 seconds to avoid delay fluctuations caused by network and database load.
"""
return pulumi.get(self, "delay_rule_time")
@property
@pulumi.getter(name="destinationEndpointEngineName")
def destination_endpoint_engine_name(self) -> pulumi.Output[Optional[str]]:
"""
The destination endpoint engine name. Valid values: `ADS`, `DB2`, `DRDS`, `DataHub`, `Greenplum`, `MSSQL`, `MySQL`, `PolarDB`, `PostgreSQL`, `Redis`, `Tablestore`, `as400`, `clickhouse`, `kafka`, `mongodb`, `odps`, `oracle`, `polardb_o`, `polardb_pg`, `tidb`.
"""
return pulumi.get(self, "destination_endpoint_engine_name")
@property
@pulumi.getter(name="destinationRegion")
def destination_region(self) -> pulumi.Output[Optional[str]]:
"""
The destination region. List of [supported regions](https://help.aliyun.com/document_detail/141033.html).
"""
return pulumi.get(self, "destination_region")
@property
@pulumi.getter(name="dtsInstanceId")
def dts_instance_id(self) -> pulumi.Output[str]:
"""
The ID of subscription instance.
"""
return pulumi.get(self, "dts_instance_id")
@property
@pulumi.getter(name="dtsJobName")
def dts_job_name(self) -> pulumi.Output[Optional[str]]:
"""
The name of subscription task.
"""
return pulumi.get(self, "dts_job_name")
@property
@pulumi.getter(name="errorNotice")
def error_notice(self) -> pulumi.Output[Optional[bool]]:
"""
This parameter decides whether to monitor abnormal status. Valid values: `true`, `false`.
"""
return pulumi.get(self, "error_notice")
@property
@pulumi.getter(name="errorPhone")
def error_phone(self) -> pulumi.Output[Optional[str]]:
"""
The mobile phone number of the contact for abnormal alarm. Multiple mobile phone numbers separated by English commas `,`. This parameter currently only supports China stations, and only supports mainland mobile phone numbers, and up to 10 mobile phone numbers can be passed in.
"""
return pulumi.get(self, "error_phone")
@property
@pulumi.getter(name="instanceClass")
def instance_class(self) -> pulumi.Output[Optional[str]]:
"""
The instance class. Valid values: `large`, `medium`, `micro`, `small`, `xlarge`, `xxlarge`.
"""
return pulumi.get(self, "instance_class")
@property
@pulumi.getter(name="paymentDuration")
def payment_duration(self) -> pulumi.Output[Optional[int]]:
"""
The duration of prepaid instance purchase. When `payment_type` is `Subscription`, this parameter is valid and must be passed in.
"""
return pulumi.get(self, "payment_duration")
@property
@pulumi.getter(name="paymentDurationUnit")
def payment_duration_unit(self) -> pulumi.Output[Optional[str]]:
"""
The payment duration unit. Valid values: `Month`, `Year`. When `payment_type` is `Subscription`, this parameter is valid and must be passed in.
"""
return pulumi.get(self, "payment_duration_unit")
@property
@pulumi.getter(name="paymentType")
def payment_type(self) -> pulumi.Output[str]:
"""
The payment type of the resource. Valid values: `Subscription`, `PayAsYouGo`.
"""
return pulumi.get(self, "payment_type")
@property
@pulumi.getter
def reserve(self) -> pulumi.Output[Optional[str]]:
"""
DTS reserves parameters, the format is a JSON string, you can pass in this parameter to complete the source and target database information (such as the data storage format of the target Kafka database, the instance ID of the cloud enterprise network CEN). For more information, please refer to the parameter description of the [Reserve parameter](https://help.aliyun.com/document_detail/176470.html).
"""
return pulumi.get(self, "reserve")
@property
@pulumi.getter(name="sourceEndpointDatabaseName")
def source_endpoint_database_name(self) -> pulumi.Output[Optional[str]]:
"""
To subscribe to the name of the database.
"""
return pulumi.get(self, "source_endpoint_database_name")
@property
@pulumi.getter(name="sourceEndpointEngineName")
def source_endpoint_engine_name(self) -> pulumi.Output[Optional[str]]:
"""
The source database type value is MySQL or Oracle. Valid values: `MySQL`, `Oracle`.
"""
return pulumi.get(self, "source_endpoint_engine_name")
@property
@pulumi.getter(name="sourceEndpointInstanceId")
def source_endpoint_instance_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of source instance. Only when the type of source database instance was RDS MySQL, PolarDB-X 1.0, PolarDB MySQL, this parameter can be available and must be set.
"""
return pulumi.get(self, "source_endpoint_instance_id")
@property
@pulumi.getter(name="sourceEndpointInstanceType")
def source_endpoint_instance_type(self) -> pulumi.Output[Optional[str]]:
"""
The type of source instance. Valid values: `RDS`, `PolarDB`, `DRDS`, `LocalInstance`, `ECS`, `Express`, `CEN`, `dg`.
"""
return pulumi.get(self, "source_endpoint_instance_type")
@property
@pulumi.getter(name="sourceEndpointIp")
def source_endpoint_ip(self) -> pulumi.Output[Optional[str]]:
"""
The IP of source endpoint.
"""
return pulumi.get(self, "source_endpoint_ip")
@property
@pulumi.getter(name="sourceEndpointOracleSid")
def source_endpoint_oracle_sid(self) -> pulumi.Output[Optional[str]]:
"""
The SID of Oracle Database. When the source database is self-built Oracle and the Oracle database is a non-RAC instance, this parameter is available and must be passed in.
"""
return pulumi.get(self, "source_endpoint_oracle_sid")
@property
@pulumi.getter(name="sourceEndpointOwnerId")
def source_endpoint_owner_id(self) -> pulumi.Output[Optional[str]]:
"""
The Alibaba Cloud account ID to which the source instance belongs. This parameter is only available when configuring data subscriptions across Alibaba Cloud accounts and must be passed in.
"""
return pulumi.get(self, "source_endpoint_owner_id")
@property
@pulumi.getter(name="sourceEndpointPassword")
def source_endpoint_password(self) -> pulumi.Output[Optional[str]]:
"""
The password of source database instance account.
"""
return pulumi.get(self, "source_endpoint_password")
@property
@pulumi.getter(name="sourceEndpointPort")
def source_endpoint_port(self) -> pulumi.Output[Optional[str]]:
"""
The port of source database.
"""
return pulumi.get(self, "source_endpoint_port")
@property
@pulumi.getter(name="sourceEndpointRegion")
def source_endpoint_region(self) -> pulumi.Output[Optional[str]]:
"""
The region of source database.
"""
return pulumi.get(self, "source_endpoint_region")
@property
@pulumi.getter(name="sourceEndpointRole")
def source_endpoint_role(self) -> pulumi.Output[Optional[str]]:
"""
Both the authorization roles. When the source instance and configure subscriptions task of the Alibaba Cloud account is not the same as the need to pass the parameter, to specify the source of the authorization roles, to allow configuration subscription task of the Alibaba Cloud account to access the source of the source instance information.
"""
return pulumi.get(self, "source_endpoint_role")
@property
@pulumi.getter(name="sourceEndpointUserName")
def source_endpoint_user_name(self) -> pulumi.Output[Optional[str]]:
"""
The username of source database instance account.
"""
return pulumi.get(self, "source_endpoint_user_name")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
"""
The status of the task. Valid values: `Normal`, `Abnormal`. When a task created, it is in this state of `NotStarted`. You can specify this state to `Normal` to start the job, and specify this state of `Abnormal` to stop the job. **Note: We treat the state `Starting` as the state of `Normal`, and consider the two states to be consistent on the user side.**
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="subscriptionDataTypeDdl")
def subscription_data_type_ddl(self) -> pulumi.Output[bool]:
"""
Whether to subscribe the DDL type of data. Valid values: `true`, `false`.
"""
return pulumi.get(self, "subscription_data_type_ddl")
@property
@pulumi.getter(name="subscriptionDataTypeDml")
def subscription_data_type_dml(self) -> pulumi.Output[bool]:
"""
Whether to subscribe the DML type of data. Valid values: `true`, `false`.
"""
return pulumi.get(self, "subscription_data_type_dml")
@property
@pulumi.getter(name="subscriptionInstanceNetworkType")
def subscription_instance_network_type(self) -> pulumi.Output[Optional[str]]:
"""
Subscription task type of network value: classic: classic Network. Virtual Private Cloud (vpc): a vpc. Valid values: `classic`, `vpc`.
"""
return pulumi.get(self, "subscription_instance_network_type")
@property
@pulumi.getter(name="subscriptionInstanceVpcId")
def subscription_instance_vpc_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of subscription vpc instance. When the value of `subscription_instance_network_type` is vpc, this parameter is available and must be passed in.
"""
return pulumi.get(self, "subscription_instance_vpc_id")
@property
@pulumi.getter(name="subscriptionInstanceVswitchId")
def subscription_instance_vswitch_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of subscription VSwitch instance. When the value of `subscription_instance_network_type` is vpc, this parameter is available and must be passed in.
"""
return pulumi.get(self, "subscription_instance_vswitch_id")
@property
@pulumi.getter(name="syncArchitecture")
def sync_architecture(self) -> pulumi.Output[Optional[str]]:
"""
The sync architecture. Valid values: `bidirectional`, `oneway`.
"""
return pulumi.get(self, "sync_architecture")
@property
@pulumi.getter(name="synchronizationDirection")
def synchronization_direction(self) -> pulumi.Output[Optional[str]]:
"""
The synchronization direction. Valid values: `Forward`, `Reverse`. When the topology type of the data synchronization instance is bidirectional, it can be passed in to reverse to start the reverse synchronization link.
"""
return pulumi.get(self, "synchronization_direction")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
return pulumi.get(self, "tags")
| 59.702025
| 443
| 0.695934
| 15,440
| 123,822
| 5.352461
| 0.02908
| 0.067484
| 0.068609
| 0.073474
| 0.978667
| 0.975557
| 0.970536
| 0.968188
| 0.965998
| 0.957032
| 0
| 0.004054
| 0.207217
| 123,822
| 2,073
| 444
| 59.730825
| 0.837822
| 0.432266
| 0
| 0.926829
| 1
| 0
| 0.13326
| 0.07914
| 0
| 0
| 0
| 0
| 0
| 1
| 0.169891
| false
| 0.022708
| 0.004205
| 0.002523
| 0.275862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d541e7e2a938fc22d61a06c7ab2513234f31bda8
| 5,774
|
py
|
Python
|
tests/test_service.py
|
Suremaker/consul-deployment-agent
|
466c36d3fcb9f8bfa144299dde7cb94f4341907b
|
[
"Apache-2.0"
] | 6
|
2016-10-10T09:26:07.000Z
|
2018-09-20T08:59:42.000Z
|
tests/test_service.py
|
Suremaker/consul-deployment-agent
|
466c36d3fcb9f8bfa144299dde7cb94f4341907b
|
[
"Apache-2.0"
] | 11
|
2016-10-10T12:11:07.000Z
|
2018-05-09T22:11:02.000Z
|
tests/test_service.py
|
Suremaker/consul-deployment-agent
|
466c36d3fcb9f8bfa144299dde7cb94f4341907b
|
[
"Apache-2.0"
] | 16
|
2016-09-28T16:00:58.000Z
|
2019-02-25T16:52:12.000Z
|
# Copyright (c) Trainline Limited, 2016-2017. All rights reserved. See LICENSE.txt in the project root for license information.
import unittest
import copy
from agent.service import Service
class TestService(unittest.TestCase):
def setUp(self):
self.service_definition = {
'Address':'127.0.0.1',
'ID':'Service-blue',
'Name':'Service',
'Ports': {'blue':12345, 'green':67890},
'Tags':['version:1.0.0', 'deployment_id:12345', 'slice:blue']
}
def test_service_instantiation_from_catalog(self):
service = Service(self.service_definition)
self.assertEqual(service.address, '127.0.0.1')
self.assertEqual(service.deployment_id, '12345')
self.assertEqual(service.id, 'Service-blue')
self.assertEqual(service.installation.get('timeout'), 3600)
self.assertEqual(service.installation.get('package_bucket'), None)
self.assertEqual(service.installation.get('package_key'), None)
self.assertEqual(service.name, 'Service-blue')
self.assertEqual(service.slice, 'blue')
self.assertEqual(service.version, '1.0.0')
self.assertEqual(service.portsConfig['blue'], 12345)
self.assertEqual(service.portsConfig['green'], 67890)
def test_service_coerces_ports_as_ints(self):
svc_copy = copy.deepcopy(self.service_definition)
svc_copy['Ports']['blue'] = "87654"
svc_copy['Ports']['green'] = "32109"
service = Service(svc_copy)
self.assertEqual(service.address, '127.0.0.1')
self.assertEqual(service.deployment_id, '12345')
self.assertEqual(service.id, 'Service-blue')
self.assertEqual(service.installation.get('timeout'), 3600)
self.assertEqual(service.installation.get('package_bucket'), None)
self.assertEqual(service.installation.get('package_key'), None)
self.assertEqual(service.name, 'Service-blue')
self.assertEqual(service.slice, 'blue')
self.assertEqual(service.version, '1.0.0')
self.assertEqual(service.portsConfig['blue'], 87654)
self.assertEqual(service.portsConfig['green'], 32109)
def test_service_instantiation_from_server_role(self):
definition = {
'Address':'127.0.0.1',
'ID':'Service-blue',
'Name':'Service',
'Ports': {'blue':12345, 'green':67890},
'Tags':['version:1.0.0']
}
installation_info = {
'InstallationTimeout':60,
'PackageBucket':'some-bucket',
'PackageKey':'some-key'
}
service = Service(definition, installation_info)
self.assertEqual(service.address, '127.0.0.1')
self.assertEqual(service.deployment_id, None)
self.assertEqual(service.id, 'Service-blue')
self.assertEqual(service.installation.get('timeout'), 3600)
self.assertEqual(service.installation.get('package_bucket'), 'some-bucket')
self.assertEqual(service.installation.get('package_key'), 'some-key')
self.assertEqual(service.name, 'Service-blue')
self.assertEqual(service.port, 0)
self.assertEqual(service.slice, None)
self.assertEqual(service.version, '1.0.0')
def test_service_instantiation_from_server_role_with_slice(self):
definition = {
'Address':'127.0.0.1',
'ID':'Service-green',
'Name':'Service',
'Ports': {'blue':12345, 'green':67890},
'Tags':['version:1.0.0', 'deployment_id:12345', 'slice:green']
}
installation_info = {
'InstallationTimeout':60,
'PackageBucket':'some-bucket',
'PackageKey':'some-key'
}
service = Service(definition, installation_info)
self.assertEqual(service.address, '127.0.0.1')
self.assertEqual(service.deployment_id, '12345')
self.assertEqual(service.id, 'Service-green')
self.assertEqual(service.installation.get('timeout'), 3600)
self.assertEqual(service.installation.get('package_bucket'), 'some-bucket')
self.assertEqual(service.installation.get('package_key'), 'some-key')
self.assertEqual(service.name, 'Service-green')
self.assertEqual(service.slice, 'green')
self.assertEqual(service.version, '1.0.0')
def test_service_instantiation_failure(self):
definition = {
'Address':None,
'ID':None,
'Name':None,
'Tags':[]
}
with self.assertRaises(ValueError) as cm:
Service(definition)
error = cm.exception
self.assertEqual(str(error), 'Service address must be specified.')
definition['Address'] = '127.0.0.1'
with self.assertRaises(ValueError) as cm:
Service(definition)
error = cm.exception
self.assertEqual(str(error), 'Service ID must be specified.')
def test_extract_tag_with_prefix_found(self):
service = Service(self.service_definition)
self.assertEqual(service._extract_tag_with_prefix('version:'), '1.0.0')
def test_extract_tag_with_prefix_not_found(self):
service = Service(self.service_definition)
self.assertEqual(service._extract_tag_with_prefix('deployment_id:'), '12345')
def test_new_tag(self):
service = Service(self.service_definition)
service.tag('prefix:', 'value')
self.assertTrue('prefix:value' in service.tags)
def test_overwrite_tag(self):
service = Service(self.service_definition)
service.tag('prefix:', 'value1')
service.tag('prefix:', 'value2')
self.assertEqual(1, len([tag for tag in service.tags if tag.startswith('prefix:')]))
| 43.089552
| 127
| 0.636301
| 637
| 5,774
| 5.646782
| 0.156986
| 0.191827
| 0.262997
| 0.113428
| 0.800667
| 0.762024
| 0.737837
| 0.722547
| 0.722547
| 0.693356
| 0
| 0.043294
| 0.223935
| 5,774
| 133
| 128
| 43.413534
| 0.759429
| 0.021649
| 0
| 0.554622
| 0
| 0
| 0.173189
| 0
| 0
| 0
| 0
| 0
| 0.411765
| 1
| 0.084034
| false
| 0
| 0.02521
| 0
| 0.117647
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d57a78a6b92f96cf9fe9ab254397592550679db8
| 39,594
|
py
|
Python
|
hessian/hessian.py
|
ravisankaradepu/layerwise_and_subspace_analysis
|
7011f47d94913cc7860f846b0fe6442f313de6ae
|
[
"MIT"
] | null | null | null |
hessian/hessian.py
|
ravisankaradepu/layerwise_and_subspace_analysis
|
7011f47d94913cc7860f846b0fe6442f313de6ae
|
[
"MIT"
] | null | null | null |
hessian/hessian.py
|
ravisankaradepu/layerwise_and_subspace_analysis
|
7011f47d94913cc7860f846b0fe6442f313de6ae
|
[
"MIT"
] | null | null | null |
import sys
import torch
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
from numpy import linalg as LA
from torch.autograd import Variable
class FullHessian:
def __init__(self,
crit=None,
loader=None,
device=None,
model=None,
num_classes=None,
hessian_type=None,
double=False,
spectrum_margin=None,
init_poly_deg=None,
poly_deg=None,
poly_points=None,
SSI_iters=None,
class_list=None,
vecs=[],
vals=[],
):
self.crit = crit
self.loader = loader
self.device = device
self.model = model
self.num_classes = num_classes
self.hessian_type = hessian_type
self.double = double
self.spectrum_margin = spectrum_margin
self.init_poly_deg = init_poly_deg
self.poly_deg = poly_deg
self.poly_points = poly_points
self.SSI_iters = SSI_iters
self.class_list = class_list
self.vecs = vecs
self.vals = vals
for i in range(len(self.vecs)):
self.vecs[i] = self.my_device(self.vecs[i])
f = getattr(nn, self.crit)
self.criterion = f(reduction='sum')
# computes matrix vector multiplication
# where the matrix is either the Hessian, G or H
def Hv(self, v):
Hg = self.my_zero()
counter = 0
for iter, batch in enumerate(self.loader):
input, target = batch[0], batch[1]
input = input.to(self.device)
target = target.to(self.device)
input = Variable(input)
target = Variable(target)
if self.double:
input = input.double()
f = self.model(input)
loss = self.criterion(f, target)
if self.hessian_type == 'G':
z = torch.randn(f.shape)
if self.double:
z = z.double()
z = z.to(self.device)
z = Variable(z, requires_grad=True)
# z^T (d f / d theta)
zT_df_dtheta = torch.autograd.grad(f,
self.model.parameters(),
z,
create_graph=True)
# v^T (z^T (d f / d theta)) / dz
# (d f / d theta) v
df_dtheta_v = torch.autograd.grad(zT_df_dtheta,
z,
v)
dloss_df = torch.autograd.grad(loss,
f,
create_graph=True)
d2loss_df2_df_dtheta_v = torch.autograd.grad(dloss_df,
f,
grad_outputs=df_dtheta_v)
Hg_ = torch.autograd.grad(f,
self.model.parameters(),
grad_outputs=d2loss_df2_df_dtheta_v)
elif self.hessian_type == 'H':
dloss_df = torch.autograd.grad(loss,
f)
df_dtheta = torch.autograd.grad(f,
self.model.parameters(),
grad_outputs=dloss_df,
create_graph=True)
df_dtheta[-1].requires_grad = True
Hg_ = torch.autograd.grad(df_dtheta,
self.model.parameters(),
v,
allow_unused=True)
zr = torch.zeros(df_dtheta[-1].shape)
zr = zr.to(self.device)
Hg_ = Hg_[:-1] + (zr,)
elif self.hessian_type == 'Hessian':
grad = torch.autograd.grad(loss,
self.model.parameters(),
create_graph=True)
Hg_ = torch.autograd.grad(grad,
self.model.parameters(),
v)
else:
raise Exception('Wrong hessian type!')
Hg = self.my_sum(Hg,Hg_)
counter += input.shape[0]
return self.my_div_const(Hg, counter)
# computes matrix vector multiplication
# where the matrix is (H - sum_i val_i vec_i vec_i^T)
# {val_i}_i and {vec_i}_i are given as input to the class and are usually
# equal to the top C eigenvalues and eigenvectors
def mat_vec(self, v):
Av = self.Hv(v)
for eigvec, eigval in zip(self.vecs, self.vals):
coeff = eigval * self.my_inner(eigvec, v)
Av = self.my_sub(Av, self.my_mult_const(eigvec, coeff))
return Av
# compute matrix matrix multiplication by iterating the previous function
def mat_mat(self, V):
AV = []
for v in V:
AV.append(self.mat_vec(v))
return AV
# generate a random vector of size #params
def my_randn(self):
v_0_l = []
for param in self.model.parameters():
Z = torch.randn(param.shape)
if self.double:
Z = Z.double()
Z = Z.to(self.device)
v_0_l.append(Z)
return v_0_l
# the following functions perform basic operations over lists of parameters
def my_zero(self):
return [0 for x in self.my_randn()]
def my_sub(self, X, Y):
return [x-y for x,y in zip(X,Y)]
def my_sum(self, X, Y):
return [x+y for x,y in zip(X,Y)]
def my_inner(self, X, Y):
return sum([torch.dot(x.view(-1), y.view(-1)) for x,y in zip(X,Y)])
def my_mult(self, X, Y):
return [x*y for x,y in zip(X,Y)]
def my_norm(self, X):
return torch.sqrt(self.my_inner(X,X))
def my_mult_const(self, X, c):
return [x*c for x in X]
def my_div_const(self, X, c):
return [x/c for x in X]
def my_len(self):
X = self.my_randn()
return sum([x.view(-1).shape[0] for x in X])
def my_data(self, X):
return [x.data for x in X]
def my_cpu(self, X):
return [x.cpu() for x in X]
def my_device(self, X):
return [x.to(self.device) for x in X]
# compute the minimal and maximal eigenvalue of the linear operator mat_vec
# this is needed for approximating the spectrum using Lanczos
def compute_lb_ub(self):
ritzVal, S, alp, bet = self.Lanczos(self.init_poly_deg)
theta_1 = ritzVal[0]
theta_k = ritzVal[-1]
s_1 = float(bet[-1]) * float(S[-1,0])
s_k = float(bet[-1]) * float(S[-1,-1])
t1 = abs(s_1)
tk = abs(s_k)
lb = theta_1 - t1
ub = theta_k + tk
return lb, ub
# approximate the spectrum of the linear operator mat_vec
def LanczosLoop(self, denormalize=False):
print('Lanczos Method')
lb, ub = self.compute_lb_ub()
print('Estimated spectrum range:')
print('[{}\t{}]'.format(lb, ub))
margin = self.spectrum_margin*(ub - lb)
lb -= margin
ub += margin
print('Spectrum range after adding margin:')
print('[{}\t{}]'.format(lb, ub))
self.c = (lb + ub)/2
self.d = (ub - lb)/2
M = self.poly_deg
LB = -1
UB = 1
H = (UB - LB) / (M - 1)
kappa = 1.25
sigma = H / np.sqrt(8 * np.log(kappa))
sigma2 = 2 * sigma**2
tol = 1e-08
width = sigma * np.sqrt(-2.0 * np.log(tol))
aa = LB
bb = UB
xdos = np.linspace(aa, bb, self.poly_points);
y = np.zeros(self.poly_points)
ritzVal, S, _, _ = self.Lanczos(self.poly_deg)
ritzVal = (ritzVal - self.c) / self.d
gamma2 = S[0,]**2
diff = np.expand_dims(ritzVal,-1) - np.expand_dims(xdos,0)
eigval_idx, pts_idx = np.where(np.abs(diff) < width)
vals = gamma2[eigval_idx] \
* np.exp(-((xdos[pts_idx] - ritzVal[eigval_idx])**2) \
/ sigma2)
np.add.at(y, pts_idx, vals)
scaling = 1.0 / np.sqrt(sigma2 * np.pi)
y = y*scaling
if denormalize:
xdos = xdos*self.d + self.c
y = y/self.d
return xdos, y
# M iteratinos of Lanczos on the linear operator mat_vec
def Lanczos(self, M):
v = self.my_randn()
v = self.my_div_const(v, self.my_norm(v))
alp = torch.zeros(M)
bet = torch.zeros(M)
if self.double:
alp = alp.double()
bet = bet.double()
alp = alp.to(self.device)
bet = bet.to(self.device)
v_prev = None
for j in range(M):
print('Iteration: [{}/{}]'.format(j+1, M))
sys.stdout.flush()
v_next = self.mat_vec(v)
if j:
v_next = self.my_sub(v_next, self.my_mult_const(v_prev,bet[j-1]))
alp[j] = self.my_inner(v_next, v)
v_next = self.my_sub(v_next, self.my_mult_const(v, alp[j]))
bet[j] = self.my_norm(v_next)
v_next = self.my_div_const(v_next, bet[j])
v_prev = v
v = v_next
B = np.diag(alp.cpu().numpy()) + np.diag(bet.cpu().numpy()[:-1], k=1) + np.diag(bet.cpu().numpy()[:-1], k=-1)
ritz_val, S = np.linalg.eigh(B)
return ritz_val, S, alp, bet
# compute top-C eigenvalues and eigenvectors using subspace iteration
def SubspaceIteration(self):
print('Subspace Iteration')
n = int(self.num_classes)
V = []
for _ in range(n):
V.append(self.my_randn())
Q, _ = self.QR(V, n)
for iter in range(self.SSI_iters):
print('Iteration: [{}/{}]'.format(iter+1, self.SSI_iters))
sys.stdout.flush()
V = self.mat_mat(Q)
eigvals = [self.my_norm(w) for w in V]
Q, _ = self.QR(V, n)
eigval_density = np.ones(len(eigvals)) * 1/len(eigvals)
return V, eigvals, eigval_density
# QR decomposition, which is needed for subspace iteration
def QR(self, A, n):
Q = []
R = torch.zeros(n,n)
if self.double:
R = R.double()
R = R.to(self.device)
for j in range(n):
v = A[j]
for i in range(j):
R[i,j] = self.my_inner(Q[i], A[j])
v = self.my_sub(v, self.my_mult_const(Q[i], R[i,j]))
R[j,j] = self.my_norm(v)
Q.append(self.my_div_const(v, R[j,j]))
return Q, R
# compute delta_{c,c'}
def compute_delta_c_cp(self):
print("Computing delta_{c,c'}")
if self.hessian_type != 'G':
raise Exception('Works only for G!')
if self.crit != 'CrossEntropyLoss':
raise Exception('Works only for cross entropy loss!')
if self.class_list is not None:
class_list = self.class_list
else:
class_list = [i for i in range(self.num_classes)]
means = []
counters = []
for c in class_list:
means.append([])
counters.append([])
for cp in class_list:
means[-1].append(None)
counters[-1].append(0)
for idx, batch in enumerate(self.loader, 1):
print('Iteration: [{}/{}]'.format(idx, len(self.loader)))
sys.stdout.flush()
input, target = batch[0], batch[1]
input = input.to(self.device)
target = target.to(self.device)
input = Variable(input)
target = Variable(target)
f = self.model(input)
prob = F.softmax(f,dim=1)
for idx_c, c in enumerate(class_list):
idxs = (target == c).nonzero()
if len(idxs) == 0:
continue
fc = f[idxs.squeeze(-1),]
probc = prob[idxs.squeeze(-1),]
for idx_cp, cp in enumerate(class_list):
# compute delta_{i,c,c'}
w = -probc
w[:,cp] = w[:,cp] + 1
w = w * torch.sqrt(probc[:,[cp]])
J = torch.autograd.grad(fc,
self.model.parameters(),
grad_outputs=w,
retain_graph=True)
J = self.my_cpu(self.my_data(J))
if means[idx_c][idx_cp] is None:
means[idx_c][idx_cp] = self.my_zero()
means[idx_c][idx_cp] = self.my_sum(means[idx_c][idx_cp], J)
counters[idx_c][idx_cp] += fc.shape[0]
for idx_c in range(len(class_list)):
for idx_cp in range(len(class_list)):
means[idx_c][idx_cp] = [x/counters[idx_c][idx_cp] for x in means[idx_c][idx_cp]]
return means
# compute G decomposition
def compute_G_decomp(self, mu_ccp_only=False, mu_only=False, plot_only=False):
# compute delta_{c,c'}
mu_ccp = self.compute_delta_c_cp()
C = len(mu_ccp)
mu_ccp_flat = []
for c in range(C):
for c_ in range(C):
mu_ccp_flat.append(mu_ccp[c][c_])
if mu_ccp_only:
return {'mu_ccp' : mu_ccp}
# compute delta_c
print("Computing delta_c")
mu = []
for c in range(C):
s = self.my_zero()
for c_ in range(C):
if c != c_:
s = self.my_sum(s, mu_ccp[c][c_])
avg = self.my_div_const(s, C-1)
mu.append(avg)
if mu_only:
return {'mu' : mu}
# compute distances between {delta_c}_c and {delta_{c,c'}}_{c,c'}
# (a total of C+C**2 elements)
# these distances will later be passed to t-SNE
print("Computing distances for t-SNE plot")
V = []
labels = []
for c in range(C):
V.append(mu[c])
labels.append([c])
for c in range(C):
for c_ in range(C):
V.append(mu_ccp[c][c_])
labels.append([c, c_])
N = C+C**2
dist = np.zeros([N, N])
for c in range(N):
print('Iteration: [{}/{}]'.format(c+1, N))
for c_ in range(N):
dist[c,c_] = self.my_norm(self.my_sub(V[c], V[c_]))**2
if plot_only:
return {'dist' : dist,
'labels' : labels}
# delta_{c,c}
mu_cc = []
for c in range(C):
mu_cc.append(mu_ccp[c][c])
# compute G0
print("Computing G0")
mu_cc_T_mu_cc = np.zeros([C, C])
for c in range(C):
for c_ in range(C):
mu_cc_T_mu_cc[c,c_] = self.my_inner(mu_cc[c], mu_cc[c_]) / C
G0_eigval, _ = LA.eig(mu_cc_T_mu_cc)
G0_eigval = sorted(G0_eigval, reverse=True)
# compute G1
print("Computing G1")
muTmu = np.zeros([C, C])
for c in range(C):
for c_ in range(C):
muTmu[c,c_] = self.my_inner(mu[c], mu[c_]) * (C-1) / C
G1_eigval, _ = LA.eig(muTmu)
G1_eigval = sorted(G1_eigval, reverse=True)
# compute G1+2
print("Computing G1+2")
mu_ccp_T_mu_ccp = np.zeros([C**2, C**2])
for c in range(C**2):
for c_ in range(C**2):
mu_ccp_T_mu_ccp[c,c_] = self.my_inner(mu_ccp_flat[c], mu_ccp_flat[c_]) / C
G12_eigval, _ = LA.eig(mu_ccp_T_mu_ccp)
G12_eigval = sorted(G12_eigval, reverse=True)
# compute G_2
print("Computing G2")
nu = []
for c in range(C):
nu.append([])
for c_ in range(C):
nu[c].append(self.my_sub(mu_ccp[c][c_], mu[c]))
nu_flat = []
for c in range(C):
for c_ in range(C):
if c != c_:
nu_flat.append(nu[c][c_])
gram_nu_flat = np.zeros([C*(C-1), C*(C-1)])
for c in range(C*(C-1)):
for c_ in range(C*(C-1)):
gram_nu_flat[c,c_] = self.my_inner(nu_flat[c], nu_flat[c_]) / C
G2_eigval, _ = LA.eig(gram_nu_flat)
G2_eigval = sorted(G2_eigval, reverse=True)
# density is 1/(number of eigenvalues)
G0_eigval_density = np.ones(len(G0_eigval)) * 1/len(G0_eigval)
G1_eigval_density = np.ones(len(G1_eigval)) * 1/len(G1_eigval)
G12_eigval_density = np.ones(len(G12_eigval)) * 1/len(G12_eigval)
G2_eigval_density = np.ones(len(G2_eigval)) * 1/len(G2_eigval)
res = {'mu_ccp' : mu_ccp,
'mu_ccp_flat' : mu_ccp_flat,
'mu' : mu,
'nu' : nu,
'nu_flat' : nu_flat,
'G0_eigval' : G0_eigval,
'G0_eigval_density' : G0_eigval_density,
'G1_eigval' : G1_eigval,
'G1_eigval_density' : G1_eigval_density,
'G2_eigval' : G2_eigval,
'G2_eigval_density' : G2_eigval_density,
'G12_eigval' : G12_eigval,
'G12_eigval_density' : G12_eigval_density,
'dist' : dist,
'labels' : labels,
}
return res
class LayerHessian:
def __init__(self,
crit=None,
loader=None,
device=None,
model=None,
layer_name=None,
num_classes=None,
hessian_type=None,
double=False,
spectrum_margin=None,
init_poly_deg=None,
poly_deg=None,
poly_points=None,
SSI_iters=None,
class_list=None,
vecs=[],
vals=[],
):
self.crit = crit
self.loader = loader
self.device = device
self.model = model
self.num_classes = num_classes
self.hessian_type = hessian_type
self.double = double
self.spectrum_margin = spectrum_margin
self.init_poly_deg = init_poly_deg
self.poly_deg = poly_deg
self.poly_points = poly_points
self.SSI_iters = SSI_iters
self.class_list = class_list
self.vecs = vecs
self.vals = vals
for i in range(len(self.vecs)):
self.vecs[i] = self.my_device(self.vecs[i])
f = getattr(nn, self.crit)
self.criterion = f(reduction='sum')
self.layer_name = layer_name
self.layer = self.get_layer(self.layer_name)
def get_layer(self, layer_name):
for name, params in self.model.named_parameters():
if name == layer_name:
return params
raise Exception('Layer not found: {}'.format(layer_name))
# computes matrix vector multiplication
# where the matrix is either the Hessian, G or H
def Hv(self, v):
Hg = self.my_zero()
counter = 0
for iter, batch in enumerate(self.loader):
input, target = batch[0], batch[1]
input = input.to(self.device)
target = target.to(self.device)
input = Variable(input)
target = Variable(target)
if self.double:
input = input.double()
f = self.model(input)
loss = self.criterion(f, target)
if self.hessian_type == 'G':
z = torch.randn(f.shape)
if self.double:
z = z.double()
z = z.to(self.device)
z = Variable(z, requires_grad=True)
# z^T (d f / d theta)
zT_df_dtheta = torch.autograd.grad(f,
self.layer,
z,
create_graph=True)
# v^T (z^T (d f / d theta)) / dz
# (d f / d theta) v
df_dtheta_v = torch.autograd.grad(zT_df_dtheta,
z,
v)
dloss_df = torch.autograd.grad(loss,
f,
create_graph=True)
d2loss_df2_df_dtheta_v = torch.autograd.grad(dloss_df,
f,
grad_outputs=df_dtheta_v)
Hg_ = torch.autograd.grad(f,
self.layer,
grad_outputs=d2loss_df2_df_dtheta_v)
elif self.hessian_type == 'H':
dloss_df = torch.autograd.grad(loss,
f)
df_dtheta = torch.autograd.grad(f,
self.layer,
grad_outputs=dloss_df,
create_graph=True)
# df_dtheta[-1].requires_grad = True
Hg_ = torch.autograd.grad(df_dtheta,
self.layer,
v,
allow_unused=True)
# zr = torch.zeros(df_dtheta[-1].shape)
# zr = zr.to(self.device)
# Hg_ = Hg_[:-1] + (zr,)
elif self.hessian_type == 'Hessian':
grad = torch.autograd.grad(loss,
self.layer,
create_graph=True)
Hg_ = torch.autograd.grad(grad,
self.layer,
v)
else:
raise Exception('Wrong hessian type!')
Hg = self.my_sum(Hg,Hg_)
counter += input.shape[0]
return self.my_div_const(Hg, counter)
# computes matrix vector multiplication
# where the matrix is (H - sum_i val_i vec_i vec_i^T)
# {val_i}_i and {vec_i}_i are given as input to the class and are usually
# equal to the top C eigenvalues and eigenvectors
def mat_vec(self, v):
Av = self.Hv(v)
for eigvec, eigval in zip(self.vecs, self.vals):
coeff = eigval * self.my_inner(eigvec, v)
Av = self.my_sub(Av, self.my_mult_const(eigvec, coeff))
return Av
# compute matrix matrix multiplication by iterating the previous function
def mat_mat(self, V):
AV = []
for v in V:
AV.append(self.mat_vec(v))
return AV
# generate a random vector of size #params
def my_randn(self):
# v_0_l = []
# for param in self.model.parameters():
# Z = torch.randn(param.shape)
# if self.double:
# Z = Z.double()
# Z = Z.to(self.device)
# v_0_l.append(Z)
# return v_0_l
return [torch.randn_like(self.layer, device=self.device)]
# the following functions perform basic operations over lists of parameters
def my_zero(self):
return [0 for x in self.my_randn()]
def my_sub(self, X, Y):
return [x-y for x,y in zip(X,Y)]
def my_sum(self, X, Y):
return [x+y for x,y in zip(X,Y)]
def my_inner(self, X, Y):
return sum([torch.dot(x.view(-1), y.view(-1)) for x,y in zip(X,Y)])
def my_mult(self, X, Y):
return [x*y for x,y in zip(X,Y)]
def my_norm(self, X):
return torch.sqrt(self.my_inner(X,X))
def my_mult_const(self, X, c):
return [x*c for x in X]
def my_div_const(self, X, c):
return [x/c for x in X]
def my_len(self):
X = self.my_randn()
return sum([x.view(-1).shape[0] for x in X])
def my_data(self, X):
return [x.data for x in X]
def my_cpu(self, X):
return [x.cpu() for x in X]
def my_device(self, X):
return [x.to(self.device) for x in X]
# compute the minimal and maximal eigenvalue of the linear operator mat_vec
# this is needed for approximating the spectrum using Lanczos
def compute_lb_ub(self):
ritzVal, S, alp, bet = self.Lanczos(self.init_poly_deg)
theta_1 = ritzVal[0]
theta_k = ritzVal[-1]
s_1 = float(bet[-1]) * float(S[-1,0])
s_k = float(bet[-1]) * float(S[-1,-1])
t1 = abs(s_1)
tk = abs(s_k)
lb = theta_1 - t1
ub = theta_k + tk
return lb, ub
# approximate the spectrum of the linear operator mat_vec
def LanczosLoop(self, denormalize=False):
print('Lanczos Method')
lb, ub = self.compute_lb_ub()
print('Estimated spectrum range:')
print('[{}\t{}]'.format(lb, ub))
margin = self.spectrum_margin*(ub - lb)
lb -= margin
ub += margin
print('Spectrum range after adding margin:')
print('[{}\t{}]'.format(lb, ub))
self.c = (lb + ub)/2
self.d = (ub - lb)/2
M = self.poly_deg
LB = -1
UB = 1
H = (UB - LB) / (M - 1)
kappa = 1.25
sigma = H / np.sqrt(8 * np.log(kappa))
sigma2 = 2 * sigma**2
tol = 1e-08
width = sigma * np.sqrt(-2.0 * np.log(tol))
aa = LB
bb = UB
xdos = np.linspace(aa, bb, self.poly_points);
y = np.zeros(self.poly_points)
ritzVal, S, _, _ = self.Lanczos(self.poly_deg)
ritzVal = (ritzVal - self.c) / self.d
gamma2 = S[0,]**2
diff = np.expand_dims(ritzVal,-1) - np.expand_dims(xdos,0)
eigval_idx, pts_idx = np.where(np.abs(diff) < width)
vals = gamma2[eigval_idx] \
* np.exp(-((xdos[pts_idx] - ritzVal[eigval_idx])**2) \
/ sigma2)
np.add.at(y, pts_idx, vals)
scaling = 1.0 / np.sqrt(sigma2 * np.pi)
y = y*scaling
if denormalize:
xdos = xdos*self.d + self.c
y = y/self.d
return xdos, y
# M iteratinos of Lanczos on the linear operator mat_vec
def Lanczos(self, M):
v = self.my_randn()
v = self.my_div_const(v, self.my_norm(v))
alp = torch.zeros(M)
bet = torch.zeros(M)
if self.double:
alp = alp.double()
bet = bet.double()
alp = alp.to(self.device)
bet = bet.to(self.device)
v_prev = None
for j in range(M):
print('Iteration: [{}/{}]'.format(j+1, M))
sys.stdout.flush()
v_next = self.mat_vec(v)
if j:
v_next = self.my_sub(v_next, self.my_mult_const(v_prev,bet[j-1]))
alp[j] = self.my_inner(v_next, v)
v_next = self.my_sub(v_next, self.my_mult_const(v, alp[j]))
bet[j] = self.my_norm(v_next)
v_next = self.my_div_const(v_next, bet[j])
v_prev = v
v = v_next
B = np.diag(alp.cpu().numpy()) + np.diag(bet.cpu().numpy()[:-1], k=1) + np.diag(bet.cpu().numpy()[:-1], k=-1)
ritz_val, S = np.linalg.eigh(B)
return ritz_val, S, alp, bet
# compute top-C eigenvalues and eigenvectors using subspace iteration
def SubspaceIteration(self):
print('Subspace Iteration')
n = int(self.num_classes)
V = []
for _ in range(n):
V.append(self.my_randn())
Q, _ = self.QR(V, n)
for iter in range(self.SSI_iters):
print('Iteration: [{}/{}]'.format(iter+1, self.SSI_iters))
sys.stdout.flush()
V = self.mat_mat(Q)
eigvals = [self.my_norm(w) for w in V]
Q, _ = self.QR(V, n)
eigval_density = np.ones(len(eigvals)) * 1/len(eigvals)
return Q, eigvals, eigval_density
# QR decomposition, which is needed for subspace iteration
def QR(self, A, n):
Q = []
R = torch.zeros(n,n)
if self.double:
R = R.double()
R = R.to(self.device)
for j in range(n):
v = A[j]
for i in range(j):
R[i,j] = self.my_inner(Q[i], A[j])
v = self.my_sub(v, self.my_mult_const(Q[i], R[i,j]))
R[j,j] = self.my_norm(v)
Q.append(self.my_div_const(v, R[j,j]))
return Q, R
# compute delta_{c,c'}
def compute_delta_c_cp(self):
print("Computing delta_{c,c'}")
if self.hessian_type != 'G':
raise Exception('Works only for G!')
if self.crit != 'CrossEntropyLoss':
raise Exception('Works only for cross entropy loss!')
if self.class_list is not None:
class_list = self.class_list
else:
class_list = [i for i in range(self.num_classes)]
means = []
counters = []
for c in class_list:
means.append([])
counters.append([])
for cp in class_list:
means[-1].append(None)
counters[-1].append(0)
for idx, batch in enumerate(self.loader, 1):
print('Iteration: [{}/{}]'.format(idx, len(self.loader)))
sys.stdout.flush()
input, target = batch[0], batch[1]
input = input.to(self.device)
target = target.to(self.device)
input = Variable(input)
target = Variable(target)
f = self.model(input)
prob = F.softmax(f,dim=1)
for idx_c, c in enumerate(class_list):
idxs = (target == c).nonzero()
if len(idxs) == 0:
continue
fc = f[idxs.squeeze(-1),]
probc = prob[idxs.squeeze(-1),]
for idx_cp, cp in enumerate(class_list):
# compute delta_{i,c,c'}
w = -probc
w[:,cp] = w[:,cp] + 1
w = w * torch.sqrt(probc[:,[cp]])
J = torch.autograd.grad(fc,
self.layer,
grad_outputs=w,
retain_graph=True)
J = self.my_cpu(self.my_data(J))
if means[idx_c][idx_cp] is None:
means[idx_c][idx_cp] = self.my_zero()
means[idx_c][idx_cp] = self.my_sum(means[idx_c][idx_cp], J)
counters[idx_c][idx_cp] += fc.shape[0]
for idx_c in range(len(class_list)):
for idx_cp in range(len(class_list)):
means[idx_c][idx_cp] = [x/counters[idx_c][idx_cp] for x in means[idx_c][idx_cp]]
return means
# compute G decomposition
def compute_G_decomp(self, mu_ccp_only=False, mu_only=False, plot_only=False):
# compute delta_{c,c'}
mu_ccp = self.compute_delta_c_cp()
C = len(mu_ccp)
mu_ccp_flat = []
for c in range(C):
for c_ in range(C):
mu_ccp_flat.append(mu_ccp[c][c_])
if mu_ccp_only:
return {'mu_ccp' : mu_ccp}
# compute delta_c
print("Computing delta_c")
mu = []
for c in range(C):
s = self.my_zero()
for c_ in range(C):
if c != c_:
s = self.my_sum(s, mu_ccp[c][c_])
avg = self.my_div_const(s, C-1)
mu.append(avg)
if mu_only:
return {'mu' : mu}
# compute distances between {delta_c}_c and {delta_{c,c'}}_{c,c'}
# (a total of C+C**2 elements)
# these distances will later be passed to t-SNE
print("Computing distances for t-SNE plot")
V = []
labels = []
for c in range(C):
V.append(mu[c])
labels.append([c])
for c in range(C):
for c_ in range(C):
V.append(mu_ccp[c][c_])
labels.append([c, c_])
N = C+C**2
dist = np.zeros([N, N])
for c in range(N):
print('Iteration: [{}/{}]'.format(c+1, N))
for c_ in range(N):
dist[c,c_] = self.my_norm(self.my_sub(V[c], V[c_]))**2
if plot_only:
return {'dist' : dist,
'labels' : labels}
# delta_{c,c}
mu_cc = []
for c in range(C):
mu_cc.append(mu_ccp[c][c])
# compute G0
print("Computing G0")
mu_cc_T_mu_cc = np.zeros([C, C])
for c in range(C):
for c_ in range(C):
mu_cc_T_mu_cc[c,c_] = self.my_inner(mu_cc[c], mu_cc[c_]) / C
G0_eigval, _ = LA.eig(mu_cc_T_mu_cc)
G0_eigval = sorted(G0_eigval, reverse=True)
# compute G1
print("Computing G1")
muTmu = np.zeros([C, C])
for c in range(C):
for c_ in range(C):
muTmu[c,c_] = self.my_inner(mu[c], mu[c_]) * (C-1) / C
G1_eigval, _ = LA.eig(muTmu)
G1_eigval = sorted(G1_eigval, reverse=True)
# compute G1+2
print("Computing G1+2")
mu_ccp_T_mu_ccp = np.zeros([C**2, C**2])
for c in range(C**2):
for c_ in range(C**2):
mu_ccp_T_mu_ccp[c,c_] = self.my_inner(mu_ccp_flat[c], mu_ccp_flat[c_]) / C
G12_eigval, _ = LA.eig(mu_ccp_T_mu_ccp)
G12_eigval = sorted(G12_eigval, reverse=True)
# compute G_2
print("Computing G2")
nu = []
for c in range(C):
nu.append([])
for c_ in range(C):
nu[c].append(self.my_sub(mu_ccp[c][c_], mu[c]))
nu_flat = []
for c in range(C):
for c_ in range(C):
if c != c_:
nu_flat.append(nu[c][c_])
gram_nu_flat = np.zeros([C*(C-1), C*(C-1)])
for c in range(C*(C-1)):
for c_ in range(C*(C-1)):
gram_nu_flat[c,c_] = self.my_inner(nu_flat[c], nu_flat[c_]) / C
G2_eigval, _ = LA.eig(gram_nu_flat)
G2_eigval = sorted(G2_eigval, reverse=True)
# density is 1/(number of eigenvalues)
G0_eigval_density = np.ones(len(G0_eigval)) * 1/len(G0_eigval)
G1_eigval_density = np.ones(len(G1_eigval)) * 1/len(G1_eigval)
G12_eigval_density = np.ones(len(G12_eigval)) * 1/len(G12_eigval)
G2_eigval_density = np.ones(len(G2_eigval)) * 1/len(G2_eigval)
res = {'mu_ccp' : mu_ccp,
'mu_ccp_flat' : mu_ccp_flat,
'mu' : mu,
'nu' : nu,
'nu_flat' : nu_flat,
'G0_eigval' : G0_eigval,
'G0_eigval_density' : G0_eigval_density,
'G1_eigval' : G1_eigval,
'G1_eigval_density' : G1_eigval_density,
'G2_eigval' : G2_eigval,
'G2_eigval_density' : G2_eigval_density,
'G12_eigval' : G12_eigval,
'G12_eigval_density' : G12_eigval_density,
'dist' : dist,
'labels' : labels,
}
return res
| 33.160804
| 117
| 0.436177
| 4,730
| 39,594
| 3.470402
| 0.063002
| 0.029973
| 0.016814
| 0.029485
| 0.973561
| 0.970088
| 0.970088
| 0.970088
| 0.969175
| 0.961316
| 0
| 0.014914
| 0.458074
| 39,594
| 1,193
| 118
| 33.1886
| 0.750105
| 0.071299
| 0
| 0.959158
| 0
| 0
| 0.03126
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060644
| false
| 0
| 0.008663
| 0.028465
| 0.137376
| 0.042079
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6376fd14c62bdc0480206a147d5f50ab77103dfd
| 85
|
py
|
Python
|
pbrl/algorithms/ppg/__init__.py
|
jjccero/rliccd
|
748bf92a1d9e401172a0d9c435ea75a8e37c6538
|
[
"MIT"
] | 11
|
2021-08-28T09:38:01.000Z
|
2021-09-18T05:15:23.000Z
|
pbrl/algorithms/ppg/__init__.py
|
jjccero/rl
|
45d1a464ec661278372fce2c1d972d02457e21f6
|
[
"MIT"
] | null | null | null |
pbrl/algorithms/ppg/__init__.py
|
jjccero/rl
|
45d1a464ec661278372fce2c1d972d02457e21f6
|
[
"MIT"
] | 1
|
2021-10-12T12:43:58.000Z
|
2021-10-12T12:43:58.000Z
|
from pbrl.algorithms.ppg.net import AuxActor
from pbrl.algorithms.ppg.ppg import PPG
| 28.333333
| 44
| 0.835294
| 14
| 85
| 5.071429
| 0.5
| 0.225352
| 0.507042
| 0.591549
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094118
| 85
| 2
| 45
| 42.5
| 0.922078
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
63790b4470e7172020f60b6721dceb08b7af54f6
| 1,811
|
py
|
Python
|
numpy/array_api/_statistical_functions.py
|
yashasvimisra2798/numpy
|
b892ed2c7fa27b2e0d73c12d12ace4b4d4e12897
|
[
"BSD-3-Clause"
] | 2
|
2021-08-25T11:22:49.000Z
|
2021-08-28T05:35:46.000Z
|
numpy/array_api/_statistical_functions.py
|
yashasvimisra2798/numpy
|
b892ed2c7fa27b2e0d73c12d12ace4b4d4e12897
|
[
"BSD-3-Clause"
] | 68
|
2021-08-30T05:08:25.000Z
|
2022-03-28T05:11:42.000Z
|
numpy/array_api/_statistical_functions.py
|
yashasvimisra2798/numpy
|
b892ed2c7fa27b2e0d73c12d12ace4b4d4e12897
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import annotations
from ._array_object import Array
from typing import Optional, Tuple, Union
import numpy as np
def max(
x: Array,
/,
*,
axis: Optional[Union[int, Tuple[int, ...]]] = None,
keepdims: bool = False,
) -> Array:
return Array._new(np.max(x._array, axis=axis, keepdims=keepdims))
def mean(
x: Array,
/,
*,
axis: Optional[Union[int, Tuple[int, ...]]] = None,
keepdims: bool = False,
) -> Array:
return Array._new(np.mean(x._array, axis=axis, keepdims=keepdims))
def min(
x: Array,
/,
*,
axis: Optional[Union[int, Tuple[int, ...]]] = None,
keepdims: bool = False,
) -> Array:
return Array._new(np.min(x._array, axis=axis, keepdims=keepdims))
def prod(
x: Array,
/,
*,
axis: Optional[Union[int, Tuple[int, ...]]] = None,
keepdims: bool = False,
) -> Array:
return Array._new(np.prod(x._array, axis=axis, keepdims=keepdims))
def std(
x: Array,
/,
*,
axis: Optional[Union[int, Tuple[int, ...]]] = None,
correction: Union[int, float] = 0.0,
keepdims: bool = False,
) -> Array:
# Note: the keyword argument correction is different here
return Array._new(np.std(x._array, axis=axis, ddof=correction, keepdims=keepdims))
def sum(
x: Array,
/,
*,
axis: Optional[Union[int, Tuple[int, ...]]] = None,
keepdims: bool = False,
) -> Array:
return Array._new(np.sum(x._array, axis=axis, keepdims=keepdims))
def var(
x: Array,
/,
*,
axis: Optional[Union[int, Tuple[int, ...]]] = None,
correction: Union[int, float] = 0.0,
keepdims: bool = False,
) -> Array:
# Note: the keyword argument correction is different here
return Array._new(np.var(x._array, axis=axis, ddof=correction, keepdims=keepdims))
| 22.085366
| 86
| 0.609056
| 233
| 1,811
| 4.648069
| 0.175966
| 0.077562
| 0.129271
| 0.116343
| 0.857802
| 0.857802
| 0.857802
| 0.705448
| 0.624192
| 0.624192
| 0
| 0.002855
| 0.226394
| 1,811
| 81
| 87
| 22.358025
| 0.770164
| 0.061292
| 0
| 0.709677
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.112903
| true
| 0
| 0.064516
| 0.112903
| 0.290323
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 8
|
8981b74ed9826cb2311b32790d17e6e1b67ffa64
| 9,952
|
py
|
Python
|
tests/unit/controllers/platform/test_delete.py
|
senstb/aws-elastic-beanstalk-cli
|
ef27ae50e8be34ccbe29bc6dc421323bddc3f485
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/controllers/platform/test_delete.py
|
senstb/aws-elastic-beanstalk-cli
|
ef27ae50e8be34ccbe29bc6dc421323bddc3f485
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/controllers/platform/test_delete.py
|
senstb/aws-elastic-beanstalk-cli
|
ef27ae50e8be34ccbe29bc6dc421323bddc3f485
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import shutil
import mock
import unittest
from ebcli.core.ebcore import EB
from ebcli.core.ebpcore import EBP
from ebcli.core import fileoperations
from ebcli.objects.platform import PlatformVersion
class TestDelete(unittest.TestCase):
platform = PlatformVersion(
'arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.5'
)
def setUp(self):
self.root_dir = os.getcwd()
if not os.path.exists('testDir'):
os.mkdir('testDir')
os.chdir('testDir')
fileoperations.create_config_file(
'my-application',
'us-west-2',
self.platform.name,
workspace_type='Platform'
)
def tearDown(self):
os.chdir(self.root_dir)
shutil.rmtree('testDir')
class TestEBPlatform(TestDelete):
@mock.patch('ebcli.controllers.platform.delete.platformops.delete_platform_version')
def test_delete__single_platform_version(
self,
delete_platform_version_mock
):
app = EB(argv=['platform', 'delete', '1.1.1', '--force'])
app.setup()
app.run()
delete_platform_version_mock.assert_called_once_with(
'1.1.1',
True
)
@mock.patch('ebcli.controllers.platform.delete.platformops.delete_platform_version')
def test_delete__version_not_specified(
self,
delete_platform_version_mock
):
app = EB(argv=['platform', 'delete', '--force'])
app.setup()
app.run()
delete_platform_version_mock.assert_not_called()
@mock.patch('ebcli.controllers.platform.delete.platformops.list_custom_platform_versions')
@mock.patch('ebcli.controllers.platform.delete.platformops.delete_platform_version')
@mock.patch('ebcli.controllers.platform.delete.io.validate_action')
def test_delete__all_failed_versions(
self,
validate_action_mock,
delete_platform_version_mock,
list_custom_platform_versions_mock
):
list_custom_platform_versions_mock.return_value = [
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.1',
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.0'
]
validate_action_mock.side_effect = None
app = EB(argv=['platform', 'delete', '--cleanup', '--all-platforms'])
app.setup()
app.run()
self.assertEqual(2, delete_platform_version_mock.call_count)
self.assertEqual(1, validate_action_mock.call_count)
delete_platform_version_mock.assert_has_calls(
[
mock.call(
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.0',
force=True
),
mock.call(
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.1',
force=True
)
]
)
@mock.patch('ebcli.controllers.platform.delete.platformops.list_custom_platform_versions')
@mock.patch('ebcli.controllers.platform.delete.platformops.delete_platform_version')
@mock.patch('ebcli.controllers.platform.delete.io.validate_action')
def test_delete__all_failed_versions__force(
self,
validate_action_mock,
delete_platform_version_mock,
list_custom_platform_versions_mock
):
list_custom_platform_versions_mock.return_value = [
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.1',
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.0'
]
validate_action_mock.side_effect = None
app = EB(argv=['platform', 'delete', '--cleanup', '--all-platforms', '--force'])
app.setup()
app.run()
self.assertEqual(2, delete_platform_version_mock.call_count)
validate_action_mock.assert_not_called()
delete_platform_version_mock.assert_has_calls(
[
mock.call(
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.0',
force=True
),
mock.call(
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.1',
force=True
)
]
)
class TestEBP(TestDelete):
@mock.patch('ebcli.controllers.platform.delete.platformops.delete_platform_version')
def test_delete__single_platform_version(
self,
delete_platform_version_mock
):
app = EBP(argv=['delete', '1.1.1', '--force'])
app.setup()
app.run()
delete_platform_version_mock.assert_called_once_with(
'1.1.1',
True
)
@mock.patch('ebcli.controllers.platform.delete.platformops.delete_platform_version')
def test_delete__version_not_specified(
self,
delete_platform_version_mock
):
app = EBP(argv=['delete', '--force'])
app.setup()
app.run()
delete_platform_version_mock.assert_not_called()
@mock.patch('ebcli.controllers.platform.delete.platformops.list_custom_platform_versions')
@mock.patch('ebcli.controllers.platform.delete.platformops.delete_platform_version')
@mock.patch('ebcli.controllers.platform.delete.io.validate_action')
def test_delete__all_failed_versions(
self,
validate_action_mock,
delete_platform_version_mock,
list_custom_platform_versions_mock
):
list_custom_platform_versions_mock.return_value = [
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.1',
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.0'
]
validate_action_mock.side_effect = None
app = EBP(argv=['delete', '--cleanup', '--all-platforms'])
app.setup()
app.run()
self.assertEqual(2, delete_platform_version_mock.call_count)
self.assertEqual(1, validate_action_mock.call_count)
@mock.patch('ebcli.controllers.platform.delete.platformops.list_custom_platform_versions')
@mock.patch('ebcli.controllers.platform.delete.platformops.delete_platform_version')
@mock.patch('ebcli.controllers.platform.delete.io.validate_action')
def test_delete__all_failed_versions__force(
self,
validate_action_mock,
delete_platform_version_mock,
list_custom_platform_versions_mock
):
list_custom_platform_versions_mock.return_value = [
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.1',
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.0'
]
validate_action_mock.side_effect = None
app = EBP(argv=['delete', '--cleanup', '--all-platforms', '--force'])
app.setup()
app.run()
self.assertEqual(2, delete_platform_version_mock.call_count)
validate_action_mock.assert_not_called()
delete_platform_version_mock.assert_has_calls(
[
mock.call(
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.0',
force=True
),
mock.call(
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.1',
force=True
)
]
)
@mock.patch('ebcli.controllers.platform.delete.platformops.list_custom_platform_versions')
@mock.patch('ebcli.controllers.platform.delete.platformops.delete_platform_version')
@mock.patch('ebcli.controllers.platform.delete.io.validate_action')
def test_delete__all_failed_versions__force(
self,
validate_action_mock,
delete_platform_version_mock,
list_custom_platform_versions_mock
):
list_custom_platform_versions_mock.return_value = [
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.1',
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.0'
]
validate_action_mock.side_effect = None
app = EB(argv=['platform', 'delete', '--cleanup', '--all-platforms', '--force'])
app.setup()
app.run()
self.assertEqual(2, delete_platform_version_mock.call_count)
validate_action_mock.assert_not_called()
delete_platform_version_mock.assert_has_calls(
[
mock.call(
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.0',
force=True
),
mock.call(
'arn:aws:elasticbeanstalk:us-west-2:123123123123:platform/ibnlempnsr-custom-platform/1.0.1',
force=True
)
]
)
| 38.573643
| 112
| 0.64791
| 1,127
| 9,952
| 5.480923
| 0.133097
| 0.080136
| 0.105391
| 0.109276
| 0.846042
| 0.846042
| 0.841347
| 0.841347
| 0.841347
| 0.83908
| 0
| 0.043142
| 0.245378
| 9,952
| 258
| 113
| 38.573643
| 0.779361
| 0.053859
| 0
| 0.7277
| 0
| 0.089202
| 0.349638
| 0.309017
| 0
| 0
| 0
| 0
| 0.084507
| 1
| 0.051643
| false
| 0
| 0.037559
| 0
| 0.107981
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98674e193db111c337e4c86abdc1a8296db0205a
| 9,647
|
py
|
Python
|
app/apps/product/migrations/0001_initial.py
|
brsrtc/mini-erp-docker
|
f5c37c71384c76e029a26e89f4771a59ed02f925
|
[
"MIT"
] | 1
|
2021-01-18T07:11:31.000Z
|
2021-01-18T07:11:31.000Z
|
app/apps/product/migrations/0001_initial.py
|
brsrtc/mini-erp-docker
|
f5c37c71384c76e029a26e89f4771a59ed02f925
|
[
"MIT"
] | null | null | null |
app/apps/product/migrations/0001_initial.py
|
brsrtc/mini-erp-docker
|
f5c37c71384c76e029a26e89f4771a59ed02f925
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.3 on 2020-12-05 17:27
import django.core.validators
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
import core.cache
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ProductType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True,
serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True,
verbose_name='Created At')),
('updated_at', models.DateTimeField(auto_now=True, null=True,
verbose_name='Updated At')),
('is_active',
models.BooleanField(default=True, verbose_name='Is Active')),
('is_deleted',
models.BooleanField(default=False, verbose_name='Is Deleted')),
('deleted_at', models.DateTimeField(blank=True, null=True,
verbose_name='Deleted At')),
('data', models.JSONField(blank=True, default=dict, null=True)),
('name',
models.CharField(max_length=128, verbose_name='Ürün Tipi')),
('created_by', models.ForeignKey(blank=True, null=True,
on_delete=django.db.models.deletion.PROTECT,
related_name='product_producttype_created_by',
to=settings.AUTH_USER_MODEL,
verbose_name='Created By')),
('deleted_by', models.ForeignKey(blank=True, null=True,
on_delete=django.db.models.deletion.PROTECT,
related_name='product_producttype_deleted_by',
to=settings.AUTH_USER_MODEL,
verbose_name='Deleted By')),
('updated_by', models.ForeignKey(blank=True, null=True,
on_delete=django.db.models.deletion.PROTECT,
related_name='product_producttype_updated_by',
to=settings.AUTH_USER_MODEL,
verbose_name='Updated By')),
],
options={
'ordering': ['id'],
'abstract': False,
},
bases=(models.Model, core.cache.BaseCache),
),
migrations.CreateModel(
name='ProductSize',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True,
serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True,
verbose_name='Created At')),
('updated_at', models.DateTimeField(auto_now=True, null=True,
verbose_name='Updated At')),
('is_active',
models.BooleanField(default=True, verbose_name='Is Active')),
('is_deleted',
models.BooleanField(default=False, verbose_name='Is Deleted')),
('deleted_at', models.DateTimeField(blank=True, null=True,
verbose_name='Deleted At')),
('data', models.JSONField(blank=True, default=dict, null=True)),
('olcu', models.CharField(max_length=128, verbose_name='Ölçü')),
('bobin', models.FloatField(default=0, verbose_name='Bobin')),
('kesim', models.FloatField(default=0, verbose_name='Kesim')),
('sarim', models.FloatField(default=0, verbose_name='Sarım')),
('metre', models.FloatField(default=0, verbose_name='Metre')),
('sarim_adet_mt', models.CharField(max_length=128,
verbose_name='Sarım Adet/Mt')),
('koli_adeti', models.CharField(max_length=128,
verbose_name='Sarım Adet/Mt')),
('created_by', models.ForeignKey(blank=True, null=True,
on_delete=django.db.models.deletion.PROTECT,
related_name='product_productsize_created_by',
to=settings.AUTH_USER_MODEL,
verbose_name='Created By')),
('deleted_by', models.ForeignKey(blank=True, null=True,
on_delete=django.db.models.deletion.PROTECT,
related_name='product_productsize_deleted_by',
to=settings.AUTH_USER_MODEL,
verbose_name='Deleted By')),
('updated_by', models.ForeignKey(blank=True, null=True,
on_delete=django.db.models.deletion.PROTECT,
related_name='product_productsize_updated_by',
to=settings.AUTH_USER_MODEL,
verbose_name='Updated By')),
],
options={
'ordering': ['id'],
'abstract': False,
},
bases=(models.Model, core.cache.BaseCache),
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True,
serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True,
verbose_name='Created At')),
('updated_at', models.DateTimeField(auto_now=True, null=True,
verbose_name='Updated At')),
('is_active',
models.BooleanField(default=True, verbose_name='Is Active')),
('is_deleted',
models.BooleanField(default=False, verbose_name='Is Deleted')),
('deleted_at', models.DateTimeField(blank=True, null=True,
verbose_name='Deleted At')),
('data', models.JSONField(blank=True, default=dict, null=True)),
('name',
models.CharField(max_length=256, verbose_name='Ürün Adı')),
('description',
models.CharField(blank=True, max_length=512, null=True,
verbose_name='Stok Açıklaması')),
('package_unit', models.FloatField(default=1, validators=[
django.core.validators.MinValueValidator(0.0)],
verbose_name='Ambalaj Birimi')),
('buying_price',
models.FloatField(default=0, verbose_name='Alış Fiyatı')),
('vat_rate', models.FloatField(default=1, verbose_name='KDV')),
('selling_price',
models.FloatField(default=0, verbose_name='Satış Fiyatı')),
('created_by', models.ForeignKey(blank=True, null=True,
on_delete=django.db.models.deletion.PROTECT,
related_name='product_product_created_by',
to=settings.AUTH_USER_MODEL,
verbose_name='Created By')),
('deleted_by', models.ForeignKey(blank=True, null=True,
on_delete=django.db.models.deletion.PROTECT,
related_name='product_product_deleted_by',
to=settings.AUTH_USER_MODEL,
verbose_name='Deleted By')),
('product_type', models.ForeignKey(blank=True, null=True,
on_delete=django.db.models.deletion.PROTECT,
to='product.producttype',
verbose_name='Cinsi')),
('updated_by', models.ForeignKey(blank=True, null=True,
on_delete=django.db.models.deletion.PROTECT,
related_name='product_product_updated_by',
to=settings.AUTH_USER_MODEL,
verbose_name='Updated By')),
],
options={
'ordering': ['id'],
'abstract': False,
},
bases=(models.Model, core.cache.BaseCache),
),
]
| 59.549383
| 95
| 0.453923
| 775
| 9,647
| 5.44129
| 0.147097
| 0.109557
| 0.04553
| 0.052407
| 0.823097
| 0.823097
| 0.789898
| 0.758596
| 0.758596
| 0.758596
| 0
| 0.008058
| 0.446875
| 9,647
| 161
| 96
| 59.919255
| 0.782234
| 0.004665
| 0
| 0.675325
| 1
| 0
| 0.115417
| 0.026875
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.032468
| 0
| 0.058442
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98a365a11a329d5618407ff9a664f8995a2cf8e2
| 118
|
py
|
Python
|
accessify/spotify/__init__.py
|
jscholes/accessify-prototype
|
9cee5e6fff6621a6eb4dc1cf6e353669cd02d33b
|
[
"Apache-2.0"
] | null | null | null |
accessify/spotify/__init__.py
|
jscholes/accessify-prototype
|
9cee5e6fff6621a6eb4dc1cf6e353669cd02d33b
|
[
"Apache-2.0"
] | null | null | null |
accessify/spotify/__init__.py
|
jscholes/accessify-prototype
|
9cee5e6fff6621a6eb4dc1cf6e353669cd02d33b
|
[
"Apache-2.0"
] | null | null | null |
from accessify.spotify import eventmanager
from accessify.spotify import remote
from accessify.spotify import webapi
| 23.6
| 42
| 0.864407
| 15
| 118
| 6.8
| 0.466667
| 0.382353
| 0.588235
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110169
| 118
| 4
| 43
| 29.5
| 0.971429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
7f6502bbe1e7b8eba70ef0ee61606f540d255deb
| 341,671
|
py
|
Python
|
boto3_type_annotations_with_docs/boto3_type_annotations/codedeploy/client.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 119
|
2018-12-01T18:20:57.000Z
|
2022-02-02T10:31:29.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/codedeploy/client.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 15
|
2018-11-16T00:16:44.000Z
|
2021-11-13T03:44:18.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/codedeploy/client.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 11
|
2019-05-06T05:26:51.000Z
|
2021-09-28T15:27:59.000Z
|
from typing import Optional
from botocore.client import BaseClient
from typing import Dict
from botocore.paginate import Paginator
from botocore.waiter import Waiter
from typing import Union
from typing import List
class Client(BaseClient):
def add_tags_to_on_premises_instances(self, tags: List, instanceNames: List):
"""
Adds tags to on-premises instances.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/AddTagsToOnPremisesInstances>`_
**Request Syntax**
::
response = client.add_tags_to_on_premises_instances(
tags=[
{
'Key': 'string',
'Value': 'string'
},
],
instanceNames=[
'string',
]
)
:type tags: list
:param tags: **[REQUIRED]**
The tag key-value pairs to add to the on-premises instances.
Keys and values are both required. Keys cannot be null or empty strings. Value-only tags are not allowed.
- *(dict) --*
Information about a tag.
- **Key** *(string) --*
The tag\'s key.
- **Value** *(string) --*
The tag\'s value.
:type instanceNames: list
:param instanceNames: **[REQUIRED]**
The names of the on-premises instances to which to add tags.
- *(string) --*
:returns: None
"""
pass
def batch_get_application_revisions(self, applicationName: str, revisions: List) -> Dict:
"""
Gets information about one or more application revisions.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/BatchGetApplicationRevisions>`_
**Request Syntax**
::
response = client.batch_get_application_revisions(
applicationName='string',
revisions=[
{
'revisionType': 'S3'|'GitHub'|'String'|'AppSpecContent',
's3Location': {
'bucket': 'string',
'key': 'string',
'bundleType': 'tar'|'tgz'|'zip'|'YAML'|'JSON',
'version': 'string',
'eTag': 'string'
},
'gitHubLocation': {
'repository': 'string',
'commitId': 'string'
},
'string': {
'content': 'string',
'sha256': 'string'
},
'appSpecContent': {
'content': 'string',
'sha256': 'string'
}
},
]
)
**Response Syntax**
::
{
'applicationName': 'string',
'errorMessage': 'string',
'revisions': [
{
'revisionLocation': {
'revisionType': 'S3'|'GitHub'|'String'|'AppSpecContent',
's3Location': {
'bucket': 'string',
'key': 'string',
'bundleType': 'tar'|'tgz'|'zip'|'YAML'|'JSON',
'version': 'string',
'eTag': 'string'
},
'gitHubLocation': {
'repository': 'string',
'commitId': 'string'
},
'string': {
'content': 'string',
'sha256': 'string'
},
'appSpecContent': {
'content': 'string',
'sha256': 'string'
}
},
'genericRevisionInfo': {
'description': 'string',
'deploymentGroups': [
'string',
],
'firstUsedTime': datetime(2015, 1, 1),
'lastUsedTime': datetime(2015, 1, 1),
'registerTime': datetime(2015, 1, 1)
}
},
]
}
**Response Structure**
- *(dict) --*
Represents the output of a BatchGetApplicationRevisions operation.
- **applicationName** *(string) --*
The name of the application that corresponds to the revisions.
- **errorMessage** *(string) --*
Information about errors that might have occurred during the API call.
- **revisions** *(list) --*
Additional information about the revisions, including the type and location.
- *(dict) --*
Information about an application revision.
- **revisionLocation** *(dict) --*
Information about the location and type of an application revision.
- **revisionType** *(string) --*
The type of application revision:
* S3: An application revision stored in Amazon S3.
* GitHub: An application revision stored in GitHub (EC2/On-premises deployments only).
* String: A YAML-formatted or JSON-formatted string (AWS Lambda deployments only).
- **s3Location** *(dict) --*
Information about the location of a revision stored in Amazon S3.
- **bucket** *(string) --*
The name of the Amazon S3 bucket where the application revision is stored.
- **key** *(string) --*
The name of the Amazon S3 object that represents the bundled artifacts for the application revision.
- **bundleType** *(string) --*
The file type of the application revision. Must be one of the following:
* tar: A tar archive file.
* tgz: A compressed tar archive file.
* zip: A zip archive file.
- **version** *(string) --*
A specific version of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the version is not specified, the system uses the most recent version by default.
- **eTag** *(string) --*
The ETag of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the ETag is not specified as an input parameter, ETag validation of the object is skipped.
- **gitHubLocation** *(dict) --*
Information about the location of application artifacts stored in GitHub.
- **repository** *(string) --*
The GitHub account and repository pair that stores a reference to the commit that represents the bundled artifacts for the application revision.
Specified as account/repository.
- **commitId** *(string) --*
The SHA1 commit ID of the GitHub commit that represents the bundled artifacts for the application revision.
- **string** *(dict) --*
Information about the location of an AWS Lambda deployment revision stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string. It includes information about which Lambda function to update and optional Lambda functions that validate deployment lifecycle events.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **appSpecContent** *(dict) --*
The content of an AppSpec file for an AWS Lambda or Amazon ECS deployment. The content is formatted as JSON or YAML and stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string.
For an AWS Lambda deployment, the content includes a Lambda function name, the alias for its original version, and the alias for its replacement version. The deployment shifts traffic from the original version of the Lambda function to the replacement version.
For an Amazon ECS deployment, the content includes the task name, information about the load balancer that serves traffic to the container, and more.
For both types of deployments, the content can specify Lambda functions that run at specified hooks, such as ``BeforeInstall`` , during a deployment.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **genericRevisionInfo** *(dict) --*
Information about an application revision, including usage details and associated deployment groups.
- **description** *(string) --*
A comment about the revision.
- **deploymentGroups** *(list) --*
The deployment groups for which this is the current target revision.
- *(string) --*
- **firstUsedTime** *(datetime) --*
When the revision was first used by AWS CodeDeploy.
- **lastUsedTime** *(datetime) --*
When the revision was last used by AWS CodeDeploy.
- **registerTime** *(datetime) --*
When the revision was registered with AWS CodeDeploy.
:type applicationName: string
:param applicationName: **[REQUIRED]**
The name of an AWS CodeDeploy application about which to get revision information.
:type revisions: list
:param revisions: **[REQUIRED]**
Information to get about the application revisions, including type and location.
- *(dict) --*
Information about the location of an application revision.
- **revisionType** *(string) --*
The type of application revision:
* S3: An application revision stored in Amazon S3.
* GitHub: An application revision stored in GitHub (EC2/On-premises deployments only).
* String: A YAML-formatted or JSON-formatted string (AWS Lambda deployments only).
- **s3Location** *(dict) --*
Information about the location of a revision stored in Amazon S3.
- **bucket** *(string) --*
The name of the Amazon S3 bucket where the application revision is stored.
- **key** *(string) --*
The name of the Amazon S3 object that represents the bundled artifacts for the application revision.
- **bundleType** *(string) --*
The file type of the application revision. Must be one of the following:
* tar: A tar archive file.
* tgz: A compressed tar archive file.
* zip: A zip archive file.
- **version** *(string) --*
A specific version of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the version is not specified, the system uses the most recent version by default.
- **eTag** *(string) --*
The ETag of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the ETag is not specified as an input parameter, ETag validation of the object is skipped.
- **gitHubLocation** *(dict) --*
Information about the location of application artifacts stored in GitHub.
- **repository** *(string) --*
The GitHub account and repository pair that stores a reference to the commit that represents the bundled artifacts for the application revision.
Specified as account/repository.
- **commitId** *(string) --*
The SHA1 commit ID of the GitHub commit that represents the bundled artifacts for the application revision.
- **string** *(dict) --*
Information about the location of an AWS Lambda deployment revision stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string. It includes information about which Lambda function to update and optional Lambda functions that validate deployment lifecycle events.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **appSpecContent** *(dict) --*
The content of an AppSpec file for an AWS Lambda or Amazon ECS deployment. The content is formatted as JSON or YAML and stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string.
For an AWS Lambda deployment, the content includes a Lambda function name, the alias for its original version, and the alias for its replacement version. The deployment shifts traffic from the original version of the Lambda function to the replacement version.
For an Amazon ECS deployment, the content includes the task name, information about the load balancer that serves traffic to the container, and more.
For both types of deployments, the content can specify Lambda functions that run at specified hooks, such as ``BeforeInstall`` , during a deployment.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
:rtype: dict
:returns:
"""
pass
def batch_get_applications(self, applicationNames: List) -> Dict:
"""
Gets information about one or more applications.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/BatchGetApplications>`_
**Request Syntax**
::
response = client.batch_get_applications(
applicationNames=[
'string',
]
)
**Response Syntax**
::
{
'applicationsInfo': [
{
'applicationId': 'string',
'applicationName': 'string',
'createTime': datetime(2015, 1, 1),
'linkedToGitHub': True|False,
'gitHubAccountName': 'string',
'computePlatform': 'Server'|'Lambda'|'ECS'
},
]
}
**Response Structure**
- *(dict) --*
Represents the output of a BatchGetApplications operation.
- **applicationsInfo** *(list) --*
Information about the applications.
- *(dict) --*
Information about an application.
- **applicationId** *(string) --*
The application ID.
- **applicationName** *(string) --*
The application name.
- **createTime** *(datetime) --*
The time at which the application was created.
- **linkedToGitHub** *(boolean) --*
True if the user has authenticated with GitHub for the specified application. Otherwise, false.
- **gitHubAccountName** *(string) --*
The name for a connection to a GitHub account.
- **computePlatform** *(string) --*
The destination platform type for deployment of the application (``Lambda`` or ``Server`` ).
:type applicationNames: list
:param applicationNames: **[REQUIRED]**
A list of application names separated by spaces.
- *(string) --*
:rtype: dict
:returns:
"""
pass
def batch_get_deployment_groups(self, applicationName: str, deploymentGroupNames: List) -> Dict:
"""
Gets information about one or more deployment groups.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/BatchGetDeploymentGroups>`_
**Request Syntax**
::
response = client.batch_get_deployment_groups(
applicationName='string',
deploymentGroupNames=[
'string',
]
)
**Response Syntax**
::
{
'deploymentGroupsInfo': [
{
'applicationName': 'string',
'deploymentGroupId': 'string',
'deploymentGroupName': 'string',
'deploymentConfigName': 'string',
'ec2TagFilters': [
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
'onPremisesInstanceTagFilters': [
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
'autoScalingGroups': [
{
'name': 'string',
'hook': 'string'
},
],
'serviceRoleArn': 'string',
'targetRevision': {
'revisionType': 'S3'|'GitHub'|'String'|'AppSpecContent',
's3Location': {
'bucket': 'string',
'key': 'string',
'bundleType': 'tar'|'tgz'|'zip'|'YAML'|'JSON',
'version': 'string',
'eTag': 'string'
},
'gitHubLocation': {
'repository': 'string',
'commitId': 'string'
},
'string': {
'content': 'string',
'sha256': 'string'
},
'appSpecContent': {
'content': 'string',
'sha256': 'string'
}
},
'triggerConfigurations': [
{
'triggerName': 'string',
'triggerTargetArn': 'string',
'triggerEvents': [
'DeploymentStart'|'DeploymentSuccess'|'DeploymentFailure'|'DeploymentStop'|'DeploymentRollback'|'DeploymentReady'|'InstanceStart'|'InstanceSuccess'|'InstanceFailure'|'InstanceReady',
]
},
],
'alarmConfiguration': {
'enabled': True|False,
'ignorePollAlarmFailure': True|False,
'alarms': [
{
'name': 'string'
},
]
},
'autoRollbackConfiguration': {
'enabled': True|False,
'events': [
'DEPLOYMENT_FAILURE'|'DEPLOYMENT_STOP_ON_ALARM'|'DEPLOYMENT_STOP_ON_REQUEST',
]
},
'deploymentStyle': {
'deploymentType': 'IN_PLACE'|'BLUE_GREEN',
'deploymentOption': 'WITH_TRAFFIC_CONTROL'|'WITHOUT_TRAFFIC_CONTROL'
},
'blueGreenDeploymentConfiguration': {
'terminateBlueInstancesOnDeploymentSuccess': {
'action': 'TERMINATE'|'KEEP_ALIVE',
'terminationWaitTimeInMinutes': 123
},
'deploymentReadyOption': {
'actionOnTimeout': 'CONTINUE_DEPLOYMENT'|'STOP_DEPLOYMENT',
'waitTimeInMinutes': 123
},
'greenFleetProvisioningOption': {
'action': 'DISCOVER_EXISTING'|'COPY_AUTO_SCALING_GROUP'
}
},
'loadBalancerInfo': {
'elbInfoList': [
{
'name': 'string'
},
],
'targetGroupInfoList': [
{
'name': 'string'
},
],
'targetGroupPairInfoList': [
{
'targetGroups': [
{
'name': 'string'
},
],
'prodTrafficRoute': {
'listenerArns': [
'string',
]
},
'testTrafficRoute': {
'listenerArns': [
'string',
]
}
},
]
},
'lastSuccessfulDeployment': {
'deploymentId': 'string',
'status': 'Created'|'Queued'|'InProgress'|'Succeeded'|'Failed'|'Stopped'|'Ready',
'endTime': datetime(2015, 1, 1),
'createTime': datetime(2015, 1, 1)
},
'lastAttemptedDeployment': {
'deploymentId': 'string',
'status': 'Created'|'Queued'|'InProgress'|'Succeeded'|'Failed'|'Stopped'|'Ready',
'endTime': datetime(2015, 1, 1),
'createTime': datetime(2015, 1, 1)
},
'ec2TagSet': {
'ec2TagSetList': [
[
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
]
},
'onPremisesTagSet': {
'onPremisesTagSetList': [
[
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
]
},
'computePlatform': 'Server'|'Lambda'|'ECS',
'ecsServices': [
{
'serviceName': 'string',
'clusterName': 'string'
},
]
},
],
'errorMessage': 'string'
}
**Response Structure**
- *(dict) --*
Represents the output of a BatchGetDeploymentGroups operation.
- **deploymentGroupsInfo** *(list) --*
Information about the deployment groups.
- *(dict) --*
Information about a deployment group.
- **applicationName** *(string) --*
The application name.
- **deploymentGroupId** *(string) --*
The deployment group ID.
- **deploymentGroupName** *(string) --*
The deployment group name.
- **deploymentConfigName** *(string) --*
The deployment configuration name.
- **ec2TagFilters** *(list) --*
The Amazon EC2 tags on which to filter. The deployment group includes EC2 instances with any of the specified tags.
- *(dict) --*
Information about an EC2 tag filter.
- **Key** *(string) --*
The tag filter key.
- **Value** *(string) --*
The tag filter value.
- **Type** *(string) --*
The tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
- **onPremisesInstanceTagFilters** *(list) --*
The on-premises instance tags on which to filter. The deployment group includes on-premises instances with any of the specified tags.
- *(dict) --*
Information about an on-premises instance tag filter.
- **Key** *(string) --*
The on-premises instance tag filter key.
- **Value** *(string) --*
The on-premises instance tag filter value.
- **Type** *(string) --*
The on-premises instance tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
- **autoScalingGroups** *(list) --*
A list of associated Auto Scaling groups.
- *(dict) --*
Information about an Auto Scaling group.
- **name** *(string) --*
The Auto Scaling group name.
- **hook** *(string) --*
An Auto Scaling lifecycle event hook name.
- **serviceRoleArn** *(string) --*
A service role ARN.
- **targetRevision** *(dict) --*
Information about the deployment group's target revision, including type and location.
- **revisionType** *(string) --*
The type of application revision:
* S3: An application revision stored in Amazon S3.
* GitHub: An application revision stored in GitHub (EC2/On-premises deployments only).
* String: A YAML-formatted or JSON-formatted string (AWS Lambda deployments only).
- **s3Location** *(dict) --*
Information about the location of a revision stored in Amazon S3.
- **bucket** *(string) --*
The name of the Amazon S3 bucket where the application revision is stored.
- **key** *(string) --*
The name of the Amazon S3 object that represents the bundled artifacts for the application revision.
- **bundleType** *(string) --*
The file type of the application revision. Must be one of the following:
* tar: A tar archive file.
* tgz: A compressed tar archive file.
* zip: A zip archive file.
- **version** *(string) --*
A specific version of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the version is not specified, the system uses the most recent version by default.
- **eTag** *(string) --*
The ETag of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the ETag is not specified as an input parameter, ETag validation of the object is skipped.
- **gitHubLocation** *(dict) --*
Information about the location of application artifacts stored in GitHub.
- **repository** *(string) --*
The GitHub account and repository pair that stores a reference to the commit that represents the bundled artifacts for the application revision.
Specified as account/repository.
- **commitId** *(string) --*
The SHA1 commit ID of the GitHub commit that represents the bundled artifacts for the application revision.
- **string** *(dict) --*
Information about the location of an AWS Lambda deployment revision stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string. It includes information about which Lambda function to update and optional Lambda functions that validate deployment lifecycle events.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **appSpecContent** *(dict) --*
The content of an AppSpec file for an AWS Lambda or Amazon ECS deployment. The content is formatted as JSON or YAML and stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string.
For an AWS Lambda deployment, the content includes a Lambda function name, the alias for its original version, and the alias for its replacement version. The deployment shifts traffic from the original version of the Lambda function to the replacement version.
For an Amazon ECS deployment, the content includes the task name, information about the load balancer that serves traffic to the container, and more.
For both types of deployments, the content can specify Lambda functions that run at specified hooks, such as ``BeforeInstall`` , during a deployment.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **triggerConfigurations** *(list) --*
Information about triggers associated with the deployment group.
- *(dict) --*
Information about notification triggers for the deployment group.
- **triggerName** *(string) --*
The name of the notification trigger.
- **triggerTargetArn** *(string) --*
The ARN of the Amazon Simple Notification Service topic through which notifications about deployment or instance events are sent.
- **triggerEvents** *(list) --*
The event type or types for which notifications are triggered.
- *(string) --*
- **alarmConfiguration** *(dict) --*
A list of alarms associated with the deployment group.
- **enabled** *(boolean) --*
Indicates whether the alarm configuration is enabled.
- **ignorePollAlarmFailure** *(boolean) --*
Indicates whether a deployment should continue if information about the current state of alarms cannot be retrieved from Amazon CloudWatch. The default value is false.
* true: The deployment proceeds even if alarm status information can't be retrieved from Amazon CloudWatch.
* false: The deployment stops if alarm status information can't be retrieved from Amazon CloudWatch.
- **alarms** *(list) --*
A list of alarms configured for the deployment group. A maximum of 10 alarms can be added to a deployment group.
- *(dict) --*
Information about an alarm.
- **name** *(string) --*
The name of the alarm. Maximum length is 255 characters. Each alarm name can be used only once in a list of alarms.
- **autoRollbackConfiguration** *(dict) --*
Information about the automatic rollback configuration associated with the deployment group.
- **enabled** *(boolean) --*
Indicates whether a defined automatic rollback configuration is currently enabled.
- **events** *(list) --*
The event type or types that trigger a rollback.
- *(string) --*
- **deploymentStyle** *(dict) --*
Information about the type of deployment, either in-place or blue/green, you want to run and whether to route deployment traffic behind a load balancer.
- **deploymentType** *(string) --*
Indicates whether to run an in-place deployment or a blue/green deployment.
- **deploymentOption** *(string) --*
Indicates whether to route deployment traffic behind a load balancer.
- **blueGreenDeploymentConfiguration** *(dict) --*
Information about blue/green deployment options for a deployment group.
- **terminateBlueInstancesOnDeploymentSuccess** *(dict) --*
Information about whether to terminate instances in the original fleet during a blue/green deployment.
- **action** *(string) --*
The action to take on instances in the original environment after a successful blue/green deployment.
* TERMINATE: Instances are terminated after a specified wait time.
* KEEP_ALIVE: Instances are left running after they are deregistered from the load balancer and removed from the deployment group.
- **terminationWaitTimeInMinutes** *(integer) --*
The number of minutes to wait after a successful blue/green deployment before terminating instances from the original environment. The maximum setting is 2880 minutes (2 days).
- **deploymentReadyOption** *(dict) --*
Information about the action to take when newly provisioned instances are ready to receive traffic in a blue/green deployment.
- **actionOnTimeout** *(string) --*
Information about when to reroute traffic from an original environment to a replacement environment in a blue/green deployment.
* CONTINUE_DEPLOYMENT: Register new instances with the load balancer immediately after the new application revision is installed on the instances in the replacement environment.
* STOP_DEPLOYMENT: Do not register new instances with a load balancer unless traffic rerouting is started using ContinueDeployment . If traffic rerouting is not started before the end of the specified wait period, the deployment status is changed to Stopped.
- **waitTimeInMinutes** *(integer) --*
The number of minutes to wait before the status of a blue/green deployment is changed to Stopped if rerouting is not started manually. Applies only to the STOP_DEPLOYMENT option for actionOnTimeout
- **greenFleetProvisioningOption** *(dict) --*
Information about how instances are provisioned for a replacement environment in a blue/green deployment.
- **action** *(string) --*
The method used to add instances to a replacement environment.
* DISCOVER_EXISTING: Use instances that already exist or will be created manually.
* COPY_AUTO_SCALING_GROUP: Use settings from a specified Auto Scaling group to define and create instances in a new Auto Scaling group.
- **loadBalancerInfo** *(dict) --*
Information about the load balancer to use in a deployment.
- **elbInfoList** *(list) --*
An array that contains information about the load balancer to use for load balancing in a deployment. In Elastic Load Balancing, load balancers are used with Classic Load Balancers.
.. note::
Adding more than one load balancer to the array is not supported.
- *(dict) --*
Information about a load balancer in Elastic Load Balancing to use in a deployment. Instances are registered directly with a load balancer, and traffic is routed to the load balancer.
- **name** *(string) --*
For blue/green deployments, the name of the load balancer that is used to route traffic from original instances to replacement instances in a blue/green deployment. For in-place deployments, the name of the load balancer that instances are deregistered from so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **targetGroupInfoList** *(list) --*
An array that contains information about the target group to use for load balancing in a deployment. In Elastic Load Balancing, target groups are used with Application Load Balancers.
.. note::
Adding more than one target group to the array is not supported.
- *(dict) --*
Information about a target group in Elastic Load Balancing to use in a deployment. Instances are registered as targets in a target group, and traffic is routed to the target group.
- **name** *(string) --*
For blue/green deployments, the name of the target group that instances in the original environment are deregistered from, and instances in the replacement environment are registered with. For in-place deployments, the name of the target group that instances are deregistered from, so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **targetGroupPairInfoList** *(list) --*
The target group pair information. This is an array of ``TargeGroupPairInfo`` objects with a maximum size of one.
- *(dict) --*
Information about two target groups and how traffic is routed during an Amazon ECS deployment. An optional test traffic route can be specified.
- **targetGroups** *(list) --*
One pair of target groups. One is associated with the original task set. The second is associated with the task set that serves traffic after the deployment is complete.
- *(dict) --*
Information about a target group in Elastic Load Balancing to use in a deployment. Instances are registered as targets in a target group, and traffic is routed to the target group.
- **name** *(string) --*
For blue/green deployments, the name of the target group that instances in the original environment are deregistered from, and instances in the replacement environment are registered with. For in-place deployments, the name of the target group that instances are deregistered from, so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **prodTrafficRoute** *(dict) --*
The path used by a load balancer to route production traffic when an Amazon ECS deployment is complete.
- **listenerArns** *(list) --*
The ARN of one listener. The listener identifies the route between a target group and a load balancer. This is an array of strings with a maximum size of one.
- *(string) --*
- **testTrafficRoute** *(dict) --*
An optional path used by a load balancer to route test traffic after an Amazon ECS deployment. Validation can occur while test traffic is served during a deployment.
- **listenerArns** *(list) --*
The ARN of one listener. The listener identifies the route between a target group and a load balancer. This is an array of strings with a maximum size of one.
- *(string) --*
- **lastSuccessfulDeployment** *(dict) --*
Information about the most recent successful deployment to the deployment group.
- **deploymentId** *(string) --*
The unique ID of a deployment.
- **status** *(string) --*
The status of the most recent deployment.
- **endTime** *(datetime) --*
A timestamp that indicates when the most recent deployment to the deployment group was complete.
- **createTime** *(datetime) --*
A timestamp that indicates when the most recent deployment to the deployment group started.
- **lastAttemptedDeployment** *(dict) --*
Information about the most recent attempted deployment to the deployment group.
- **deploymentId** *(string) --*
The unique ID of a deployment.
- **status** *(string) --*
The status of the most recent deployment.
- **endTime** *(datetime) --*
A timestamp that indicates when the most recent deployment to the deployment group was complete.
- **createTime** *(datetime) --*
A timestamp that indicates when the most recent deployment to the deployment group started.
- **ec2TagSet** *(dict) --*
Information about groups of tags applied to an EC2 instance. The deployment group includes only EC2 instances identified by all of the tag groups. Cannot be used in the same call as ec2TagFilters.
- **ec2TagSetList** *(list) --*
A list that contains other lists of EC2 instance tag groups. For an instance to be included in the deployment group, it must be identified by all of the tag groups in the list.
- *(list) --*
- *(dict) --*
Information about an EC2 tag filter.
- **Key** *(string) --*
The tag filter key.
- **Value** *(string) --*
The tag filter value.
- **Type** *(string) --*
The tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
- **onPremisesTagSet** *(dict) --*
Information about groups of tags applied to an on-premises instance. The deployment group includes only on-premises instances identified by all the tag groups. Cannot be used in the same call as onPremisesInstanceTagFilters.
- **onPremisesTagSetList** *(list) --*
A list that contains other lists of on-premises instance tag groups. For an instance to be included in the deployment group, it must be identified by all of the tag groups in the list.
- *(list) --*
- *(dict) --*
Information about an on-premises instance tag filter.
- **Key** *(string) --*
The on-premises instance tag filter key.
- **Value** *(string) --*
The on-premises instance tag filter value.
- **Type** *(string) --*
The on-premises instance tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
- **computePlatform** *(string) --*
The destination platform type for the deployment group (``Lambda`` or ``Server`` ).
- **ecsServices** *(list) --*
The target Amazon ECS services in the deployment group. This applies only to deployment groups that use the Amazon ECS compute platform. A target Amazon ECS service is specified as an Amazon ECS cluster and service name pair using the format ``<clustername>:<servicename>`` .
- *(dict) --*
Contains the service and cluster names used to identify an Amazon ECS deployment's target.
- **serviceName** *(string) --*
The name of the target Amazon ECS service.
- **clusterName** *(string) --*
The name of the cluster that the Amazon ECS service is associated with.
- **errorMessage** *(string) --*
Information about errors that might have occurred during the API call.
:type applicationName: string
:param applicationName: **[REQUIRED]**
The name of an AWS CodeDeploy application associated with the applicable IAM user or AWS account.
:type deploymentGroupNames: list
:param deploymentGroupNames: **[REQUIRED]**
The names of the deployment groups.
- *(string) --*
:rtype: dict
:returns:
"""
pass
def batch_get_deployment_instances(self, deploymentId: str, instanceIds: List) -> Dict:
"""
.. note::
This method works, but is deprecated. Use ``BatchGetDeploymentTargets`` instead.
Returns an array of instances associated with a deployment. This method works with EC2/On-premises and AWS Lambda compute platforms. The newer ``BatchGetDeploymentTargets`` works with all compute platforms.
.. danger::
This operation is deprecated and may not function as expected. This operation should not be used going forward and is only kept for the purpose of backwards compatiblity.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/BatchGetDeploymentInstances>`_
**Request Syntax**
::
response = client.batch_get_deployment_instances(
deploymentId='string',
instanceIds=[
'string',
]
)
**Response Syntax**
::
{
'instancesSummary': [
{
'deploymentId': 'string',
'instanceId': 'string',
'status': 'Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'|'Ready',
'lastUpdatedAt': datetime(2015, 1, 1),
'lifecycleEvents': [
{
'lifecycleEventName': 'string',
'diagnostics': {
'errorCode': 'Success'|'ScriptMissing'|'ScriptNotExecutable'|'ScriptTimedOut'|'ScriptFailed'|'UnknownError',
'scriptName': 'string',
'message': 'string',
'logTail': 'string'
},
'startTime': datetime(2015, 1, 1),
'endTime': datetime(2015, 1, 1),
'status': 'Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'
},
],
'instanceType': 'Blue'|'Green'
},
],
'errorMessage': 'string'
}
**Response Structure**
- *(dict) --*
Represents the output of a BatchGetDeploymentInstances operation.
- **instancesSummary** *(list) --*
Information about the instance.
- *(dict) --*
Information about an instance in a deployment.
- **deploymentId** *(string) --*
The unique ID of a deployment.
- **instanceId** *(string) --*
The instance ID.
- **status** *(string) --*
The deployment status for this instance:
* Pending: The deployment is pending for this instance.
* In Progress: The deployment is in progress for this instance.
* Succeeded: The deployment has succeeded for this instance.
* Failed: The deployment has failed for this instance.
* Skipped: The deployment has been skipped for this instance.
* Unknown: The deployment status is unknown for this instance.
- **lastUpdatedAt** *(datetime) --*
A timestamp that indicaties when the instance information was last updated.
- **lifecycleEvents** *(list) --*
A list of lifecycle events for this instance.
- *(dict) --*
Information about a deployment lifecycle event.
- **lifecycleEventName** *(string) --*
The deployment lifecycle event name, such as ApplicationStop, BeforeInstall, AfterInstall, ApplicationStart, or ValidateService.
- **diagnostics** *(dict) --*
Diagnostic information about the deployment lifecycle event.
- **errorCode** *(string) --*
The associated error code:
* Success: The specified script ran.
* ScriptMissing: The specified script was not found in the specified location.
* ScriptNotExecutable: The specified script is not a recognized executable file type.
* ScriptTimedOut: The specified script did not finish running in the specified time period.
* ScriptFailed: The specified script failed to run as expected.
* UnknownError: The specified script did not run for an unknown reason.
- **scriptName** *(string) --*
The name of the script.
- **message** *(string) --*
The message associated with the error.
- **logTail** *(string) --*
The last portion of the diagnostic log.
If available, AWS CodeDeploy returns up to the last 4 KB of the diagnostic log.
- **startTime** *(datetime) --*
A timestamp that indicates when the deployment lifecycle event started.
- **endTime** *(datetime) --*
A timestamp that indicates when the deployment lifecycle event ended.
- **status** *(string) --*
The deployment lifecycle event status:
* Pending: The deployment lifecycle event is pending.
* InProgress: The deployment lifecycle event is in progress.
* Succeeded: The deployment lifecycle event ran successfully.
* Failed: The deployment lifecycle event has failed.
* Skipped: The deployment lifecycle event has been skipped.
* Unknown: The deployment lifecycle event is unknown.
- **instanceType** *(string) --*
Information about which environment an instance belongs to in a blue/green deployment.
* BLUE: The instance is part of the original environment.
* GREEN: The instance is part of the replacement environment.
- **errorMessage** *(string) --*
Information about errors that might have occurred during the API call.
:type deploymentId: string
:param deploymentId: **[REQUIRED]**
The unique ID of a deployment.
:type instanceIds: list
:param instanceIds: **[REQUIRED]**
The unique IDs of instances used in the deployment.
- *(string) --*
:rtype: dict
:returns:
"""
pass
def batch_get_deployment_targets(self, deploymentId: str = None, targetIds: List = None) -> Dict:
"""
Returns an array of targets associated with a deployment. This method works with all compute types and should be used instead of the deprecated ``BatchGetDeploymentInstances`` .
The type of targets returned depends on the deployment's compute platform:
* **EC2/On-premises** : Information about EC2 instance targets.
* **AWS Lambda** : Information about Lambda functions targets.
* **Amazon ECS** : Information about Amazon ECS service targets.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/BatchGetDeploymentTargets>`_
**Request Syntax**
::
response = client.batch_get_deployment_targets(
deploymentId='string',
targetIds=[
'string',
]
)
**Response Syntax**
::
{
'deploymentTargets': [
{
'deploymentTargetType': 'InstanceTarget'|'LambdaTarget'|'ECSTarget',
'instanceTarget': {
'deploymentId': 'string',
'targetId': 'string',
'targetArn': 'string',
'status': 'Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'|'Ready',
'lastUpdatedAt': datetime(2015, 1, 1),
'lifecycleEvents': [
{
'lifecycleEventName': 'string',
'diagnostics': {
'errorCode': 'Success'|'ScriptMissing'|'ScriptNotExecutable'|'ScriptTimedOut'|'ScriptFailed'|'UnknownError',
'scriptName': 'string',
'message': 'string',
'logTail': 'string'
},
'startTime': datetime(2015, 1, 1),
'endTime': datetime(2015, 1, 1),
'status': 'Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'
},
],
'instanceLabel': 'Blue'|'Green'
},
'lambdaTarget': {
'deploymentId': 'string',
'targetId': 'string',
'targetArn': 'string',
'status': 'Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'|'Ready',
'lastUpdatedAt': datetime(2015, 1, 1),
'lifecycleEvents': [
{
'lifecycleEventName': 'string',
'diagnostics': {
'errorCode': 'Success'|'ScriptMissing'|'ScriptNotExecutable'|'ScriptTimedOut'|'ScriptFailed'|'UnknownError',
'scriptName': 'string',
'message': 'string',
'logTail': 'string'
},
'startTime': datetime(2015, 1, 1),
'endTime': datetime(2015, 1, 1),
'status': 'Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'
},
]
},
'ecsTarget': {
'deploymentId': 'string',
'targetId': 'string',
'targetArn': 'string',
'lastUpdatedAt': datetime(2015, 1, 1),
'lifecycleEvents': [
{
'lifecycleEventName': 'string',
'diagnostics': {
'errorCode': 'Success'|'ScriptMissing'|'ScriptNotExecutable'|'ScriptTimedOut'|'ScriptFailed'|'UnknownError',
'scriptName': 'string',
'message': 'string',
'logTail': 'string'
},
'startTime': datetime(2015, 1, 1),
'endTime': datetime(2015, 1, 1),
'status': 'Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'
},
],
'status': 'Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'|'Ready',
'taskSetsInfo': [
{
'identifer': 'string',
'desiredCount': 123,
'pendingCount': 123,
'runningCount': 123,
'status': 'string',
'trafficWeight': 123.0,
'targetGroup': {
'name': 'string'
},
'taskSetLabel': 'Blue'|'Green'
},
]
}
},
]
}
**Response Structure**
- *(dict) --*
- **deploymentTargets** *(list) --*
A list of target objects for a deployment. Each target object contains details about the target, such as its status and lifecycle events. The type of the target objects depends on the deployment' compute platform.
* **EC2/On-premises** : Each target object is an EC2 or on-premises instance.
* **AWS Lambda** : The target object is a specific version of an AWS Lambda function.
* **Amazon ECS** : The target object is an Amazon ECS service.
- *(dict) --*
Information about the deployment target.
- **deploymentTargetType** *(string) --*
The deployment type that is specific to the deployment's compute platform.
- **instanceTarget** *(dict) --*
Information about the target for a deployment that uses the EC2/On-premises compute platform.
- **deploymentId** *(string) --*
The unique ID of a deployment.
- **targetId** *(string) --*
The unique ID of a deployment target that has a type of ``instanceTarget`` .
- **targetArn** *(string) --*
The ARN of the target.
- **status** *(string) --*
The status an EC2/On-premises deployment's target instance.
- **lastUpdatedAt** *(datetime) --*
The date and time when the target instance was updated by a deployment.
- **lifecycleEvents** *(list) --*
The lifecycle events of the deployment to this target instance.
- *(dict) --*
Information about a deployment lifecycle event.
- **lifecycleEventName** *(string) --*
The deployment lifecycle event name, such as ApplicationStop, BeforeInstall, AfterInstall, ApplicationStart, or ValidateService.
- **diagnostics** *(dict) --*
Diagnostic information about the deployment lifecycle event.
- **errorCode** *(string) --*
The associated error code:
* Success: The specified script ran.
* ScriptMissing: The specified script was not found in the specified location.
* ScriptNotExecutable: The specified script is not a recognized executable file type.
* ScriptTimedOut: The specified script did not finish running in the specified time period.
* ScriptFailed: The specified script failed to run as expected.
* UnknownError: The specified script did not run for an unknown reason.
- **scriptName** *(string) --*
The name of the script.
- **message** *(string) --*
The message associated with the error.
- **logTail** *(string) --*
The last portion of the diagnostic log.
If available, AWS CodeDeploy returns up to the last 4 KB of the diagnostic log.
- **startTime** *(datetime) --*
A timestamp that indicates when the deployment lifecycle event started.
- **endTime** *(datetime) --*
A timestamp that indicates when the deployment lifecycle event ended.
- **status** *(string) --*
The deployment lifecycle event status:
* Pending: The deployment lifecycle event is pending.
* InProgress: The deployment lifecycle event is in progress.
* Succeeded: The deployment lifecycle event ran successfully.
* Failed: The deployment lifecycle event has failed.
* Skipped: The deployment lifecycle event has been skipped.
* Unknown: The deployment lifecycle event is unknown.
- **instanceLabel** *(string) --*
A label that identifies whether the instance is an original target (``BLUE`` ) or a replacement target (``GREEN`` ).
- **lambdaTarget** *(dict) --*
Information about the target for a deployment that uses the AWS Lambda compute platform.
- **deploymentId** *(string) --*
The unique ID of a deployment.
- **targetId** *(string) --*
The unique ID of a deployment target that has a type of ``lambdaTarget`` .
- **targetArn** *(string) --*
The ARN of the target.
- **status** *(string) --*
The status an AWS Lambda deployment's target Lambda function.
- **lastUpdatedAt** *(datetime) --*
The date and time when the target Lambda function was updated by a deployment.
- **lifecycleEvents** *(list) --*
The lifecycle events of the deployment to this target Lambda function.
- *(dict) --*
Information about a deployment lifecycle event.
- **lifecycleEventName** *(string) --*
The deployment lifecycle event name, such as ApplicationStop, BeforeInstall, AfterInstall, ApplicationStart, or ValidateService.
- **diagnostics** *(dict) --*
Diagnostic information about the deployment lifecycle event.
- **errorCode** *(string) --*
The associated error code:
* Success: The specified script ran.
* ScriptMissing: The specified script was not found in the specified location.
* ScriptNotExecutable: The specified script is not a recognized executable file type.
* ScriptTimedOut: The specified script did not finish running in the specified time period.
* ScriptFailed: The specified script failed to run as expected.
* UnknownError: The specified script did not run for an unknown reason.
- **scriptName** *(string) --*
The name of the script.
- **message** *(string) --*
The message associated with the error.
- **logTail** *(string) --*
The last portion of the diagnostic log.
If available, AWS CodeDeploy returns up to the last 4 KB of the diagnostic log.
- **startTime** *(datetime) --*
A timestamp that indicates when the deployment lifecycle event started.
- **endTime** *(datetime) --*
A timestamp that indicates when the deployment lifecycle event ended.
- **status** *(string) --*
The deployment lifecycle event status:
* Pending: The deployment lifecycle event is pending.
* InProgress: The deployment lifecycle event is in progress.
* Succeeded: The deployment lifecycle event ran successfully.
* Failed: The deployment lifecycle event has failed.
* Skipped: The deployment lifecycle event has been skipped.
* Unknown: The deployment lifecycle event is unknown.
- **ecsTarget** *(dict) --*
Information about the target for a deployment that uses the Amazon ECS compute platform.
- **deploymentId** *(string) --*
The unique ID of a deployment.
- **targetId** *(string) --*
The unique ID of a deployment target that has a type of ``ecsTarget`` .
- **targetArn** *(string) --*
The ARN of the target.
- **lastUpdatedAt** *(datetime) --*
The date and time when the target Amazon ECS application was updated by a deployment.
- **lifecycleEvents** *(list) --*
The lifecycle events of the deployment to this target Amazon ECS application.
- *(dict) --*
Information about a deployment lifecycle event.
- **lifecycleEventName** *(string) --*
The deployment lifecycle event name, such as ApplicationStop, BeforeInstall, AfterInstall, ApplicationStart, or ValidateService.
- **diagnostics** *(dict) --*
Diagnostic information about the deployment lifecycle event.
- **errorCode** *(string) --*
The associated error code:
* Success: The specified script ran.
* ScriptMissing: The specified script was not found in the specified location.
* ScriptNotExecutable: The specified script is not a recognized executable file type.
* ScriptTimedOut: The specified script did not finish running in the specified time period.
* ScriptFailed: The specified script failed to run as expected.
* UnknownError: The specified script did not run for an unknown reason.
- **scriptName** *(string) --*
The name of the script.
- **message** *(string) --*
The message associated with the error.
- **logTail** *(string) --*
The last portion of the diagnostic log.
If available, AWS CodeDeploy returns up to the last 4 KB of the diagnostic log.
- **startTime** *(datetime) --*
A timestamp that indicates when the deployment lifecycle event started.
- **endTime** *(datetime) --*
A timestamp that indicates when the deployment lifecycle event ended.
- **status** *(string) --*
The deployment lifecycle event status:
* Pending: The deployment lifecycle event is pending.
* InProgress: The deployment lifecycle event is in progress.
* Succeeded: The deployment lifecycle event ran successfully.
* Failed: The deployment lifecycle event has failed.
* Skipped: The deployment lifecycle event has been skipped.
* Unknown: The deployment lifecycle event is unknown.
- **status** *(string) --*
The status an Amazon ECS deployment's target ECS application.
- **taskSetsInfo** *(list) --*
The ``ECSTaskSet`` objects associated with the ECS target.
- *(dict) --*
Information about a set of Amazon ECS tasks in an AWS CodeDeploy deployment. An Amazon ECS task set includes details such as the desired number of tasks, how many tasks are running, and whether the task set serves production traffic. An AWS CodeDeploy application that uses the Amazon ECS compute platform deploys a containerized application in an Amazon ECS service as a task set.
- **identifer** *(string) --*
A unique ID of an ``ECSTaskSet`` .
- **desiredCount** *(integer) --*
The number of tasks in a task set. During a deployment that uses the Amazon ECS compute type, CodeDeploy instructs Amazon ECS to create a new task set and uses this value to determine how many tasks to create. After the updated task set is created, CodeDeploy shifts traffic to the new task set.
- **pendingCount** *(integer) --*
The number of tasks in the task set that are in the ``PENDING`` status during an Amazon ECS deployment. A task in the ``PENDING`` state is preparing to enter the ``RUNNING`` state. A task set enters the ``PENDING`` status when it launches for the first time, or when it is restarted after being in the ``STOPPED`` state.
- **runningCount** *(integer) --*
The number of tasks in the task set that are in the ``RUNNING`` status during an Amazon ECS deployment. A task in the ``RUNNING`` state is running and ready for use.
- **status** *(string) --*
The status of the task set. There are three valid task set statuses:
* ``PRIMARY`` : Indicates the task set is serving production traffic.
* ``ACTIVE`` : Indicates the task set is not serving production traffic.
* ``DRAINING`` : Indicates the tasks in the task set are being stopped and their corresponding targets are being deregistered from their target group.
- **trafficWeight** *(float) --*
The percentage of traffic served by this task set.
- **targetGroup** *(dict) --*
The target group associated with the task set. The target group is used by AWS CodeDeploy to manage traffic to a task set.
- **name** *(string) --*
For blue/green deployments, the name of the target group that instances in the original environment are deregistered from, and instances in the replacement environment are registered with. For in-place deployments, the name of the target group that instances are deregistered from, so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **taskSetLabel** *(string) --*
A label that identifies whether the ECS task set is an original target (``BLUE`` ) or a replacement target (``GREEN`` ).
:type deploymentId: string
:param deploymentId:
The unique ID of a deployment.
:type targetIds: list
:param targetIds:
The unique IDs of the deployment targets. The compute platform of the deployment determines the type of the targets and their formats.
* For deployments that use the EC2/On-premises compute platform, the target IDs are EC2 or on-premises instances IDs, and their target type is ``instanceTarget`` .
* For deployments that use the AWS Lambda compute platform, the target IDs are the names of Lambda functions, and their target type is ``instanceTarget`` .
* For deployments that use the Amazon ECS compute platform, the target IDs are pairs of Amazon ECS clusters and services specified using the format ``<clustername>:<servicename>`` . Their target type is ``ecsTarget`` .
- *(string) --*
:rtype: dict
:returns:
"""
pass
def batch_get_deployments(self, deploymentIds: List) -> Dict:
"""
Gets information about one or more deployments.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/BatchGetDeployments>`_
**Request Syntax**
::
response = client.batch_get_deployments(
deploymentIds=[
'string',
]
)
**Response Syntax**
::
{
'deploymentsInfo': [
{
'applicationName': 'string',
'deploymentGroupName': 'string',
'deploymentConfigName': 'string',
'deploymentId': 'string',
'previousRevision': {
'revisionType': 'S3'|'GitHub'|'String'|'AppSpecContent',
's3Location': {
'bucket': 'string',
'key': 'string',
'bundleType': 'tar'|'tgz'|'zip'|'YAML'|'JSON',
'version': 'string',
'eTag': 'string'
},
'gitHubLocation': {
'repository': 'string',
'commitId': 'string'
},
'string': {
'content': 'string',
'sha256': 'string'
},
'appSpecContent': {
'content': 'string',
'sha256': 'string'
}
},
'revision': {
'revisionType': 'S3'|'GitHub'|'String'|'AppSpecContent',
's3Location': {
'bucket': 'string',
'key': 'string',
'bundleType': 'tar'|'tgz'|'zip'|'YAML'|'JSON',
'version': 'string',
'eTag': 'string'
},
'gitHubLocation': {
'repository': 'string',
'commitId': 'string'
},
'string': {
'content': 'string',
'sha256': 'string'
},
'appSpecContent': {
'content': 'string',
'sha256': 'string'
}
},
'status': 'Created'|'Queued'|'InProgress'|'Succeeded'|'Failed'|'Stopped'|'Ready',
'errorInformation': {
'code': 'DEPLOYMENT_GROUP_MISSING'|'APPLICATION_MISSING'|'REVISION_MISSING'|'IAM_ROLE_MISSING'|'IAM_ROLE_PERMISSIONS'|'NO_EC2_SUBSCRIPTION'|'OVER_MAX_INSTANCES'|'NO_INSTANCES'|'TIMEOUT'|'HEALTH_CONSTRAINTS_INVALID'|'HEALTH_CONSTRAINTS'|'INTERNAL_ERROR'|'THROTTLED'|'ALARM_ACTIVE'|'AGENT_ISSUE'|'AUTO_SCALING_IAM_ROLE_PERMISSIONS'|'AUTO_SCALING_CONFIGURATION'|'MANUAL_STOP'|'MISSING_BLUE_GREEN_DEPLOYMENT_CONFIGURATION'|'MISSING_ELB_INFORMATION'|'MISSING_GITHUB_TOKEN'|'ELASTIC_LOAD_BALANCING_INVALID'|'ELB_INVALID_INSTANCE'|'INVALID_LAMBDA_CONFIGURATION'|'INVALID_LAMBDA_FUNCTION'|'HOOK_EXECUTION_FAILURE'|'AUTOSCALING_VALIDATION_ERROR'|'INVALID_ECS_SERVICE'|'ECS_UPDATE_ERROR'|'INVALID_REVISION',
'message': 'string'
},
'createTime': datetime(2015, 1, 1),
'startTime': datetime(2015, 1, 1),
'completeTime': datetime(2015, 1, 1),
'deploymentOverview': {
'Pending': 123,
'InProgress': 123,
'Succeeded': 123,
'Failed': 123,
'Skipped': 123,
'Ready': 123
},
'description': 'string',
'creator': 'user'|'autoscaling'|'codeDeployRollback',
'ignoreApplicationStopFailures': True|False,
'autoRollbackConfiguration': {
'enabled': True|False,
'events': [
'DEPLOYMENT_FAILURE'|'DEPLOYMENT_STOP_ON_ALARM'|'DEPLOYMENT_STOP_ON_REQUEST',
]
},
'updateOutdatedInstancesOnly': True|False,
'rollbackInfo': {
'rollbackDeploymentId': 'string',
'rollbackTriggeringDeploymentId': 'string',
'rollbackMessage': 'string'
},
'deploymentStyle': {
'deploymentType': 'IN_PLACE'|'BLUE_GREEN',
'deploymentOption': 'WITH_TRAFFIC_CONTROL'|'WITHOUT_TRAFFIC_CONTROL'
},
'targetInstances': {
'tagFilters': [
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
'autoScalingGroups': [
'string',
],
'ec2TagSet': {
'ec2TagSetList': [
[
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
]
}
},
'instanceTerminationWaitTimeStarted': True|False,
'blueGreenDeploymentConfiguration': {
'terminateBlueInstancesOnDeploymentSuccess': {
'action': 'TERMINATE'|'KEEP_ALIVE',
'terminationWaitTimeInMinutes': 123
},
'deploymentReadyOption': {
'actionOnTimeout': 'CONTINUE_DEPLOYMENT'|'STOP_DEPLOYMENT',
'waitTimeInMinutes': 123
},
'greenFleetProvisioningOption': {
'action': 'DISCOVER_EXISTING'|'COPY_AUTO_SCALING_GROUP'
}
},
'loadBalancerInfo': {
'elbInfoList': [
{
'name': 'string'
},
],
'targetGroupInfoList': [
{
'name': 'string'
},
],
'targetGroupPairInfoList': [
{
'targetGroups': [
{
'name': 'string'
},
],
'prodTrafficRoute': {
'listenerArns': [
'string',
]
},
'testTrafficRoute': {
'listenerArns': [
'string',
]
}
},
]
},
'additionalDeploymentStatusInfo': 'string',
'fileExistsBehavior': 'DISALLOW'|'OVERWRITE'|'RETAIN',
'deploymentStatusMessages': [
'string',
],
'computePlatform': 'Server'|'Lambda'|'ECS'
},
]
}
**Response Structure**
- *(dict) --*
Represents the output of a BatchGetDeployments operation.
- **deploymentsInfo** *(list) --*
Information about the deployments.
- *(dict) --*
Information about a deployment.
- **applicationName** *(string) --*
The application name.
- **deploymentGroupName** *(string) --*
The deployment group name.
- **deploymentConfigName** *(string) --*
The deployment configuration name.
- **deploymentId** *(string) --*
The unique ID of a deployment.
- **previousRevision** *(dict) --*
Information about the application revision that was deployed to the deployment group before the most recent successful deployment.
- **revisionType** *(string) --*
The type of application revision:
* S3: An application revision stored in Amazon S3.
* GitHub: An application revision stored in GitHub (EC2/On-premises deployments only).
* String: A YAML-formatted or JSON-formatted string (AWS Lambda deployments only).
- **s3Location** *(dict) --*
Information about the location of a revision stored in Amazon S3.
- **bucket** *(string) --*
The name of the Amazon S3 bucket where the application revision is stored.
- **key** *(string) --*
The name of the Amazon S3 object that represents the bundled artifacts for the application revision.
- **bundleType** *(string) --*
The file type of the application revision. Must be one of the following:
* tar: A tar archive file.
* tgz: A compressed tar archive file.
* zip: A zip archive file.
- **version** *(string) --*
A specific version of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the version is not specified, the system uses the most recent version by default.
- **eTag** *(string) --*
The ETag of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the ETag is not specified as an input parameter, ETag validation of the object is skipped.
- **gitHubLocation** *(dict) --*
Information about the location of application artifacts stored in GitHub.
- **repository** *(string) --*
The GitHub account and repository pair that stores a reference to the commit that represents the bundled artifacts for the application revision.
Specified as account/repository.
- **commitId** *(string) --*
The SHA1 commit ID of the GitHub commit that represents the bundled artifacts for the application revision.
- **string** *(dict) --*
Information about the location of an AWS Lambda deployment revision stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string. It includes information about which Lambda function to update and optional Lambda functions that validate deployment lifecycle events.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **appSpecContent** *(dict) --*
The content of an AppSpec file for an AWS Lambda or Amazon ECS deployment. The content is formatted as JSON or YAML and stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string.
For an AWS Lambda deployment, the content includes a Lambda function name, the alias for its original version, and the alias for its replacement version. The deployment shifts traffic from the original version of the Lambda function to the replacement version.
For an Amazon ECS deployment, the content includes the task name, information about the load balancer that serves traffic to the container, and more.
For both types of deployments, the content can specify Lambda functions that run at specified hooks, such as ``BeforeInstall`` , during a deployment.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **revision** *(dict) --*
Information about the location of stored application artifacts and the service from which to retrieve them.
- **revisionType** *(string) --*
The type of application revision:
* S3: An application revision stored in Amazon S3.
* GitHub: An application revision stored in GitHub (EC2/On-premises deployments only).
* String: A YAML-formatted or JSON-formatted string (AWS Lambda deployments only).
- **s3Location** *(dict) --*
Information about the location of a revision stored in Amazon S3.
- **bucket** *(string) --*
The name of the Amazon S3 bucket where the application revision is stored.
- **key** *(string) --*
The name of the Amazon S3 object that represents the bundled artifacts for the application revision.
- **bundleType** *(string) --*
The file type of the application revision. Must be one of the following:
* tar: A tar archive file.
* tgz: A compressed tar archive file.
* zip: A zip archive file.
- **version** *(string) --*
A specific version of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the version is not specified, the system uses the most recent version by default.
- **eTag** *(string) --*
The ETag of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the ETag is not specified as an input parameter, ETag validation of the object is skipped.
- **gitHubLocation** *(dict) --*
Information about the location of application artifacts stored in GitHub.
- **repository** *(string) --*
The GitHub account and repository pair that stores a reference to the commit that represents the bundled artifacts for the application revision.
Specified as account/repository.
- **commitId** *(string) --*
The SHA1 commit ID of the GitHub commit that represents the bundled artifacts for the application revision.
- **string** *(dict) --*
Information about the location of an AWS Lambda deployment revision stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string. It includes information about which Lambda function to update and optional Lambda functions that validate deployment lifecycle events.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **appSpecContent** *(dict) --*
The content of an AppSpec file for an AWS Lambda or Amazon ECS deployment. The content is formatted as JSON or YAML and stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string.
For an AWS Lambda deployment, the content includes a Lambda function name, the alias for its original version, and the alias for its replacement version. The deployment shifts traffic from the original version of the Lambda function to the replacement version.
For an Amazon ECS deployment, the content includes the task name, information about the load balancer that serves traffic to the container, and more.
For both types of deployments, the content can specify Lambda functions that run at specified hooks, such as ``BeforeInstall`` , during a deployment.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **status** *(string) --*
The current state of the deployment as a whole.
- **errorInformation** *(dict) --*
Information about any error associated with this deployment.
- **code** *(string) --*
For more information, see `Error Codes for AWS CodeDeploy <https://docs.aws.amazon.com/codedeploy/latest/userguide/error-codes.html>`__ in the `AWS CodeDeploy User Guide <https://docs.aws.amazon.com/codedeploy/latest/userguide>`__ .
The error code:
* APPLICATION_MISSING: The application was missing. This error code is most likely raised if the application is deleted after the deployment is created, but before it is started.
* DEPLOYMENT_GROUP_MISSING: The deployment group was missing. This error code is most likely raised if the deployment group is deleted after the deployment is created, but before it is started.
* HEALTH_CONSTRAINTS: The deployment failed on too many instances to be successfully deployed within the instance health constraints specified.
* HEALTH_CONSTRAINTS_INVALID: The revision cannot be successfully deployed within the instance health constraints specified.
* IAM_ROLE_MISSING: The service role cannot be accessed.
* IAM_ROLE_PERMISSIONS: The service role does not have the correct permissions.
* INTERNAL_ERROR: There was an internal error.
* NO_EC2_SUBSCRIPTION: The calling account is not subscribed to Amazon EC2.
* NO_INSTANCES: No instances were specified, or no instances can be found.
* OVER_MAX_INSTANCES: The maximum number of instances was exceeded.
* THROTTLED: The operation was throttled because the calling account exceeded the throttling limits of one or more AWS services.
* TIMEOUT: The deployment has timed out.
* REVISION_MISSING: The revision ID was missing. This error code is most likely raised if the revision is deleted after the deployment is created, but before it is started.
- **message** *(string) --*
An accompanying error message.
- **createTime** *(datetime) --*
A timestamp that indicates when the deployment was created.
- **startTime** *(datetime) --*
A timestamp that indicates when the deployment was deployed to the deployment group.
In some cases, the reported value of the start time might be later than the complete time. This is due to differences in the clock settings of backend servers that participate in the deployment process.
- **completeTime** *(datetime) --*
A timestamp that indicates when the deployment was complete.
- **deploymentOverview** *(dict) --*
A summary of the deployment status of the instances in the deployment.
- **Pending** *(integer) --*
The number of instances in the deployment in a pending state.
- **InProgress** *(integer) --*
The number of instances in which the deployment is in progress.
- **Succeeded** *(integer) --*
The number of instances in the deployment to which revisions have been successfully deployed.
- **Failed** *(integer) --*
The number of instances in the deployment in a failed state.
- **Skipped** *(integer) --*
The number of instances in the deployment in a skipped state.
- **Ready** *(integer) --*
The number of instances in a replacement environment ready to receive traffic in a blue/green deployment.
- **description** *(string) --*
A comment about the deployment.
- **creator** *(string) --*
The means by which the deployment was created:
* user: A user created the deployment.
* autoscaling: Amazon EC2 Auto Scaling created the deployment.
* codeDeployRollback: A rollback process created the deployment.
- **ignoreApplicationStopFailures** *(boolean) --*
If true, then if an ApplicationStop, BeforeBlockTraffic, or AfterBlockTraffic deployment lifecycle event to an instance fails, then the deployment continues to the next deployment lifecycle event. For example, if ApplicationStop fails, the deployment continues with DownloadBundle. If BeforeBlockTraffic fails, the deployment continues with BlockTraffic. If AfterBlockTraffic fails, the deployment continues with ApplicationStop.
If false or not specified, then if a lifecycle event fails during a deployment to an instance, that deployment fails. If deployment to that instance is part of an overall deployment and the number of healthy hosts is not less than the minimum number of healthy hosts, then a deployment to the next instance is attempted.
During a deployment, the AWS CodeDeploy agent runs the scripts specified for ApplicationStop, BeforeBlockTraffic, and AfterBlockTraffic in the AppSpec file from the previous successful deployment. (All other scripts are run from the AppSpec file in the current deployment.) If one of these scripts contains an error and does not run successfully, the deployment can fail.
If the cause of the failure is a script from the last successful deployment that will never run successfully, create a new deployment and use ``ignoreApplicationStopFailures`` to specify that the ApplicationStop, BeforeBlockTraffic, and AfterBlockTraffic failures should be ignored.
- **autoRollbackConfiguration** *(dict) --*
Information about the automatic rollback configuration associated with the deployment.
- **enabled** *(boolean) --*
Indicates whether a defined automatic rollback configuration is currently enabled.
- **events** *(list) --*
The event type or types that trigger a rollback.
- *(string) --*
- **updateOutdatedInstancesOnly** *(boolean) --*
Indicates whether only instances that are not running the latest application revision are to be deployed to.
- **rollbackInfo** *(dict) --*
Information about a deployment rollback.
- **rollbackDeploymentId** *(string) --*
The ID of the deployment rollback.
- **rollbackTriggeringDeploymentId** *(string) --*
The deployment ID of the deployment that was underway and triggered a rollback deployment because it failed or was stopped.
- **rollbackMessage** *(string) --*
Information that describes the status of a deployment rollback (for example, whether the deployment can't be rolled back, is in progress, failed, or succeeded).
- **deploymentStyle** *(dict) --*
Information about the type of deployment, either in-place or blue/green, you want to run and whether to route deployment traffic behind a load balancer.
- **deploymentType** *(string) --*
Indicates whether to run an in-place deployment or a blue/green deployment.
- **deploymentOption** *(string) --*
Indicates whether to route deployment traffic behind a load balancer.
- **targetInstances** *(dict) --*
Information about the instances that belong to the replacement environment in a blue/green deployment.
- **tagFilters** *(list) --*
The tag filter key, type, and value used to identify Amazon EC2 instances in a replacement environment for a blue/green deployment. Cannot be used in the same call as ec2TagSet.
- *(dict) --*
Information about an EC2 tag filter.
- **Key** *(string) --*
The tag filter key.
- **Value** *(string) --*
The tag filter value.
- **Type** *(string) --*
The tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
- **autoScalingGroups** *(list) --*
The names of one or more Auto Scaling groups to identify a replacement environment for a blue/green deployment.
- *(string) --*
- **ec2TagSet** *(dict) --*
Information about the groups of EC2 instance tags that an instance must be identified by in order for it to be included in the replacement environment for a blue/green deployment. Cannot be used in the same call as tagFilters.
- **ec2TagSetList** *(list) --*
A list that contains other lists of EC2 instance tag groups. For an instance to be included in the deployment group, it must be identified by all of the tag groups in the list.
- *(list) --*
- *(dict) --*
Information about an EC2 tag filter.
- **Key** *(string) --*
The tag filter key.
- **Value** *(string) --*
The tag filter value.
- **Type** *(string) --*
The tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
- **instanceTerminationWaitTimeStarted** *(boolean) --*
Indicates whether the wait period set for the termination of instances in the original environment has started. Status is 'false' if the KEEP_ALIVE option is specified. Otherwise, 'true' as soon as the termination wait period starts.
- **blueGreenDeploymentConfiguration** *(dict) --*
Information about blue/green deployment options for this deployment.
- **terminateBlueInstancesOnDeploymentSuccess** *(dict) --*
Information about whether to terminate instances in the original fleet during a blue/green deployment.
- **action** *(string) --*
The action to take on instances in the original environment after a successful blue/green deployment.
* TERMINATE: Instances are terminated after a specified wait time.
* KEEP_ALIVE: Instances are left running after they are deregistered from the load balancer and removed from the deployment group.
- **terminationWaitTimeInMinutes** *(integer) --*
The number of minutes to wait after a successful blue/green deployment before terminating instances from the original environment. The maximum setting is 2880 minutes (2 days).
- **deploymentReadyOption** *(dict) --*
Information about the action to take when newly provisioned instances are ready to receive traffic in a blue/green deployment.
- **actionOnTimeout** *(string) --*
Information about when to reroute traffic from an original environment to a replacement environment in a blue/green deployment.
* CONTINUE_DEPLOYMENT: Register new instances with the load balancer immediately after the new application revision is installed on the instances in the replacement environment.
* STOP_DEPLOYMENT: Do not register new instances with a load balancer unless traffic rerouting is started using ContinueDeployment . If traffic rerouting is not started before the end of the specified wait period, the deployment status is changed to Stopped.
- **waitTimeInMinutes** *(integer) --*
The number of minutes to wait before the status of a blue/green deployment is changed to Stopped if rerouting is not started manually. Applies only to the STOP_DEPLOYMENT option for actionOnTimeout
- **greenFleetProvisioningOption** *(dict) --*
Information about how instances are provisioned for a replacement environment in a blue/green deployment.
- **action** *(string) --*
The method used to add instances to a replacement environment.
* DISCOVER_EXISTING: Use instances that already exist or will be created manually.
* COPY_AUTO_SCALING_GROUP: Use settings from a specified Auto Scaling group to define and create instances in a new Auto Scaling group.
- **loadBalancerInfo** *(dict) --*
Information about the load balancer used in the deployment.
- **elbInfoList** *(list) --*
An array that contains information about the load balancer to use for load balancing in a deployment. In Elastic Load Balancing, load balancers are used with Classic Load Balancers.
.. note::
Adding more than one load balancer to the array is not supported.
- *(dict) --*
Information about a load balancer in Elastic Load Balancing to use in a deployment. Instances are registered directly with a load balancer, and traffic is routed to the load balancer.
- **name** *(string) --*
For blue/green deployments, the name of the load balancer that is used to route traffic from original instances to replacement instances in a blue/green deployment. For in-place deployments, the name of the load balancer that instances are deregistered from so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **targetGroupInfoList** *(list) --*
An array that contains information about the target group to use for load balancing in a deployment. In Elastic Load Balancing, target groups are used with Application Load Balancers.
.. note::
Adding more than one target group to the array is not supported.
- *(dict) --*
Information about a target group in Elastic Load Balancing to use in a deployment. Instances are registered as targets in a target group, and traffic is routed to the target group.
- **name** *(string) --*
For blue/green deployments, the name of the target group that instances in the original environment are deregistered from, and instances in the replacement environment are registered with. For in-place deployments, the name of the target group that instances are deregistered from, so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **targetGroupPairInfoList** *(list) --*
The target group pair information. This is an array of ``TargeGroupPairInfo`` objects with a maximum size of one.
- *(dict) --*
Information about two target groups and how traffic is routed during an Amazon ECS deployment. An optional test traffic route can be specified.
- **targetGroups** *(list) --*
One pair of target groups. One is associated with the original task set. The second is associated with the task set that serves traffic after the deployment is complete.
- *(dict) --*
Information about a target group in Elastic Load Balancing to use in a deployment. Instances are registered as targets in a target group, and traffic is routed to the target group.
- **name** *(string) --*
For blue/green deployments, the name of the target group that instances in the original environment are deregistered from, and instances in the replacement environment are registered with. For in-place deployments, the name of the target group that instances are deregistered from, so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **prodTrafficRoute** *(dict) --*
The path used by a load balancer to route production traffic when an Amazon ECS deployment is complete.
- **listenerArns** *(list) --*
The ARN of one listener. The listener identifies the route between a target group and a load balancer. This is an array of strings with a maximum size of one.
- *(string) --*
- **testTrafficRoute** *(dict) --*
An optional path used by a load balancer to route test traffic after an Amazon ECS deployment. Validation can occur while test traffic is served during a deployment.
- **listenerArns** *(list) --*
The ARN of one listener. The listener identifies the route between a target group and a load balancer. This is an array of strings with a maximum size of one.
- *(string) --*
- **additionalDeploymentStatusInfo** *(string) --*
Provides information about the results of a deployment, such as whether instances in the original environment in a blue/green deployment were not terminated.
- **fileExistsBehavior** *(string) --*
Information about how AWS CodeDeploy handles files that already exist in a deployment target location but weren't part of the previous successful deployment.
* DISALLOW: The deployment fails. This is also the default behavior if no option is specified.
* OVERWRITE: The version of the file from the application revision currently being deployed replaces the version already on the instance.
* RETAIN: The version of the file already on the instance is kept and used as part of the new deployment.
- **deploymentStatusMessages** *(list) --*
Messages that contain information about the status of a deployment.
- *(string) --*
- **computePlatform** *(string) --*
The destination platform type for the deployment (``Lambda`` or ``Server`` ).
:type deploymentIds: list
:param deploymentIds: **[REQUIRED]**
A list of deployment IDs, separated by spaces.
- *(string) --*
:rtype: dict
:returns:
"""
pass
def batch_get_on_premises_instances(self, instanceNames: List) -> Dict:
"""
Gets information about one or more on-premises instances.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/BatchGetOnPremisesInstances>`_
**Request Syntax**
::
response = client.batch_get_on_premises_instances(
instanceNames=[
'string',
]
)
**Response Syntax**
::
{
'instanceInfos': [
{
'instanceName': 'string',
'iamSessionArn': 'string',
'iamUserArn': 'string',
'instanceArn': 'string',
'registerTime': datetime(2015, 1, 1),
'deregisterTime': datetime(2015, 1, 1),
'tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
]
}
**Response Structure**
- *(dict) --*
Represents the output of a BatchGetOnPremisesInstances operation.
- **instanceInfos** *(list) --*
Information about the on-premises instances.
- *(dict) --*
Information about an on-premises instance.
- **instanceName** *(string) --*
The name of the on-premises instance.
- **iamSessionArn** *(string) --*
The ARN of the IAM session associated with the on-premises instance.
- **iamUserArn** *(string) --*
The IAM user ARN associated with the on-premises instance.
- **instanceArn** *(string) --*
The ARN of the on-premises instance.
- **registerTime** *(datetime) --*
The time at which the on-premises instance was registered.
- **deregisterTime** *(datetime) --*
If the on-premises instance was deregistered, the time at which the on-premises instance was deregistered.
- **tags** *(list) --*
The tags currently associated with the on-premises instance.
- *(dict) --*
Information about a tag.
- **Key** *(string) --*
The tag's key.
- **Value** *(string) --*
The tag's value.
:type instanceNames: list
:param instanceNames: **[REQUIRED]**
The names of the on-premises instances about which to get information.
- *(string) --*
:rtype: dict
:returns:
"""
pass
def can_paginate(self, operation_name: str = None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you\'d normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator(\"create_foo\")``.
:return: ``True`` if the operation can be paginated,
``False`` otherwise.
"""
pass
def continue_deployment(self, deploymentId: str = None, deploymentWaitType: str = None):
"""
For a blue/green deployment, starts the process of rerouting traffic from instances in the original environment to instances in the replacement environment without waiting for a specified wait time to elapse. (Traffic rerouting, which is achieved by registering instances in the replacement environment with the load balancer, can start as soon as all instances have a status of Ready.)
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/ContinueDeployment>`_
**Request Syntax**
::
response = client.continue_deployment(
deploymentId='string',
deploymentWaitType='READY_WAIT'|'TERMINATION_WAIT'
)
:type deploymentId: string
:param deploymentId:
The unique ID of a blue/green deployment for which you want to start rerouting traffic to the replacement environment.
:type deploymentWaitType: string
:param deploymentWaitType:
The status of the deployment\'s waiting period. READY_WAIT indicates the deployment is ready to start shifting traffic. TERMINATION_WAIT indicates the traffic is shifted, but the original target is not terminated.
:returns: None
"""
pass
def create_application(self, applicationName: str, computePlatform: str = None) -> Dict:
"""
Creates an application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/CreateApplication>`_
**Request Syntax**
::
response = client.create_application(
applicationName='string',
computePlatform='Server'|'Lambda'|'ECS'
)
**Response Syntax**
::
{
'applicationId': 'string'
}
**Response Structure**
- *(dict) --*
Represents the output of a CreateApplication operation.
- **applicationId** *(string) --*
A unique application ID.
:type applicationName: string
:param applicationName: **[REQUIRED]**
The name of the application. This name must be unique with the applicable IAM user or AWS account.
:type computePlatform: string
:param computePlatform:
The destination platform type for the deployment (``Lambda`` or ``Server`` ).
:rtype: dict
:returns:
"""
pass
def create_deployment(self, applicationName: str, deploymentGroupName: str = None, revision: Dict = None, deploymentConfigName: str = None, description: str = None, ignoreApplicationStopFailures: bool = None, targetInstances: Dict = None, autoRollbackConfiguration: Dict = None, updateOutdatedInstancesOnly: bool = None, fileExistsBehavior: str = None) -> Dict:
"""
Deploys an application revision through the specified deployment group.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/CreateDeployment>`_
**Request Syntax**
::
response = client.create_deployment(
applicationName='string',
deploymentGroupName='string',
revision={
'revisionType': 'S3'|'GitHub'|'String'|'AppSpecContent',
's3Location': {
'bucket': 'string',
'key': 'string',
'bundleType': 'tar'|'tgz'|'zip'|'YAML'|'JSON',
'version': 'string',
'eTag': 'string'
},
'gitHubLocation': {
'repository': 'string',
'commitId': 'string'
},
'string': {
'content': 'string',
'sha256': 'string'
},
'appSpecContent': {
'content': 'string',
'sha256': 'string'
}
},
deploymentConfigName='string',
description='string',
ignoreApplicationStopFailures=True|False,
targetInstances={
'tagFilters': [
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
'autoScalingGroups': [
'string',
],
'ec2TagSet': {
'ec2TagSetList': [
[
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
]
}
},
autoRollbackConfiguration={
'enabled': True|False,
'events': [
'DEPLOYMENT_FAILURE'|'DEPLOYMENT_STOP_ON_ALARM'|'DEPLOYMENT_STOP_ON_REQUEST',
]
},
updateOutdatedInstancesOnly=True|False,
fileExistsBehavior='DISALLOW'|'OVERWRITE'|'RETAIN'
)
**Response Syntax**
::
{
'deploymentId': 'string'
}
**Response Structure**
- *(dict) --*
Represents the output of a CreateDeployment operation.
- **deploymentId** *(string) --*
The unique ID of a deployment.
:type applicationName: string
:param applicationName: **[REQUIRED]**
The name of an AWS CodeDeploy application associated with the IAM user or AWS account.
:type deploymentGroupName: string
:param deploymentGroupName:
The name of the deployment group.
:type revision: dict
:param revision:
The type and location of the revision to deploy.
- **revisionType** *(string) --*
The type of application revision:
* S3: An application revision stored in Amazon S3.
* GitHub: An application revision stored in GitHub (EC2/On-premises deployments only).
* String: A YAML-formatted or JSON-formatted string (AWS Lambda deployments only).
- **s3Location** *(dict) --*
Information about the location of a revision stored in Amazon S3.
- **bucket** *(string) --*
The name of the Amazon S3 bucket where the application revision is stored.
- **key** *(string) --*
The name of the Amazon S3 object that represents the bundled artifacts for the application revision.
- **bundleType** *(string) --*
The file type of the application revision. Must be one of the following:
* tar: A tar archive file.
* tgz: A compressed tar archive file.
* zip: A zip archive file.
- **version** *(string) --*
A specific version of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the version is not specified, the system uses the most recent version by default.
- **eTag** *(string) --*
The ETag of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the ETag is not specified as an input parameter, ETag validation of the object is skipped.
- **gitHubLocation** *(dict) --*
Information about the location of application artifacts stored in GitHub.
- **repository** *(string) --*
The GitHub account and repository pair that stores a reference to the commit that represents the bundled artifacts for the application revision.
Specified as account/repository.
- **commitId** *(string) --*
The SHA1 commit ID of the GitHub commit that represents the bundled artifacts for the application revision.
- **string** *(dict) --*
Information about the location of an AWS Lambda deployment revision stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string. It includes information about which Lambda function to update and optional Lambda functions that validate deployment lifecycle events.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **appSpecContent** *(dict) --*
The content of an AppSpec file for an AWS Lambda or Amazon ECS deployment. The content is formatted as JSON or YAML and stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string.
For an AWS Lambda deployment, the content includes a Lambda function name, the alias for its original version, and the alias for its replacement version. The deployment shifts traffic from the original version of the Lambda function to the replacement version.
For an Amazon ECS deployment, the content includes the task name, information about the load balancer that serves traffic to the container, and more.
For both types of deployments, the content can specify Lambda functions that run at specified hooks, such as ``BeforeInstall`` , during a deployment.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
:type deploymentConfigName: string
:param deploymentConfigName:
The name of a deployment configuration associated with the IAM user or AWS account.
If not specified, the value configured in the deployment group is used as the default. If the deployment group does not have a deployment configuration associated with it, CodeDeployDefault.OneAtATime is used by default.
:type description: string
:param description:
A comment about the deployment.
:type ignoreApplicationStopFailures: boolean
:param ignoreApplicationStopFailures:
If true, then if an ApplicationStop, BeforeBlockTraffic, or AfterBlockTraffic deployment lifecycle event to an instance fails, then the deployment continues to the next deployment lifecycle event. For example, if ApplicationStop fails, the deployment continues with DownloadBundle. If BeforeBlockTraffic fails, the deployment continues with BlockTraffic. If AfterBlockTraffic fails, the deployment continues with ApplicationStop.
If false or not specified, then if a lifecycle event fails during a deployment to an instance, that deployment fails. If deployment to that instance is part of an overall deployment and the number of healthy hosts is not less than the minimum number of healthy hosts, then a deployment to the next instance is attempted.
During a deployment, the AWS CodeDeploy agent runs the scripts specified for ApplicationStop, BeforeBlockTraffic, and AfterBlockTraffic in the AppSpec file from the previous successful deployment. (All other scripts are run from the AppSpec file in the current deployment.) If one of these scripts contains an error and does not run successfully, the deployment can fail.
If the cause of the failure is a script from the last successful deployment that will never run successfully, create a new deployment and use ``ignoreApplicationStopFailures`` to specify that the ApplicationStop, BeforeBlockTraffic, and AfterBlockTraffic failures should be ignored.
:type targetInstances: dict
:param targetInstances:
Information about the instances that belong to the replacement environment in a blue/green deployment.
- **tagFilters** *(list) --*
The tag filter key, type, and value used to identify Amazon EC2 instances in a replacement environment for a blue/green deployment. Cannot be used in the same call as ec2TagSet.
- *(dict) --*
Information about an EC2 tag filter.
- **Key** *(string) --*
The tag filter key.
- **Value** *(string) --*
The tag filter value.
- **Type** *(string) --*
The tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
- **autoScalingGroups** *(list) --*
The names of one or more Auto Scaling groups to identify a replacement environment for a blue/green deployment.
- *(string) --*
- **ec2TagSet** *(dict) --*
Information about the groups of EC2 instance tags that an instance must be identified by in order for it to be included in the replacement environment for a blue/green deployment. Cannot be used in the same call as tagFilters.
- **ec2TagSetList** *(list) --*
A list that contains other lists of EC2 instance tag groups. For an instance to be included in the deployment group, it must be identified by all of the tag groups in the list.
- *(list) --*
- *(dict) --*
Information about an EC2 tag filter.
- **Key** *(string) --*
The tag filter key.
- **Value** *(string) --*
The tag filter value.
- **Type** *(string) --*
The tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
:type autoRollbackConfiguration: dict
:param autoRollbackConfiguration:
Configuration information for an automatic rollback that is added when a deployment is created.
- **enabled** *(boolean) --*
Indicates whether a defined automatic rollback configuration is currently enabled.
- **events** *(list) --*
The event type or types that trigger a rollback.
- *(string) --*
:type updateOutdatedInstancesOnly: boolean
:param updateOutdatedInstancesOnly:
Indicates whether to deploy to all instances or only to instances that are not running the latest application revision.
:type fileExistsBehavior: string
:param fileExistsBehavior:
Information about how AWS CodeDeploy handles files that already exist in a deployment target location but weren\'t part of the previous successful deployment.
The fileExistsBehavior parameter takes any of the following values:
* DISALLOW: The deployment fails. This is also the default behavior if no option is specified.
* OVERWRITE: The version of the file from the application revision currently being deployed replaces the version already on the instance.
* RETAIN: The version of the file already on the instance is kept and used as part of the new deployment.
:rtype: dict
:returns:
"""
pass
def create_deployment_config(self, deploymentConfigName: str, minimumHealthyHosts: Dict = None, trafficRoutingConfig: Dict = None, computePlatform: str = None) -> Dict:
"""
Creates a deployment configuration.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/CreateDeploymentConfig>`_
**Request Syntax**
::
response = client.create_deployment_config(
deploymentConfigName='string',
minimumHealthyHosts={
'value': 123,
'type': 'HOST_COUNT'|'FLEET_PERCENT'
},
trafficRoutingConfig={
'type': 'TimeBasedCanary'|'TimeBasedLinear'|'AllAtOnce',
'timeBasedCanary': {
'canaryPercentage': 123,
'canaryInterval': 123
},
'timeBasedLinear': {
'linearPercentage': 123,
'linearInterval': 123
}
},
computePlatform='Server'|'Lambda'|'ECS'
)
**Response Syntax**
::
{
'deploymentConfigId': 'string'
}
**Response Structure**
- *(dict) --*
Represents the output of a CreateDeploymentConfig operation.
- **deploymentConfigId** *(string) --*
A unique deployment configuration ID.
:type deploymentConfigName: string
:param deploymentConfigName: **[REQUIRED]**
The name of the deployment configuration to create.
:type minimumHealthyHosts: dict
:param minimumHealthyHosts:
The minimum number of healthy instances that should be available at any time during the deployment. There are two parameters expected in the input: type and value.
The type parameter takes either of the following values:
* HOST_COUNT: The value parameter represents the minimum number of healthy instances as an absolute value.
* FLEET_PERCENT: The value parameter represents the minimum number of healthy instances as a percentage of the total number of instances in the deployment. If you specify FLEET_PERCENT, at the start of the deployment, AWS CodeDeploy converts the percentage to the equivalent number of instance and rounds up fractional instances.
The value parameter takes an integer.
For example, to set a minimum of 95% healthy instance, specify a type of FLEET_PERCENT and a value of 95.
- **value** *(integer) --*
The minimum healthy instance value.
- **type** *(string) --*
The minimum healthy instance type:
* HOST_COUNT: The minimum number of healthy instance as an absolute value.
* FLEET_PERCENT: The minimum number of healthy instance as a percentage of the total number of instance in the deployment.
In an example of nine instance, if a HOST_COUNT of six is specified, deploy to up to three instances at a time. The deployment is successful if six or more instances are deployed to successfully. Otherwise, the deployment fails. If a FLEET_PERCENT of 40 is specified, deploy to up to five instance at a time. The deployment is successful if four or more instance are deployed to successfully. Otherwise, the deployment fails.
.. note::
In a call to the get deployment configuration operation, CodeDeployDefault.OneAtATime returns a minimum healthy instance type of MOST_CONCURRENCY and a value of 1. This means a deployment to only one instance at a time. (You cannot set the type to MOST_CONCURRENCY, only to HOST_COUNT or FLEET_PERCENT.) In addition, with CodeDeployDefault.OneAtATime, AWS CodeDeploy attempts to ensure that all instances but one are kept in a healthy state during the deployment. Although this allows one instance at a time to be taken offline for a new deployment, it also means that if the deployment to the last instance fails, the overall deployment is still successful.
For more information, see `AWS CodeDeploy Instance Health <https://docs.aws.amazon.com/codedeploy/latest/userguide/instances-health.html>`__ in the *AWS CodeDeploy User Guide* .
:type trafficRoutingConfig: dict
:param trafficRoutingConfig:
The configuration that specifies how the deployment traffic is routed.
- **type** *(string) --*
The type of traffic shifting (``TimeBasedCanary`` or ``TimeBasedLinear`` ) used by a deployment configuration .
- **timeBasedCanary** *(dict) --*
A configuration that shifts traffic from one version of a Lambda function to another in two increments. The original and target Lambda function versions are specified in the deployment\'s AppSpec file.
- **canaryPercentage** *(integer) --*
The percentage of traffic to shift in the first increment of a ``TimeBasedCanary`` deployment.
- **canaryInterval** *(integer) --*
The number of minutes between the first and second traffic shifts of a ``TimeBasedCanary`` deployment.
- **timeBasedLinear** *(dict) --*
A configuration that shifts traffic from one version of a Lambda function to another in equal increments, with an equal number of minutes between each increment. The original and target Lambda function versions are specified in the deployment\'s AppSpec file.
- **linearPercentage** *(integer) --*
The percentage of traffic that is shifted at the start of each increment of a ``TimeBasedLinear`` deployment.
- **linearInterval** *(integer) --*
The number of minutes between each incremental traffic shift of a ``TimeBasedLinear`` deployment.
:type computePlatform: string
:param computePlatform:
The destination platform type for the deployment (``Lambda`` or ``Server`` >).
:rtype: dict
:returns:
"""
pass
def create_deployment_group(self, applicationName: str, deploymentGroupName: str, serviceRoleArn: str, deploymentConfigName: str = None, ec2TagFilters: List = None, onPremisesInstanceTagFilters: List = None, autoScalingGroups: List = None, triggerConfigurations: List = None, alarmConfiguration: Dict = None, autoRollbackConfiguration: Dict = None, deploymentStyle: Dict = None, blueGreenDeploymentConfiguration: Dict = None, loadBalancerInfo: Dict = None, ec2TagSet: Dict = None, ecsServices: List = None, onPremisesTagSet: Dict = None) -> Dict:
"""
Creates a deployment group to which application revisions are deployed.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/CreateDeploymentGroup>`_
**Request Syntax**
::
response = client.create_deployment_group(
applicationName='string',
deploymentGroupName='string',
deploymentConfigName='string',
ec2TagFilters=[
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
onPremisesInstanceTagFilters=[
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
autoScalingGroups=[
'string',
],
serviceRoleArn='string',
triggerConfigurations=[
{
'triggerName': 'string',
'triggerTargetArn': 'string',
'triggerEvents': [
'DeploymentStart'|'DeploymentSuccess'|'DeploymentFailure'|'DeploymentStop'|'DeploymentRollback'|'DeploymentReady'|'InstanceStart'|'InstanceSuccess'|'InstanceFailure'|'InstanceReady',
]
},
],
alarmConfiguration={
'enabled': True|False,
'ignorePollAlarmFailure': True|False,
'alarms': [
{
'name': 'string'
},
]
},
autoRollbackConfiguration={
'enabled': True|False,
'events': [
'DEPLOYMENT_FAILURE'|'DEPLOYMENT_STOP_ON_ALARM'|'DEPLOYMENT_STOP_ON_REQUEST',
]
},
deploymentStyle={
'deploymentType': 'IN_PLACE'|'BLUE_GREEN',
'deploymentOption': 'WITH_TRAFFIC_CONTROL'|'WITHOUT_TRAFFIC_CONTROL'
},
blueGreenDeploymentConfiguration={
'terminateBlueInstancesOnDeploymentSuccess': {
'action': 'TERMINATE'|'KEEP_ALIVE',
'terminationWaitTimeInMinutes': 123
},
'deploymentReadyOption': {
'actionOnTimeout': 'CONTINUE_DEPLOYMENT'|'STOP_DEPLOYMENT',
'waitTimeInMinutes': 123
},
'greenFleetProvisioningOption': {
'action': 'DISCOVER_EXISTING'|'COPY_AUTO_SCALING_GROUP'
}
},
loadBalancerInfo={
'elbInfoList': [
{
'name': 'string'
},
],
'targetGroupInfoList': [
{
'name': 'string'
},
],
'targetGroupPairInfoList': [
{
'targetGroups': [
{
'name': 'string'
},
],
'prodTrafficRoute': {
'listenerArns': [
'string',
]
},
'testTrafficRoute': {
'listenerArns': [
'string',
]
}
},
]
},
ec2TagSet={
'ec2TagSetList': [
[
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
]
},
ecsServices=[
{
'serviceName': 'string',
'clusterName': 'string'
},
],
onPremisesTagSet={
'onPremisesTagSetList': [
[
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
]
}
)
**Response Syntax**
::
{
'deploymentGroupId': 'string'
}
**Response Structure**
- *(dict) --*
Represents the output of a CreateDeploymentGroup operation.
- **deploymentGroupId** *(string) --*
A unique deployment group ID.
:type applicationName: string
:param applicationName: **[REQUIRED]**
The name of an AWS CodeDeploy application associated with the IAM user or AWS account.
:type deploymentGroupName: string
:param deploymentGroupName: **[REQUIRED]**
The name of a new deployment group for the specified application.
:type deploymentConfigName: string
:param deploymentConfigName:
If specified, the deployment configuration name can be either one of the predefined configurations provided with AWS CodeDeploy or a custom deployment configuration that you create by calling the create deployment configuration operation.
CodeDeployDefault.OneAtATime is the default deployment configuration. It is used if a configuration isn\'t specified for the deployment or deployment group.
For more information about the predefined deployment configurations in AWS CodeDeploy, see `Working with Deployment Groups in AWS CodeDeploy <https://docs.aws.amazon.com/codedeploy/latest/userguide/deployment-configurations.html>`__ in the AWS CodeDeploy User Guide.
:type ec2TagFilters: list
:param ec2TagFilters:
The Amazon EC2 tags on which to filter. The deployment group includes EC2 instances with any of the specified tags. Cannot be used in the same call as ec2TagSet.
- *(dict) --*
Information about an EC2 tag filter.
- **Key** *(string) --*
The tag filter key.
- **Value** *(string) --*
The tag filter value.
- **Type** *(string) --*
The tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
:type onPremisesInstanceTagFilters: list
:param onPremisesInstanceTagFilters:
The on-premises instance tags on which to filter. The deployment group includes on-premises instances with any of the specified tags. Cannot be used in the same call as OnPremisesTagSet.
- *(dict) --*
Information about an on-premises instance tag filter.
- **Key** *(string) --*
The on-premises instance tag filter key.
- **Value** *(string) --*
The on-premises instance tag filter value.
- **Type** *(string) --*
The on-premises instance tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
:type autoScalingGroups: list
:param autoScalingGroups:
A list of associated Amazon EC2 Auto Scaling groups.
- *(string) --*
:type serviceRoleArn: string
:param serviceRoleArn: **[REQUIRED]**
A service role ARN that allows AWS CodeDeploy to act on the user\'s behalf when interacting with AWS services.
:type triggerConfigurations: list
:param triggerConfigurations:
Information about triggers to create when the deployment group is created. For examples, see `Create a Trigger for an AWS CodeDeploy Event <https://docs.aws.amazon.com/codedeploy/latest/userguide/how-to-notify-sns.html>`__ in the AWS CodeDeploy User Guide.
- *(dict) --*
Information about notification triggers for the deployment group.
- **triggerName** *(string) --*
The name of the notification trigger.
- **triggerTargetArn** *(string) --*
The ARN of the Amazon Simple Notification Service topic through which notifications about deployment or instance events are sent.
- **triggerEvents** *(list) --*
The event type or types for which notifications are triggered.
- *(string) --*
:type alarmConfiguration: dict
:param alarmConfiguration:
Information to add about Amazon CloudWatch alarms when the deployment group is created.
- **enabled** *(boolean) --*
Indicates whether the alarm configuration is enabled.
- **ignorePollAlarmFailure** *(boolean) --*
Indicates whether a deployment should continue if information about the current state of alarms cannot be retrieved from Amazon CloudWatch. The default value is false.
* true: The deployment proceeds even if alarm status information can\'t be retrieved from Amazon CloudWatch.
* false: The deployment stops if alarm status information can\'t be retrieved from Amazon CloudWatch.
- **alarms** *(list) --*
A list of alarms configured for the deployment group. A maximum of 10 alarms can be added to a deployment group.
- *(dict) --*
Information about an alarm.
- **name** *(string) --*
The name of the alarm. Maximum length is 255 characters. Each alarm name can be used only once in a list of alarms.
:type autoRollbackConfiguration: dict
:param autoRollbackConfiguration:
Configuration information for an automatic rollback that is added when a deployment group is created.
- **enabled** *(boolean) --*
Indicates whether a defined automatic rollback configuration is currently enabled.
- **events** *(list) --*
The event type or types that trigger a rollback.
- *(string) --*
:type deploymentStyle: dict
:param deploymentStyle:
Information about the type of deployment, in-place or blue/green, that you want to run and whether to route deployment traffic behind a load balancer.
- **deploymentType** *(string) --*
Indicates whether to run an in-place deployment or a blue/green deployment.
- **deploymentOption** *(string) --*
Indicates whether to route deployment traffic behind a load balancer.
:type blueGreenDeploymentConfiguration: dict
:param blueGreenDeploymentConfiguration:
Information about blue/green deployment options for a deployment group.
- **terminateBlueInstancesOnDeploymentSuccess** *(dict) --*
Information about whether to terminate instances in the original fleet during a blue/green deployment.
- **action** *(string) --*
The action to take on instances in the original environment after a successful blue/green deployment.
* TERMINATE: Instances are terminated after a specified wait time.
* KEEP_ALIVE: Instances are left running after they are deregistered from the load balancer and removed from the deployment group.
- **terminationWaitTimeInMinutes** *(integer) --*
The number of minutes to wait after a successful blue/green deployment before terminating instances from the original environment. The maximum setting is 2880 minutes (2 days).
- **deploymentReadyOption** *(dict) --*
Information about the action to take when newly provisioned instances are ready to receive traffic in a blue/green deployment.
- **actionOnTimeout** *(string) --*
Information about when to reroute traffic from an original environment to a replacement environment in a blue/green deployment.
* CONTINUE_DEPLOYMENT: Register new instances with the load balancer immediately after the new application revision is installed on the instances in the replacement environment.
* STOP_DEPLOYMENT: Do not register new instances with a load balancer unless traffic rerouting is started using ContinueDeployment . If traffic rerouting is not started before the end of the specified wait period, the deployment status is changed to Stopped.
- **waitTimeInMinutes** *(integer) --*
The number of minutes to wait before the status of a blue/green deployment is changed to Stopped if rerouting is not started manually. Applies only to the STOP_DEPLOYMENT option for actionOnTimeout
- **greenFleetProvisioningOption** *(dict) --*
Information about how instances are provisioned for a replacement environment in a blue/green deployment.
- **action** *(string) --*
The method used to add instances to a replacement environment.
* DISCOVER_EXISTING: Use instances that already exist or will be created manually.
* COPY_AUTO_SCALING_GROUP: Use settings from a specified Auto Scaling group to define and create instances in a new Auto Scaling group.
:type loadBalancerInfo: dict
:param loadBalancerInfo:
Information about the load balancer used in a deployment.
- **elbInfoList** *(list) --*
An array that contains information about the load balancer to use for load balancing in a deployment. In Elastic Load Balancing, load balancers are used with Classic Load Balancers.
.. note::
Adding more than one load balancer to the array is not supported.
- *(dict) --*
Information about a load balancer in Elastic Load Balancing to use in a deployment. Instances are registered directly with a load balancer, and traffic is routed to the load balancer.
- **name** *(string) --*
For blue/green deployments, the name of the load balancer that is used to route traffic from original instances to replacement instances in a blue/green deployment. For in-place deployments, the name of the load balancer that instances are deregistered from so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **targetGroupInfoList** *(list) --*
An array that contains information about the target group to use for load balancing in a deployment. In Elastic Load Balancing, target groups are used with Application Load Balancers.
.. note::
Adding more than one target group to the array is not supported.
- *(dict) --*
Information about a target group in Elastic Load Balancing to use in a deployment. Instances are registered as targets in a target group, and traffic is routed to the target group.
- **name** *(string) --*
For blue/green deployments, the name of the target group that instances in the original environment are deregistered from, and instances in the replacement environment are registered with. For in-place deployments, the name of the target group that instances are deregistered from, so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **targetGroupPairInfoList** *(list) --*
The target group pair information. This is an array of ``TargeGroupPairInfo`` objects with a maximum size of one.
- *(dict) --*
Information about two target groups and how traffic is routed during an Amazon ECS deployment. An optional test traffic route can be specified.
- **targetGroups** *(list) --*
One pair of target groups. One is associated with the original task set. The second is associated with the task set that serves traffic after the deployment is complete.
- *(dict) --*
Information about a target group in Elastic Load Balancing to use in a deployment. Instances are registered as targets in a target group, and traffic is routed to the target group.
- **name** *(string) --*
For blue/green deployments, the name of the target group that instances in the original environment are deregistered from, and instances in the replacement environment are registered with. For in-place deployments, the name of the target group that instances are deregistered from, so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **prodTrafficRoute** *(dict) --*
The path used by a load balancer to route production traffic when an Amazon ECS deployment is complete.
- **listenerArns** *(list) --*
The ARN of one listener. The listener identifies the route between a target group and a load balancer. This is an array of strings with a maximum size of one.
- *(string) --*
- **testTrafficRoute** *(dict) --*
An optional path used by a load balancer to route test traffic after an Amazon ECS deployment. Validation can occur while test traffic is served during a deployment.
- **listenerArns** *(list) --*
The ARN of one listener. The listener identifies the route between a target group and a load balancer. This is an array of strings with a maximum size of one.
- *(string) --*
:type ec2TagSet: dict
:param ec2TagSet:
Information about groups of tags applied to EC2 instances. The deployment group includes only EC2 instances identified by all the tag groups. Cannot be used in the same call as ec2TagFilters.
- **ec2TagSetList** *(list) --*
A list that contains other lists of EC2 instance tag groups. For an instance to be included in the deployment group, it must be identified by all of the tag groups in the list.
- *(list) --*
- *(dict) --*
Information about an EC2 tag filter.
- **Key** *(string) --*
The tag filter key.
- **Value** *(string) --*
The tag filter value.
- **Type** *(string) --*
The tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
:type ecsServices: list
:param ecsServices:
The target Amazon ECS services in the deployment group. This applies only to deployment groups that use the Amazon ECS compute platform. A target Amazon ECS service is specified as an Amazon ECS cluster and service name pair using the format ``<clustername>:<servicename>`` .
- *(dict) --*
Contains the service and cluster names used to identify an Amazon ECS deployment\'s target.
- **serviceName** *(string) --*
The name of the target Amazon ECS service.
- **clusterName** *(string) --*
The name of the cluster that the Amazon ECS service is associated with.
:type onPremisesTagSet: dict
:param onPremisesTagSet:
Information about groups of tags applied to on-premises instances. The deployment group includes only on-premises instances identified by all of the tag groups. Cannot be used in the same call as onPremisesInstanceTagFilters.
- **onPremisesTagSetList** *(list) --*
A list that contains other lists of on-premises instance tag groups. For an instance to be included in the deployment group, it must be identified by all of the tag groups in the list.
- *(list) --*
- *(dict) --*
Information about an on-premises instance tag filter.
- **Key** *(string) --*
The on-premises instance tag filter key.
- **Value** *(string) --*
The on-premises instance tag filter value.
- **Type** *(string) --*
The on-premises instance tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
:rtype: dict
:returns:
"""
pass
def delete_application(self, applicationName: str):
"""
Deletes an application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/DeleteApplication>`_
**Request Syntax**
::
response = client.delete_application(
applicationName='string'
)
:type applicationName: string
:param applicationName: **[REQUIRED]**
The name of an AWS CodeDeploy application associated with the IAM user or AWS account.
:returns: None
"""
pass
def delete_deployment_config(self, deploymentConfigName: str):
"""
Deletes a deployment configuration.
.. note::
A deployment configuration cannot be deleted if it is currently in use. Predefined configurations cannot be deleted.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/DeleteDeploymentConfig>`_
**Request Syntax**
::
response = client.delete_deployment_config(
deploymentConfigName='string'
)
:type deploymentConfigName: string
:param deploymentConfigName: **[REQUIRED]**
The name of a deployment configuration associated with the IAM user or AWS account.
:returns: None
"""
pass
def delete_deployment_group(self, applicationName: str, deploymentGroupName: str) -> Dict:
"""
Deletes a deployment group.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/DeleteDeploymentGroup>`_
**Request Syntax**
::
response = client.delete_deployment_group(
applicationName='string',
deploymentGroupName='string'
)
**Response Syntax**
::
{
'hooksNotCleanedUp': [
{
'name': 'string',
'hook': 'string'
},
]
}
**Response Structure**
- *(dict) --*
Represents the output of a DeleteDeploymentGroup operation.
- **hooksNotCleanedUp** *(list) --*
If the output contains no data, and the corresponding deployment group contained at least one Auto Scaling group, AWS CodeDeploy successfully removed all corresponding Auto Scaling lifecycle event hooks from the Amazon EC2 instances in the Auto Scaling group. If the output contains data, AWS CodeDeploy could not remove some Auto Scaling lifecycle event hooks from the Amazon EC2 instances in the Auto Scaling group.
- *(dict) --*
Information about an Auto Scaling group.
- **name** *(string) --*
The Auto Scaling group name.
- **hook** *(string) --*
An Auto Scaling lifecycle event hook name.
:type applicationName: string
:param applicationName: **[REQUIRED]**
The name of an AWS CodeDeploy application associated with the IAM user or AWS account.
:type deploymentGroupName: string
:param deploymentGroupName: **[REQUIRED]**
The name of a deployment group for the specified application.
:rtype: dict
:returns:
"""
pass
def delete_git_hub_account_token(self, tokenName: str = None) -> Dict:
"""
Deletes a GitHub account connection.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/DeleteGitHubAccountToken>`_
**Request Syntax**
::
response = client.delete_git_hub_account_token(
tokenName='string'
)
**Response Syntax**
::
{
'tokenName': 'string'
}
**Response Structure**
- *(dict) --*
Represents the output of a DeleteGitHubAccountToken operation.
- **tokenName** *(string) --*
The name of the GitHub account connection that was deleted.
:type tokenName: string
:param tokenName:
The name of the GitHub account connection to delete.
:rtype: dict
:returns:
"""
pass
def deregister_on_premises_instance(self, instanceName: str):
"""
Deregisters an on-premises instance.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/DeregisterOnPremisesInstance>`_
**Request Syntax**
::
response = client.deregister_on_premises_instance(
instanceName='string'
)
:type instanceName: string
:param instanceName: **[REQUIRED]**
The name of the on-premises instance to deregister.
:returns: None
"""
pass
def generate_presigned_url(self, ClientMethod: str = None, Params: Dict = None, ExpiresIn: int = None, HttpMethod: str = None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to
``ClientMethod``.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid
for. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By
default, the http method is whatever is used in the method\'s model.
:returns: The presigned url
"""
pass
def get_application(self, applicationName: str) -> Dict:
"""
Gets information about an application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/GetApplication>`_
**Request Syntax**
::
response = client.get_application(
applicationName='string'
)
**Response Syntax**
::
{
'application': {
'applicationId': 'string',
'applicationName': 'string',
'createTime': datetime(2015, 1, 1),
'linkedToGitHub': True|False,
'gitHubAccountName': 'string',
'computePlatform': 'Server'|'Lambda'|'ECS'
}
}
**Response Structure**
- *(dict) --*
Represents the output of a GetApplication operation.
- **application** *(dict) --*
Information about the application.
- **applicationId** *(string) --*
The application ID.
- **applicationName** *(string) --*
The application name.
- **createTime** *(datetime) --*
The time at which the application was created.
- **linkedToGitHub** *(boolean) --*
True if the user has authenticated with GitHub for the specified application. Otherwise, false.
- **gitHubAccountName** *(string) --*
The name for a connection to a GitHub account.
- **computePlatform** *(string) --*
The destination platform type for deployment of the application (``Lambda`` or ``Server`` ).
:type applicationName: string
:param applicationName: **[REQUIRED]**
The name of an AWS CodeDeploy application associated with the IAM user or AWS account.
:rtype: dict
:returns:
"""
pass
def get_application_revision(self, applicationName: str, revision: Dict) -> Dict:
"""
Gets information about an application revision.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/GetApplicationRevision>`_
**Request Syntax**
::
response = client.get_application_revision(
applicationName='string',
revision={
'revisionType': 'S3'|'GitHub'|'String'|'AppSpecContent',
's3Location': {
'bucket': 'string',
'key': 'string',
'bundleType': 'tar'|'tgz'|'zip'|'YAML'|'JSON',
'version': 'string',
'eTag': 'string'
},
'gitHubLocation': {
'repository': 'string',
'commitId': 'string'
},
'string': {
'content': 'string',
'sha256': 'string'
},
'appSpecContent': {
'content': 'string',
'sha256': 'string'
}
}
)
**Response Syntax**
::
{
'applicationName': 'string',
'revision': {
'revisionType': 'S3'|'GitHub'|'String'|'AppSpecContent',
's3Location': {
'bucket': 'string',
'key': 'string',
'bundleType': 'tar'|'tgz'|'zip'|'YAML'|'JSON',
'version': 'string',
'eTag': 'string'
},
'gitHubLocation': {
'repository': 'string',
'commitId': 'string'
},
'string': {
'content': 'string',
'sha256': 'string'
},
'appSpecContent': {
'content': 'string',
'sha256': 'string'
}
},
'revisionInfo': {
'description': 'string',
'deploymentGroups': [
'string',
],
'firstUsedTime': datetime(2015, 1, 1),
'lastUsedTime': datetime(2015, 1, 1),
'registerTime': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
Represents the output of a GetApplicationRevision operation.
- **applicationName** *(string) --*
The name of the application that corresponds to the revision.
- **revision** *(dict) --*
Additional information about the revision, including type and location.
- **revisionType** *(string) --*
The type of application revision:
* S3: An application revision stored in Amazon S3.
* GitHub: An application revision stored in GitHub (EC2/On-premises deployments only).
* String: A YAML-formatted or JSON-formatted string (AWS Lambda deployments only).
- **s3Location** *(dict) --*
Information about the location of a revision stored in Amazon S3.
- **bucket** *(string) --*
The name of the Amazon S3 bucket where the application revision is stored.
- **key** *(string) --*
The name of the Amazon S3 object that represents the bundled artifacts for the application revision.
- **bundleType** *(string) --*
The file type of the application revision. Must be one of the following:
* tar: A tar archive file.
* tgz: A compressed tar archive file.
* zip: A zip archive file.
- **version** *(string) --*
A specific version of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the version is not specified, the system uses the most recent version by default.
- **eTag** *(string) --*
The ETag of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the ETag is not specified as an input parameter, ETag validation of the object is skipped.
- **gitHubLocation** *(dict) --*
Information about the location of application artifacts stored in GitHub.
- **repository** *(string) --*
The GitHub account and repository pair that stores a reference to the commit that represents the bundled artifacts for the application revision.
Specified as account/repository.
- **commitId** *(string) --*
The SHA1 commit ID of the GitHub commit that represents the bundled artifacts for the application revision.
- **string** *(dict) --*
Information about the location of an AWS Lambda deployment revision stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string. It includes information about which Lambda function to update and optional Lambda functions that validate deployment lifecycle events.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **appSpecContent** *(dict) --*
The content of an AppSpec file for an AWS Lambda or Amazon ECS deployment. The content is formatted as JSON or YAML and stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string.
For an AWS Lambda deployment, the content includes a Lambda function name, the alias for its original version, and the alias for its replacement version. The deployment shifts traffic from the original version of the Lambda function to the replacement version.
For an Amazon ECS deployment, the content includes the task name, information about the load balancer that serves traffic to the container, and more.
For both types of deployments, the content can specify Lambda functions that run at specified hooks, such as ``BeforeInstall`` , during a deployment.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **revisionInfo** *(dict) --*
General information about the revision.
- **description** *(string) --*
A comment about the revision.
- **deploymentGroups** *(list) --*
The deployment groups for which this is the current target revision.
- *(string) --*
- **firstUsedTime** *(datetime) --*
When the revision was first used by AWS CodeDeploy.
- **lastUsedTime** *(datetime) --*
When the revision was last used by AWS CodeDeploy.
- **registerTime** *(datetime) --*
When the revision was registered with AWS CodeDeploy.
:type applicationName: string
:param applicationName: **[REQUIRED]**
The name of the application that corresponds to the revision.
:type revision: dict
:param revision: **[REQUIRED]**
Information about the application revision to get, including type and location.
- **revisionType** *(string) --*
The type of application revision:
* S3: An application revision stored in Amazon S3.
* GitHub: An application revision stored in GitHub (EC2/On-premises deployments only).
* String: A YAML-formatted or JSON-formatted string (AWS Lambda deployments only).
- **s3Location** *(dict) --*
Information about the location of a revision stored in Amazon S3.
- **bucket** *(string) --*
The name of the Amazon S3 bucket where the application revision is stored.
- **key** *(string) --*
The name of the Amazon S3 object that represents the bundled artifacts for the application revision.
- **bundleType** *(string) --*
The file type of the application revision. Must be one of the following:
* tar: A tar archive file.
* tgz: A compressed tar archive file.
* zip: A zip archive file.
- **version** *(string) --*
A specific version of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the version is not specified, the system uses the most recent version by default.
- **eTag** *(string) --*
The ETag of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the ETag is not specified as an input parameter, ETag validation of the object is skipped.
- **gitHubLocation** *(dict) --*
Information about the location of application artifacts stored in GitHub.
- **repository** *(string) --*
The GitHub account and repository pair that stores a reference to the commit that represents the bundled artifacts for the application revision.
Specified as account/repository.
- **commitId** *(string) --*
The SHA1 commit ID of the GitHub commit that represents the bundled artifacts for the application revision.
- **string** *(dict) --*
Information about the location of an AWS Lambda deployment revision stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string. It includes information about which Lambda function to update and optional Lambda functions that validate deployment lifecycle events.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **appSpecContent** *(dict) --*
The content of an AppSpec file for an AWS Lambda or Amazon ECS deployment. The content is formatted as JSON or YAML and stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string.
For an AWS Lambda deployment, the content includes a Lambda function name, the alias for its original version, and the alias for its replacement version. The deployment shifts traffic from the original version of the Lambda function to the replacement version.
For an Amazon ECS deployment, the content includes the task name, information about the load balancer that serves traffic to the container, and more.
For both types of deployments, the content can specify Lambda functions that run at specified hooks, such as ``BeforeInstall`` , during a deployment.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
:rtype: dict
:returns:
"""
pass
def get_deployment(self, deploymentId: str) -> Dict:
"""
Gets information about a deployment.
.. note::
The ``content`` property of the ``appSpecContent`` object in the returned revision is always null. Use ``GetApplicationRevision`` and the ``sha256`` property of the returned ``appSpecContent`` object to get the content of the deployment’s AppSpec file.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/GetDeployment>`_
**Request Syntax**
::
response = client.get_deployment(
deploymentId='string'
)
**Response Syntax**
::
{
'deploymentInfo': {
'applicationName': 'string',
'deploymentGroupName': 'string',
'deploymentConfigName': 'string',
'deploymentId': 'string',
'previousRevision': {
'revisionType': 'S3'|'GitHub'|'String'|'AppSpecContent',
's3Location': {
'bucket': 'string',
'key': 'string',
'bundleType': 'tar'|'tgz'|'zip'|'YAML'|'JSON',
'version': 'string',
'eTag': 'string'
},
'gitHubLocation': {
'repository': 'string',
'commitId': 'string'
},
'string': {
'content': 'string',
'sha256': 'string'
},
'appSpecContent': {
'content': 'string',
'sha256': 'string'
}
},
'revision': {
'revisionType': 'S3'|'GitHub'|'String'|'AppSpecContent',
's3Location': {
'bucket': 'string',
'key': 'string',
'bundleType': 'tar'|'tgz'|'zip'|'YAML'|'JSON',
'version': 'string',
'eTag': 'string'
},
'gitHubLocation': {
'repository': 'string',
'commitId': 'string'
},
'string': {
'content': 'string',
'sha256': 'string'
},
'appSpecContent': {
'content': 'string',
'sha256': 'string'
}
},
'status': 'Created'|'Queued'|'InProgress'|'Succeeded'|'Failed'|'Stopped'|'Ready',
'errorInformation': {
'code': 'DEPLOYMENT_GROUP_MISSING'|'APPLICATION_MISSING'|'REVISION_MISSING'|'IAM_ROLE_MISSING'|'IAM_ROLE_PERMISSIONS'|'NO_EC2_SUBSCRIPTION'|'OVER_MAX_INSTANCES'|'NO_INSTANCES'|'TIMEOUT'|'HEALTH_CONSTRAINTS_INVALID'|'HEALTH_CONSTRAINTS'|'INTERNAL_ERROR'|'THROTTLED'|'ALARM_ACTIVE'|'AGENT_ISSUE'|'AUTO_SCALING_IAM_ROLE_PERMISSIONS'|'AUTO_SCALING_CONFIGURATION'|'MANUAL_STOP'|'MISSING_BLUE_GREEN_DEPLOYMENT_CONFIGURATION'|'MISSING_ELB_INFORMATION'|'MISSING_GITHUB_TOKEN'|'ELASTIC_LOAD_BALANCING_INVALID'|'ELB_INVALID_INSTANCE'|'INVALID_LAMBDA_CONFIGURATION'|'INVALID_LAMBDA_FUNCTION'|'HOOK_EXECUTION_FAILURE'|'AUTOSCALING_VALIDATION_ERROR'|'INVALID_ECS_SERVICE'|'ECS_UPDATE_ERROR'|'INVALID_REVISION',
'message': 'string'
},
'createTime': datetime(2015, 1, 1),
'startTime': datetime(2015, 1, 1),
'completeTime': datetime(2015, 1, 1),
'deploymentOverview': {
'Pending': 123,
'InProgress': 123,
'Succeeded': 123,
'Failed': 123,
'Skipped': 123,
'Ready': 123
},
'description': 'string',
'creator': 'user'|'autoscaling'|'codeDeployRollback',
'ignoreApplicationStopFailures': True|False,
'autoRollbackConfiguration': {
'enabled': True|False,
'events': [
'DEPLOYMENT_FAILURE'|'DEPLOYMENT_STOP_ON_ALARM'|'DEPLOYMENT_STOP_ON_REQUEST',
]
},
'updateOutdatedInstancesOnly': True|False,
'rollbackInfo': {
'rollbackDeploymentId': 'string',
'rollbackTriggeringDeploymentId': 'string',
'rollbackMessage': 'string'
},
'deploymentStyle': {
'deploymentType': 'IN_PLACE'|'BLUE_GREEN',
'deploymentOption': 'WITH_TRAFFIC_CONTROL'|'WITHOUT_TRAFFIC_CONTROL'
},
'targetInstances': {
'tagFilters': [
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
'autoScalingGroups': [
'string',
],
'ec2TagSet': {
'ec2TagSetList': [
[
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
]
}
},
'instanceTerminationWaitTimeStarted': True|False,
'blueGreenDeploymentConfiguration': {
'terminateBlueInstancesOnDeploymentSuccess': {
'action': 'TERMINATE'|'KEEP_ALIVE',
'terminationWaitTimeInMinutes': 123
},
'deploymentReadyOption': {
'actionOnTimeout': 'CONTINUE_DEPLOYMENT'|'STOP_DEPLOYMENT',
'waitTimeInMinutes': 123
},
'greenFleetProvisioningOption': {
'action': 'DISCOVER_EXISTING'|'COPY_AUTO_SCALING_GROUP'
}
},
'loadBalancerInfo': {
'elbInfoList': [
{
'name': 'string'
},
],
'targetGroupInfoList': [
{
'name': 'string'
},
],
'targetGroupPairInfoList': [
{
'targetGroups': [
{
'name': 'string'
},
],
'prodTrafficRoute': {
'listenerArns': [
'string',
]
},
'testTrafficRoute': {
'listenerArns': [
'string',
]
}
},
]
},
'additionalDeploymentStatusInfo': 'string',
'fileExistsBehavior': 'DISALLOW'|'OVERWRITE'|'RETAIN',
'deploymentStatusMessages': [
'string',
],
'computePlatform': 'Server'|'Lambda'|'ECS'
}
}
**Response Structure**
- *(dict) --*
Represents the output of a GetDeployment operation.
- **deploymentInfo** *(dict) --*
Information about the deployment.
- **applicationName** *(string) --*
The application name.
- **deploymentGroupName** *(string) --*
The deployment group name.
- **deploymentConfigName** *(string) --*
The deployment configuration name.
- **deploymentId** *(string) --*
The unique ID of a deployment.
- **previousRevision** *(dict) --*
Information about the application revision that was deployed to the deployment group before the most recent successful deployment.
- **revisionType** *(string) --*
The type of application revision:
* S3: An application revision stored in Amazon S3.
* GitHub: An application revision stored in GitHub (EC2/On-premises deployments only).
* String: A YAML-formatted or JSON-formatted string (AWS Lambda deployments only).
- **s3Location** *(dict) --*
Information about the location of a revision stored in Amazon S3.
- **bucket** *(string) --*
The name of the Amazon S3 bucket where the application revision is stored.
- **key** *(string) --*
The name of the Amazon S3 object that represents the bundled artifacts for the application revision.
- **bundleType** *(string) --*
The file type of the application revision. Must be one of the following:
* tar: A tar archive file.
* tgz: A compressed tar archive file.
* zip: A zip archive file.
- **version** *(string) --*
A specific version of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the version is not specified, the system uses the most recent version by default.
- **eTag** *(string) --*
The ETag of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the ETag is not specified as an input parameter, ETag validation of the object is skipped.
- **gitHubLocation** *(dict) --*
Information about the location of application artifacts stored in GitHub.
- **repository** *(string) --*
The GitHub account and repository pair that stores a reference to the commit that represents the bundled artifacts for the application revision.
Specified as account/repository.
- **commitId** *(string) --*
The SHA1 commit ID of the GitHub commit that represents the bundled artifacts for the application revision.
- **string** *(dict) --*
Information about the location of an AWS Lambda deployment revision stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string. It includes information about which Lambda function to update and optional Lambda functions that validate deployment lifecycle events.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **appSpecContent** *(dict) --*
The content of an AppSpec file for an AWS Lambda or Amazon ECS deployment. The content is formatted as JSON or YAML and stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string.
For an AWS Lambda deployment, the content includes a Lambda function name, the alias for its original version, and the alias for its replacement version. The deployment shifts traffic from the original version of the Lambda function to the replacement version.
For an Amazon ECS deployment, the content includes the task name, information about the load balancer that serves traffic to the container, and more.
For both types of deployments, the content can specify Lambda functions that run at specified hooks, such as ``BeforeInstall`` , during a deployment.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **revision** *(dict) --*
Information about the location of stored application artifacts and the service from which to retrieve them.
- **revisionType** *(string) --*
The type of application revision:
* S3: An application revision stored in Amazon S3.
* GitHub: An application revision stored in GitHub (EC2/On-premises deployments only).
* String: A YAML-formatted or JSON-formatted string (AWS Lambda deployments only).
- **s3Location** *(dict) --*
Information about the location of a revision stored in Amazon S3.
- **bucket** *(string) --*
The name of the Amazon S3 bucket where the application revision is stored.
- **key** *(string) --*
The name of the Amazon S3 object that represents the bundled artifacts for the application revision.
- **bundleType** *(string) --*
The file type of the application revision. Must be one of the following:
* tar: A tar archive file.
* tgz: A compressed tar archive file.
* zip: A zip archive file.
- **version** *(string) --*
A specific version of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the version is not specified, the system uses the most recent version by default.
- **eTag** *(string) --*
The ETag of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the ETag is not specified as an input parameter, ETag validation of the object is skipped.
- **gitHubLocation** *(dict) --*
Information about the location of application artifacts stored in GitHub.
- **repository** *(string) --*
The GitHub account and repository pair that stores a reference to the commit that represents the bundled artifacts for the application revision.
Specified as account/repository.
- **commitId** *(string) --*
The SHA1 commit ID of the GitHub commit that represents the bundled artifacts for the application revision.
- **string** *(dict) --*
Information about the location of an AWS Lambda deployment revision stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string. It includes information about which Lambda function to update and optional Lambda functions that validate deployment lifecycle events.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **appSpecContent** *(dict) --*
The content of an AppSpec file for an AWS Lambda or Amazon ECS deployment. The content is formatted as JSON or YAML and stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string.
For an AWS Lambda deployment, the content includes a Lambda function name, the alias for its original version, and the alias for its replacement version. The deployment shifts traffic from the original version of the Lambda function to the replacement version.
For an Amazon ECS deployment, the content includes the task name, information about the load balancer that serves traffic to the container, and more.
For both types of deployments, the content can specify Lambda functions that run at specified hooks, such as ``BeforeInstall`` , during a deployment.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **status** *(string) --*
The current state of the deployment as a whole.
- **errorInformation** *(dict) --*
Information about any error associated with this deployment.
- **code** *(string) --*
For more information, see `Error Codes for AWS CodeDeploy <https://docs.aws.amazon.com/codedeploy/latest/userguide/error-codes.html>`__ in the `AWS CodeDeploy User Guide <https://docs.aws.amazon.com/codedeploy/latest/userguide>`__ .
The error code:
* APPLICATION_MISSING: The application was missing. This error code is most likely raised if the application is deleted after the deployment is created, but before it is started.
* DEPLOYMENT_GROUP_MISSING: The deployment group was missing. This error code is most likely raised if the deployment group is deleted after the deployment is created, but before it is started.
* HEALTH_CONSTRAINTS: The deployment failed on too many instances to be successfully deployed within the instance health constraints specified.
* HEALTH_CONSTRAINTS_INVALID: The revision cannot be successfully deployed within the instance health constraints specified.
* IAM_ROLE_MISSING: The service role cannot be accessed.
* IAM_ROLE_PERMISSIONS: The service role does not have the correct permissions.
* INTERNAL_ERROR: There was an internal error.
* NO_EC2_SUBSCRIPTION: The calling account is not subscribed to Amazon EC2.
* NO_INSTANCES: No instances were specified, or no instances can be found.
* OVER_MAX_INSTANCES: The maximum number of instances was exceeded.
* THROTTLED: The operation was throttled because the calling account exceeded the throttling limits of one or more AWS services.
* TIMEOUT: The deployment has timed out.
* REVISION_MISSING: The revision ID was missing. This error code is most likely raised if the revision is deleted after the deployment is created, but before it is started.
- **message** *(string) --*
An accompanying error message.
- **createTime** *(datetime) --*
A timestamp that indicates when the deployment was created.
- **startTime** *(datetime) --*
A timestamp that indicates when the deployment was deployed to the deployment group.
In some cases, the reported value of the start time might be later than the complete time. This is due to differences in the clock settings of backend servers that participate in the deployment process.
- **completeTime** *(datetime) --*
A timestamp that indicates when the deployment was complete.
- **deploymentOverview** *(dict) --*
A summary of the deployment status of the instances in the deployment.
- **Pending** *(integer) --*
The number of instances in the deployment in a pending state.
- **InProgress** *(integer) --*
The number of instances in which the deployment is in progress.
- **Succeeded** *(integer) --*
The number of instances in the deployment to which revisions have been successfully deployed.
- **Failed** *(integer) --*
The number of instances in the deployment in a failed state.
- **Skipped** *(integer) --*
The number of instances in the deployment in a skipped state.
- **Ready** *(integer) --*
The number of instances in a replacement environment ready to receive traffic in a blue/green deployment.
- **description** *(string) --*
A comment about the deployment.
- **creator** *(string) --*
The means by which the deployment was created:
* user: A user created the deployment.
* autoscaling: Amazon EC2 Auto Scaling created the deployment.
* codeDeployRollback: A rollback process created the deployment.
- **ignoreApplicationStopFailures** *(boolean) --*
If true, then if an ApplicationStop, BeforeBlockTraffic, or AfterBlockTraffic deployment lifecycle event to an instance fails, then the deployment continues to the next deployment lifecycle event. For example, if ApplicationStop fails, the deployment continues with DownloadBundle. If BeforeBlockTraffic fails, the deployment continues with BlockTraffic. If AfterBlockTraffic fails, the deployment continues with ApplicationStop.
If false or not specified, then if a lifecycle event fails during a deployment to an instance, that deployment fails. If deployment to that instance is part of an overall deployment and the number of healthy hosts is not less than the minimum number of healthy hosts, then a deployment to the next instance is attempted.
During a deployment, the AWS CodeDeploy agent runs the scripts specified for ApplicationStop, BeforeBlockTraffic, and AfterBlockTraffic in the AppSpec file from the previous successful deployment. (All other scripts are run from the AppSpec file in the current deployment.) If one of these scripts contains an error and does not run successfully, the deployment can fail.
If the cause of the failure is a script from the last successful deployment that will never run successfully, create a new deployment and use ``ignoreApplicationStopFailures`` to specify that the ApplicationStop, BeforeBlockTraffic, and AfterBlockTraffic failures should be ignored.
- **autoRollbackConfiguration** *(dict) --*
Information about the automatic rollback configuration associated with the deployment.
- **enabled** *(boolean) --*
Indicates whether a defined automatic rollback configuration is currently enabled.
- **events** *(list) --*
The event type or types that trigger a rollback.
- *(string) --*
- **updateOutdatedInstancesOnly** *(boolean) --*
Indicates whether only instances that are not running the latest application revision are to be deployed to.
- **rollbackInfo** *(dict) --*
Information about a deployment rollback.
- **rollbackDeploymentId** *(string) --*
The ID of the deployment rollback.
- **rollbackTriggeringDeploymentId** *(string) --*
The deployment ID of the deployment that was underway and triggered a rollback deployment because it failed or was stopped.
- **rollbackMessage** *(string) --*
Information that describes the status of a deployment rollback (for example, whether the deployment can't be rolled back, is in progress, failed, or succeeded).
- **deploymentStyle** *(dict) --*
Information about the type of deployment, either in-place or blue/green, you want to run and whether to route deployment traffic behind a load balancer.
- **deploymentType** *(string) --*
Indicates whether to run an in-place deployment or a blue/green deployment.
- **deploymentOption** *(string) --*
Indicates whether to route deployment traffic behind a load balancer.
- **targetInstances** *(dict) --*
Information about the instances that belong to the replacement environment in a blue/green deployment.
- **tagFilters** *(list) --*
The tag filter key, type, and value used to identify Amazon EC2 instances in a replacement environment for a blue/green deployment. Cannot be used in the same call as ec2TagSet.
- *(dict) --*
Information about an EC2 tag filter.
- **Key** *(string) --*
The tag filter key.
- **Value** *(string) --*
The tag filter value.
- **Type** *(string) --*
The tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
- **autoScalingGroups** *(list) --*
The names of one or more Auto Scaling groups to identify a replacement environment for a blue/green deployment.
- *(string) --*
- **ec2TagSet** *(dict) --*
Information about the groups of EC2 instance tags that an instance must be identified by in order for it to be included in the replacement environment for a blue/green deployment. Cannot be used in the same call as tagFilters.
- **ec2TagSetList** *(list) --*
A list that contains other lists of EC2 instance tag groups. For an instance to be included in the deployment group, it must be identified by all of the tag groups in the list.
- *(list) --*
- *(dict) --*
Information about an EC2 tag filter.
- **Key** *(string) --*
The tag filter key.
- **Value** *(string) --*
The tag filter value.
- **Type** *(string) --*
The tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
- **instanceTerminationWaitTimeStarted** *(boolean) --*
Indicates whether the wait period set for the termination of instances in the original environment has started. Status is 'false' if the KEEP_ALIVE option is specified. Otherwise, 'true' as soon as the termination wait period starts.
- **blueGreenDeploymentConfiguration** *(dict) --*
Information about blue/green deployment options for this deployment.
- **terminateBlueInstancesOnDeploymentSuccess** *(dict) --*
Information about whether to terminate instances in the original fleet during a blue/green deployment.
- **action** *(string) --*
The action to take on instances in the original environment after a successful blue/green deployment.
* TERMINATE: Instances are terminated after a specified wait time.
* KEEP_ALIVE: Instances are left running after they are deregistered from the load balancer and removed from the deployment group.
- **terminationWaitTimeInMinutes** *(integer) --*
The number of minutes to wait after a successful blue/green deployment before terminating instances from the original environment. The maximum setting is 2880 minutes (2 days).
- **deploymentReadyOption** *(dict) --*
Information about the action to take when newly provisioned instances are ready to receive traffic in a blue/green deployment.
- **actionOnTimeout** *(string) --*
Information about when to reroute traffic from an original environment to a replacement environment in a blue/green deployment.
* CONTINUE_DEPLOYMENT: Register new instances with the load balancer immediately after the new application revision is installed on the instances in the replacement environment.
* STOP_DEPLOYMENT: Do not register new instances with a load balancer unless traffic rerouting is started using ContinueDeployment . If traffic rerouting is not started before the end of the specified wait period, the deployment status is changed to Stopped.
- **waitTimeInMinutes** *(integer) --*
The number of minutes to wait before the status of a blue/green deployment is changed to Stopped if rerouting is not started manually. Applies only to the STOP_DEPLOYMENT option for actionOnTimeout
- **greenFleetProvisioningOption** *(dict) --*
Information about how instances are provisioned for a replacement environment in a blue/green deployment.
- **action** *(string) --*
The method used to add instances to a replacement environment.
* DISCOVER_EXISTING: Use instances that already exist or will be created manually.
* COPY_AUTO_SCALING_GROUP: Use settings from a specified Auto Scaling group to define and create instances in a new Auto Scaling group.
- **loadBalancerInfo** *(dict) --*
Information about the load balancer used in the deployment.
- **elbInfoList** *(list) --*
An array that contains information about the load balancer to use for load balancing in a deployment. In Elastic Load Balancing, load balancers are used with Classic Load Balancers.
.. note::
Adding more than one load balancer to the array is not supported.
- *(dict) --*
Information about a load balancer in Elastic Load Balancing to use in a deployment. Instances are registered directly with a load balancer, and traffic is routed to the load balancer.
- **name** *(string) --*
For blue/green deployments, the name of the load balancer that is used to route traffic from original instances to replacement instances in a blue/green deployment. For in-place deployments, the name of the load balancer that instances are deregistered from so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **targetGroupInfoList** *(list) --*
An array that contains information about the target group to use for load balancing in a deployment. In Elastic Load Balancing, target groups are used with Application Load Balancers.
.. note::
Adding more than one target group to the array is not supported.
- *(dict) --*
Information about a target group in Elastic Load Balancing to use in a deployment. Instances are registered as targets in a target group, and traffic is routed to the target group.
- **name** *(string) --*
For blue/green deployments, the name of the target group that instances in the original environment are deregistered from, and instances in the replacement environment are registered with. For in-place deployments, the name of the target group that instances are deregistered from, so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **targetGroupPairInfoList** *(list) --*
The target group pair information. This is an array of ``TargeGroupPairInfo`` objects with a maximum size of one.
- *(dict) --*
Information about two target groups and how traffic is routed during an Amazon ECS deployment. An optional test traffic route can be specified.
- **targetGroups** *(list) --*
One pair of target groups. One is associated with the original task set. The second is associated with the task set that serves traffic after the deployment is complete.
- *(dict) --*
Information about a target group in Elastic Load Balancing to use in a deployment. Instances are registered as targets in a target group, and traffic is routed to the target group.
- **name** *(string) --*
For blue/green deployments, the name of the target group that instances in the original environment are deregistered from, and instances in the replacement environment are registered with. For in-place deployments, the name of the target group that instances are deregistered from, so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **prodTrafficRoute** *(dict) --*
The path used by a load balancer to route production traffic when an Amazon ECS deployment is complete.
- **listenerArns** *(list) --*
The ARN of one listener. The listener identifies the route between a target group and a load balancer. This is an array of strings with a maximum size of one.
- *(string) --*
- **testTrafficRoute** *(dict) --*
An optional path used by a load balancer to route test traffic after an Amazon ECS deployment. Validation can occur while test traffic is served during a deployment.
- **listenerArns** *(list) --*
The ARN of one listener. The listener identifies the route between a target group and a load balancer. This is an array of strings with a maximum size of one.
- *(string) --*
- **additionalDeploymentStatusInfo** *(string) --*
Provides information about the results of a deployment, such as whether instances in the original environment in a blue/green deployment were not terminated.
- **fileExistsBehavior** *(string) --*
Information about how AWS CodeDeploy handles files that already exist in a deployment target location but weren't part of the previous successful deployment.
* DISALLOW: The deployment fails. This is also the default behavior if no option is specified.
* OVERWRITE: The version of the file from the application revision currently being deployed replaces the version already on the instance.
* RETAIN: The version of the file already on the instance is kept and used as part of the new deployment.
- **deploymentStatusMessages** *(list) --*
Messages that contain information about the status of a deployment.
- *(string) --*
- **computePlatform** *(string) --*
The destination platform type for the deployment (``Lambda`` or ``Server`` ).
:type deploymentId: string
:param deploymentId: **[REQUIRED]**
The unique ID of a deployment associated with the IAM user or AWS account.
:rtype: dict
:returns:
"""
pass
def get_deployment_config(self, deploymentConfigName: str) -> Dict:
"""
Gets information about a deployment configuration.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/GetDeploymentConfig>`_
**Request Syntax**
::
response = client.get_deployment_config(
deploymentConfigName='string'
)
**Response Syntax**
::
{
'deploymentConfigInfo': {
'deploymentConfigId': 'string',
'deploymentConfigName': 'string',
'minimumHealthyHosts': {
'value': 123,
'type': 'HOST_COUNT'|'FLEET_PERCENT'
},
'createTime': datetime(2015, 1, 1),
'computePlatform': 'Server'|'Lambda'|'ECS',
'trafficRoutingConfig': {
'type': 'TimeBasedCanary'|'TimeBasedLinear'|'AllAtOnce',
'timeBasedCanary': {
'canaryPercentage': 123,
'canaryInterval': 123
},
'timeBasedLinear': {
'linearPercentage': 123,
'linearInterval': 123
}
}
}
}
**Response Structure**
- *(dict) --*
Represents the output of a GetDeploymentConfig operation.
- **deploymentConfigInfo** *(dict) --*
Information about the deployment configuration.
- **deploymentConfigId** *(string) --*
The deployment configuration ID.
- **deploymentConfigName** *(string) --*
The deployment configuration name.
- **minimumHealthyHosts** *(dict) --*
Information about the number or percentage of minimum healthy instance.
- **value** *(integer) --*
The minimum healthy instance value.
- **type** *(string) --*
The minimum healthy instance type:
* HOST_COUNT: The minimum number of healthy instance as an absolute value.
* FLEET_PERCENT: The minimum number of healthy instance as a percentage of the total number of instance in the deployment.
In an example of nine instance, if a HOST_COUNT of six is specified, deploy to up to three instances at a time. The deployment is successful if six or more instances are deployed to successfully. Otherwise, the deployment fails. If a FLEET_PERCENT of 40 is specified, deploy to up to five instance at a time. The deployment is successful if four or more instance are deployed to successfully. Otherwise, the deployment fails.
.. note::
In a call to the get deployment configuration operation, CodeDeployDefault.OneAtATime returns a minimum healthy instance type of MOST_CONCURRENCY and a value of 1. This means a deployment to only one instance at a time. (You cannot set the type to MOST_CONCURRENCY, only to HOST_COUNT or FLEET_PERCENT.) In addition, with CodeDeployDefault.OneAtATime, AWS CodeDeploy attempts to ensure that all instances but one are kept in a healthy state during the deployment. Although this allows one instance at a time to be taken offline for a new deployment, it also means that if the deployment to the last instance fails, the overall deployment is still successful.
For more information, see `AWS CodeDeploy Instance Health <https://docs.aws.amazon.com/codedeploy/latest/userguide/instances-health.html>`__ in the *AWS CodeDeploy User Guide* .
- **createTime** *(datetime) --*
The time at which the deployment configuration was created.
- **computePlatform** *(string) --*
The destination platform type for the deployment (``Lambda`` or ``Server`` ).
- **trafficRoutingConfig** *(dict) --*
The configuration that specifies how the deployment traffic is routed. Only deployments with a Lambda compute platform can specify this.
- **type** *(string) --*
The type of traffic shifting (``TimeBasedCanary`` or ``TimeBasedLinear`` ) used by a deployment configuration .
- **timeBasedCanary** *(dict) --*
A configuration that shifts traffic from one version of a Lambda function to another in two increments. The original and target Lambda function versions are specified in the deployment's AppSpec file.
- **canaryPercentage** *(integer) --*
The percentage of traffic to shift in the first increment of a ``TimeBasedCanary`` deployment.
- **canaryInterval** *(integer) --*
The number of minutes between the first and second traffic shifts of a ``TimeBasedCanary`` deployment.
- **timeBasedLinear** *(dict) --*
A configuration that shifts traffic from one version of a Lambda function to another in equal increments, with an equal number of minutes between each increment. The original and target Lambda function versions are specified in the deployment's AppSpec file.
- **linearPercentage** *(integer) --*
The percentage of traffic that is shifted at the start of each increment of a ``TimeBasedLinear`` deployment.
- **linearInterval** *(integer) --*
The number of minutes between each incremental traffic shift of a ``TimeBasedLinear`` deployment.
:type deploymentConfigName: string
:param deploymentConfigName: **[REQUIRED]**
The name of a deployment configuration associated with the IAM user or AWS account.
:rtype: dict
:returns:
"""
pass
def get_deployment_group(self, applicationName: str, deploymentGroupName: str) -> Dict:
"""
Gets information about a deployment group.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/GetDeploymentGroup>`_
**Request Syntax**
::
response = client.get_deployment_group(
applicationName='string',
deploymentGroupName='string'
)
**Response Syntax**
::
{
'deploymentGroupInfo': {
'applicationName': 'string',
'deploymentGroupId': 'string',
'deploymentGroupName': 'string',
'deploymentConfigName': 'string',
'ec2TagFilters': [
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
'onPremisesInstanceTagFilters': [
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
'autoScalingGroups': [
{
'name': 'string',
'hook': 'string'
},
],
'serviceRoleArn': 'string',
'targetRevision': {
'revisionType': 'S3'|'GitHub'|'String'|'AppSpecContent',
's3Location': {
'bucket': 'string',
'key': 'string',
'bundleType': 'tar'|'tgz'|'zip'|'YAML'|'JSON',
'version': 'string',
'eTag': 'string'
},
'gitHubLocation': {
'repository': 'string',
'commitId': 'string'
},
'string': {
'content': 'string',
'sha256': 'string'
},
'appSpecContent': {
'content': 'string',
'sha256': 'string'
}
},
'triggerConfigurations': [
{
'triggerName': 'string',
'triggerTargetArn': 'string',
'triggerEvents': [
'DeploymentStart'|'DeploymentSuccess'|'DeploymentFailure'|'DeploymentStop'|'DeploymentRollback'|'DeploymentReady'|'InstanceStart'|'InstanceSuccess'|'InstanceFailure'|'InstanceReady',
]
},
],
'alarmConfiguration': {
'enabled': True|False,
'ignorePollAlarmFailure': True|False,
'alarms': [
{
'name': 'string'
},
]
},
'autoRollbackConfiguration': {
'enabled': True|False,
'events': [
'DEPLOYMENT_FAILURE'|'DEPLOYMENT_STOP_ON_ALARM'|'DEPLOYMENT_STOP_ON_REQUEST',
]
},
'deploymentStyle': {
'deploymentType': 'IN_PLACE'|'BLUE_GREEN',
'deploymentOption': 'WITH_TRAFFIC_CONTROL'|'WITHOUT_TRAFFIC_CONTROL'
},
'blueGreenDeploymentConfiguration': {
'terminateBlueInstancesOnDeploymentSuccess': {
'action': 'TERMINATE'|'KEEP_ALIVE',
'terminationWaitTimeInMinutes': 123
},
'deploymentReadyOption': {
'actionOnTimeout': 'CONTINUE_DEPLOYMENT'|'STOP_DEPLOYMENT',
'waitTimeInMinutes': 123
},
'greenFleetProvisioningOption': {
'action': 'DISCOVER_EXISTING'|'COPY_AUTO_SCALING_GROUP'
}
},
'loadBalancerInfo': {
'elbInfoList': [
{
'name': 'string'
},
],
'targetGroupInfoList': [
{
'name': 'string'
},
],
'targetGroupPairInfoList': [
{
'targetGroups': [
{
'name': 'string'
},
],
'prodTrafficRoute': {
'listenerArns': [
'string',
]
},
'testTrafficRoute': {
'listenerArns': [
'string',
]
}
},
]
},
'lastSuccessfulDeployment': {
'deploymentId': 'string',
'status': 'Created'|'Queued'|'InProgress'|'Succeeded'|'Failed'|'Stopped'|'Ready',
'endTime': datetime(2015, 1, 1),
'createTime': datetime(2015, 1, 1)
},
'lastAttemptedDeployment': {
'deploymentId': 'string',
'status': 'Created'|'Queued'|'InProgress'|'Succeeded'|'Failed'|'Stopped'|'Ready',
'endTime': datetime(2015, 1, 1),
'createTime': datetime(2015, 1, 1)
},
'ec2TagSet': {
'ec2TagSetList': [
[
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
]
},
'onPremisesTagSet': {
'onPremisesTagSetList': [
[
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
]
},
'computePlatform': 'Server'|'Lambda'|'ECS',
'ecsServices': [
{
'serviceName': 'string',
'clusterName': 'string'
},
]
}
}
**Response Structure**
- *(dict) --*
Represents the output of a GetDeploymentGroup operation.
- **deploymentGroupInfo** *(dict) --*
Information about the deployment group.
- **applicationName** *(string) --*
The application name.
- **deploymentGroupId** *(string) --*
The deployment group ID.
- **deploymentGroupName** *(string) --*
The deployment group name.
- **deploymentConfigName** *(string) --*
The deployment configuration name.
- **ec2TagFilters** *(list) --*
The Amazon EC2 tags on which to filter. The deployment group includes EC2 instances with any of the specified tags.
- *(dict) --*
Information about an EC2 tag filter.
- **Key** *(string) --*
The tag filter key.
- **Value** *(string) --*
The tag filter value.
- **Type** *(string) --*
The tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
- **onPremisesInstanceTagFilters** *(list) --*
The on-premises instance tags on which to filter. The deployment group includes on-premises instances with any of the specified tags.
- *(dict) --*
Information about an on-premises instance tag filter.
- **Key** *(string) --*
The on-premises instance tag filter key.
- **Value** *(string) --*
The on-premises instance tag filter value.
- **Type** *(string) --*
The on-premises instance tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
- **autoScalingGroups** *(list) --*
A list of associated Auto Scaling groups.
- *(dict) --*
Information about an Auto Scaling group.
- **name** *(string) --*
The Auto Scaling group name.
- **hook** *(string) --*
An Auto Scaling lifecycle event hook name.
- **serviceRoleArn** *(string) --*
A service role ARN.
- **targetRevision** *(dict) --*
Information about the deployment group's target revision, including type and location.
- **revisionType** *(string) --*
The type of application revision:
* S3: An application revision stored in Amazon S3.
* GitHub: An application revision stored in GitHub (EC2/On-premises deployments only).
* String: A YAML-formatted or JSON-formatted string (AWS Lambda deployments only).
- **s3Location** *(dict) --*
Information about the location of a revision stored in Amazon S3.
- **bucket** *(string) --*
The name of the Amazon S3 bucket where the application revision is stored.
- **key** *(string) --*
The name of the Amazon S3 object that represents the bundled artifacts for the application revision.
- **bundleType** *(string) --*
The file type of the application revision. Must be one of the following:
* tar: A tar archive file.
* tgz: A compressed tar archive file.
* zip: A zip archive file.
- **version** *(string) --*
A specific version of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the version is not specified, the system uses the most recent version by default.
- **eTag** *(string) --*
The ETag of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the ETag is not specified as an input parameter, ETag validation of the object is skipped.
- **gitHubLocation** *(dict) --*
Information about the location of application artifacts stored in GitHub.
- **repository** *(string) --*
The GitHub account and repository pair that stores a reference to the commit that represents the bundled artifacts for the application revision.
Specified as account/repository.
- **commitId** *(string) --*
The SHA1 commit ID of the GitHub commit that represents the bundled artifacts for the application revision.
- **string** *(dict) --*
Information about the location of an AWS Lambda deployment revision stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string. It includes information about which Lambda function to update and optional Lambda functions that validate deployment lifecycle events.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **appSpecContent** *(dict) --*
The content of an AppSpec file for an AWS Lambda or Amazon ECS deployment. The content is formatted as JSON or YAML and stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string.
For an AWS Lambda deployment, the content includes a Lambda function name, the alias for its original version, and the alias for its replacement version. The deployment shifts traffic from the original version of the Lambda function to the replacement version.
For an Amazon ECS deployment, the content includes the task name, information about the load balancer that serves traffic to the container, and more.
For both types of deployments, the content can specify Lambda functions that run at specified hooks, such as ``BeforeInstall`` , during a deployment.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **triggerConfigurations** *(list) --*
Information about triggers associated with the deployment group.
- *(dict) --*
Information about notification triggers for the deployment group.
- **triggerName** *(string) --*
The name of the notification trigger.
- **triggerTargetArn** *(string) --*
The ARN of the Amazon Simple Notification Service topic through which notifications about deployment or instance events are sent.
- **triggerEvents** *(list) --*
The event type or types for which notifications are triggered.
- *(string) --*
- **alarmConfiguration** *(dict) --*
A list of alarms associated with the deployment group.
- **enabled** *(boolean) --*
Indicates whether the alarm configuration is enabled.
- **ignorePollAlarmFailure** *(boolean) --*
Indicates whether a deployment should continue if information about the current state of alarms cannot be retrieved from Amazon CloudWatch. The default value is false.
* true: The deployment proceeds even if alarm status information can't be retrieved from Amazon CloudWatch.
* false: The deployment stops if alarm status information can't be retrieved from Amazon CloudWatch.
- **alarms** *(list) --*
A list of alarms configured for the deployment group. A maximum of 10 alarms can be added to a deployment group.
- *(dict) --*
Information about an alarm.
- **name** *(string) --*
The name of the alarm. Maximum length is 255 characters. Each alarm name can be used only once in a list of alarms.
- **autoRollbackConfiguration** *(dict) --*
Information about the automatic rollback configuration associated with the deployment group.
- **enabled** *(boolean) --*
Indicates whether a defined automatic rollback configuration is currently enabled.
- **events** *(list) --*
The event type or types that trigger a rollback.
- *(string) --*
- **deploymentStyle** *(dict) --*
Information about the type of deployment, either in-place or blue/green, you want to run and whether to route deployment traffic behind a load balancer.
- **deploymentType** *(string) --*
Indicates whether to run an in-place deployment or a blue/green deployment.
- **deploymentOption** *(string) --*
Indicates whether to route deployment traffic behind a load balancer.
- **blueGreenDeploymentConfiguration** *(dict) --*
Information about blue/green deployment options for a deployment group.
- **terminateBlueInstancesOnDeploymentSuccess** *(dict) --*
Information about whether to terminate instances in the original fleet during a blue/green deployment.
- **action** *(string) --*
The action to take on instances in the original environment after a successful blue/green deployment.
* TERMINATE: Instances are terminated after a specified wait time.
* KEEP_ALIVE: Instances are left running after they are deregistered from the load balancer and removed from the deployment group.
- **terminationWaitTimeInMinutes** *(integer) --*
The number of minutes to wait after a successful blue/green deployment before terminating instances from the original environment. The maximum setting is 2880 minutes (2 days).
- **deploymentReadyOption** *(dict) --*
Information about the action to take when newly provisioned instances are ready to receive traffic in a blue/green deployment.
- **actionOnTimeout** *(string) --*
Information about when to reroute traffic from an original environment to a replacement environment in a blue/green deployment.
* CONTINUE_DEPLOYMENT: Register new instances with the load balancer immediately after the new application revision is installed on the instances in the replacement environment.
* STOP_DEPLOYMENT: Do not register new instances with a load balancer unless traffic rerouting is started using ContinueDeployment . If traffic rerouting is not started before the end of the specified wait period, the deployment status is changed to Stopped.
- **waitTimeInMinutes** *(integer) --*
The number of minutes to wait before the status of a blue/green deployment is changed to Stopped if rerouting is not started manually. Applies only to the STOP_DEPLOYMENT option for actionOnTimeout
- **greenFleetProvisioningOption** *(dict) --*
Information about how instances are provisioned for a replacement environment in a blue/green deployment.
- **action** *(string) --*
The method used to add instances to a replacement environment.
* DISCOVER_EXISTING: Use instances that already exist or will be created manually.
* COPY_AUTO_SCALING_GROUP: Use settings from a specified Auto Scaling group to define and create instances in a new Auto Scaling group.
- **loadBalancerInfo** *(dict) --*
Information about the load balancer to use in a deployment.
- **elbInfoList** *(list) --*
An array that contains information about the load balancer to use for load balancing in a deployment. In Elastic Load Balancing, load balancers are used with Classic Load Balancers.
.. note::
Adding more than one load balancer to the array is not supported.
- *(dict) --*
Information about a load balancer in Elastic Load Balancing to use in a deployment. Instances are registered directly with a load balancer, and traffic is routed to the load balancer.
- **name** *(string) --*
For blue/green deployments, the name of the load balancer that is used to route traffic from original instances to replacement instances in a blue/green deployment. For in-place deployments, the name of the load balancer that instances are deregistered from so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **targetGroupInfoList** *(list) --*
An array that contains information about the target group to use for load balancing in a deployment. In Elastic Load Balancing, target groups are used with Application Load Balancers.
.. note::
Adding more than one target group to the array is not supported.
- *(dict) --*
Information about a target group in Elastic Load Balancing to use in a deployment. Instances are registered as targets in a target group, and traffic is routed to the target group.
- **name** *(string) --*
For blue/green deployments, the name of the target group that instances in the original environment are deregistered from, and instances in the replacement environment are registered with. For in-place deployments, the name of the target group that instances are deregistered from, so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **targetGroupPairInfoList** *(list) --*
The target group pair information. This is an array of ``TargeGroupPairInfo`` objects with a maximum size of one.
- *(dict) --*
Information about two target groups and how traffic is routed during an Amazon ECS deployment. An optional test traffic route can be specified.
- **targetGroups** *(list) --*
One pair of target groups. One is associated with the original task set. The second is associated with the task set that serves traffic after the deployment is complete.
- *(dict) --*
Information about a target group in Elastic Load Balancing to use in a deployment. Instances are registered as targets in a target group, and traffic is routed to the target group.
- **name** *(string) --*
For blue/green deployments, the name of the target group that instances in the original environment are deregistered from, and instances in the replacement environment are registered with. For in-place deployments, the name of the target group that instances are deregistered from, so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **prodTrafficRoute** *(dict) --*
The path used by a load balancer to route production traffic when an Amazon ECS deployment is complete.
- **listenerArns** *(list) --*
The ARN of one listener. The listener identifies the route between a target group and a load balancer. This is an array of strings with a maximum size of one.
- *(string) --*
- **testTrafficRoute** *(dict) --*
An optional path used by a load balancer to route test traffic after an Amazon ECS deployment. Validation can occur while test traffic is served during a deployment.
- **listenerArns** *(list) --*
The ARN of one listener. The listener identifies the route between a target group and a load balancer. This is an array of strings with a maximum size of one.
- *(string) --*
- **lastSuccessfulDeployment** *(dict) --*
Information about the most recent successful deployment to the deployment group.
- **deploymentId** *(string) --*
The unique ID of a deployment.
- **status** *(string) --*
The status of the most recent deployment.
- **endTime** *(datetime) --*
A timestamp that indicates when the most recent deployment to the deployment group was complete.
- **createTime** *(datetime) --*
A timestamp that indicates when the most recent deployment to the deployment group started.
- **lastAttemptedDeployment** *(dict) --*
Information about the most recent attempted deployment to the deployment group.
- **deploymentId** *(string) --*
The unique ID of a deployment.
- **status** *(string) --*
The status of the most recent deployment.
- **endTime** *(datetime) --*
A timestamp that indicates when the most recent deployment to the deployment group was complete.
- **createTime** *(datetime) --*
A timestamp that indicates when the most recent deployment to the deployment group started.
- **ec2TagSet** *(dict) --*
Information about groups of tags applied to an EC2 instance. The deployment group includes only EC2 instances identified by all of the tag groups. Cannot be used in the same call as ec2TagFilters.
- **ec2TagSetList** *(list) --*
A list that contains other lists of EC2 instance tag groups. For an instance to be included in the deployment group, it must be identified by all of the tag groups in the list.
- *(list) --*
- *(dict) --*
Information about an EC2 tag filter.
- **Key** *(string) --*
The tag filter key.
- **Value** *(string) --*
The tag filter value.
- **Type** *(string) --*
The tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
- **onPremisesTagSet** *(dict) --*
Information about groups of tags applied to an on-premises instance. The deployment group includes only on-premises instances identified by all the tag groups. Cannot be used in the same call as onPremisesInstanceTagFilters.
- **onPremisesTagSetList** *(list) --*
A list that contains other lists of on-premises instance tag groups. For an instance to be included in the deployment group, it must be identified by all of the tag groups in the list.
- *(list) --*
- *(dict) --*
Information about an on-premises instance tag filter.
- **Key** *(string) --*
The on-premises instance tag filter key.
- **Value** *(string) --*
The on-premises instance tag filter value.
- **Type** *(string) --*
The on-premises instance tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
- **computePlatform** *(string) --*
The destination platform type for the deployment group (``Lambda`` or ``Server`` ).
- **ecsServices** *(list) --*
The target Amazon ECS services in the deployment group. This applies only to deployment groups that use the Amazon ECS compute platform. A target Amazon ECS service is specified as an Amazon ECS cluster and service name pair using the format ``<clustername>:<servicename>`` .
- *(dict) --*
Contains the service and cluster names used to identify an Amazon ECS deployment's target.
- **serviceName** *(string) --*
The name of the target Amazon ECS service.
- **clusterName** *(string) --*
The name of the cluster that the Amazon ECS service is associated with.
:type applicationName: string
:param applicationName: **[REQUIRED]**
The name of an AWS CodeDeploy application associated with the IAM user or AWS account.
:type deploymentGroupName: string
:param deploymentGroupName: **[REQUIRED]**
The name of a deployment group for the specified application.
:rtype: dict
:returns:
"""
pass
def get_deployment_instance(self, deploymentId: str, instanceId: str) -> Dict:
"""
Gets information about an instance as part of a deployment.
.. danger::
This operation is deprecated and may not function as expected. This operation should not be used going forward and is only kept for the purpose of backwards compatiblity.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/GetDeploymentInstance>`_
**Request Syntax**
::
response = client.get_deployment_instance(
deploymentId='string',
instanceId='string'
)
**Response Syntax**
::
{
'instanceSummary': {
'deploymentId': 'string',
'instanceId': 'string',
'status': 'Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'|'Ready',
'lastUpdatedAt': datetime(2015, 1, 1),
'lifecycleEvents': [
{
'lifecycleEventName': 'string',
'diagnostics': {
'errorCode': 'Success'|'ScriptMissing'|'ScriptNotExecutable'|'ScriptTimedOut'|'ScriptFailed'|'UnknownError',
'scriptName': 'string',
'message': 'string',
'logTail': 'string'
},
'startTime': datetime(2015, 1, 1),
'endTime': datetime(2015, 1, 1),
'status': 'Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'
},
],
'instanceType': 'Blue'|'Green'
}
}
**Response Structure**
- *(dict) --*
Represents the output of a GetDeploymentInstance operation.
- **instanceSummary** *(dict) --*
Information about the instance.
- **deploymentId** *(string) --*
The unique ID of a deployment.
- **instanceId** *(string) --*
The instance ID.
- **status** *(string) --*
The deployment status for this instance:
* Pending: The deployment is pending for this instance.
* In Progress: The deployment is in progress for this instance.
* Succeeded: The deployment has succeeded for this instance.
* Failed: The deployment has failed for this instance.
* Skipped: The deployment has been skipped for this instance.
* Unknown: The deployment status is unknown for this instance.
- **lastUpdatedAt** *(datetime) --*
A timestamp that indicaties when the instance information was last updated.
- **lifecycleEvents** *(list) --*
A list of lifecycle events for this instance.
- *(dict) --*
Information about a deployment lifecycle event.
- **lifecycleEventName** *(string) --*
The deployment lifecycle event name, such as ApplicationStop, BeforeInstall, AfterInstall, ApplicationStart, or ValidateService.
- **diagnostics** *(dict) --*
Diagnostic information about the deployment lifecycle event.
- **errorCode** *(string) --*
The associated error code:
* Success: The specified script ran.
* ScriptMissing: The specified script was not found in the specified location.
* ScriptNotExecutable: The specified script is not a recognized executable file type.
* ScriptTimedOut: The specified script did not finish running in the specified time period.
* ScriptFailed: The specified script failed to run as expected.
* UnknownError: The specified script did not run for an unknown reason.
- **scriptName** *(string) --*
The name of the script.
- **message** *(string) --*
The message associated with the error.
- **logTail** *(string) --*
The last portion of the diagnostic log.
If available, AWS CodeDeploy returns up to the last 4 KB of the diagnostic log.
- **startTime** *(datetime) --*
A timestamp that indicates when the deployment lifecycle event started.
- **endTime** *(datetime) --*
A timestamp that indicates when the deployment lifecycle event ended.
- **status** *(string) --*
The deployment lifecycle event status:
* Pending: The deployment lifecycle event is pending.
* InProgress: The deployment lifecycle event is in progress.
* Succeeded: The deployment lifecycle event ran successfully.
* Failed: The deployment lifecycle event has failed.
* Skipped: The deployment lifecycle event has been skipped.
* Unknown: The deployment lifecycle event is unknown.
- **instanceType** *(string) --*
Information about which environment an instance belongs to in a blue/green deployment.
* BLUE: The instance is part of the original environment.
* GREEN: The instance is part of the replacement environment.
:type deploymentId: string
:param deploymentId: **[REQUIRED]**
The unique ID of a deployment.
:type instanceId: string
:param instanceId: **[REQUIRED]**
The unique ID of an instance in the deployment group.
:rtype: dict
:returns:
"""
pass
def get_deployment_target(self, deploymentId: str = None, targetId: str = None) -> Dict:
"""
Returns information about a deployment target.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/GetDeploymentTarget>`_
**Request Syntax**
::
response = client.get_deployment_target(
deploymentId='string',
targetId='string'
)
**Response Syntax**
::
{
'deploymentTarget': {
'deploymentTargetType': 'InstanceTarget'|'LambdaTarget'|'ECSTarget',
'instanceTarget': {
'deploymentId': 'string',
'targetId': 'string',
'targetArn': 'string',
'status': 'Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'|'Ready',
'lastUpdatedAt': datetime(2015, 1, 1),
'lifecycleEvents': [
{
'lifecycleEventName': 'string',
'diagnostics': {
'errorCode': 'Success'|'ScriptMissing'|'ScriptNotExecutable'|'ScriptTimedOut'|'ScriptFailed'|'UnknownError',
'scriptName': 'string',
'message': 'string',
'logTail': 'string'
},
'startTime': datetime(2015, 1, 1),
'endTime': datetime(2015, 1, 1),
'status': 'Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'
},
],
'instanceLabel': 'Blue'|'Green'
},
'lambdaTarget': {
'deploymentId': 'string',
'targetId': 'string',
'targetArn': 'string',
'status': 'Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'|'Ready',
'lastUpdatedAt': datetime(2015, 1, 1),
'lifecycleEvents': [
{
'lifecycleEventName': 'string',
'diagnostics': {
'errorCode': 'Success'|'ScriptMissing'|'ScriptNotExecutable'|'ScriptTimedOut'|'ScriptFailed'|'UnknownError',
'scriptName': 'string',
'message': 'string',
'logTail': 'string'
},
'startTime': datetime(2015, 1, 1),
'endTime': datetime(2015, 1, 1),
'status': 'Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'
},
]
},
'ecsTarget': {
'deploymentId': 'string',
'targetId': 'string',
'targetArn': 'string',
'lastUpdatedAt': datetime(2015, 1, 1),
'lifecycleEvents': [
{
'lifecycleEventName': 'string',
'diagnostics': {
'errorCode': 'Success'|'ScriptMissing'|'ScriptNotExecutable'|'ScriptTimedOut'|'ScriptFailed'|'UnknownError',
'scriptName': 'string',
'message': 'string',
'logTail': 'string'
},
'startTime': datetime(2015, 1, 1),
'endTime': datetime(2015, 1, 1),
'status': 'Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'
},
],
'status': 'Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'|'Ready',
'taskSetsInfo': [
{
'identifer': 'string',
'desiredCount': 123,
'pendingCount': 123,
'runningCount': 123,
'status': 'string',
'trafficWeight': 123.0,
'targetGroup': {
'name': 'string'
},
'taskSetLabel': 'Blue'|'Green'
},
]
}
}
}
**Response Structure**
- *(dict) --*
- **deploymentTarget** *(dict) --*
A deployment target that contains information about a deployment such as its status, lifecyle events, and when it was last updated. It also contains metadata about the deployment target. The deployment target metadata depends on the deployment target's type (``instanceTarget`` , ``lambdaTarget`` , or ``ecsTarget`` ).
- **deploymentTargetType** *(string) --*
The deployment type that is specific to the deployment's compute platform.
- **instanceTarget** *(dict) --*
Information about the target for a deployment that uses the EC2/On-premises compute platform.
- **deploymentId** *(string) --*
The unique ID of a deployment.
- **targetId** *(string) --*
The unique ID of a deployment target that has a type of ``instanceTarget`` .
- **targetArn** *(string) --*
The ARN of the target.
- **status** *(string) --*
The status an EC2/On-premises deployment's target instance.
- **lastUpdatedAt** *(datetime) --*
The date and time when the target instance was updated by a deployment.
- **lifecycleEvents** *(list) --*
The lifecycle events of the deployment to this target instance.
- *(dict) --*
Information about a deployment lifecycle event.
- **lifecycleEventName** *(string) --*
The deployment lifecycle event name, such as ApplicationStop, BeforeInstall, AfterInstall, ApplicationStart, or ValidateService.
- **diagnostics** *(dict) --*
Diagnostic information about the deployment lifecycle event.
- **errorCode** *(string) --*
The associated error code:
* Success: The specified script ran.
* ScriptMissing: The specified script was not found in the specified location.
* ScriptNotExecutable: The specified script is not a recognized executable file type.
* ScriptTimedOut: The specified script did not finish running in the specified time period.
* ScriptFailed: The specified script failed to run as expected.
* UnknownError: The specified script did not run for an unknown reason.
- **scriptName** *(string) --*
The name of the script.
- **message** *(string) --*
The message associated with the error.
- **logTail** *(string) --*
The last portion of the diagnostic log.
If available, AWS CodeDeploy returns up to the last 4 KB of the diagnostic log.
- **startTime** *(datetime) --*
A timestamp that indicates when the deployment lifecycle event started.
- **endTime** *(datetime) --*
A timestamp that indicates when the deployment lifecycle event ended.
- **status** *(string) --*
The deployment lifecycle event status:
* Pending: The deployment lifecycle event is pending.
* InProgress: The deployment lifecycle event is in progress.
* Succeeded: The deployment lifecycle event ran successfully.
* Failed: The deployment lifecycle event has failed.
* Skipped: The deployment lifecycle event has been skipped.
* Unknown: The deployment lifecycle event is unknown.
- **instanceLabel** *(string) --*
A label that identifies whether the instance is an original target (``BLUE`` ) or a replacement target (``GREEN`` ).
- **lambdaTarget** *(dict) --*
Information about the target for a deployment that uses the AWS Lambda compute platform.
- **deploymentId** *(string) --*
The unique ID of a deployment.
- **targetId** *(string) --*
The unique ID of a deployment target that has a type of ``lambdaTarget`` .
- **targetArn** *(string) --*
The ARN of the target.
- **status** *(string) --*
The status an AWS Lambda deployment's target Lambda function.
- **lastUpdatedAt** *(datetime) --*
The date and time when the target Lambda function was updated by a deployment.
- **lifecycleEvents** *(list) --*
The lifecycle events of the deployment to this target Lambda function.
- *(dict) --*
Information about a deployment lifecycle event.
- **lifecycleEventName** *(string) --*
The deployment lifecycle event name, such as ApplicationStop, BeforeInstall, AfterInstall, ApplicationStart, or ValidateService.
- **diagnostics** *(dict) --*
Diagnostic information about the deployment lifecycle event.
- **errorCode** *(string) --*
The associated error code:
* Success: The specified script ran.
* ScriptMissing: The specified script was not found in the specified location.
* ScriptNotExecutable: The specified script is not a recognized executable file type.
* ScriptTimedOut: The specified script did not finish running in the specified time period.
* ScriptFailed: The specified script failed to run as expected.
* UnknownError: The specified script did not run for an unknown reason.
- **scriptName** *(string) --*
The name of the script.
- **message** *(string) --*
The message associated with the error.
- **logTail** *(string) --*
The last portion of the diagnostic log.
If available, AWS CodeDeploy returns up to the last 4 KB of the diagnostic log.
- **startTime** *(datetime) --*
A timestamp that indicates when the deployment lifecycle event started.
- **endTime** *(datetime) --*
A timestamp that indicates when the deployment lifecycle event ended.
- **status** *(string) --*
The deployment lifecycle event status:
* Pending: The deployment lifecycle event is pending.
* InProgress: The deployment lifecycle event is in progress.
* Succeeded: The deployment lifecycle event ran successfully.
* Failed: The deployment lifecycle event has failed.
* Skipped: The deployment lifecycle event has been skipped.
* Unknown: The deployment lifecycle event is unknown.
- **ecsTarget** *(dict) --*
Information about the target for a deployment that uses the Amazon ECS compute platform.
- **deploymentId** *(string) --*
The unique ID of a deployment.
- **targetId** *(string) --*
The unique ID of a deployment target that has a type of ``ecsTarget`` .
- **targetArn** *(string) --*
The ARN of the target.
- **lastUpdatedAt** *(datetime) --*
The date and time when the target Amazon ECS application was updated by a deployment.
- **lifecycleEvents** *(list) --*
The lifecycle events of the deployment to this target Amazon ECS application.
- *(dict) --*
Information about a deployment lifecycle event.
- **lifecycleEventName** *(string) --*
The deployment lifecycle event name, such as ApplicationStop, BeforeInstall, AfterInstall, ApplicationStart, or ValidateService.
- **diagnostics** *(dict) --*
Diagnostic information about the deployment lifecycle event.
- **errorCode** *(string) --*
The associated error code:
* Success: The specified script ran.
* ScriptMissing: The specified script was not found in the specified location.
* ScriptNotExecutable: The specified script is not a recognized executable file type.
* ScriptTimedOut: The specified script did not finish running in the specified time period.
* ScriptFailed: The specified script failed to run as expected.
* UnknownError: The specified script did not run for an unknown reason.
- **scriptName** *(string) --*
The name of the script.
- **message** *(string) --*
The message associated with the error.
- **logTail** *(string) --*
The last portion of the diagnostic log.
If available, AWS CodeDeploy returns up to the last 4 KB of the diagnostic log.
- **startTime** *(datetime) --*
A timestamp that indicates when the deployment lifecycle event started.
- **endTime** *(datetime) --*
A timestamp that indicates when the deployment lifecycle event ended.
- **status** *(string) --*
The deployment lifecycle event status:
* Pending: The deployment lifecycle event is pending.
* InProgress: The deployment lifecycle event is in progress.
* Succeeded: The deployment lifecycle event ran successfully.
* Failed: The deployment lifecycle event has failed.
* Skipped: The deployment lifecycle event has been skipped.
* Unknown: The deployment lifecycle event is unknown.
- **status** *(string) --*
The status an Amazon ECS deployment's target ECS application.
- **taskSetsInfo** *(list) --*
The ``ECSTaskSet`` objects associated with the ECS target.
- *(dict) --*
Information about a set of Amazon ECS tasks in an AWS CodeDeploy deployment. An Amazon ECS task set includes details such as the desired number of tasks, how many tasks are running, and whether the task set serves production traffic. An AWS CodeDeploy application that uses the Amazon ECS compute platform deploys a containerized application in an Amazon ECS service as a task set.
- **identifer** *(string) --*
A unique ID of an ``ECSTaskSet`` .
- **desiredCount** *(integer) --*
The number of tasks in a task set. During a deployment that uses the Amazon ECS compute type, CodeDeploy instructs Amazon ECS to create a new task set and uses this value to determine how many tasks to create. After the updated task set is created, CodeDeploy shifts traffic to the new task set.
- **pendingCount** *(integer) --*
The number of tasks in the task set that are in the ``PENDING`` status during an Amazon ECS deployment. A task in the ``PENDING`` state is preparing to enter the ``RUNNING`` state. A task set enters the ``PENDING`` status when it launches for the first time, or when it is restarted after being in the ``STOPPED`` state.
- **runningCount** *(integer) --*
The number of tasks in the task set that are in the ``RUNNING`` status during an Amazon ECS deployment. A task in the ``RUNNING`` state is running and ready for use.
- **status** *(string) --*
The status of the task set. There are three valid task set statuses:
* ``PRIMARY`` : Indicates the task set is serving production traffic.
* ``ACTIVE`` : Indicates the task set is not serving production traffic.
* ``DRAINING`` : Indicates the tasks in the task set are being stopped and their corresponding targets are being deregistered from their target group.
- **trafficWeight** *(float) --*
The percentage of traffic served by this task set.
- **targetGroup** *(dict) --*
The target group associated with the task set. The target group is used by AWS CodeDeploy to manage traffic to a task set.
- **name** *(string) --*
For blue/green deployments, the name of the target group that instances in the original environment are deregistered from, and instances in the replacement environment are registered with. For in-place deployments, the name of the target group that instances are deregistered from, so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **taskSetLabel** *(string) --*
A label that identifies whether the ECS task set is an original target (``BLUE`` ) or a replacement target (``GREEN`` ).
:type deploymentId: string
:param deploymentId:
The unique ID of a deployment.
:type targetId: string
:param targetId:
The unique ID of a deployment target.
:rtype: dict
:returns:
"""
pass
def get_on_premises_instance(self, instanceName: str) -> Dict:
"""
Gets information about an on-premises instance.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/GetOnPremisesInstance>`_
**Request Syntax**
::
response = client.get_on_premises_instance(
instanceName='string'
)
**Response Syntax**
::
{
'instanceInfo': {
'instanceName': 'string',
'iamSessionArn': 'string',
'iamUserArn': 'string',
'instanceArn': 'string',
'registerTime': datetime(2015, 1, 1),
'deregisterTime': datetime(2015, 1, 1),
'tags': [
{
'Key': 'string',
'Value': 'string'
},
]
}
}
**Response Structure**
- *(dict) --*
Represents the output of a GetOnPremisesInstance operation.
- **instanceInfo** *(dict) --*
Information about the on-premises instance.
- **instanceName** *(string) --*
The name of the on-premises instance.
- **iamSessionArn** *(string) --*
The ARN of the IAM session associated with the on-premises instance.
- **iamUserArn** *(string) --*
The IAM user ARN associated with the on-premises instance.
- **instanceArn** *(string) --*
The ARN of the on-premises instance.
- **registerTime** *(datetime) --*
The time at which the on-premises instance was registered.
- **deregisterTime** *(datetime) --*
If the on-premises instance was deregistered, the time at which the on-premises instance was deregistered.
- **tags** *(list) --*
The tags currently associated with the on-premises instance.
- *(dict) --*
Information about a tag.
- **Key** *(string) --*
The tag's key.
- **Value** *(string) --*
The tag's value.
:type instanceName: string
:param instanceName: **[REQUIRED]**
The name of the on-premises instance about which to get information.
:rtype: dict
:returns:
"""
pass
def get_paginator(self, operation_name: str = None) -> Paginator:
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you\'d normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator(\"create_foo\")``.
:raise OperationNotPageableError: Raised if the operation is not
pageable. You can use the ``client.can_paginate`` method to
check if an operation is pageable.
:rtype: L{botocore.paginate.Paginator}
:return: A paginator object.
"""
pass
def get_waiter(self, waiter_name: str = None) -> Waiter:
"""
Returns an object that can wait for some condition.
:type waiter_name: str
:param waiter_name: The name of the waiter to get. See the waiters
section of the service docs for a list of available waiters.
:returns: The specified waiter object.
:rtype: botocore.waiter.Waiter
"""
pass
def list_application_revisions(self, applicationName: str, sortBy: str = None, sortOrder: str = None, s3Bucket: str = None, s3KeyPrefix: str = None, deployed: str = None, nextToken: str = None) -> Dict:
"""
Lists information about revisions for an application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/ListApplicationRevisions>`_
**Request Syntax**
::
response = client.list_application_revisions(
applicationName='string',
sortBy='registerTime'|'firstUsedTime'|'lastUsedTime',
sortOrder='ascending'|'descending',
s3Bucket='string',
s3KeyPrefix='string',
deployed='include'|'exclude'|'ignore',
nextToken='string'
)
**Response Syntax**
::
{
'revisions': [
{
'revisionType': 'S3'|'GitHub'|'String'|'AppSpecContent',
's3Location': {
'bucket': 'string',
'key': 'string',
'bundleType': 'tar'|'tgz'|'zip'|'YAML'|'JSON',
'version': 'string',
'eTag': 'string'
},
'gitHubLocation': {
'repository': 'string',
'commitId': 'string'
},
'string': {
'content': 'string',
'sha256': 'string'
},
'appSpecContent': {
'content': 'string',
'sha256': 'string'
}
},
],
'nextToken': 'string'
}
**Response Structure**
- *(dict) --*
Represents the output of a ListApplicationRevisions operation.
- **revisions** *(list) --*
A list of locations that contain the matching revisions.
- *(dict) --*
Information about the location of an application revision.
- **revisionType** *(string) --*
The type of application revision:
* S3: An application revision stored in Amazon S3.
* GitHub: An application revision stored in GitHub (EC2/On-premises deployments only).
* String: A YAML-formatted or JSON-formatted string (AWS Lambda deployments only).
- **s3Location** *(dict) --*
Information about the location of a revision stored in Amazon S3.
- **bucket** *(string) --*
The name of the Amazon S3 bucket where the application revision is stored.
- **key** *(string) --*
The name of the Amazon S3 object that represents the bundled artifacts for the application revision.
- **bundleType** *(string) --*
The file type of the application revision. Must be one of the following:
* tar: A tar archive file.
* tgz: A compressed tar archive file.
* zip: A zip archive file.
- **version** *(string) --*
A specific version of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the version is not specified, the system uses the most recent version by default.
- **eTag** *(string) --*
The ETag of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the ETag is not specified as an input parameter, ETag validation of the object is skipped.
- **gitHubLocation** *(dict) --*
Information about the location of application artifacts stored in GitHub.
- **repository** *(string) --*
The GitHub account and repository pair that stores a reference to the commit that represents the bundled artifacts for the application revision.
Specified as account/repository.
- **commitId** *(string) --*
The SHA1 commit ID of the GitHub commit that represents the bundled artifacts for the application revision.
- **string** *(dict) --*
Information about the location of an AWS Lambda deployment revision stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string. It includes information about which Lambda function to update and optional Lambda functions that validate deployment lifecycle events.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **appSpecContent** *(dict) --*
The content of an AppSpec file for an AWS Lambda or Amazon ECS deployment. The content is formatted as JSON or YAML and stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string.
For an AWS Lambda deployment, the content includes a Lambda function name, the alias for its original version, and the alias for its replacement version. The deployment shifts traffic from the original version of the Lambda function to the replacement version.
For an Amazon ECS deployment, the content includes the task name, information about the load balancer that serves traffic to the container, and more.
For both types of deployments, the content can specify Lambda functions that run at specified hooks, such as ``BeforeInstall`` , during a deployment.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **nextToken** *(string) --*
If a large amount of information is returned, an identifier is also returned. It can be used in a subsequent list application revisions call to return the next set of application revisions in the list.
:type applicationName: string
:param applicationName: **[REQUIRED]**
The name of an AWS CodeDeploy application associated with the IAM user or AWS account.
:type sortBy: string
:param sortBy:
The column name to use to sort the list results:
* registerTime: Sort by the time the revisions were registered with AWS CodeDeploy.
* firstUsedTime: Sort by the time the revisions were first used in a deployment.
* lastUsedTime: Sort by the time the revisions were last used in a deployment.
If not specified or set to null, the results are returned in an arbitrary order.
:type sortOrder: string
:param sortOrder:
The order in which to sort the list results:
* ascending: ascending order.
* descending: descending order.
If not specified, the results are sorted in ascending order.
If set to null, the results are sorted in an arbitrary order.
:type s3Bucket: string
:param s3Bucket:
An Amazon S3 bucket name to limit the search for revisions.
If set to null, all of the user\'s buckets are searched.
:type s3KeyPrefix: string
:param s3KeyPrefix:
A key prefix for the set of Amazon S3 objects to limit the search for revisions.
:type deployed: string
:param deployed:
Whether to list revisions based on whether the revision is the target revision of an deployment group:
* include: List revisions that are target revisions of a deployment group.
* exclude: Do not list revisions that are target revisions of a deployment group.
* ignore: List all revisions.
:type nextToken: string
:param nextToken:
An identifier returned from the previous ``ListApplicationRevisions`` call. It can be used to return the next set of applications in the list.
:rtype: dict
:returns:
"""
pass
def list_applications(self, nextToken: str = None) -> Dict:
"""
Lists the applications registered with the IAM user or AWS account.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/ListApplications>`_
**Request Syntax**
::
response = client.list_applications(
nextToken='string'
)
**Response Syntax**
::
{
'applications': [
'string',
],
'nextToken': 'string'
}
**Response Structure**
- *(dict) --*
Represents the output of a ListApplications operation.
- **applications** *(list) --*
A list of application names.
- *(string) --*
- **nextToken** *(string) --*
If a large amount of information is returned, an identifier is also returned. It can be used in a subsequent list applications call to return the next set of applications in the list.
:type nextToken: string
:param nextToken:
An identifier returned from the previous list applications call. It can be used to return the next set of applications in the list.
:rtype: dict
:returns:
"""
pass
def list_deployment_configs(self, nextToken: str = None) -> Dict:
"""
Lists the deployment configurations with the IAM user or AWS account.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/ListDeploymentConfigs>`_
**Request Syntax**
::
response = client.list_deployment_configs(
nextToken='string'
)
**Response Syntax**
::
{
'deploymentConfigsList': [
'string',
],
'nextToken': 'string'
}
**Response Structure**
- *(dict) --*
Represents the output of a ListDeploymentConfigs operation.
- **deploymentConfigsList** *(list) --*
A list of deployment configurations, including built-in configurations such as CodeDeployDefault.OneAtATime.
- *(string) --*
- **nextToken** *(string) --*
If a large amount of information is returned, an identifier is also returned. It can be used in a subsequent list deployment configurations call to return the next set of deployment configurations in the list.
:type nextToken: string
:param nextToken:
An identifier returned from the previous ``ListDeploymentConfigs`` call. It can be used to return the next set of deployment configurations in the list.
:rtype: dict
:returns:
"""
pass
def list_deployment_groups(self, applicationName: str, nextToken: str = None) -> Dict:
"""
Lists the deployment groups for an application registered with the IAM user or AWS account.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/ListDeploymentGroups>`_
**Request Syntax**
::
response = client.list_deployment_groups(
applicationName='string',
nextToken='string'
)
**Response Syntax**
::
{
'applicationName': 'string',
'deploymentGroups': [
'string',
],
'nextToken': 'string'
}
**Response Structure**
- *(dict) --*
Represents the output of a ListDeploymentGroups operation.
- **applicationName** *(string) --*
The application name.
- **deploymentGroups** *(list) --*
A list of deployment group names.
- *(string) --*
- **nextToken** *(string) --*
If a large amount of information is returned, an identifier is also returned. It can be used in a subsequent list deployment groups call to return the next set of deployment groups in the list.
:type applicationName: string
:param applicationName: **[REQUIRED]**
The name of an AWS CodeDeploy application associated with the IAM user or AWS account.
:type nextToken: string
:param nextToken:
An identifier returned from the previous list deployment groups call. It can be used to return the next set of deployment groups in the list.
:rtype: dict
:returns:
"""
pass
def list_deployment_instances(self, deploymentId: str, nextToken: str = None, instanceStatusFilter: List = None, instanceTypeFilter: List = None) -> Dict:
"""
.. note::
The newer BatchGetDeploymentTargets should be used instead because it works with all compute types. ``ListDeploymentInstances`` throws an exception if it is used with a compute platform other than EC2/On-premises or AWS Lambda.
Lists the instance for a deployment associated with the IAM user or AWS account.
.. danger::
This operation is deprecated and may not function as expected. This operation should not be used going forward and is only kept for the purpose of backwards compatiblity.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/ListDeploymentInstances>`_
**Request Syntax**
::
response = client.list_deployment_instances(
deploymentId='string',
nextToken='string',
instanceStatusFilter=[
'Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'|'Ready',
],
instanceTypeFilter=[
'Blue'|'Green',
]
)
**Response Syntax**
::
{
'instancesList': [
'string',
],
'nextToken': 'string'
}
**Response Structure**
- *(dict) --*
Represents the output of a ListDeploymentInstances operation.
- **instancesList** *(list) --*
A list of instance IDs.
- *(string) --*
- **nextToken** *(string) --*
If a large amount of information is returned, an identifier is also returned. It can be used in a subsequent list deployment instances call to return the next set of deployment instances in the list.
:type deploymentId: string
:param deploymentId: **[REQUIRED]**
The unique ID of a deployment.
:type nextToken: string
:param nextToken:
An identifier returned from the previous list deployment instances call. It can be used to return the next set of deployment instances in the list.
:type instanceStatusFilter: list
:param instanceStatusFilter:
A subset of instances to list by status:
* Pending: Include those instances with pending deployments.
* InProgress: Include those instances where deployments are still in progress.
* Succeeded: Include those instances with successful deployments.
* Failed: Include those instances with failed deployments.
* Skipped: Include those instances with skipped deployments.
* Unknown: Include those instances with deployments in an unknown state.
- *(string) --*
:type instanceTypeFilter: list
:param instanceTypeFilter:
The set of instances in a blue/green deployment, either those in the original environment (\"BLUE\") or those in the replacement environment (\"GREEN\"), for which you want to view instance information.
- *(string) --*
:rtype: dict
:returns:
"""
pass
def list_deployment_targets(self, deploymentId: str = None, nextToken: str = None, targetFilters: Dict = None) -> Dict:
"""
Returns an array of target IDs that are associated a deployment.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/ListDeploymentTargets>`_
**Request Syntax**
::
response = client.list_deployment_targets(
deploymentId='string',
nextToken='string',
targetFilters={
'string': [
'string',
]
}
)
**Response Syntax**
::
{
'targetIds': [
'string',
],
'nextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **targetIds** *(list) --*
The unique IDs of deployment targets.
- *(string) --*
- **nextToken** *(string) --*
If a large amount of information is returned, a token identifier is also returned. It can be used in a subsequent ``ListDeploymentTargets`` call to return the next set of deployment targets in the list.
:type deploymentId: string
:param deploymentId:
The unique ID of a deployment.
:type nextToken: string
:param nextToken:
A token identifier returned from the previous ``ListDeploymentTargets`` call. It can be used to return the next set of deployment targets in the list.
:type targetFilters: dict
:param targetFilters:
A key used to filter the returned targets.
- *(string) --*
- *(list) --*
- *(string) --*
:rtype: dict
:returns:
"""
pass
def list_deployments(self, applicationName: str = None, deploymentGroupName: str = None, includeOnlyStatuses: List = None, createTimeRange: Dict = None, nextToken: str = None) -> Dict:
"""
Lists the deployments in a deployment group for an application registered with the IAM user or AWS account.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/ListDeployments>`_
**Request Syntax**
::
response = client.list_deployments(
applicationName='string',
deploymentGroupName='string',
includeOnlyStatuses=[
'Created'|'Queued'|'InProgress'|'Succeeded'|'Failed'|'Stopped'|'Ready',
],
createTimeRange={
'start': datetime(2015, 1, 1),
'end': datetime(2015, 1, 1)
},
nextToken='string'
)
**Response Syntax**
::
{
'deployments': [
'string',
],
'nextToken': 'string'
}
**Response Structure**
- *(dict) --*
Represents the output of a ListDeployments operation.
- **deployments** *(list) --*
A list of deployment IDs.
- *(string) --*
- **nextToken** *(string) --*
If a large amount of information is returned, an identifier is also returned. It can be used in a subsequent list deployments call to return the next set of deployments in the list.
:type applicationName: string
:param applicationName:
The name of an AWS CodeDeploy application associated with the IAM user or AWS account.
:type deploymentGroupName: string
:param deploymentGroupName:
The name of a deployment group for the specified application.
:type includeOnlyStatuses: list
:param includeOnlyStatuses:
A subset of deployments to list by status:
* Created: Include created deployments in the resulting list.
* Queued: Include queued deployments in the resulting list.
* In Progress: Include in-progress deployments in the resulting list.
* Succeeded: Include successful deployments in the resulting list.
* Failed: Include failed deployments in the resulting list.
* Stopped: Include stopped deployments in the resulting list.
- *(string) --*
:type createTimeRange: dict
:param createTimeRange:
A time range (start and end) for returning a subset of the list of deployments.
- **start** *(datetime) --*
The start time of the time range.
.. note::
Specify null to leave the start time open-ended.
- **end** *(datetime) --*
The end time of the time range.
.. note::
Specify null to leave the end time open-ended.
:type nextToken: string
:param nextToken:
An identifier returned from the previous list deployments call. It can be used to return the next set of deployments in the list.
:rtype: dict
:returns:
"""
pass
def list_git_hub_account_token_names(self, nextToken: str = None) -> Dict:
"""
Lists the names of stored connections to GitHub accounts.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/ListGitHubAccountTokenNames>`_
**Request Syntax**
::
response = client.list_git_hub_account_token_names(
nextToken='string'
)
**Response Syntax**
::
{
'tokenNameList': [
'string',
],
'nextToken': 'string'
}
**Response Structure**
- *(dict) --*
Represents the output of a ListGitHubAccountTokenNames operation.
- **tokenNameList** *(list) --*
A list of names of connections to GitHub accounts.
- *(string) --*
- **nextToken** *(string) --*
If a large amount of information is returned, an identifier is also returned. It can be used in a subsequent ListGitHubAccountTokenNames call to return the next set of names in the list.
:type nextToken: string
:param nextToken:
An identifier returned from the previous ListGitHubAccountTokenNames call. It can be used to return the next set of names in the list.
:rtype: dict
:returns:
"""
pass
def list_on_premises_instances(self, registrationStatus: str = None, tagFilters: List = None, nextToken: str = None) -> Dict:
"""
Gets a list of names for one or more on-premises instances.
Unless otherwise specified, both registered and deregistered on-premises instance names are listed. To list only registered or deregistered on-premises instance names, use the registration status parameter.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/ListOnPremisesInstances>`_
**Request Syntax**
::
response = client.list_on_premises_instances(
registrationStatus='Registered'|'Deregistered',
tagFilters=[
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
nextToken='string'
)
**Response Syntax**
::
{
'instanceNames': [
'string',
],
'nextToken': 'string'
}
**Response Structure**
- *(dict) --*
Represents the output of the list on-premises instances operation.
- **instanceNames** *(list) --*
The list of matching on-premises instance names.
- *(string) --*
- **nextToken** *(string) --*
If a large amount of information is returned, an identifier is also returned. It can be used in a subsequent list on-premises instances call to return the next set of on-premises instances in the list.
:type registrationStatus: string
:param registrationStatus:
The registration status of the on-premises instances:
* Deregistered: Include deregistered on-premises instances in the resulting list.
* Registered: Include registered on-premises instances in the resulting list.
:type tagFilters: list
:param tagFilters:
The on-premises instance tags that are used to restrict the on-premises instance names returned.
- *(dict) --*
Information about an on-premises instance tag filter.
- **Key** *(string) --*
The on-premises instance tag filter key.
- **Value** *(string) --*
The on-premises instance tag filter value.
- **Type** *(string) --*
The on-premises instance tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
:type nextToken: string
:param nextToken:
An identifier returned from the previous list on-premises instances call. It can be used to return the next set of on-premises instances in the list.
:rtype: dict
:returns:
"""
pass
def put_lifecycle_event_hook_execution_status(self, deploymentId: str = None, lifecycleEventHookExecutionId: str = None, status: str = None) -> Dict:
"""
Sets the result of a Lambda validation function. The function validates one or both lifecycle events (``BeforeAllowTraffic`` and ``AfterAllowTraffic`` ) and returns ``Succeeded`` or ``Failed`` .
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/PutLifecycleEventHookExecutionStatus>`_
**Request Syntax**
::
response = client.put_lifecycle_event_hook_execution_status(
deploymentId='string',
lifecycleEventHookExecutionId='string',
status='Pending'|'InProgress'|'Succeeded'|'Failed'|'Skipped'|'Unknown'
)
**Response Syntax**
::
{
'lifecycleEventHookExecutionId': 'string'
}
**Response Structure**
- *(dict) --*
- **lifecycleEventHookExecutionId** *(string) --*
The execution ID of the lifecycle event hook. A hook is specified in the ``hooks`` section of the deployment's AppSpec file.
:type deploymentId: string
:param deploymentId:
The unique ID of a deployment. Pass this ID to a Lambda function that validates a deployment lifecycle event.
:type lifecycleEventHookExecutionId: string
:param lifecycleEventHookExecutionId:
The execution ID of a deployment\'s lifecycle hook. A deployment lifecycle hook is specified in the ``hooks`` section of the AppSpec file.
:type status: string
:param status:
The result of a Lambda function that validates a deployment lifecycle event (``Succeeded`` or ``Failed`` ).
:rtype: dict
:returns:
"""
pass
def register_application_revision(self, applicationName: str, revision: Dict, description: str = None):
"""
Registers with AWS CodeDeploy a revision for the specified application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/RegisterApplicationRevision>`_
**Request Syntax**
::
response = client.register_application_revision(
applicationName='string',
description='string',
revision={
'revisionType': 'S3'|'GitHub'|'String'|'AppSpecContent',
's3Location': {
'bucket': 'string',
'key': 'string',
'bundleType': 'tar'|'tgz'|'zip'|'YAML'|'JSON',
'version': 'string',
'eTag': 'string'
},
'gitHubLocation': {
'repository': 'string',
'commitId': 'string'
},
'string': {
'content': 'string',
'sha256': 'string'
},
'appSpecContent': {
'content': 'string',
'sha256': 'string'
}
}
)
:type applicationName: string
:param applicationName: **[REQUIRED]**
The name of an AWS CodeDeploy application associated with the IAM user or AWS account.
:type description: string
:param description:
A comment about the revision.
:type revision: dict
:param revision: **[REQUIRED]**
Information about the application revision to register, including type and location.
- **revisionType** *(string) --*
The type of application revision:
* S3: An application revision stored in Amazon S3.
* GitHub: An application revision stored in GitHub (EC2/On-premises deployments only).
* String: A YAML-formatted or JSON-formatted string (AWS Lambda deployments only).
- **s3Location** *(dict) --*
Information about the location of a revision stored in Amazon S3.
- **bucket** *(string) --*
The name of the Amazon S3 bucket where the application revision is stored.
- **key** *(string) --*
The name of the Amazon S3 object that represents the bundled artifacts for the application revision.
- **bundleType** *(string) --*
The file type of the application revision. Must be one of the following:
* tar: A tar archive file.
* tgz: A compressed tar archive file.
* zip: A zip archive file.
- **version** *(string) --*
A specific version of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the version is not specified, the system uses the most recent version by default.
- **eTag** *(string) --*
The ETag of the Amazon S3 object that represents the bundled artifacts for the application revision.
If the ETag is not specified as an input parameter, ETag validation of the object is skipped.
- **gitHubLocation** *(dict) --*
Information about the location of application artifacts stored in GitHub.
- **repository** *(string) --*
The GitHub account and repository pair that stores a reference to the commit that represents the bundled artifacts for the application revision.
Specified as account/repository.
- **commitId** *(string) --*
The SHA1 commit ID of the GitHub commit that represents the bundled artifacts for the application revision.
- **string** *(dict) --*
Information about the location of an AWS Lambda deployment revision stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string. It includes information about which Lambda function to update and optional Lambda functions that validate deployment lifecycle events.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
- **appSpecContent** *(dict) --*
The content of an AppSpec file for an AWS Lambda or Amazon ECS deployment. The content is formatted as JSON or YAML and stored as a RawString.
- **content** *(string) --*
The YAML-formatted or JSON-formatted revision string.
For an AWS Lambda deployment, the content includes a Lambda function name, the alias for its original version, and the alias for its replacement version. The deployment shifts traffic from the original version of the Lambda function to the replacement version.
For an Amazon ECS deployment, the content includes the task name, information about the load balancer that serves traffic to the container, and more.
For both types of deployments, the content can specify Lambda functions that run at specified hooks, such as ``BeforeInstall`` , during a deployment.
- **sha256** *(string) --*
The SHA256 hash value of the revision content.
:returns: None
"""
pass
def register_on_premises_instance(self, instanceName: str, iamSessionArn: str = None, iamUserArn: str = None):
"""
Registers an on-premises instance.
.. note::
Only one IAM ARN (an IAM session ARN or IAM user ARN) is supported in the request. You cannot use both.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/RegisterOnPremisesInstance>`_
**Request Syntax**
::
response = client.register_on_premises_instance(
instanceName='string',
iamSessionArn='string',
iamUserArn='string'
)
:type instanceName: string
:param instanceName: **[REQUIRED]**
The name of the on-premises instance to register.
:type iamSessionArn: string
:param iamSessionArn:
The ARN of the IAM session to associate with the on-premises instance.
:type iamUserArn: string
:param iamUserArn:
The ARN of the IAM user to associate with the on-premises instance.
:returns: None
"""
pass
def remove_tags_from_on_premises_instances(self, tags: List, instanceNames: List):
"""
Removes one or more tags from one or more on-premises instances.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/RemoveTagsFromOnPremisesInstances>`_
**Request Syntax**
::
response = client.remove_tags_from_on_premises_instances(
tags=[
{
'Key': 'string',
'Value': 'string'
},
],
instanceNames=[
'string',
]
)
:type tags: list
:param tags: **[REQUIRED]**
The tag key-value pairs to remove from the on-premises instances.
- *(dict) --*
Information about a tag.
- **Key** *(string) --*
The tag\'s key.
- **Value** *(string) --*
The tag\'s value.
:type instanceNames: list
:param instanceNames: **[REQUIRED]**
The names of the on-premises instances from which to remove tags.
- *(string) --*
:returns: None
"""
pass
def skip_wait_time_for_instance_termination(self, deploymentId: str = None):
"""
In a blue/green deployment, overrides any specified wait time and starts terminating instances immediately after the traffic routing is complete.
.. danger::
This operation is deprecated and may not function as expected. This operation should not be used going forward and is only kept for the purpose of backwards compatiblity.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/SkipWaitTimeForInstanceTermination>`_
**Request Syntax**
::
response = client.skip_wait_time_for_instance_termination(
deploymentId='string'
)
:type deploymentId: string
:param deploymentId:
The unique ID of a blue/green deployment for which you want to skip the instance termination wait time.
:returns: None
"""
pass
def stop_deployment(self, deploymentId: str, autoRollbackEnabled: bool = None) -> Dict:
"""
Attempts to stop an ongoing deployment.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/StopDeployment>`_
**Request Syntax**
::
response = client.stop_deployment(
deploymentId='string',
autoRollbackEnabled=True|False
)
**Response Syntax**
::
{
'status': 'Pending'|'Succeeded',
'statusMessage': 'string'
}
**Response Structure**
- *(dict) --*
Represents the output of a StopDeployment operation.
- **status** *(string) --*
The status of the stop deployment operation:
* Pending: The stop operation is pending.
* Succeeded: The stop operation was successful.
- **statusMessage** *(string) --*
An accompanying status message.
:type deploymentId: string
:param deploymentId: **[REQUIRED]**
The unique ID of a deployment.
:type autoRollbackEnabled: boolean
:param autoRollbackEnabled:
Indicates, when a deployment is stopped, whether instances that have been updated should be rolled back to the previous version of the application revision.
:rtype: dict
:returns:
"""
pass
def update_application(self, applicationName: str = None, newApplicationName: str = None):
"""
Changes the name of an application.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/UpdateApplication>`_
**Request Syntax**
::
response = client.update_application(
applicationName='string',
newApplicationName='string'
)
:type applicationName: string
:param applicationName:
The current name of the application you want to change.
:type newApplicationName: string
:param newApplicationName:
The new name to give the application.
:returns: None
"""
pass
def update_deployment_group(self, applicationName: str, currentDeploymentGroupName: str, newDeploymentGroupName: str = None, deploymentConfigName: str = None, ec2TagFilters: List = None, onPremisesInstanceTagFilters: List = None, autoScalingGroups: List = None, serviceRoleArn: str = None, triggerConfigurations: List = None, alarmConfiguration: Dict = None, autoRollbackConfiguration: Dict = None, deploymentStyle: Dict = None, blueGreenDeploymentConfiguration: Dict = None, loadBalancerInfo: Dict = None, ec2TagSet: Dict = None, ecsServices: List = None, onPremisesTagSet: Dict = None) -> Dict:
"""
Changes information about a deployment group.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/codedeploy-2014-10-06/UpdateDeploymentGroup>`_
**Request Syntax**
::
response = client.update_deployment_group(
applicationName='string',
currentDeploymentGroupName='string',
newDeploymentGroupName='string',
deploymentConfigName='string',
ec2TagFilters=[
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
onPremisesInstanceTagFilters=[
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
autoScalingGroups=[
'string',
],
serviceRoleArn='string',
triggerConfigurations=[
{
'triggerName': 'string',
'triggerTargetArn': 'string',
'triggerEvents': [
'DeploymentStart'|'DeploymentSuccess'|'DeploymentFailure'|'DeploymentStop'|'DeploymentRollback'|'DeploymentReady'|'InstanceStart'|'InstanceSuccess'|'InstanceFailure'|'InstanceReady',
]
},
],
alarmConfiguration={
'enabled': True|False,
'ignorePollAlarmFailure': True|False,
'alarms': [
{
'name': 'string'
},
]
},
autoRollbackConfiguration={
'enabled': True|False,
'events': [
'DEPLOYMENT_FAILURE'|'DEPLOYMENT_STOP_ON_ALARM'|'DEPLOYMENT_STOP_ON_REQUEST',
]
},
deploymentStyle={
'deploymentType': 'IN_PLACE'|'BLUE_GREEN',
'deploymentOption': 'WITH_TRAFFIC_CONTROL'|'WITHOUT_TRAFFIC_CONTROL'
},
blueGreenDeploymentConfiguration={
'terminateBlueInstancesOnDeploymentSuccess': {
'action': 'TERMINATE'|'KEEP_ALIVE',
'terminationWaitTimeInMinutes': 123
},
'deploymentReadyOption': {
'actionOnTimeout': 'CONTINUE_DEPLOYMENT'|'STOP_DEPLOYMENT',
'waitTimeInMinutes': 123
},
'greenFleetProvisioningOption': {
'action': 'DISCOVER_EXISTING'|'COPY_AUTO_SCALING_GROUP'
}
},
loadBalancerInfo={
'elbInfoList': [
{
'name': 'string'
},
],
'targetGroupInfoList': [
{
'name': 'string'
},
],
'targetGroupPairInfoList': [
{
'targetGroups': [
{
'name': 'string'
},
],
'prodTrafficRoute': {
'listenerArns': [
'string',
]
},
'testTrafficRoute': {
'listenerArns': [
'string',
]
}
},
]
},
ec2TagSet={
'ec2TagSetList': [
[
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
]
},
ecsServices=[
{
'serviceName': 'string',
'clusterName': 'string'
},
],
onPremisesTagSet={
'onPremisesTagSetList': [
[
{
'Key': 'string',
'Value': 'string',
'Type': 'KEY_ONLY'|'VALUE_ONLY'|'KEY_AND_VALUE'
},
],
]
}
)
**Response Syntax**
::
{
'hooksNotCleanedUp': [
{
'name': 'string',
'hook': 'string'
},
]
}
**Response Structure**
- *(dict) --*
Represents the output of an UpdateDeploymentGroup operation.
- **hooksNotCleanedUp** *(list) --*
If the output contains no data, and the corresponding deployment group contained at least one Auto Scaling group, AWS CodeDeploy successfully removed all corresponding Auto Scaling lifecycle event hooks from the AWS account. If the output contains data, AWS CodeDeploy could not remove some Auto Scaling lifecycle event hooks from the AWS account.
- *(dict) --*
Information about an Auto Scaling group.
- **name** *(string) --*
The Auto Scaling group name.
- **hook** *(string) --*
An Auto Scaling lifecycle event hook name.
:type applicationName: string
:param applicationName: **[REQUIRED]**
The application name that corresponds to the deployment group to update.
:type currentDeploymentGroupName: string
:param currentDeploymentGroupName: **[REQUIRED]**
The current name of the deployment group.
:type newDeploymentGroupName: string
:param newDeploymentGroupName:
The new name of the deployment group, if you want to change it.
:type deploymentConfigName: string
:param deploymentConfigName:
The replacement deployment configuration name to use, if you want to change it.
:type ec2TagFilters: list
:param ec2TagFilters:
The replacement set of Amazon EC2 tags on which to filter, if you want to change them. To keep the existing tags, enter their names. To remove tags, do not enter any tag names.
- *(dict) --*
Information about an EC2 tag filter.
- **Key** *(string) --*
The tag filter key.
- **Value** *(string) --*
The tag filter value.
- **Type** *(string) --*
The tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
:type onPremisesInstanceTagFilters: list
:param onPremisesInstanceTagFilters:
The replacement set of on-premises instance tags on which to filter, if you want to change them. To keep the existing tags, enter their names. To remove tags, do not enter any tag names.
- *(dict) --*
Information about an on-premises instance tag filter.
- **Key** *(string) --*
The on-premises instance tag filter key.
- **Value** *(string) --*
The on-premises instance tag filter value.
- **Type** *(string) --*
The on-premises instance tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
:type autoScalingGroups: list
:param autoScalingGroups:
The replacement list of Auto Scaling groups to be included in the deployment group, if you want to change them. To keep the Auto Scaling groups, enter their names. To remove Auto Scaling groups, do not enter any Auto Scaling group names.
- *(string) --*
:type serviceRoleArn: string
:param serviceRoleArn:
A replacement ARN for the service role, if you want to change it.
:type triggerConfigurations: list
:param triggerConfigurations:
Information about triggers to change when the deployment group is updated. For examples, see `Modify Triggers in an AWS CodeDeploy Deployment Group <https://docs.aws.amazon.com/codedeploy/latest/userguide/how-to-notify-edit.html>`__ in the AWS CodeDeploy User Guide.
- *(dict) --*
Information about notification triggers for the deployment group.
- **triggerName** *(string) --*
The name of the notification trigger.
- **triggerTargetArn** *(string) --*
The ARN of the Amazon Simple Notification Service topic through which notifications about deployment or instance events are sent.
- **triggerEvents** *(list) --*
The event type or types for which notifications are triggered.
- *(string) --*
:type alarmConfiguration: dict
:param alarmConfiguration:
Information to add or change about Amazon CloudWatch alarms when the deployment group is updated.
- **enabled** *(boolean) --*
Indicates whether the alarm configuration is enabled.
- **ignorePollAlarmFailure** *(boolean) --*
Indicates whether a deployment should continue if information about the current state of alarms cannot be retrieved from Amazon CloudWatch. The default value is false.
* true: The deployment proceeds even if alarm status information can\'t be retrieved from Amazon CloudWatch.
* false: The deployment stops if alarm status information can\'t be retrieved from Amazon CloudWatch.
- **alarms** *(list) --*
A list of alarms configured for the deployment group. A maximum of 10 alarms can be added to a deployment group.
- *(dict) --*
Information about an alarm.
- **name** *(string) --*
The name of the alarm. Maximum length is 255 characters. Each alarm name can be used only once in a list of alarms.
:type autoRollbackConfiguration: dict
:param autoRollbackConfiguration:
Information for an automatic rollback configuration that is added or changed when a deployment group is updated.
- **enabled** *(boolean) --*
Indicates whether a defined automatic rollback configuration is currently enabled.
- **events** *(list) --*
The event type or types that trigger a rollback.
- *(string) --*
:type deploymentStyle: dict
:param deploymentStyle:
Information about the type of deployment, either in-place or blue/green, you want to run and whether to route deployment traffic behind a load balancer.
- **deploymentType** *(string) --*
Indicates whether to run an in-place deployment or a blue/green deployment.
- **deploymentOption** *(string) --*
Indicates whether to route deployment traffic behind a load balancer.
:type blueGreenDeploymentConfiguration: dict
:param blueGreenDeploymentConfiguration:
Information about blue/green deployment options for a deployment group.
- **terminateBlueInstancesOnDeploymentSuccess** *(dict) --*
Information about whether to terminate instances in the original fleet during a blue/green deployment.
- **action** *(string) --*
The action to take on instances in the original environment after a successful blue/green deployment.
* TERMINATE: Instances are terminated after a specified wait time.
* KEEP_ALIVE: Instances are left running after they are deregistered from the load balancer and removed from the deployment group.
- **terminationWaitTimeInMinutes** *(integer) --*
The number of minutes to wait after a successful blue/green deployment before terminating instances from the original environment. The maximum setting is 2880 minutes (2 days).
- **deploymentReadyOption** *(dict) --*
Information about the action to take when newly provisioned instances are ready to receive traffic in a blue/green deployment.
- **actionOnTimeout** *(string) --*
Information about when to reroute traffic from an original environment to a replacement environment in a blue/green deployment.
* CONTINUE_DEPLOYMENT: Register new instances with the load balancer immediately after the new application revision is installed on the instances in the replacement environment.
* STOP_DEPLOYMENT: Do not register new instances with a load balancer unless traffic rerouting is started using ContinueDeployment . If traffic rerouting is not started before the end of the specified wait period, the deployment status is changed to Stopped.
- **waitTimeInMinutes** *(integer) --*
The number of minutes to wait before the status of a blue/green deployment is changed to Stopped if rerouting is not started manually. Applies only to the STOP_DEPLOYMENT option for actionOnTimeout
- **greenFleetProvisioningOption** *(dict) --*
Information about how instances are provisioned for a replacement environment in a blue/green deployment.
- **action** *(string) --*
The method used to add instances to a replacement environment.
* DISCOVER_EXISTING: Use instances that already exist or will be created manually.
* COPY_AUTO_SCALING_GROUP: Use settings from a specified Auto Scaling group to define and create instances in a new Auto Scaling group.
:type loadBalancerInfo: dict
:param loadBalancerInfo:
Information about the load balancer used in a deployment.
- **elbInfoList** *(list) --*
An array that contains information about the load balancer to use for load balancing in a deployment. In Elastic Load Balancing, load balancers are used with Classic Load Balancers.
.. note::
Adding more than one load balancer to the array is not supported.
- *(dict) --*
Information about a load balancer in Elastic Load Balancing to use in a deployment. Instances are registered directly with a load balancer, and traffic is routed to the load balancer.
- **name** *(string) --*
For blue/green deployments, the name of the load balancer that is used to route traffic from original instances to replacement instances in a blue/green deployment. For in-place deployments, the name of the load balancer that instances are deregistered from so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **targetGroupInfoList** *(list) --*
An array that contains information about the target group to use for load balancing in a deployment. In Elastic Load Balancing, target groups are used with Application Load Balancers.
.. note::
Adding more than one target group to the array is not supported.
- *(dict) --*
Information about a target group in Elastic Load Balancing to use in a deployment. Instances are registered as targets in a target group, and traffic is routed to the target group.
- **name** *(string) --*
For blue/green deployments, the name of the target group that instances in the original environment are deregistered from, and instances in the replacement environment are registered with. For in-place deployments, the name of the target group that instances are deregistered from, so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **targetGroupPairInfoList** *(list) --*
The target group pair information. This is an array of ``TargeGroupPairInfo`` objects with a maximum size of one.
- *(dict) --*
Information about two target groups and how traffic is routed during an Amazon ECS deployment. An optional test traffic route can be specified.
- **targetGroups** *(list) --*
One pair of target groups. One is associated with the original task set. The second is associated with the task set that serves traffic after the deployment is complete.
- *(dict) --*
Information about a target group in Elastic Load Balancing to use in a deployment. Instances are registered as targets in a target group, and traffic is routed to the target group.
- **name** *(string) --*
For blue/green deployments, the name of the target group that instances in the original environment are deregistered from, and instances in the replacement environment are registered with. For in-place deployments, the name of the target group that instances are deregistered from, so they are not serving traffic during a deployment, and then re-registered with after the deployment is complete.
- **prodTrafficRoute** *(dict) --*
The path used by a load balancer to route production traffic when an Amazon ECS deployment is complete.
- **listenerArns** *(list) --*
The ARN of one listener. The listener identifies the route between a target group and a load balancer. This is an array of strings with a maximum size of one.
- *(string) --*
- **testTrafficRoute** *(dict) --*
An optional path used by a load balancer to route test traffic after an Amazon ECS deployment. Validation can occur while test traffic is served during a deployment.
- **listenerArns** *(list) --*
The ARN of one listener. The listener identifies the route between a target group and a load balancer. This is an array of strings with a maximum size of one.
- *(string) --*
:type ec2TagSet: dict
:param ec2TagSet:
Information about groups of tags applied to on-premises instances. The deployment group includes only EC2 instances identified by all the tag groups.
- **ec2TagSetList** *(list) --*
A list that contains other lists of EC2 instance tag groups. For an instance to be included in the deployment group, it must be identified by all of the tag groups in the list.
- *(list) --*
- *(dict) --*
Information about an EC2 tag filter.
- **Key** *(string) --*
The tag filter key.
- **Value** *(string) --*
The tag filter value.
- **Type** *(string) --*
The tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
:type ecsServices: list
:param ecsServices:
The target Amazon ECS services in the deployment group. This applies only to deployment groups that use the Amazon ECS compute platform. A target Amazon ECS service is specified as an Amazon ECS cluster and service name pair using the format ``<clustername>:<servicename>`` .
- *(dict) --*
Contains the service and cluster names used to identify an Amazon ECS deployment\'s target.
- **serviceName** *(string) --*
The name of the target Amazon ECS service.
- **clusterName** *(string) --*
The name of the cluster that the Amazon ECS service is associated with.
:type onPremisesTagSet: dict
:param onPremisesTagSet:
Information about an on-premises instance tag set. The deployment group includes only on-premises instances identified by all the tag groups.
- **onPremisesTagSetList** *(list) --*
A list that contains other lists of on-premises instance tag groups. For an instance to be included in the deployment group, it must be identified by all of the tag groups in the list.
- *(list) --*
- *(dict) --*
Information about an on-premises instance tag filter.
- **Key** *(string) --*
The on-premises instance tag filter key.
- **Value** *(string) --*
The on-premises instance tag filter value.
- **Type** *(string) --*
The on-premises instance tag filter type:
* KEY_ONLY: Key only.
* VALUE_ONLY: Value only.
* KEY_AND_VALUE: Key and value.
:rtype: dict
:returns:
"""
pass
| 62.382874
| 725
| 0.542311
| 32,191
| 341,671
| 5.730577
| 0.030008
| 0.01971
| 0.020924
| 0.006765
| 0.901894
| 0.885712
| 0.870637
| 0.862484
| 0.856451
| 0.84949
| 0
| 0.006506
| 0.379186
| 341,671
| 5,476
| 726
| 62.394266
| 0.863182
| 0.854808
| 0
| 0.460784
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.460784
| false
| 0.460784
| 0.068627
| 0
| 0.539216
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 10
|
7f6f84c0f2f53a0e4476ed42da9b3203d6850e4a
| 1,594
|
py
|
Python
|
batch_version/_myLib/metrics.py
|
droubo/meta-level-analysis-of-anomaly-detectors
|
a64671365b6c98ad14fc82f3430d3082b0455a6c
|
[
"MIT"
] | null | null | null |
batch_version/_myLib/metrics.py
|
droubo/meta-level-analysis-of-anomaly-detectors
|
a64671365b6c98ad14fc82f3430d3082b0455a6c
|
[
"MIT"
] | null | null | null |
batch_version/_myLib/metrics.py
|
droubo/meta-level-analysis-of-anomaly-detectors
|
a64671365b6c98ad14fc82f3430d3082b0455a6c
|
[
"MIT"
] | null | null | null |
import numpy as np
def average_precision_at_k(y_true, y_scores, k):
size = len(y_true)
size_ones = np.ndarray.tolist(y_true).count(1)
relevant = 0
ap = []
item_counter = 0
ap_at_k = 0
for sp in np.argsort(y_scores)[::-1]:
item_counter += 1
if y_true[sp] == 1:
relevant += 1
ap = ap + [relevant / item_counter]
else:
ap = ap + [0]
if item_counter == min(size, k):
sum_ap = sum(ap)
ap_at_k = sum_ap / size_ones if sum_ap > 0 else 0
break
return ap_at_k
def new_average_precision_at_k(y_true, y_scores, k):
size = len(y_true)
size_ones = np.ndarray.tolist(y_true).count([1])
relevant = 0
ap = []
item_counter = 0
ap_at_k = 0
for sp in np.argsort(y_scores)[::-1]:
item_counter += 1
if y_true[sp] == 1:
relevant += 1
ap = ap + [relevant / item_counter]
else:
ap = ap + [0]
if item_counter == min(size, k):
sum_ap = sum(ap)
ap_at_k = sum_ap / size_ones if sum_ap > 0 else 0
break
return ap_at_k
def precision_at_k(y_true, y_scores, k):
size = len(y_true)
relevant = 0
item_counter = 0
p_at_k = 0
for sp in np.argsort(y_scores)[::-1]:
item_counter += 1
if y_true[sp] == 1:
relevant += 1
if item_counter == min(size, k):
p_at_k = relevant / item_counter
break
return p_at_k
| 25.301587
| 62
| 0.510665
| 238
| 1,594
| 3.142857
| 0.151261
| 0.048128
| 0.040107
| 0.052139
| 0.891711
| 0.891711
| 0.863636
| 0.863636
| 0.863636
| 0.863636
| 0
| 0.029683
| 0.387077
| 1,594
| 62
| 63
| 25.709677
| 0.735926
| 0
| 0
| 0.826923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057692
| false
| 0
| 0.019231
| 0
| 0.134615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7fbd3c4f82add21e56a537aa8f57f32ce92d267c
| 46
|
py
|
Python
|
finance_ml/differential/__init__.py
|
xaviergoby/finance_ml
|
c348556fa3e13417e8fcf02999f42d5e72f0501b
|
[
"MIT"
] | 1
|
2018-12-14T18:51:29.000Z
|
2018-12-14T18:51:29.000Z
|
finance_ml/differential/__init__.py
|
xaviergoby/finance_ml
|
c348556fa3e13417e8fcf02999f42d5e72f0501b
|
[
"MIT"
] | null | null | null |
finance_ml/differential/__init__.py
|
xaviergoby/finance_ml
|
c348556fa3e13417e8fcf02999f42d5e72f0501b
|
[
"MIT"
] | null | null | null |
from .fraction import frac_diff, frac_diff_FFD
| 46
| 46
| 0.869565
| 8
| 46
| 4.625
| 0.75
| 0.432432
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 46
| 1
| 46
| 46
| 0.880952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f6874261b120975cee2d9040530f3c9a189c14a2
| 88
|
py
|
Python
|
open_seq2seq/parts/convs2s/__init__.py
|
Kipok/OpenSeq2Seq
|
cb48d2552e157c88f005c2a7364548a658e4cdf1
|
[
"MIT"
] | null | null | null |
open_seq2seq/parts/convs2s/__init__.py
|
Kipok/OpenSeq2Seq
|
cb48d2552e157c88f005c2a7364548a658e4cdf1
|
[
"MIT"
] | null | null | null |
open_seq2seq/parts/convs2s/__init__.py
|
Kipok/OpenSeq2Seq
|
cb48d2552e157c88f005c2a7364548a658e4cdf1
|
[
"MIT"
] | null | null | null |
from . import ffn_wn_layer
from . import conv_wn_layer
from . import attention_wn_layer
| 22
| 32
| 0.829545
| 15
| 88
| 4.466667
| 0.466667
| 0.447761
| 0.328358
| 0.507463
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 88
| 3
| 33
| 29.333333
| 0.881579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f6949373a601cb203228b7a4472da27920b709c7
| 9,204
|
py
|
Python
|
examples/utilities/06-bis_6D10F.py
|
1234zou/MOKIT
|
47171b0c168212ef27281882085fbb23055c3ea3
|
[
"FSFAP"
] | 9
|
2022-01-19T09:25:17.000Z
|
2022-01-30T06:12:43.000Z
|
examples/utilities/06-bis_6D10F.py
|
1234zou/MOKIT
|
47171b0c168212ef27281882085fbb23055c3ea3
|
[
"FSFAP"
] | 1
|
2022-01-21T14:00:28.000Z
|
2022-01-21T15:02:39.000Z
|
examples/utilities/06-bis_6D10F.py
|
1234zou/MOKIT
|
47171b0c168212ef27281882085fbb23055c3ea3
|
[
"FSFAP"
] | null | null | null |
from pyscf import gto, scf
from fch2py import fch2py
mol = gto.M()
# 12 atom(s)
mol.atom = '''
Cu 0.00000000 1.40000000 0.00000000
Cu 0.00000000 -1.40000000 0.00000000
O 0.00000000 0.00000000 1.15000000
O 0.00000000 0.00000000 -1.15000000
N 0.00000000 3.40000000 0.00000000
N 0.00000000 -3.40000000 0.00000000
H -0.93969300 3.74202000 0.00000000
H 0.93969300 -3.74202000 0.00000000
H 0.46984600 3.74202000 0.81379800
H -0.46984600 -3.74202000 -0.81379800
H 0.46984600 3.74202000 -0.81379800
H -0.46984600 -3.74202000 0.81379800
'''
mol.basis = {
'Cu': gto.basis.parse('''
Cu S
0.817600000E+01 -0.421025989E+00
0.256800000E+01 0.738592381E+00
0.958700000E+00 0.552569186E+00
Cu S
0.817600000E+01 0.178766454E+00
0.256800000E+01 -0.359227208E+00
0.958700000E+00 -0.470482380E+00
0.115300000E+00 0.108074072E+01
Cu S
0.396000000E-01 0.100000000E+01
Cu P
0.256300000E+02 -0.489173002E-01
0.316600000E+01 0.627285402E+00
0.102300000E+01 0.471618802E+00
Cu P
0.860000000E-01 0.100000000E+01
Cu P
0.240000000E-01 0.100000000E+01
Cu D
0.413400000E+02 0.465424016E-01
0.114200000E+02 0.222782408E+00
0.383900000E+01 0.453905916E+00
0.123000000E+01 0.531476918E+00
Cu D
0.310200000E+00 0.100000000E+01
'''),
'Cu': gto.basis.parse('''
Cu S
0.817600000E+01 -0.421025989E+00
0.256800000E+01 0.738592381E+00
0.958700000E+00 0.552569186E+00
Cu S
0.817600000E+01 0.178766454E+00
0.256800000E+01 -0.359227208E+00
0.958700000E+00 -0.470482380E+00
0.115300000E+00 0.108074072E+01
Cu S
0.396000000E-01 0.100000000E+01
Cu P
0.256300000E+02 -0.489173002E-01
0.316600000E+01 0.627285402E+00
0.102300000E+01 0.471618802E+00
Cu P
0.860000000E-01 0.100000000E+01
Cu P
0.240000000E-01 0.100000000E+01
Cu D
0.413400000E+02 0.465424016E-01
0.114200000E+02 0.222782408E+00
0.383900000E+01 0.453905916E+00
0.123000000E+01 0.531476918E+00
Cu D
0.310200000E+00 0.100000000E+01
'''),
'O': gto.basis.parse('''
O S
0.117200000E+05 0.709645947E-03
0.175900000E+04 0.546727229E-02
0.400800000E+03 0.278231186E-01
0.113700000E+03 0.104747740E+00
0.370300000E+02 0.282920846E+00
0.132700000E+02 0.448495239E+00
0.502500000E+01 0.270816885E+00
0.101300000E+01 0.154502916E-01
O S
0.117200000E+05 -0.314443412E-03
0.175900000E+04 -0.248213768E-02
0.400800000E+03 -0.123163554E-01
0.113700000E+03 -0.505389173E-01
0.370300000E+02 -0.139384903E+00
0.132700000E+02 -0.325077495E+00
0.502500000E+01 -0.229848308E+00
0.101300000E+01 0.109537935E+01
O S
0.302300000E+00 0.100000000E+01
O P
0.177000000E+02 0.626791663E-01
0.385400000E+01 0.333536566E+00
0.104600000E+01 0.741239642E+00
O P
0.275300000E+00 0.100000000E+01
O D
0.118500000E+01 0.100000000E+01
'''),
'O': gto.basis.parse('''
O S
0.117200000E+05 0.709645947E-03
0.175900000E+04 0.546727229E-02
0.400800000E+03 0.278231186E-01
0.113700000E+03 0.104747740E+00
0.370300000E+02 0.282920846E+00
0.132700000E+02 0.448495239E+00
0.502500000E+01 0.270816885E+00
0.101300000E+01 0.154502916E-01
O S
0.117200000E+05 -0.314443412E-03
0.175900000E+04 -0.248213768E-02
0.400800000E+03 -0.123163554E-01
0.113700000E+03 -0.505389173E-01
0.370300000E+02 -0.139384903E+00
0.132700000E+02 -0.325077495E+00
0.502500000E+01 -0.229848308E+00
0.101300000E+01 0.109537935E+01
O S
0.302300000E+00 0.100000000E+01
O P
0.177000000E+02 0.626791663E-01
0.385400000E+01 0.333536566E+00
0.104600000E+01 0.741239642E+00
O P
0.275300000E+00 0.100000000E+01
O D
0.118500000E+01 0.100000000E+01
'''),
'N': gto.basis.parse('''
N S
0.904600000E+04 0.699617413E-03
0.135700000E+04 0.538605463E-02
0.309300000E+03 0.273910212E-01
0.877300000E+02 0.103150592E+00
0.285600000E+02 0.278570663E+00
0.102100000E+02 0.448294849E+00
0.383800000E+01 0.278085928E+00
0.746600000E+00 0.154315612E-01
N S
0.904600000E+04 -0.304990096E-03
0.135700000E+04 -0.240802638E-02
0.309300000E+03 -0.119444487E-01
0.877300000E+02 -0.489259929E-01
0.285600000E+02 -0.134472725E+00
0.102100000E+02 -0.315112578E+00
0.383800000E+01 -0.242857833E+00
0.746600000E+00 0.109438221E+01
N S
0.224800000E+00 0.100000000E+01
N P
0.135500000E+02 0.589056768E-01
0.291700000E+01 0.320461107E+00
0.797300000E+00 0.753042062E+00
N P
0.218500000E+00 0.100000000E+01
N D
0.817000000E+00 0.100000000E+01
'''),
'N': gto.basis.parse('''
N S
0.904600000E+04 0.699617413E-03
0.135700000E+04 0.538605463E-02
0.309300000E+03 0.273910212E-01
0.877300000E+02 0.103150592E+00
0.285600000E+02 0.278570663E+00
0.102100000E+02 0.448294849E+00
0.383800000E+01 0.278085928E+00
0.746600000E+00 0.154315612E-01
N S
0.904600000E+04 -0.304990096E-03
0.135700000E+04 -0.240802638E-02
0.309300000E+03 -0.119444487E-01
0.877300000E+02 -0.489259929E-01
0.285600000E+02 -0.134472725E+00
0.102100000E+02 -0.315112578E+00
0.383800000E+01 -0.242857833E+00
0.746600000E+00 0.109438221E+01
N S
0.224800000E+00 0.100000000E+01
N P
0.135500000E+02 0.589056768E-01
0.291700000E+01 0.320461107E+00
0.797300000E+00 0.753042062E+00
N P
0.218500000E+00 0.100000000E+01
N D
0.817000000E+00 0.100000000E+01
'''),
'H': gto.basis.parse('''
H S
0.130100000E+02 0.334987264E-01
0.196200000E+01 0.234800801E+00
0.444600000E+00 0.813682958E+00
H S
0.122000000E+00 0.100000000E+01
H P
0.727000000E+00 0.100000000E+01
'''),
'H': gto.basis.parse('''
H S
0.130100000E+02 0.334987264E-01
0.196200000E+01 0.234800801E+00
0.444600000E+00 0.813682958E+00
H S
0.122000000E+00 0.100000000E+01
H P
0.727000000E+00 0.100000000E+01
'''),
'H': gto.basis.parse('''
H S
0.130100000E+02 0.334987264E-01
0.196200000E+01 0.234800801E+00
0.444600000E+00 0.813682958E+00
H S
0.122000000E+00 0.100000000E+01
H P
0.727000000E+00 0.100000000E+01
'''),
'H': gto.basis.parse('''
H S
0.130100000E+02 0.334987264E-01
0.196200000E+01 0.234800801E+00
0.444600000E+00 0.813682958E+00
H S
0.122000000E+00 0.100000000E+01
H P
0.727000000E+00 0.100000000E+01
'''),
'H': gto.basis.parse('''
H S
0.130100000E+02 0.334987264E-01
0.196200000E+01 0.234800801E+00
0.444600000E+00 0.813682958E+00
H S
0.122000000E+00 0.100000000E+01
H P
0.727000000E+00 0.100000000E+01
'''),
'H': gto.basis.parse('''
H S
0.130100000E+02 0.334987264E-01
0.196200000E+01 0.234800801E+00
0.444600000E+00 0.813682958E+00
H S
0.122000000E+00 0.100000000E+01
H P
0.727000000E+00 0.100000000E+01
''')}
mol.ecp = {
'Cu': gto.basis.parse_ecp('''
Cu nelec 10
Cu ul
1 511.99517600 -10.00000000
2 93.28010740 -72.55482820
2 23.22066690 -12.74502310
Cu S
0 173.11808500 3.00000000
1 185.24198900 23.83518250
2 73.15178470 473.89304900
2 14.68841570 157.63458200
Cu P
0 100.71913700 5.00000000
1 130.83456600 6.49909360
2 53.86837200 351.46053900
2 14.09894690 85.50160360
'''),
'Cu': gto.basis.parse_ecp('''
Cu nelec 10
Cu ul
1 511.99517600 -10.00000000
2 93.28010740 -72.55482820
2 23.22066690 -12.74502310
Cu S
0 173.11808500 3.00000000
1 185.24198900 23.83518250
2 73.15178470 473.89304900
2 14.68841570 157.63458200
Cu P
0 100.71913700 5.00000000
1 130.83456600 6.49909360
2 53.86837200 351.46053900
2 14.09894690 85.50160360
''')}
# Remember to check the charge and spin
mol.charge = 2
mol.spin = 0
mol.verbose = 4
mol.cart = True
mol.build()
mf = scf.UHF(mol)
mf.max_cycle = 1
mf.kernel()
# read MOs from .fch(k) file
nbf = mf.mo_coeff[0].shape[0]
nif = mf.mo_coeff[0].shape[1]
S = mol.intor_symmetric('int1e_ovlp')
Sdiag = S.diagonal()
alpha_coeff = fch2py('02-bis_6D10F.fchk', nbf, nif, Sdiag, 'a')
beta_coeff = fch2py('02-bis_6D10F.fchk', nbf, nif, Sdiag, 'b')
mf.mo_coeff = (alpha_coeff, beta_coeff)
# read done
dm = mf.make_rdm1()
mf.max_cycle = 5
mf.kernel(dm)
| 28.7625
| 63
| 0.613103
| 1,433
| 9,204
| 3.926727
| 0.156315
| 0.051182
| 0.073929
| 0.063977
| 0.940466
| 0.935134
| 0.935134
| 0.906522
| 0.906522
| 0.893727
| 0
| 0.675075
| 0.272056
| 9,204
| 319
| 64
| 28.852665
| 0.164776
| 0.009235
| 0
| 0.879479
| 0
| 0
| 0.896071
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.006515
| 0
| 0.006515
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
f6c1abe9a3f1862402abdec4935128928eb823b7
| 8,117
|
py
|
Python
|
tests/test_serialization.py
|
unparalleled-js/ledger-eth-lib
|
ec9e4d0e801c98d09e5c8b04983e12540cff1650
|
[
"MIT"
] | 10
|
2020-10-26T05:46:47.000Z
|
2022-03-07T22:28:50.000Z
|
tests/test_serialization.py
|
unparalleled-js/ledger-eth-lib
|
ec9e4d0e801c98d09e5c8b04983e12540cff1650
|
[
"MIT"
] | 13
|
2020-02-04T09:08:45.000Z
|
2022-03-21T00:05:28.000Z
|
tests/test_serialization.py
|
unparalleled-js/ledger-eth-lib
|
ec9e4d0e801c98d09e5c8b04983e12540cff1650
|
[
"MIT"
] | 6
|
2020-02-04T18:22:56.000Z
|
2022-03-01T13:44:51.000Z
|
"""
Test objects and serialization
"""
from eth_utils import decode_hex
from ledgereth.constants import DEFAULT_CHAIN_ID, DEFAULTS
from ledgereth.objects import (
SignedTransaction,
SignedType1Transaction,
SignedType2Transaction,
Transaction,
Type1Transaction,
Type2Transaction,
)
from ledgereth.transactions import create_transaction, sign_transaction
def test_legacy_serialization(yield_dongle):
"""Test serialization of legacy Transaction objects"""
destination = decode_hex("0xf0155486a14539f784739be1c02e93f28eb8e960")
amount = int(1e17)
gas_limit = int(1e6)
gas_price = int(1e9)
data = b"0xdeadbeef"
nonce = 666
tx = Transaction(
destination=destination,
amount=amount,
gas_limit=gas_limit,
gas_price=gas_price,
data=data,
nonce=nonce,
)
assert tx.nonce == nonce
assert tx.gas_price == gas_price
assert tx.gas_limit == gas_limit
assert tx.destination == destination
assert tx.amount == amount
assert tx.data == data
assert tx.chain_id == DEFAULT_CHAIN_ID
assert tx.dummy1 == DEFAULTS[int]
assert tx.dummy2 == DEFAULTS[int]
def test_signed_legacy_serialization(yield_dongle):
"""Test serialization of legacy SignedTransaction objects"""
destination = decode_hex("0xf0155486a14539f784739be1c02e93f28eb8e960")
amount = int(1e17)
gas_limit = int(1e6)
gas_price = int(1e9)
data = b"0xdeadbeef"
nonce = 666
r = int.from_bytes(
b"#\xdc\x11\x1d|:\xd1\xdf\x98\x06\xce\x1e\x8e\xb4\xf5_W\xdb\xa1\x173\x9cT^u\x93\xd1\xf6\xc3\xb0&b",
"big",
)
s = int.from_bytes(
b"3,9\xdc\xd3\x98\xea4\xa4\x8b\x87\x18\x98\xd5\x89\xf5_\xc4\xc7\xbc\xe0\x05b\xfbg\x0c\x97.~\x1b\x07 ",
"big",
)
v = 1
tx = SignedTransaction(
destination=destination,
amount=amount,
gas_limit=gas_limit,
gas_price=gas_price,
data=data,
nonce=nonce,
v=v,
r=r,
s=s,
)
assert tx.nonce == nonce
assert tx.gas_price == gas_price
assert tx.gas_limit == gas_limit
assert tx.destination == destination
assert tx.amount == amount
assert tx.data == data
assert tx.r == r
assert tx.s == s
assert tx.v == v
assert tx.raw_transaction()
def test_type1_serialization(yield_dongle):
"""Test serialization of Type1Transaction objects"""
destination = decode_hex("0xf0155486a14539f784739be1c02e93f28eb8e960")
amount = int(1e17)
gas_limit = int(1e6)
gas_price = int(1e9)
data = b"0xdeadbeef"
nonce = 666
access_list = [[destination, [10, 200, 3000]]]
tx = Type1Transaction(
chain_id=DEFAULT_CHAIN_ID,
destination=destination,
amount=amount,
gas_limit=gas_limit,
gas_price=gas_price,
data=data,
nonce=nonce,
access_list=access_list,
)
assert tx.nonce == nonce
assert tx.gas_price == gas_price
assert tx.gas_limit == gas_limit
assert tx.destination == destination
assert tx.amount == amount
assert tx.data == data
assert tx.chain_id == DEFAULT_CHAIN_ID
assert type(tx.access_list) == tuple
assert len(tx.access_list) == len(access_list)
assert type(tx.access_list[0]) == tuple
assert tx.access_list[0][0] == destination
assert len(tx.access_list[0][1]) == len(access_list[0][1])
def test_signed_type1_serialization(yield_dongle):
"""Test serialization of SignedType1Transaction objects"""
destination = decode_hex("0xf0155486a14539f784739be1c02e93f28eb8e960")
amount = int(1e17)
gas_limit = int(1e6)
gas_price = int(1e9)
data = b"0xdeadbeef"
nonce = 666
access_list = [[destination, [10, 200, 3000]]]
r = int.from_bytes(
b"#\xdc\x11\x1d|:\xd1\xdf\x98\x06\xce\x1e\x8e\xb4\xf5_W\xdb\xa1\x173\x9cT^u\x93\xd1\xf6\xc3\xb0&b",
"big",
)
s = int.from_bytes(
b"3,9\xdc\xd3\x98\xea4\xa4\x8b\x87\x18\x98\xd5\x89\xf5_\xc4\xc7\xbc\xe0\x05b\xfbg\x0c\x97.~\x1b\x07 ",
"big",
)
v = 1
tx = SignedType1Transaction(
chain_id=DEFAULT_CHAIN_ID,
destination=destination,
amount=amount,
gas_limit=gas_limit,
gas_price=gas_price,
data=data,
nonce=nonce,
access_list=access_list,
sender_r=r,
sender_s=s,
y_parity=v,
)
assert tx.nonce == nonce
assert tx.gas_price == gas_price
assert tx.gas_limit == gas_limit
assert tx.destination == destination
assert tx.amount == amount
assert tx.data == data
assert tx.chain_id == DEFAULT_CHAIN_ID
assert tx.sender_r == r
assert tx.sender_s == s
assert tx.y_parity == v
assert type(tx.access_list) == tuple
assert len(tx.access_list) == len(access_list)
assert type(tx.access_list[0]) == tuple
assert tx.access_list[0][0] == destination
assert len(tx.access_list[0][1]) == len(access_list[0][1])
assert tx.raw_transaction()
def test_type2_serialization(yield_dongle):
"""Test serialization of Type2Transaction objects"""
destination = decode_hex("0xf0155486a14539f784739be1c02e93f28eb8e960")
amount = int(1e17)
gas_limit = int(1e6)
max_fee_per_gas = int(10e9)
max_priority_fee_per_gas = int(1e9)
data = b"0xdeadbeef"
nonce = 666
access_list = [[destination, [10, 200, 3000]]]
tx = Type2Transaction(
chain_id=DEFAULT_CHAIN_ID,
destination=destination,
amount=amount,
gas_limit=gas_limit,
max_fee_per_gas=max_fee_per_gas,
max_priority_fee_per_gas=max_priority_fee_per_gas,
data=data,
nonce=nonce,
access_list=access_list,
)
assert tx.nonce == nonce
assert tx.max_fee_per_gas == max_fee_per_gas
assert tx.max_priority_fee_per_gas == max_priority_fee_per_gas
assert tx.gas_limit == gas_limit
assert tx.destination == destination
assert tx.amount == amount
assert tx.data == data
assert tx.chain_id == DEFAULT_CHAIN_ID
assert type(tx.access_list) == tuple
assert len(tx.access_list) == len(access_list)
assert type(tx.access_list[0]) == tuple
assert tx.access_list[0][0] == destination
assert len(tx.access_list[0][1]) == len(access_list[0][1])
def test_signed_type2_serialization(yield_dongle):
"""Test serialization of SignedType2Transaction objects"""
destination = decode_hex("0xf0155486a14539f784739be1c02e93f28eb8e960")
amount = int(1e17)
gas_limit = int(1e6)
max_fee_per_gas = int(10e9)
max_priority_fee_per_gas = int(1e9)
data = b"0xdeadbeef"
nonce = 666
access_list = [[destination, [10, 200, 3000]]]
r = int.from_bytes(
b"#\xdc\x11\x1d|:\xd1\xdf\x98\x06\xce\x1e\x8e\xb4\xf5_W\xdb\xa1\x173\x9cT^u\x93\xd1\xf6\xc3\xb0&b",
"big",
)
s = int.from_bytes(
b"3,9\xdc\xd3\x98\xea4\xa4\x8b\x87\x18\x98\xd5\x89\xf5_\xc4\xc7\xbc\xe0\x05b\xfbg\x0c\x97.~\x1b\x07 ",
"big",
)
v = 1
tx = SignedType2Transaction(
chain_id=DEFAULT_CHAIN_ID,
destination=destination,
amount=amount,
gas_limit=gas_limit,
max_fee_per_gas=max_fee_per_gas,
max_priority_fee_per_gas=max_priority_fee_per_gas,
data=data,
nonce=nonce,
access_list=access_list,
sender_r=r,
sender_s=s,
y_parity=v,
)
assert tx.nonce == nonce
assert tx.max_fee_per_gas == max_fee_per_gas
assert tx.max_priority_fee_per_gas == max_priority_fee_per_gas
assert tx.gas_limit == gas_limit
assert tx.destination == destination
assert tx.amount == amount
assert tx.data == data
assert tx.chain_id == DEFAULT_CHAIN_ID
assert tx.sender_r == r
assert tx.sender_s == s
assert tx.y_parity == v
assert type(tx.access_list) == tuple
assert len(tx.access_list) == len(access_list)
assert type(tx.access_list[0]) == tuple
assert tx.access_list[0][0] == destination
assert len(tx.access_list[0][1]) == len(access_list[0][1])
assert tx.raw_transaction()
| 30.630189
| 110
| 0.661821
| 1,110
| 8,117
| 4.617117
| 0.106306
| 0.09522
| 0.046829
| 0.037463
| 0.883512
| 0.883512
| 0.876488
| 0.839024
| 0.817561
| 0.817561
| 0
| 0.078478
| 0.226069
| 8,117
| 264
| 111
| 30.746212
| 0.737345
| 0.041148
| 0
| 0.813043
| 0
| 0.026087
| 0.117366
| 0.106908
| 0
| 0
| 0.040284
| 0
| 0.334783
| 1
| 0.026087
| false
| 0
| 0.017391
| 0
| 0.043478
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f6c2069b5620c0b0589f19078bc5a3d9eb5f314c
| 30,856
|
py
|
Python
|
pkgs/conf-pkg/src/genie/libs/conf/bgp/iosxe/tests/test_bgp.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | null | null | null |
pkgs/conf-pkg/src/genie/libs/conf/bgp/iosxe/tests/test_bgp.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | null | null | null |
pkgs/conf-pkg/src/genie/libs/conf/bgp/iosxe/tests/test_bgp.py
|
miott/genielibs
|
6464642cdd67aa2367bdbb12561af4bb060e5e62
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# import python
import unittest
# import genie
from genie.conf.tests import TestCase
from genie.conf import Genie
from genie.conf.base import Testbed, Device, Interface
# import genie.libs
from genie.libs.conf.bgp import Bgp
from genie.libs.conf.vrf import Vrf
class test_bgp(TestCase):
# Old code test
def test_init(self):
Genie.testbed = testbed = Testbed()
dev1 = Device(testbed=testbed, name='PE1', os='iosxe')
intf1 = Interface(device=dev1, name='GigabitEthernet0/0/1',
ipv4='10.1.0.1/24')
intf2 = Interface(device=dev1, name='GigabitEthernet0/0/2',
ipv4='10.2.0.1/24')
dev2 = Device(testbed=testbed, name='PE2', os='iosxe')
intf3 = Interface(device=dev2, name='GigabitEthernet0/0/3',
ipv4='10.1.0.2/24')
intf4 = Interface(device=dev2, name='GigabitEthernet0/0/4',
ipv4='10.2.0.2/24')
vrf1 = Vrf(name='vrf1')
vrf2 = Vrf(name='a')
with self.assertNoWarnings():
Genie.testbed = None
with self.assertRaises(TypeError):
bgp = Bgp()
with self.assertRaises(TypeError):
bgp = Bgp(testbed=testbed)
Genie.testbed = testbed
with self.assertRaises(TypeError):
bgp = Bgp()
bgp = Bgp(asn=100)
self.assertIs(bgp.testbed, testbed)
Genie.testbed = testbed
bgp = Bgp(asn=100)
self.assertIs(bgp.testbed, Genie.testbed)
self.assertIs(bgp.testbed, testbed)
dev1.add_feature(bgp)
cfgs = bgp.build_config(apply=False)
self.assertCountEqual(cfgs.keys(), [dev1.name])
self.assertMultiLineEqual(str(cfgs[dev1.name]), '\n'.join([
'router bgp 100',
' address-family ipv4 unicast',
' exit',
' exit',
]))
dev2.add_feature(bgp)
cfgs = bgp.build_config(apply=False)
self.assertCountEqual(cfgs.keys(), [dev1.name, dev2.name])
self.assertMultiLineEqual(str(cfgs[dev1.name]), '\n'.join([
'router bgp 100',
' address-family ipv4 unicast',
' exit',
' exit',
]))
self.assertMultiLineEqual(str(cfgs[dev2.name]), '\n'.join([
'router bgp 100',
' address-family ipv4 unicast',
' exit',
' exit',
]))
bgp.device_attr[dev1].add_neighbor(intf3.ipv4)
bgp.device_attr[dev1].add_vrf(vrf1)
bgp.device_attr[dev1].vrf_attr[vrf1].add_neighbor(intf4.ipv4)
bgp.device_attr[dev1].vrf_attr[vrf1].neighbor_attr[intf4.ipv4].\
address_family_attr['ipv4 unicast'].activate = True
bgp.device_attr[dev1].add_vrf(vrf2)
cfgs = bgp.build_config(apply=False)
self.assertCountEqual(cfgs.keys(), [dev1.name, dev2.name])
self.assertMultiLineEqual(str(cfgs[dev1.name]), '\n'.join([
'router bgp 100',
' neighbor 10.1.0.2 remote-as 100',
' address-family ipv4 unicast',
' exit',
' address-family ipv4 unicast vrf a',
' exit',
' neighbor 10.2.0.2 remote-as 100',
' address-family ipv4 unicast vrf vrf1',
' neighbor 10.2.0.2 activate',
' exit',
' address-family ipv4 unicast vrf vrf1',
' exit',
' exit',
]))
self.assertMultiLineEqual(str(cfgs[dev2.name]), '\n'.join([
'router bgp 100',
' address-family ipv4 unicast',
' exit',
' exit',
]))
cfgs = bgp.build_config(apply=False,
attributes='device_attr__PE1__vrf_attr__default')
self.assertCountEqual(cfgs.keys(), [dev1.name])
self.assertMultiLineEqual(str(cfgs[dev1.name]), '\n'.join([
'router bgp 100',
' neighbor 10.1.0.2 remote-as 100',
' address-family ipv4 unicast',
' exit',
' exit',
]))
cfgs = bgp.build_config(apply=False, attributes={
'device_attr': {
'*': (),
},
})
self.assertCountEqual(cfgs.keys(), [dev1.name, dev2.name])
self.assertMultiLineEqual(str(cfgs[dev1.name]), '\n'.join([
'router bgp 100',
' exit',
]))
self.assertMultiLineEqual(str(cfgs[dev2.name]), '\n'.join([
'router bgp 100',
' exit',
]))
cfgs = bgp.build_config(apply=False, attributes={
'device_attr': {
'PE1': 'vrf_attr__default',
},
})
self.assertCountEqual(cfgs.keys(), [dev1.name])
self.assertMultiLineEqual(str(cfgs[dev1.name]), '\n'.join([
'router bgp 100',
' neighbor 10.1.0.2 remote-as 100',
' address-family ipv4 unicast',
' exit',
' exit',
]))
cfgs = bgp.build_config(apply=False, attributes={
'device_attr': {
'PE1': 'vrf_attr__default__neighbor_attr__10.1.0.2',
},
})
self.assertCountEqual(cfgs.keys(), [dev1.name])
self.assertMultiLineEqual(str(cfgs[dev1.name]), '\n'.join([
'router bgp 100',
' neighbor 10.1.0.2 remote-as 100',
' exit',
]))
def test_cfg(self):
Genie.testbed = testbed = Testbed()
dev1 = Device(testbed=testbed, name='PE1', os='iosxe')
intf1 = Interface(device=dev1, name='Ethernet0/0/1',
ipv4='10.1.0.1/24')
intf2 = Interface(device=dev1, name='Ethernet0/0/2',
ipv4='10.2.0.1/24')
dev2 = Device(testbed=testbed, name='PE2', os='iosxe')
intf3 = Interface(device=dev2, name='Ethernet0/0/3',
ipv4='10.1.0.2/24', ipv6='2001:111:222::/64')
intf4 = Interface(device=dev2, name='Ethernet0/0/4',
ipv4='10.2.0.2/24')
with self.assertNoWarnings():
Genie.testbed = None
with self.assertRaises(TypeError):
bgp = Bgp()
with self.assertRaises(TypeError):
bgp = Bgp(testbed=testbed)
Genie.testbed = testbed
with self.assertRaises(TypeError):
bgp = Bgp()
bgp = Bgp(bgp_id=100)
self.assertIs(bgp.testbed, testbed)
Genie.testbed = testbed
bgp = Bgp(bgp_id=100)
self.assertIs(bgp.testbed, Genie.testbed)
self.assertIs(bgp.testbed, testbed)
# Defining attributes
af_name = 'ipv4 unicast'
af_name2 = 'link-state'
bgp.device_attr[dev1]
bgp.device_attr[dev1].vrf_attr[None].always_compare_med = True
bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name]
bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name2]
bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name].\
af_label_allocation_mode = 'per-vrf'
bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name].\
af_dampening = True
neighbor_id = intf4.ipv4
bgp.device_attr[dev1].vrf_attr[None].add_neighbor(neighbor_id)
bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id]
bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\
nbr_remote_as = 200
bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\
nbr_fall_over_bfd = True
bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\
nbr_shutdown = True
bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\
nbr_update_source = 'loopback0'
bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\
nbr_remove_private_as = True
bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\
nbr_remove_private_as_af_name = 'ipv4 unicast'
nbr_af_name = 'ipv4 multicast'
bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\
address_family_attr[nbr_af_name]
bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\
address_family_attr[nbr_af_name].nbr_af_allowas_in = True
dev1.add_feature(bgp)
cfgs = bgp.build_config(apply=False)
self.assertCountEqual(cfgs.keys(), [dev1.name])
self.assertMultiLineEqual(str(cfgs[dev1.name]), '\n'.join(
['router bgp 100',
' bgp always-compare-med',
' neighbor 10.2.0.2 fall-over bfd',
' neighbor 10.2.0.2 remote-as 200',
' address-family ipv4 unicast',
' neighbor 10.2.0.2 remove-private-as',
' exit',
' neighbor 10.2.0.2 shutdown',
' neighbor 10.2.0.2 update-source loopback0',
' address-family ipv4 multicast',
' neighbor 10.2.0.2 activate',
' neighbor 10.2.0.2 allowas-in',
' exit',
' address-family ipv4 unicast',
' bgp dampening',
' exit',
' address-family link-state',
' exit',
' exit',
'mpls label mode vrf default protocol bgp-vpnv4 per-vrf',
]))
def test_partial_cfg(self):
Genie.testbed = testbed = Testbed()
dev1 = Device(testbed=testbed, name='PE1', os='iosxe')
with self.assertNoWarnings():
Genie.testbed = None
with self.assertRaises(TypeError):
bgp = Bgp()
with self.assertRaises(TypeError):
bgp = Bgp(testbed=testbed)
Genie.testbed = testbed
with self.assertRaises(TypeError):
bgp = Bgp()
bgp = Bgp(bgp_id=100)
self.assertIs(bgp.testbed, testbed)
Genie.testbed = testbed
bgp = Bgp(bgp_id=100)
self.assertIs(bgp.testbed, Genie.testbed)
self.assertIs(bgp.testbed, testbed)
# Defining attributes
bgp.device_attr[dev1]
dev1.add_feature(bgp)
ps_name = 'PEER-SESSION'
bgp.device_attr[dev1].peer_session_attr[ps_name].ps_fall_over_bfd=\
True
partial_cfg1 = bgp.build_config(
apply=False,
attributes={'device_attr':\
{'*':{'peer_session_attr':\
{'*':"ps_fall_over_bfd"}}}})
self.assertCountEqual(partial_cfg1.keys(), [dev1.name])
self.assertMultiLineEqual(str(partial_cfg1[dev1.name]), '\n'.\
join([
'router bgp 100',
' template peer-session PEER-SESSION',
' fall-over bfd',
' exit',
' exit',
]))
def test_cfg2(self):
Genie.testbed = testbed = Testbed()
dev1 = Device(testbed=testbed, name='PE1', os='iosxe')
intf1 = Interface(device=dev1, name='Ethernet0/0/1',
ipv4='10.1.0.1/24')
intf2 = Interface(device=dev1, name='Ethernet0/0/2',
ipv4='10.2.0.1/24',
ipv6='2001::1')
dev2 = Device(testbed=testbed, name='PE2', os='iosxe')
intf3 = Interface(device=dev2, name='Ethernet0/0/3',
ipv4='10.1.0.2/24', ipv6='2001:111:222::/64')
intf4 = Interface(device=dev2, name='Ethernet0/0/4',
ipv4='10.2.0.2/24')
with self.assertNoWarnings():
Genie.testbed = None
with self.assertRaises(TypeError):
bgp = Bgp()
with self.assertRaises(TypeError):
bgp = Bgp(testbed=testbed)
Genie.testbed = testbed
with self.assertRaises(TypeError):
bgp = Bgp()
bgp = Bgp(bgp_id=100)
self.assertIs(bgp.testbed, testbed)
Genie.testbed = testbed
bgp = Bgp(bgp_id=100)
self.assertIs(bgp.testbed, Genie.testbed)
self.assertIs(bgp.testbed, testbed)
# Defining attributes
vrf_id = 'vrf1'
vrf1 = Vrf(vrf_id)
pp_name = 'PEER-POLICY'
ps_name = 'PEER-SESSION'
bgp.device_attr[dev2].peer_policy_attr[pp_name].pp_allowas_in =\
True
bgp.device_attr[dev2].peer_session_attr[ps_name].ps_fall_over_bfd=\
True
bgp.device_attr[dev2].vrf_attr[None].cluster_id = '150'
af_name = 'ipv6 unicast'
bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name]
bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name].\
af_nexthop_route_map = 'test'
bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name].\
af_dampening = True
bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name].\
af_dampening_half_life_time = 1
bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name].\
af_dampening_reuse_time = 10
bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name].\
af_dampening_suppress_time = 30
bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name].\
af_dampening_max_suppress_time = 2
neighbor_id = intf2.ipv4
bgp.device_attr[dev2].vrf_attr[vrf1].add_neighbor(neighbor_id)
bgp.device_attr[dev2].vrf_attr[vrf1].neighbor_attr[neighbor_id].\
nbr_suppress_four_byte_as_capability = True
dev2.add_feature(bgp)
cfgs = bgp.build_config(apply=False)
self.assertCountEqual(cfgs.keys(), [dev2.name])
self.maxDiff = None
self.assertEqual(str(cfgs[dev2.name]), '\n'.join(
['router bgp 100',
' bgp cluster-id 150',
' address-family ipv6 unicast',
' bgp dampening 1 10 30 2',
' bgp nexthop route-map test',
' exit',
' address-family ipv4 unicast vrf vrf1',
' neighbor 10.2.0.1 dont-capability-negotiate four-octets-as',
' exit',
' template peer-session PEER-SESSION',
' fall-over bfd',
' exit',
' template peer-policy PEER-POLICY',
' allowas-in',
' exit',
' exit',
]))
def test_cfg3(self):
Genie.testbed = testbed = Testbed()
dev1 = Device(testbed=testbed, name='PE1', os='iosxe')
intf1 = Interface(device=dev1, name='Ethernet0/0/1',
ipv4='10.1.0.1/24')
intf2 = Interface(device=dev1, name='Ethernet0/0/2',
ipv4='10.2.0.1/24',
ipv6='2001::1')
dev2 = Device(testbed=testbed, name='PE2', os='iosxe')
intf3 = Interface(device=dev2, name='Ethernet0/0/3',
ipv4='10.1.0.2/24', ipv6='2001:111:222::/64')
intf4 = Interface(device=dev2, name='Ethernet0/0/4',
ipv4='10.2.0.2/24')
with self.assertNoWarnings():
Genie.testbed = None
with self.assertRaises(TypeError):
bgp = Bgp()
with self.assertRaises(TypeError):
bgp = Bgp(testbed=testbed)
Genie.testbed = testbed
with self.assertRaises(TypeError):
bgp = Bgp()
bgp = Bgp(bgp_id=100)
self.assertIs(bgp.testbed, testbed)
Genie.testbed = testbed
bgp = Bgp(bgp_id=100)
self.assertIs(bgp.testbed, Genie.testbed)
self.assertIs(bgp.testbed, testbed)
# Defining attributes
vrf_id = 'vrf1'
vrf1 = Vrf(vrf_id)
pp_name = 'PEER-POLICY'
ps_name = 'PEER-SESSION'
bgp.device_attr[dev2].peer_policy_attr[pp_name].pp_as_override =\
True
bgp.device_attr[dev2].peer_session_attr[ps_name].ps_remote_as=\
12
bgp.device_attr[dev2].vrf_attr[None].cluster_id = '150'
af_name = 'ipv6 unicast'
bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name]
bgp.device_attr[dev2].vrf_attr[None].address_family_attr[af_name].\
af_client_to_client_reflection = True
nbr_af_name = 'ipv6 unicast'
neighbor_id2 = intf2.ipv6
bgp.device_attr[dev2].vrf_attr[vrf1].neighbor_attr[neighbor_id2].\
address_family_attr[nbr_af_name]
bgp.device_attr[dev2].vrf_attr[vrf1].neighbor_attr[neighbor_id2].\
address_family_attr[nbr_af_name].\
nbr_af_maximum_prefix_max_prefix_no = 300000
dev2.add_feature(bgp)
cfgs = bgp.build_config(apply=False)
self.assertCountEqual(cfgs.keys(), [dev2.name])
self.maxDiff = None
self.assertEqual(str(cfgs[dev2.name]), '\n'.join(
['router bgp 100',
' bgp cluster-id 150',
' address-family ipv6 unicast',
' bgp client-to-client reflection',
' exit',
' address-family ipv6 unicast vrf vrf1',
' neighbor 2001::1 activate',
' neighbor 2001::1 maximum-prefix 300000',
' exit',
' template peer-session PEER-SESSION',
' remote-as 12',
' exit',
' template peer-policy PEER-POLICY',
' as-override',
' exit',
' exit',
]))
def test_uncfg(self):
Genie.testbed = testbed = Testbed()
dev1 = Device(testbed=testbed, name='PE1', os='iosxe')
intf1 = Interface(device=dev1, name='Ethernet0/0/1',
ipv4='10.1.0.1/24')
intf2 = Interface(device=dev1, name='Ethernet0/0/2',
ipv4='10.2.0.1/24')
dev2 = Device(testbed=testbed, name='PE2', os='iosxe')
intf3 = Interface(device=dev2, name='Ethernet0/0/3',
ipv4='10.1.0.2/24', ipv6='2001:111:222::/64')
intf4 = Interface(device=dev2, name='Ethernet0/0/4',
ipv4='10.2.0.2/24')
with self.assertNoWarnings():
Genie.testbed = None
with self.assertRaises(TypeError):
bgp = Bgp()
with self.assertRaises(TypeError):
bgp = Bgp(testbed=testbed)
Genie.testbed = testbed
with self.assertRaises(TypeError):
bgp = Bgp()
bgp = Bgp(bgp_id=100)
self.assertIs(bgp.testbed, testbed)
Genie.testbed = testbed
bgp = Bgp(bgp_id=100)
self.assertIs(bgp.testbed, Genie.testbed)
self.assertIs(bgp.testbed, testbed)
dev1.add_feature(bgp)
uncfgs = bgp.build_unconfig(apply=False)
self.assertCountEqual(uncfgs.keys(), [dev1.name])
self.assertMultiLineEqual(str(uncfgs[dev1.name]), '\n'.join([
'no router bgp 100',
]))
# Defining attributes
af_name = 'vpnv4 unicast'
af_name2 = 'link-state'
vrf_id = 'vrf1'
vrf1 = Vrf(vrf_id)
bgp.device_attr[dev1]
bgp.device_attr[dev1].vrf_attr[None].always_compare_med = True
bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name]
bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name2]
bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name].\
af_dampening = True
bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name].\
af_dampening_half_life_time = 1
bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name].\
af_dampening_reuse_time = 10
bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name].\
af_dampening_suppress_time = 30
bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name].\
af_dampening_max_suppress_time = 2
neighbor_id = intf4.ipv4
bgp.device_attr[dev1].vrf_attr[vrf1].add_neighbor(neighbor_id)
bgp.device_attr[dev1].vrf_attr[vrf1].neighbor_attr[neighbor_id]
bgp.device_attr[dev1].vrf_attr[vrf1].neighbor_attr[neighbor_id].\
nbr_fall_over_bfd = True
partial_uncfg1 = bgp.build_unconfig(
apply=False,
attributes={'device_attr':\
{'*':{'vrf_attr':\
{'*':"always_compare_med"}}}})
self.assertCountEqual(partial_uncfg1.keys(), [dev1.name])
self.assertMultiLineEqual(str(partial_uncfg1[dev1.name]), '\n'.\
join([
'router bgp 100',
' no bgp always-compare-med',
' exit',
]))
partial_uncfg2 = bgp.build_unconfig(\
apply=False,
attributes={'device_attr':\
{'*':{'vrf_attr':'*'}}})
self.assertCountEqual(partial_uncfg2.keys(), [dev1.name])
self.assertMultiLineEqual(str(partial_uncfg2[dev1.name]), '\n'.\
join([
'router bgp 100',
' no bgp always-compare-med',
' no address-family link-state',
' no address-family vpnv4 unicast',
' no neighbor 10.2.0.2',
' no address-family ipv4 unicast vrf vrf1',
' exit',
]))
partial_uncfg3 = bgp.build_unconfig(
apply=False,
attributes={'device_attr':\
{'*': {'vrf_attr':\
{'*': {'neighbor_attr':\
{'*':"nbr_fall_over_bfd"}}}}}})
self.assertCountEqual(partial_uncfg3.keys(), [dev1.name])
self.assertMultiLineEqual(str(partial_uncfg3[dev1.name]), '\n'.\
join([
'router bgp 100',
' address-family ipv4 unicast vrf vrf1',
' no neighbor 10.2.0.2 fall-over bfd',
' exit',
' exit',
]))
# Defining neighbor address family
nbr_af_name = 'ipv4 unicast'
bgp.device_attr[dev1].vrf_attr[vrf1].neighbor_attr[neighbor_id].\
address_family_attr[nbr_af_name]
bgp.device_attr[dev1].vrf_attr[vrf1].neighbor_attr[neighbor_id].\
address_family_attr[nbr_af_name].nbr_af_allowas_in = True
partial_uncfg4 = bgp.build_unconfig(
apply=False,
attributes={'device_attr': {'*': {'vrf_attr':
{'*': {'neighbor_attr':\
{'*': {'address_family_attr':\
{'*':"nbr_af_allowas_in"}}}}}}}})
self.assertCountEqual(partial_uncfg4.keys(), [dev1.name])
self.assertMultiLineEqual(str(partial_uncfg4[dev1.name]), '\n'.\
join([
'router bgp 100',
' address-family ipv4 unicast vrf vrf1',
' no neighbor 10.2.0.2 activate',
' no neighbor 10.2.0.2 allowas-in',
' exit',
' exit',
]))
partial_uncfg5 = bgp.build_unconfig(
apply=False,
attributes={'device_attr':\
{'*':{'vrf_attr':\
{'*':{'address_family_attr':\
{'*':"af_dampening"}}}}}})
self.assertCountEqual(partial_uncfg5.keys(), [dev1.name])
self.assertMultiLineEqual(str(partial_uncfg5[dev1.name]), '\n'.\
join([
'router bgp 100',
' address-family vpnv4 unicast',
' no bgp dampening',
' exit',
' exit',
]))
def test_uncfg2(self):
Genie.testbed = testbed = Testbed()
dev1 = Device(testbed=testbed, name='PE1', os='iosxe')
with self.assertNoWarnings():
Genie.testbed = None
with self.assertRaises(TypeError):
bgp = Bgp()
with self.assertRaises(TypeError):
bgp = Bgp(testbed=testbed)
Genie.testbed = testbed
with self.assertRaises(TypeError):
bgp = Bgp()
bgp = Bgp(bgp_id=100)
self.assertIs(bgp.testbed, testbed)
Genie.testbed = testbed
bgp = Bgp(bgp_id=100)
self.assertIs(bgp.testbed, Genie.testbed)
self.assertIs(bgp.testbed, testbed)
dev1.add_feature(bgp)
# Defining attributes
af_name = 'ipv4 unicast'
bgp.device_attr[dev1]
bgp.device_attr[dev1].vrf_attr[None].always_compare_med = True
bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name]
bgp.device_attr[dev1].vrf_attr[None].address_family_attr[af_name].\
af_label_allocation_mode = 'per-vrf'
uncfg = bgp.build_unconfig(apply=False)
self.assertCountEqual(uncfg.keys(), [dev1.name])
self.assertMultiLineEqual(str(uncfg[dev1.name]), '\n'.\
join([
'no router bgp 100',
'no mpls label mode vrf default protocol bgp-vpnv4 per-vrf',
]))
def test_uncfg3(self):
Genie.testbed = testbed = Testbed()
dev1 = Device(testbed=testbed, name='PE1', os='iosxe')
dev2 = Device(testbed=testbed, name='PE2', os='iosxe')
intf4 = Interface(device=dev2, name='Ethernet0/0/4',
ipv4='10.2.0.2/24')
with self.assertNoWarnings():
Genie.testbed = None
with self.assertRaises(TypeError):
bgp = Bgp()
with self.assertRaises(TypeError):
bgp = Bgp(testbed=testbed)
Genie.testbed = testbed
with self.assertRaises(TypeError):
bgp = Bgp()
bgp = Bgp(bgp_id=100)
self.assertIs(bgp.testbed, testbed)
Genie.testbed = testbed
bgp = Bgp(bgp_id=100)
self.assertIs(bgp.testbed, Genie.testbed)
self.assertIs(bgp.testbed, testbed)
dev1.add_feature(bgp)
# Defining attributes
bgp.device_attr[dev1]
neighbor_id = intf4.ipv4
bgp.device_attr[dev1].vrf_attr[None].add_neighbor(neighbor_id)
bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id]
partial_uncfg = bgp.build_unconfig(
apply=False,
attributes={'device_attr':\
{'*':{'vrf_attr':\
{'*':{'neighbor_attr':'*'}}}}})
self.assertCountEqual(partial_uncfg.keys(), [dev1.name])
self.assertMultiLineEqual(str(partial_uncfg[dev1.name]), '\n'.\
join([
'router bgp 100',
' no neighbor 10.2.0.2',
' exit',
]))
def test_cfg_l2vpn_vpls(self):
Genie.testbed = testbed = Testbed()
dev1 = Device(testbed=testbed, name='PE1', os='iosxe')
bgp = Bgp(bgp_id=100)
# Defining attributes
nbr_af_name = 'l2vpn vpls'
neighbor_id = '10.2.0.2'
bgp.device_attr[dev1].vrf_attr[None].neighbor_attr[neighbor_id].\
address_family_attr[nbr_af_name].nbr_af_suppress_signaling_protocol_ldp = True
bgp.device_attr[dev1]
self.assertIs(bgp.testbed, testbed)
dev1.add_feature(bgp)
cfgs = bgp.build_config(apply=False)
self.assertCountEqual(cfgs.keys(), [dev1.name])
self.assertMultiLineEqual(str(cfgs[dev1.name]), '\n'.join(
['router bgp 100',
' address-family l2vpn vpls',
' neighbor 10.2.0.2 activate',
' neighbor 10.2.0.2 suppress-signaling-protocol ldp',
' exit',
' exit',
]))
uncfgs = bgp.build_unconfig(
apply=False,
attributes={'device_attr': {'*': {'vrf_attr':
{'*': {'neighbor_attr': \
{'*': {'address_family_attr': \
{'*': "nbr_af_suppress_signaling_protocol_ldp"}}}}}}}})
self.assertCountEqual(uncfgs.keys(), [dev1.name])
self.maxDiff = None
self.assertMultiLineEqual(str(uncfgs[dev1.name]), '\n'.join(
['router bgp 100',
' address-family l2vpn vpls',
' no neighbor 10.2.0.2 activate',
' exit',
' exit',
]))
if __name__ == '__main__':
unittest.main()
| 41.528937
| 127
| 0.508977
| 3,229
| 30,856
| 4.688139
| 0.053887
| 0.050865
| 0.05582
| 0.051658
| 0.889153
| 0.86993
| 0.835976
| 0.785176
| 0.76014
| 0.743097
| 0
| 0.047882
| 0.372569
| 30,856
| 742
| 128
| 41.584906
| 0.734039
| 0.008815
| 0
| 0.787597
| 0
| 0
| 0.144068
| 0.005463
| 0
| 0
| 0
| 0
| 0.15814
| 1
| 0.013953
| false
| 0
| 0.009302
| 0
| 0.024806
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
12120ae8c10d9df475fbb0300cf9ffb578735d3e
| 6,792
|
py
|
Python
|
tests/test_datetime_truncate.py
|
mediapop/datetime_truncate
|
da0285b2de2f30acf8a1c8a4263c1f07894caeed
|
[
"MIT"
] | 7
|
2015-05-07T19:22:58.000Z
|
2021-12-23T09:29:14.000Z
|
tests/test_datetime_truncate.py
|
mediapop/datetime_truncate
|
da0285b2de2f30acf8a1c8a4263c1f07894caeed
|
[
"MIT"
] | 4
|
2015-09-03T17:54:37.000Z
|
2020-08-06T20:20:57.000Z
|
tests/test_datetime_truncate.py
|
mediapop/datetime_truncate
|
da0285b2de2f30acf8a1c8a4263c1f07894caeed
|
[
"MIT"
] | 5
|
2015-09-02T20:28:34.000Z
|
2020-04-23T08:36:59.000Z
|
from datetime import datetime
import unittest
from datetime_truncate import truncate
from datetime_truncate import truncate_half_year
from datetime_truncate import truncate_quarter
from datetime_truncate import truncate_week
DEFAULT_DT = datetime(2012, 7, 12, 12, 14, 14, 342)
class TestDatetimeTruncate(unittest.TestCase):
def setUp(self):
self.default_dt = datetime(2012, 7, 12, 12, 14, 14, 342)
def test_truncate_to_second(self):
self.assertEqual(truncate(self.default_dt, 'second'),
self.default_dt.replace(microsecond=0))
def test_truncate_to_minute(self):
self.assertEqual(truncate(self.default_dt, 'minute'),
self.default_dt.replace(second=0, microsecond=0))
def test_truncate_to_hour(self):
self.assertEqual(truncate(self.default_dt, 'hour'),
self.default_dt.replace(minute=0, second=0,
microsecond=0))
def test_truncate_to_day(self):
self.assertEqual(truncate(self.default_dt, 'day'),
self.default_dt.replace(hour=0, minute=0,
second=0, microsecond=0))
def test_truncate_to_month(self):
self.assertEqual(truncate(self.default_dt, 'month'),
self.default_dt.replace(day=1, hour=0, minute=0,
second=0, microsecond=0))
def test_truncate_to_year(self):
self.assertEqual(truncate(self.default_dt, 'year'),
self.default_dt.replace(month=1, day=1, hour=0,
minute=0, second=0,
microsecond=0))
def test_truncate_to_week(self):
self.assertEqual(truncate(self.default_dt, 'week'),
self.default_dt.replace(day=9, hour=0,
minute=0, second=0,
microsecond=0))
self.assertEqual(truncate(self.default_dt.replace(day=9), 'week'),
self.default_dt.replace(day=9, hour=0,
minute=0, second=0,
microsecond=0))
self.assertEqual(truncate(self.default_dt.replace(day=16), 'week'),
self.default_dt.replace(day=16, hour=0,
minute=0, second=0,
microsecond=0))
self.assertEqual(truncate_week(self.default_dt),
self.default_dt.replace(day=9, hour=0,
minute=0, second=0,
microsecond=0))
self.assertEqual(truncate_week(self.default_dt.replace(day=9)),
self.default_dt.replace(day=9, hour=0,
minute=0, second=0,
microsecond=0))
self.assertEqual(truncate_week(self.default_dt.replace(day=16)),
self.default_dt.replace(day=16, hour=0,
minute=0, second=0,
microsecond=0))
def test_truncate_to_quarter(self):
self.assertEqual(truncate(self.default_dt.replace(month=2), 'quarter'),
self.default_dt.replace(month=1, day=1, hour=0,
minute=0, second=0,
microsecond=0))
self.assertEqual(truncate(self.default_dt.replace(month=6), 'quarter'),
self.default_dt.replace(month=4, day=1, hour=0,
minute=0, second=0,
microsecond=0))
self.assertEqual(truncate(self.default_dt, 'quarter'),
self.default_dt.replace(month=7, day=1, hour=0,
minute=0, second=0,
microsecond=0))
self.assertEqual(
truncate(self.default_dt.replace(month=10), 'quarter'),
self.default_dt.replace(month=10, day=1, hour=0,
minute=0, second=0,
microsecond=0))
self.assertEqual(truncate_quarter(self.default_dt.replace(month=2)),
self.default_dt.replace(month=1, day=1, hour=0,
minute=0, second=0,
microsecond=0))
self.assertEqual(truncate_quarter(self.default_dt.replace(month=6)),
self.default_dt.replace(month=4, day=1, hour=0,
minute=0, second=0,
microsecond=0))
self.assertEqual(truncate_quarter(self.default_dt),
self.default_dt.replace(month=7, day=1, hour=0,
minute=0, second=0,
microsecond=0))
self.assertEqual(
truncate_quarter(self.default_dt.replace(month=10)),
self.default_dt.replace(month=10, day=1, hour=0,
minute=0, second=0,
microsecond=0))
def test_truncat_to_half_year(self):
self.assertEqual(
truncate(self.default_dt.replace(month=6), 'half_year'),
self.default_dt.replace(month=1, day=1, hour=0,
minute=0, second=0,
microsecond=0)
)
self.assertEqual(
truncate_half_year(self.default_dt.replace(month=6)),
self.default_dt.replace(month=1, day=1, hour=0,
minute=0, second=0,
microsecond=0)
)
self.assertEqual(truncate(self.default_dt, 'half_year'),
self.default_dt.replace(month=7, day=1, hour=0,
minute=0, second=0,
microsecond=0))
self.assertEqual(truncate_half_year(self.default_dt),
self.default_dt.replace(month=7, day=1, hour=0,
minute=0, second=0,
microsecond=0))
| 52.246154
| 79
| 0.464959
| 663
| 6,792
| 4.613876
| 0.06184
| 0.147107
| 0.208238
| 0.235371
| 0.906832
| 0.855835
| 0.837202
| 0.749591
| 0.722131
| 0.68748
| 0
| 0.046174
| 0.441991
| 6,792
| 129
| 80
| 52.651163
| 0.76095
| 0
| 0
| 0.561404
| 0
| 0
| 0.012662
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 1
| 0.087719
| false
| 0
| 0.052632
| 0
| 0.149123
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c3c42a6c469794515fc5af95ce791a069d64da87
| 13,251
|
py
|
Python
|
tensorflow_testing/histpdf_data.py
|
NLeSC/parallel-roofit-scripts
|
70de07edfd8e400650af4cb34789dbb8b8fc9574
|
[
"Apache-2.0"
] | 1
|
2017-11-21T10:24:51.000Z
|
2017-11-21T10:24:51.000Z
|
tensorflow_testing/histpdf_data.py
|
NLeSC/parallel-roofit-scripts
|
70de07edfd8e400650af4cb34789dbb8b8fc9574
|
[
"Apache-2.0"
] | 2
|
2017-07-02T12:51:57.000Z
|
2017-09-07T14:35:43.000Z
|
tensorflow_testing/histpdf_data.py
|
roofit-dev/parallel-roofit-scripts
|
70de07edfd8e400650af4cb34789dbb8b8fc9574
|
[
"Apache-2.0"
] | null | null | null |
gaussian_raw = [
{'x_bin': -9.9, 'weight': 1, 'vol': 0.2},
{'x_bin': -9.7, 'weight': 0, 'vol': 0.2},
{'x_bin': -9.5, 'weight': 1, 'vol': 0.2},
{'x_bin': -9.3, 'weight': 0, 'vol': 0.2},
{'x_bin': -9.1, 'weight': 1, 'vol': 0.2},
{'x_bin': -8.9, 'weight': 0, 'vol': 0.2},
{'x_bin': -8.7, 'weight': 1, 'vol': 0.2},
{'x_bin': -8.5, 'weight': 1, 'vol': 0.2},
{'x_bin': -8.3, 'weight': 1, 'vol': 0.2},
{'x_bin': -8.1, 'weight': 0, 'vol': 0.2},
{'x_bin': -7.9, 'weight': 1, 'vol': 0.2},
{'x_bin': -7.7, 'weight': 0, 'vol': 0.2},
{'x_bin': -7.5, 'weight': 0, 'vol': 0.2},
{'x_bin': -7.3, 'weight': 0, 'vol': 0.2},
{'x_bin': -7.1, 'weight': 1, 'vol': 0.2},
{'x_bin': -6.9, 'weight': 5, 'vol': 0.2},
{'x_bin': -6.7, 'weight': 1, 'vol': 0.2},
{'x_bin': -6.5, 'weight': 1, 'vol': 0.2},
{'x_bin': -6.3, 'weight': 4, 'vol': 0.2},
{'x_bin': -6.1, 'weight': 2, 'vol': 0.2},
{'x_bin': -5.9, 'weight': 3, 'vol': 0.2},
{'x_bin': -5.7, 'weight': 2, 'vol': 0.2},
{'x_bin': -5.5, 'weight': 4, 'vol': 0.2},
{'x_bin': -5.3, 'weight': 4, 'vol': 0.2},
{'x_bin': -5.1, 'weight': 6, 'vol': 0.2},
{'x_bin': -4.9, 'weight': 7, 'vol': 0.2},
{'x_bin': -4.7, 'weight': 12, 'vol': 0.2},
{'x_bin': -4.5, 'weight': 6, 'vol': 0.2},
{'x_bin': -4.3, 'weight': 10, 'vol': 0.2},
{'x_bin': -4.1, 'weight': 6, 'vol': 0.2},
{'x_bin': -3.9, 'weight': 12, 'vol': 0.2},
{'x_bin': -3.7, 'weight': 15, 'vol': 0.2},
{'x_bin': -3.5, 'weight': 12, 'vol': 0.2},
{'x_bin': -3.3, 'weight': 19, 'vol': 0.2},
{'x_bin': -3.1, 'weight': 11, 'vol': 0.2},
{'x_bin': -2.9, 'weight': 15, 'vol': 0.2},
{'x_bin': -2.7, 'weight': 12, 'vol': 0.2},
{'x_bin': -2.5, 'weight': 26, 'vol': 0.2},
{'x_bin': -2.3, 'weight': 17, 'vol': 0.2},
{'x_bin': -2.1, 'weight': 19, 'vol': 0.2},
{'x_bin': -1.9, 'weight': 18, 'vol': 0.2},
{'x_bin': -1.7, 'weight': 22, 'vol': 0.2},
{'x_bin': -1.5, 'weight': 24, 'vol': 0.2},
{'x_bin': -1.3, 'weight': 26, 'vol': 0.2},
{'x_bin': -1.1, 'weight': 20, 'vol': 0.2},
{'x_bin': -0.9, 'weight': 18, 'vol': 0.2},
{'x_bin': -0.7, 'weight': 36, 'vol': 0.2},
{'x_bin': -0.5, 'weight': 32, 'vol': 0.2},
{'x_bin': -0.3, 'weight': 29, 'vol': 0.2},
{'x_bin': -0.1, 'weight': 21, 'vol': 0.2},
{'x_bin': 0.1, 'weight': 19, 'vol': 0.2},
{'x_bin': 0.3, 'weight': 22, 'vol': 0.2},
{'x_bin': 0.5, 'weight': 22, 'vol': 0.2},
{'x_bin': 0.7, 'weight': 26, 'vol': 0.2},
{'x_bin': 0.9, 'weight': 31, 'vol': 0.2},
{'x_bin': 1.1, 'weight': 35, 'vol': 0.2},
{'x_bin': 1.3, 'weight': 27, 'vol': 0.2},
{'x_bin': 1.5, 'weight': 25, 'vol': 0.2},
{'x_bin': 1.7, 'weight': 26, 'vol': 0.2},
{'x_bin': 1.9, 'weight': 17, 'vol': 0.2},
{'x_bin': 2.1, 'weight': 22, 'vol': 0.2},
{'x_bin': 2.3, 'weight': 25, 'vol': 0.2},
{'x_bin': 2.5, 'weight': 14, 'vol': 0.2},
{'x_bin': 2.7, 'weight': 22, 'vol': 0.2},
{'x_bin': 2.9, 'weight': 16, 'vol': 0.2},
{'x_bin': 3.1, 'weight': 12, 'vol': 0.2},
{'x_bin': 3.3, 'weight': 15, 'vol': 0.2},
{'x_bin': 3.5, 'weight': 17, 'vol': 0.2},
{'x_bin': 3.7, 'weight': 12, 'vol': 0.2},
{'x_bin': 3.9, 'weight': 10, 'vol': 0.2},
{'x_bin': 4.1, 'weight': 10, 'vol': 0.2},
{'x_bin': 4.3, 'weight': 11, 'vol': 0.2},
{'x_bin': 4.5, 'weight': 15, 'vol': 0.2},
{'x_bin': 4.7, 'weight': 8, 'vol': 0.2},
{'x_bin': 4.9, 'weight': 4, 'vol': 0.2},
{'x_bin': 5.1, 'weight': 12, 'vol': 0.2},
{'x_bin': 5.3, 'weight': 9, 'vol': 0.2},
{'x_bin': 5.5, 'weight': 6, 'vol': 0.2},
{'x_bin': 5.7, 'weight': 1, 'vol': 0.2},
{'x_bin': 5.9, 'weight': 5, 'vol': 0.2},
{'x_bin': 6.1, 'weight': 0, 'vol': 0.2},
{'x_bin': 6.3, 'weight': 1, 'vol': 0.2},
{'x_bin': 6.5, 'weight': 1, 'vol': 0.2},
{'x_bin': 6.7, 'weight': 7, 'vol': 0.2},
{'x_bin': 6.9, 'weight': 0, 'vol': 0.2},
{'x_bin': 7.1, 'weight': 3, 'vol': 0.2},
{'x_bin': 7.3, 'weight': 0, 'vol': 0.2},
{'x_bin': 7.5, 'weight': 1, 'vol': 0.2},
{'x_bin': 7.7, 'weight': 2, 'vol': 0.2},
{'x_bin': 7.9, 'weight': 1, 'vol': 0.2},
{'x_bin': 8.1, 'weight': 0, 'vol': 0.2},
{'x_bin': 8.3, 'weight': 0, 'vol': 0.2},
{'x_bin': 8.5, 'weight': 0, 'vol': 0.2},
{'x_bin': 8.7, 'weight': 0, 'vol': 0.2},
{'x_bin': 8.9, 'weight': 0, 'vol': 0.2},
{'x_bin': 9.1, 'weight': 2, 'vol': 0.2},
{'x_bin': 9.3, 'weight': 1, 'vol': 0.2},
{'x_bin': 9.5, 'weight': 0, 'vol': 0.2},
{'x_bin': 9.7, 'weight': 0, 'vol': 0.2},
{'x_bin': 9.9, 'weight': 0, 'vol': 0.2},
]
uniform_raw = [
{'x_bin': -9.9, 'weight': 13, 'vol': 0.2},
{'x_bin': -9.7, 'weight': 13, 'vol': 0.2},
{'x_bin': -9.5, 'weight': 8, 'vol': 0.2},
{'x_bin': -9.3, 'weight': 15, 'vol': 0.2},
{'x_bin': -9.1, 'weight': 10, 'vol': 0.2},
{'x_bin': -8.9, 'weight': 14, 'vol': 0.2},
{'x_bin': -8.7, 'weight': 9, 'vol': 0.2},
{'x_bin': -8.5, 'weight': 11, 'vol': 0.2},
{'x_bin': -8.3, 'weight': 10, 'vol': 0.2},
{'x_bin': -8.1, 'weight': 15, 'vol': 0.2},
{'x_bin': -7.9, 'weight': 8, 'vol': 0.2},
{'x_bin': -7.7, 'weight': 7, 'vol': 0.2},
{'x_bin': -7.5, 'weight': 4, 'vol': 0.2},
{'x_bin': -7.3, 'weight': 7, 'vol': 0.2},
{'x_bin': -7.1, 'weight': 14, 'vol': 0.2},
{'x_bin': -6.9, 'weight': 7, 'vol': 0.2},
{'x_bin': -6.7, 'weight': 10, 'vol': 0.2},
{'x_bin': -6.5, 'weight': 12, 'vol': 0.2},
{'x_bin': -6.3, 'weight': 8, 'vol': 0.2},
{'x_bin': -6.1, 'weight': 6, 'vol': 0.2},
{'x_bin': -5.9, 'weight': 11, 'vol': 0.2},
{'x_bin': -5.7, 'weight': 12, 'vol': 0.2},
{'x_bin': -5.5, 'weight': 9, 'vol': 0.2},
{'x_bin': -5.3, 'weight': 6, 'vol': 0.2},
{'x_bin': -5.1, 'weight': 7, 'vol': 0.2},
{'x_bin': -4.9, 'weight': 5, 'vol': 0.2},
{'x_bin': -4.7, 'weight': 7, 'vol': 0.2},
{'x_bin': -4.5, 'weight': 12, 'vol': 0.2},
{'x_bin': -4.3, 'weight': 10, 'vol': 0.2},
{'x_bin': -4.1, 'weight': 10, 'vol': 0.2},
{'x_bin': -3.9, 'weight': 12, 'vol': 0.2},
{'x_bin': -3.7, 'weight': 9, 'vol': 0.2},
{'x_bin': -3.5, 'weight': 13, 'vol': 0.2},
{'x_bin': -3.3, 'weight': 14, 'vol': 0.2},
{'x_bin': -3.1, 'weight': 7, 'vol': 0.2},
{'x_bin': -2.9, 'weight': 15, 'vol': 0.2},
{'x_bin': -2.7, 'weight': 7, 'vol': 0.2},
{'x_bin': -2.5, 'weight': 8, 'vol': 0.2},
{'x_bin': -2.3, 'weight': 10, 'vol': 0.2},
{'x_bin': -2.1, 'weight': 8, 'vol': 0.2},
{'x_bin': -1.9, 'weight': 11, 'vol': 0.2},
{'x_bin': -1.7, 'weight': 8, 'vol': 0.2},
{'x_bin': -1.5, 'weight': 10, 'vol': 0.2},
{'x_bin': -1.3, 'weight': 6, 'vol': 0.2},
{'x_bin': -1.1, 'weight': 10, 'vol': 0.2},
{'x_bin': -0.9, 'weight': 11, 'vol': 0.2},
{'x_bin': -0.7, 'weight': 9, 'vol': 0.2},
{'x_bin': -0.5, 'weight': 7, 'vol': 0.2},
{'x_bin': -0.3, 'weight': 9, 'vol': 0.2},
{'x_bin': -0.1, 'weight': 14, 'vol': 0.2},
{'x_bin': 0.1, 'weight': 6, 'vol': 0.2},
{'x_bin': 0.3, 'weight': 13, 'vol': 0.2},
{'x_bin': 0.5, 'weight': 9, 'vol': 0.2},
{'x_bin': 0.7, 'weight': 8, 'vol': 0.2},
{'x_bin': 0.9, 'weight': 9, 'vol': 0.2},
{'x_bin': 1.1, 'weight': 9, 'vol': 0.2},
{'x_bin': 1.3, 'weight': 9, 'vol': 0.2},
{'x_bin': 1.5, 'weight': 10, 'vol': 0.2},
{'x_bin': 1.7, 'weight': 12, 'vol': 0.2},
{'x_bin': 1.9, 'weight': 10, 'vol': 0.2},
{'x_bin': 2.1, 'weight': 10, 'vol': 0.2},
{'x_bin': 2.3, 'weight': 9, 'vol': 0.2},
{'x_bin': 2.5, 'weight': 13, 'vol': 0.2},
{'x_bin': 2.7, 'weight': 16, 'vol': 0.2},
{'x_bin': 2.9, 'weight': 12, 'vol': 0.2},
{'x_bin': 3.1, 'weight': 12, 'vol': 0.2},
{'x_bin': 3.3, 'weight': 16, 'vol': 0.2},
{'x_bin': 3.5, 'weight': 10, 'vol': 0.2},
{'x_bin': 3.7, 'weight': 11, 'vol': 0.2},
{'x_bin': 3.9, 'weight': 6, 'vol': 0.2},
{'x_bin': 4.1, 'weight': 6, 'vol': 0.2},
{'x_bin': 4.3, 'weight': 9, 'vol': 0.2},
{'x_bin': 4.5, 'weight': 9, 'vol': 0.2},
{'x_bin': 4.7, 'weight': 5, 'vol': 0.2},
{'x_bin': 4.9, 'weight': 13, 'vol': 0.2},
{'x_bin': 5.1, 'weight': 12, 'vol': 0.2},
{'x_bin': 5.3, 'weight': 11, 'vol': 0.2},
{'x_bin': 5.5, 'weight': 10, 'vol': 0.2},
{'x_bin': 5.7, 'weight': 9, 'vol': 0.2},
{'x_bin': 5.9, 'weight': 11, 'vol': 0.2},
{'x_bin': 6.1, 'weight': 8, 'vol': 0.2},
{'x_bin': 6.3, 'weight': 10, 'vol': 0.2},
{'x_bin': 6.5, 'weight': 9, 'vol': 0.2},
{'x_bin': 6.7, 'weight': 7, 'vol': 0.2},
{'x_bin': 6.9, 'weight': 15, 'vol': 0.2},
{'x_bin': 7.1, 'weight': 16, 'vol': 0.2},
{'x_bin': 7.3, 'weight': 16, 'vol': 0.2},
{'x_bin': 7.5, 'weight': 16, 'vol': 0.2},
{'x_bin': 7.7, 'weight': 8, 'vol': 0.2},
{'x_bin': 7.9, 'weight': 8, 'vol': 0.2},
{'x_bin': 8.1, 'weight': 13, 'vol': 0.2},
{'x_bin': 8.3, 'weight': 7, 'vol': 0.2},
{'x_bin': 8.5, 'weight': 7, 'vol': 0.2},
{'x_bin': 8.7, 'weight': 15, 'vol': 0.2},
{'x_bin': 8.9, 'weight': 12, 'vol': 0.2},
{'x_bin': 9.1, 'weight': 3, 'vol': 0.2},
{'x_bin': 9.3, 'weight': 6, 'vol': 0.2},
{'x_bin': 9.5, 'weight': 13, 'vol': 0.2},
{'x_bin': 9.7, 'weight': 7, 'vol': 0.2},
{'x_bin': 9.9, 'weight': 11, 'vol': 0.2},
]
combined_raw = [
{'x_bin': -9.9, 'weight': 7, 'vol': 0.2},
{'x_bin': -9.7, 'weight': 5, 'vol': 0.2},
{'x_bin': -9.5, 'weight': 7, 'vol': 0.2},
{'x_bin': -9.3, 'weight': 10, 'vol': 0.2},
{'x_bin': -9.1, 'weight': 8, 'vol': 0.2},
{'x_bin': -8.9, 'weight': 10, 'vol': 0.2},
{'x_bin': -8.7, 'weight': 8, 'vol': 0.2},
{'x_bin': -8.5, 'weight': 10, 'vol': 0.2},
{'x_bin': -8.3, 'weight': 5, 'vol': 0.2},
{'x_bin': -8.1, 'weight': 4, 'vol': 0.2},
{'x_bin': -7.9, 'weight': 3, 'vol': 0.2},
{'x_bin': -7.7, 'weight': 1, 'vol': 0.2},
{'x_bin': -7.5, 'weight': 3, 'vol': 0.2},
{'x_bin': -7.3, 'weight': 3, 'vol': 0.2},
{'x_bin': -7.1, 'weight': 9, 'vol': 0.2},
{'x_bin': -6.9, 'weight': 6, 'vol': 0.2},
{'x_bin': -6.7, 'weight': 3, 'vol': 0.2},
{'x_bin': -6.5, 'weight': 6, 'vol': 0.2},
{'x_bin': -6.3, 'weight': 3, 'vol': 0.2},
{'x_bin': -6.1, 'weight': 5, 'vol': 0.2},
{'x_bin': -5.9, 'weight': 7, 'vol': 0.2},
{'x_bin': -5.7, 'weight': 8, 'vol': 0.2},
{'x_bin': -5.5, 'weight': 5, 'vol': 0.2},
{'x_bin': -5.3, 'weight': 7, 'vol': 0.2},
{'x_bin': -5.1, 'weight': 9, 'vol': 0.2},
{'x_bin': -4.9, 'weight': 4, 'vol': 0.2},
{'x_bin': -4.7, 'weight': 8, 'vol': 0.2},
{'x_bin': -4.5, 'weight': 11, 'vol': 0.2},
{'x_bin': -4.3, 'weight': 13, 'vol': 0.2},
{'x_bin': -4.1, 'weight': 6, 'vol': 0.2},
{'x_bin': -3.9, 'weight': 11, 'vol': 0.2},
{'x_bin': -3.7, 'weight': 8, 'vol': 0.2},
{'x_bin': -3.5, 'weight': 13, 'vol': 0.2},
{'x_bin': -3.3, 'weight': 16, 'vol': 0.2},
{'x_bin': -3.1, 'weight': 3, 'vol': 0.2},
{'x_bin': -2.9, 'weight': 17, 'vol': 0.2},
{'x_bin': -2.7, 'weight': 12, 'vol': 0.2},
{'x_bin': -2.5, 'weight': 18, 'vol': 0.2},
{'x_bin': -2.3, 'weight': 10, 'vol': 0.2},
{'x_bin': -2.1, 'weight': 12, 'vol': 0.2},
{'x_bin': -1.9, 'weight': 13, 'vol': 0.2},
{'x_bin': -1.7, 'weight': 15, 'vol': 0.2},
{'x_bin': -1.5, 'weight': 17, 'vol': 0.2},
{'x_bin': -1.3, 'weight': 13, 'vol': 0.2},
{'x_bin': -1.1, 'weight': 15, 'vol': 0.2},
{'x_bin': -0.9, 'weight': 16, 'vol': 0.2},
{'x_bin': -0.7, 'weight': 18, 'vol': 0.2},
{'x_bin': -0.5, 'weight': 16, 'vol': 0.2},
{'x_bin': -0.3, 'weight': 14, 'vol': 0.2},
{'x_bin': -0.1, 'weight': 21, 'vol': 0.2},
{'x_bin': 0.1, 'weight': 18, 'vol': 0.2},
{'x_bin': 0.3, 'weight': 12, 'vol': 0.2},
{'x_bin': 0.5, 'weight': 16, 'vol': 0.2},
{'x_bin': 0.7, 'weight': 19, 'vol': 0.2},
{'x_bin': 0.9, 'weight': 20, 'vol': 0.2},
{'x_bin': 1.1, 'weight': 24, 'vol': 0.2},
{'x_bin': 1.3, 'weight': 15, 'vol': 0.2},
{'x_bin': 1.5, 'weight': 17, 'vol': 0.2},
{'x_bin': 1.7, 'weight': 23, 'vol': 0.2},
{'x_bin': 1.9, 'weight': 8, 'vol': 0.2},
{'x_bin': 2.1, 'weight': 11, 'vol': 0.2},
{'x_bin': 2.3, 'weight': 17, 'vol': 0.2},
{'x_bin': 2.5, 'weight': 15, 'vol': 0.2},
{'x_bin': 2.7, 'weight': 24, 'vol': 0.2},
{'x_bin': 2.9, 'weight': 22, 'vol': 0.2},
{'x_bin': 3.1, 'weight': 10, 'vol': 0.2},
{'x_bin': 3.3, 'weight': 18, 'vol': 0.2},
{'x_bin': 3.5, 'weight': 18, 'vol': 0.2},
{'x_bin': 3.7, 'weight': 9, 'vol': 0.2},
{'x_bin': 3.9, 'weight': 6, 'vol': 0.2},
{'x_bin': 4.1, 'weight': 7, 'vol': 0.2},
{'x_bin': 4.3, 'weight': 7, 'vol': 0.2},
{'x_bin': 4.5, 'weight': 9, 'vol': 0.2},
{'x_bin': 4.7, 'weight': 8, 'vol': 0.2},
{'x_bin': 4.9, 'weight': 8, 'vol': 0.2},
{'x_bin': 5.1, 'weight': 11, 'vol': 0.2},
{'x_bin': 5.3, 'weight': 16, 'vol': 0.2},
{'x_bin': 5.5, 'weight': 10, 'vol': 0.2},
{'x_bin': 5.7, 'weight': 5, 'vol': 0.2},
{'x_bin': 5.9, 'weight': 9, 'vol': 0.2},
{'x_bin': 6.1, 'weight': 3, 'vol': 0.2},
{'x_bin': 6.3, 'weight': 4, 'vol': 0.2},
{'x_bin': 6.5, 'weight': 8, 'vol': 0.2},
{'x_bin': 6.7, 'weight': 9, 'vol': 0.2},
{'x_bin': 6.9, 'weight': 12, 'vol': 0.2},
{'x_bin': 7.1, 'weight': 10, 'vol': 0.2},
{'x_bin': 7.3, 'weight': 2, 'vol': 0.2},
{'x_bin': 7.5, 'weight': 6, 'vol': 0.2},
{'x_bin': 7.7, 'weight': 6, 'vol': 0.2},
{'x_bin': 7.9, 'weight': 1, 'vol': 0.2},
{'x_bin': 8.1, 'weight': 10, 'vol': 0.2},
{'x_bin': 8.3, 'weight': 5, 'vol': 0.2},
{'x_bin': 8.5, 'weight': 5, 'vol': 0.2},
{'x_bin': 8.7, 'weight': 5, 'vol': 0.2},
{'x_bin': 8.9, 'weight': 9, 'vol': 0.2},
{'x_bin': 9.1, 'weight': 3, 'vol': 0.2},
{'x_bin': 9.3, 'weight': 7, 'vol': 0.2},
{'x_bin': 9.5, 'weight': 2, 'vol': 0.2},
{'x_bin': 9.7, 'weight': 6, 'vol': 0.2},
{'x_bin': 9.9, 'weight': 3, 'vol': 0.2},
]
| 43.162866
| 44
| 0.44374
| 2,706
| 13,251
| 2.060976
| 0.015152
| 0.215169
| 0.268962
| 0.319527
| 0.993366
| 0.993366
| 0.985655
| 0.980993
| 0.522682
| 0.495607
| 0
| 0.151767
| 0.182024
| 13,251
| 306
| 45
| 43.303922
| 0.362764
| 0
| 0
| 0.111111
| 0
| 0
| 0.316957
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c3f4dbabc68791c69b8b1d34570a2c97486f33de
| 231
|
py
|
Python
|
title.py
|
jrgessiel/requestStatusCode
|
9a68f5e9fea3c4c32f2a4117767ce39d29c1340b
|
[
"MIT"
] | null | null | null |
title.py
|
jrgessiel/requestStatusCode
|
9a68f5e9fea3c4c32f2a4117767ce39d29c1340b
|
[
"MIT"
] | null | null | null |
title.py
|
jrgessiel/requestStatusCode
|
9a68f5e9fea3c4c32f2a4117767ce39d29c1340b
|
[
"MIT"
] | null | null | null |
def title():
print('-----------------------------------------------------')
print('- Bem-vindo(a) ao verificador de status de URLs 1.0 -')
print('-----------------------------------------------------')
print()
| 38.5
| 67
| 0.285714
| 17
| 231
| 3.882353
| 0.764706
| 0.30303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010256
| 0.155844
| 231
| 5
| 68
| 46.2
| 0.328205
| 0
| 0
| 0.4
| 0
| 0
| 0.70354
| 0.469027
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0
| 0
| 0.2
| 0.8
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
61a02b5646b820b66d11d2350077c4a65d462696
| 7,065
|
py
|
Python
|
rolling/minmax.py
|
ajcr/rolling
|
7bbac93809a0ad1cd6e128cfd64b70a99d26ea8c
|
[
"MIT"
] | 189
|
2018-03-12T00:31:19.000Z
|
2022-03-26T00:17:38.000Z
|
rolling/minmax.py
|
ajcr/rolling
|
7bbac93809a0ad1cd6e128cfd64b70a99d26ea8c
|
[
"MIT"
] | 23
|
2017-12-31T17:50:48.000Z
|
2021-11-27T15:31:54.000Z
|
rolling/minmax.py
|
ajcr/rolling
|
7bbac93809a0ad1cd6e128cfd64b70a99d26ea8c
|
[
"MIT"
] | 7
|
2019-01-28T02:53:49.000Z
|
2021-11-11T18:34:45.000Z
|
from collections import deque
from heapq import heapify, heappush, heappop
from itertools import islice
from operator import itemgetter
from .base import RollingObject
# A tuple in a window has a value and an index at which it will exit the window
_value = itemgetter(0)
_death = itemgetter(1)
class Min(RollingObject):
"""
Iterator object that computes the minimum
of a rolling window over a Python iterable.
Parameters
----------
iterable : any iterable object
window_size : integer, the size of the rolling
window moving over the iterable
Complexity
----------
Update time: O(1)
Memory usage: O(k)
where k is the size of the rolling window
Notes
-----
This method uses the algorithms outlined in [1] to
maintain a deque of ascending minima.
[1] http://www.richardhartersworld.com/cri/2001/slidingmin.html
"""
# Note: _obs must be tracked separately, we cannot just use
# the size of the buffer as the algorithm may overwrite existing
# values with a new value, rather than appending the value
def _init_fixed(self, iterable, window_size, **kwargs):
self._i = -1
self._window_obs = 0
self._buffer = deque()
for new in islice(self._iterator, window_size - 1):
self._add_new(new)
def _init_variable(self, iterable, window_size, **kwargs):
self._i = -1
self._window_obs = 0
self._buffer = deque()
def _update_window(self, new):
self._i += 1
new_pair = (new, self._i + self.window_size)
# remove larger values from the end of the buffer
while self._buffer and _value(self._buffer[-1]) >= new:
self._buffer.pop()
self._buffer.append(new_pair)
# remove any minima that die on this iteration
if _death(self._buffer[0]) <= self._i:
self._buffer.popleft()
def _add_new(self, new):
self._i += 1
self._window_obs += 1
new_pair = (new, self._i + self.window_size)
# remove larger values from the end of the buffer
while self._buffer and _value(self._buffer[-1]) >= new:
self._buffer.pop()
self._buffer.append(new_pair)
def _remove_old(self):
self._i += 1
self._window_obs -= 1
# remove any minima that die on this iteration
while _death(self._buffer[0]) <= self._i:
self._buffer.popleft()
@property
def _obs(self):
return self._window_obs
@property
def current_value(self):
return _value(self._buffer[0])
class Max(RollingObject):
"""
Iterator object that computes the maximum
of a rolling window over a Python iterable.
Parameters
----------
iterable : any iterable object
window_size : integer, the size of the rolling
window moving over the iterable
Complexity
----------
Update time: O(1)
Memory usage: O(k)
where k is the size of the rolling window
Notes
-----
This method uses the algorithms outlined in [1] to
maintain a deque of descending maxima.
[1] http://www.richardhartersworld.com/cri/2001/slidingmin.html
"""
# Note: _obs must be tracked separately, we cannot just use
# the size of the buffer as the algorithm may overwrite existing
# values with a new value, rather than appending the value
def _init_fixed(self, iterable, window_size, **kwargs):
self._i = -1
self._window_obs = 0
self._buffer = deque()
for new in islice(self._iterator, window_size - 1):
self._add_new(new)
def _init_variable(self, iterable, window_size, **kwargs):
self._buffer = deque()
self._i = -1
self._window_obs = 0
def _update_window(self, new):
self._i += 1
new_pair = (new, self._i + self.window_size)
# remove smaller values from the end of the buffer
while self._buffer and _value(self._buffer[-1]) <= new:
self._buffer.pop()
self._buffer.append(new_pair)
# remove any maxima that die on this iteration
if _death(self._buffer[0]) <= self._i:
self._buffer.popleft()
def _add_new(self, new):
self._i += 1
self._window_obs += 1
new_pair = (new, self._i + self.window_size)
# remove smaller values from the end of the buffer
while self._buffer and _value(self._buffer[-1]) <= new:
self._buffer.pop()
self._buffer.append(new_pair)
def _remove_old(self):
self._i += 1
self._window_obs -= 1
# remove any maxima that die on this iteration
while _death(self._buffer[0]) <= self._i:
self._buffer.popleft()
@property
def _obs(self):
return self._window_obs
@property
def current_value(self):
return _value(self._buffer[0])
class MinHeap(RollingObject):
"""
Iterator object that computes the minimum value
of a rolling window over a Python iterable.
Parameters
----------
iterable : any iterable object
window_size : integer, the size of the rolling
window moving over the iterable
Complexity
----------
Update time: O(1)
Memory usage: O(k) (if the iterable is unordered)
where k is the size of the rolling window
Notes
-----
This method uses a heap to keep track of the minimum
values in the rolling window (as opposed to a deque
used by the Min class).
Items that expire are lazily deleted, which can mean
that the heap can grow to be larger than the specified
window size, k, in cases where data is ordered.
"""
def _init_fixed(self, iterable, window_size, **kwargs):
head = islice(self._iterator, window_size - 1)
# faster to create the heap this way, rather than repeat _add_new()
self._heap = [(value, i + window_size) for i, value in enumerate(head)]
heapify(self._heap)
self._i = len(self._heap) - 1
self._window_obs = len(self._heap)
def _init_variable(self, iterable, window_size, **kwargs):
self._heap = []
self._i = -1
self._window_obs = 0
def _update_window(self, new):
self._i += 1
new_pair = (new, self._i + self.window_size)
heappush(self._heap, new_pair)
# remove any minima that die on this iteration
while _death(self._heap[0]) <= self._i:
heappop(self._heap)
def _add_new(self, new):
self._i += 1
self._window_obs += 1
new_pair = (new, self._i + self.window_size)
heappush(self._heap, new_pair)
def _remove_old(self):
self._i += 1
self._window_obs -= 1
# remove any minima that die on this iteration
while _death(self._heap[0]) <= self._i:
heappop(self._heap)
@property
def _obs(self):
return self._window_obs
@property
def current_value(self):
return _value(self._heap[0])
| 28.26
| 79
| 0.622647
| 965
| 7,065
| 4.359585
| 0.160622
| 0.07131
| 0.046351
| 0.039933
| 0.831947
| 0.831947
| 0.81507
| 0.791776
| 0.779415
| 0.768243
| 0
| 0.011651
| 0.283227
| 7,065
| 249
| 80
| 28.373494
| 0.819115
| 0.37155
| 0
| 0.850877
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.184211
| false
| 0
| 0.04386
| 0.052632
| 0.307018
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
61af61e22801311775772a99c30bc4523ac4e7b9
| 28,283
|
py
|
Python
|
tests/mssql_test.py
|
wesrog/simple-db-migrate
|
5d5637cbb96424676571431bb688f8b977b0837d
|
[
"Apache-2.0"
] | 1
|
2017-12-14T22:20:30.000Z
|
2017-12-14T22:20:30.000Z
|
tests/mssql_test.py
|
wesrog/simple-db-migrate
|
5d5637cbb96424676571431bb688f8b977b0837d
|
[
"Apache-2.0"
] | null | null | null |
tests/mssql_test.py
|
wesrog/simple-db-migrate
|
5d5637cbb96424676571431bb688f8b977b0837d
|
[
"Apache-2.0"
] | null | null | null |
import os
import unittest
import simple_db_migrate
from mock import patch, Mock, MagicMock, call
from simple_db_migrate.config import *
from simple_db_migrate.mssql import *
from tests import BaseTest, create_file, create_migration_file, delete_files, create_config
class MSSQLTest(BaseTest):
def setUp(self):
self.execute_returns = {'select count(*) from __db_version__;': 0}
self.close_returns = {}
self.last_execute_command = '';
self.config_dict = {'database_script_encoding': 'utf8',
'database_encoding': 'utf8',
'database_host': 'localhost',
'database_user': 'root',
'database_password': '',
'database_name': 'migration_test',
'database_version_table': '__db_version__',
'drop_db_first': False
}
self.config_mock = MagicMock(spec_set=dict, wraps=self.config_dict)
self.db_mock = MagicMock(**{"execute_scalar": Mock(side_effect=self.execute_side_effect),
"execute_non_query": Mock(side_effect=self.execute_side_effect),
"execute_query": Mock(side_effect=self.execute_side_effect),
"execute_row": Mock(side_effect=self.execute_side_effect),
"close": Mock(side_effect=self.close_side_effect),
"__iter__":Mock(side_effect=self.iter_side_effect)})
self.db_driver_mock = Mock(**{"connect.return_value": self.db_mock})
def test_it_should_use_mssql_as_driver(self):
mssql_mock = MagicMock()
with patch.dict('sys.modules', _mssql=mssql_mock):
mssql = MSSQL(self.config_mock)
self.assertNotEqual(0, mssql_mock.connect.call_count)
def test_it_should_stop_process_when_an_error_occur_during_connect_database(self):
self.db_driver_mock.connect.side_effect = Exception("error when connecting")
try:
mssql = MSSQL(self.config_mock, self.db_driver_mock)
self.fail("it should not get here")
except Exception, e:
self.assertEqual("could not connect to database: error when connecting", str(e))
self.assertEqual(0, self.db_mock.close.call_count)
self.assertEqual(0, self.db_mock.execute_scalar.call_count)
self.assertEqual(0, self.db_mock.execute_non_query.call_count)
self.assertEqual(0, self.db_mock.execute_query.call_count)
def test_it_should_create_database_and_version_table_on_init_if_not_exists(self):
mssql = MSSQL(self.config_mock, self.db_driver_mock)
expected_query_calls = [
call("if not exists ( select 1 from sysdatabases where name = 'migration_test' ) create database migration_test;"),
call("if not exists ( select 1 from sysobjects where name = '__db_version__' and type = 'u' ) create table __db_version__ ( id INT IDENTITY(1,1) NOT NULL PRIMARY KEY, version varchar(20) NOT NULL default '0', label varchar(255), name varchar(255), sql_up NTEXT, sql_down NTEXT)"),
call("insert into __db_version__ (version) values ('0')")
]
self.assertEqual(expected_query_calls, self.db_mock.execute_non_query.mock_calls)
self.db_mock.select_db.assert_called_with('migration_test')
self.assertEqual(4, self.db_mock.close.call_count)
expected_execute_calls = [
call('select count(*) from __db_version__;')
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_scalar.mock_calls)
def test_it_should_drop_database_on_init_if_its_asked(self):
self.config_dict["drop_db_first"] = True
mssql = MSSQL(self.config_mock, self.db_driver_mock)
expected_query_calls = [
call("if exists ( select 1 from sysdatabases where name = 'migration_test' ) drop database migration_test;"),
call("if not exists ( select 1 from sysdatabases where name = 'migration_test' ) create database migration_test;"),
call("if not exists ( select 1 from sysobjects where name = '__db_version__' and type = 'u' ) create table __db_version__ ( id INT IDENTITY(1,1) NOT NULL PRIMARY KEY, version varchar(20) NOT NULL default '0', label varchar(255), name varchar(255), sql_up NTEXT, sql_down NTEXT)"),
call("insert into __db_version__ (version) values ('0')")
]
self.assertEqual(expected_query_calls, self.db_mock.execute_non_query.mock_calls)
self.db_mock.select_db.assert_called_with('migration_test')
self.assertEqual(5, self.db_mock.close.call_count)
expected_execute_calls = [
call('select count(*) from __db_version__;')
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_scalar.mock_calls)
def test_it_should_stop_process_when_an_error_occur_during_drop_database(self):
self.config_dict["drop_db_first"] = True
self.db_mock.execute_non_query.side_effect = Exception("error when dropping")
try:
mssql = MSSQL(self.config_mock, self.db_driver_mock)
self.fail("it should not get here")
except Exception, e:
self.assertEqual("can't drop database 'migration_test'; \nerror when dropping", str(e))
expected_query_calls = [
call("if exists ( select 1 from sysdatabases where name = 'migration_test' ) drop database migration_test;")
]
self.assertEqual(expected_query_calls, self.db_mock.execute_non_query.mock_calls)
self.assertEqual(1, self.db_mock.close.call_count)
def test_it_should_execute_migration_up_and_update_schema_version(self):
mssql = MSSQL(self.config_mock, self.db_driver_mock)
mssql.change("create table spam();", "20090212112104", "20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration", "create table spam();", "drop table spam;")
expected_query_calls = [
call("if not exists ( select 1 from sysdatabases where name = 'migration_test' ) create database migration_test;"),
call("if not exists ( select 1 from sysobjects where name = '__db_version__' and type = 'u' ) create table __db_version__ ( id INT IDENTITY(1,1) NOT NULL PRIMARY KEY, version varchar(20) NOT NULL default '0', label varchar(255), name varchar(255), sql_up NTEXT, sql_down NTEXT)"),
call("insert into __db_version__ (version) values ('0')"),
call('create table spam()'),
call('insert into __db_version__ (version, label, name, sql_up, sql_down) values (%s, %s, %s, %s, %s);', ('20090212112104', None, '20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration', 'create table spam();', 'drop table spam;'))
]
self.assertEqual(expected_query_calls, self.db_mock.execute_non_query.mock_calls)
self.db_mock.select_db.assert_called_with('migration_test')
self.assertEqual(6, self.db_mock.close.call_count)
expected_execute_calls = [
call('select count(*) from __db_version__;')
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_scalar.mock_calls)
def test_it_should_execute_migration_down_and_update_schema_version(self):
mssql = MSSQL(self.config_mock, self.db_driver_mock)
mssql.change("drop table spam;", "20090212112104", "20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration", "create table spam();", "drop table spam;", False)
expected_query_calls = [
call("if not exists ( select 1 from sysdatabases where name = 'migration_test' ) create database migration_test;"),
call("if not exists ( select 1 from sysobjects where name = '__db_version__' and type = 'u' ) create table __db_version__ ( id INT IDENTITY(1,1) NOT NULL PRIMARY KEY, version varchar(20) NOT NULL default '0', label varchar(255), name varchar(255), sql_up NTEXT, sql_down NTEXT)"),
call("insert into __db_version__ (version) values ('0')"),
call('drop table spam'),
call("delete from __db_version__ where version = %s;", ('20090212112104',))
]
self.assertEqual(expected_query_calls, self.db_mock.execute_non_query.mock_calls)
self.db_mock.select_db.assert_called_with('migration_test')
self.assertEqual(6, self.db_mock.close.call_count)
expected_execute_calls = [
call('select count(*) from __db_version__;')
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_scalar.mock_calls)
def test_it_should_use_label_version_when_updating_schema_version(self):
mssql = MSSQL(self.config_mock, self.db_driver_mock)
mssql.change("create table spam();", "20090212112104", "20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration", "create table spam();", "drop table spam;", label_version="label")
expected_query_calls = [
call("if not exists ( select 1 from sysdatabases where name = 'migration_test' ) create database migration_test;"),
call("if not exists ( select 1 from sysobjects where name = '__db_version__' and type = 'u' ) create table __db_version__ ( id INT IDENTITY(1,1) NOT NULL PRIMARY KEY, version varchar(20) NOT NULL default '0', label varchar(255), name varchar(255), sql_up NTEXT, sql_down NTEXT)"),
call("insert into __db_version__ (version) values ('0')"),
call('create table spam()'),
call('insert into __db_version__ (version, label, name, sql_up, sql_down) values (%s, %s, %s, %s, %s);', ('20090212112104', 'label', '20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration', 'create table spam();', 'drop table spam;'))
]
self.assertEqual(expected_query_calls, self.db_mock.execute_non_query.mock_calls)
self.db_mock.select_db.assert_called_with('migration_test')
self.assertEqual(6, self.db_mock.close.call_count)
expected_execute_calls = [
call('select count(*) from __db_version__;')
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_scalar.mock_calls)
def test_it_should_raise_whem_migration_sql_has_a_syntax_error(self):
mssql = MSSQL(self.config_mock, self.db_driver_mock)
self.assertRaisesWithMessage(Exception, "error executing migration: invalid sql syntax 'create table foo(); create table spam());'", mssql.change,
"create table foo(); create table spam());", "20090212112104", "20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration", "create table spam());", "drop table spam;", label_version="label")
def test_it_should_stop_process_when_an_error_occur_during_database_change(self):
self.execute_returns["insert into spam"] = Exception("invalid sql")
try:
mssql = MSSQL(self.config_mock, self.db_driver_mock)
mssql.change("create table spam(); insert into spam", "20090212112104", "20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration", "create table spam();", "drop table spam;", label_version="label")
except Exception, e:
self.assertEqual("error executing migration: invalid sql\n\n[ERROR DETAILS] SQL command was:\ninsert into spam", str(e))
self.assertTrue(isinstance(e, simple_db_migrate.core.exceptions.MigrationException))
expected_query_calls = [
call("if not exists ( select 1 from sysdatabases where name = 'migration_test' ) create database migration_test;"),
call("if not exists ( select 1 from sysobjects where name = '__db_version__' and type = 'u' ) create table __db_version__ ( id INT IDENTITY(1,1) NOT NULL PRIMARY KEY, version varchar(20) NOT NULL default '0', label varchar(255), name varchar(255), sql_up NTEXT, sql_down NTEXT)"),
call("insert into __db_version__ (version) values ('0')"),
call('create table spam()'),
call('insert into spam')
]
self.assertEqual(expected_query_calls, self.db_mock.execute_non_query.mock_calls)
self.db_mock.select_db.assert_called_with('migration_test')
self.assertEqual(1, self.db_mock.cancel.call_count)
self.assertEqual(5, self.db_mock.close.call_count)
expected_execute_calls = [
call('select count(*) from __db_version__;')
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_scalar.mock_calls)
def test_it_should_stop_process_when_an_error_occur_during_log_schema_version(self):
self.execute_returns['insert into __db_version__ (version, label, name, sql_up, sql_down) values (%s, %s, %s, %s, %s);'] = Exception("invalid sql")
try:
mssql = MSSQL(self.config_mock, self.db_driver_mock)
mssql.change("create table spam();", "20090212112104", "20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration", "create table spam();", "drop table spam;", label_version="label")
except Exception, e:
self.assertEqual('error logging migration: invalid sql\n\n[ERROR DETAILS] SQL command was:\n20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration', str(e))
self.assertTrue(isinstance(e, simple_db_migrate.core.exceptions.MigrationException))
expected_query_calls = [
call("if not exists ( select 1 from sysdatabases where name = 'migration_test' ) create database migration_test;"),
call("if not exists ( select 1 from sysobjects where name = '__db_version__' and type = 'u' ) create table __db_version__ ( id INT IDENTITY(1,1) NOT NULL PRIMARY KEY, version varchar(20) NOT NULL default '0', label varchar(255), name varchar(255), sql_up NTEXT, sql_down NTEXT)"),
call("insert into __db_version__ (version) values ('0')"),
call('create table spam()'),
call('insert into __db_version__ (version, label, name, sql_up, sql_down) values (%s, %s, %s, %s, %s);', ('20090212112104', 'label', '20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration', 'create table spam();', 'drop table spam;'))
]
self.assertEqual(expected_query_calls, self.db_mock.execute_non_query.mock_calls)
self.db_mock.select_db.assert_called_with('migration_test')
self.assertEqual(1, self.db_mock.cancel.call_count)
self.assertEqual(6, self.db_mock.close.call_count)
expected_execute_calls = [
call('select count(*) from __db_version__;'),
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_scalar.mock_calls)
def test_it_should_log_execution_when_a_function_is_given_when_updating_schema_version(self):
execution_log_mock = Mock()
mssql = MSSQL(self.config_mock, self.db_driver_mock)
mssql.change("create table spam();", "20090212112104", "20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration", "create table spam();", "drop table spam;", execution_log=execution_log_mock)
expected_execution_log_calls = [
call('create table spam()\n-- 1 row(s) affected\n'),
call('migration 20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration registered\n')
]
self.assertEqual(expected_execution_log_calls, execution_log_mock.mock_calls)
def test_it_should_get_current_schema_version(self):
self.execute_returns = {'select count(*) from __db_version__;': 0, 'select top 1 version from __db_version__ order by id desc': "0"}
mssql = MSSQL(self.config_mock, self.db_driver_mock)
self.assertEquals("0", mssql.get_current_schema_version())
expected_query_calls = [
call("if not exists ( select 1 from sysdatabases where name = 'migration_test' ) create database migration_test;"),
call("if not exists ( select 1 from sysobjects where name = '__db_version__' and type = 'u' ) create table __db_version__ ( id INT IDENTITY(1,1) NOT NULL PRIMARY KEY, version varchar(20) NOT NULL default '0', label varchar(255), name varchar(255), sql_up NTEXT, sql_down NTEXT)"),
call("insert into __db_version__ (version) values ('0')")
]
self.assertEqual(expected_query_calls, self.db_mock.execute_non_query.mock_calls)
self.db_mock.select_db.assert_called_with('migration_test')
self.assertEqual(5, self.db_mock.close.call_count)
expected_execute_calls = [
call('select count(*) from __db_version__;'),
call("select top 1 version from __db_version__ order by id desc")
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_scalar.mock_calls)
def test_it_should_get_all_schema_versions(self):
expected_versions = []
expected_versions.append("0")
expected_versions.append("20090211120001")
expected_versions.append("20090211120002")
expected_versions.append("20090211120003")
db_versions = [{'version':version} for version in expected_versions]
self.execute_returns = {'select count(*) from __db_version__;': 0, 'select version from __db_version__ order by id;': db_versions}
mssql = MSSQL(self.config_mock, self.db_driver_mock)
schema_versions = mssql.get_all_schema_versions()
self.assertEquals(len(expected_versions), len(schema_versions))
for version in schema_versions:
self.assertTrue(version in expected_versions)
expected_query_calls = [
call("if not exists ( select 1 from sysdatabases where name = 'migration_test' ) create database migration_test;"),
call("if not exists ( select 1 from sysobjects where name = '__db_version__' and type = 'u' ) create table __db_version__ ( id INT IDENTITY(1,1) NOT NULL PRIMARY KEY, version varchar(20) NOT NULL default '0', label varchar(255), name varchar(255), sql_up NTEXT, sql_down NTEXT)"),
call("insert into __db_version__ (version) values ('0')")
]
self.assertEqual(expected_query_calls, self.db_mock.execute_non_query.mock_calls)
self.db_mock.select_db.assert_called_with('migration_test')
self.assertEqual(5, self.db_mock.close.call_count)
expected_execute_calls = [
call('select count(*) from __db_version__;')
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_scalar.mock_calls)
expected_execute_calls = [
call("select version from __db_version__ order by id;")
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_query.mock_calls)
def test_it_should_get_all_schema_migrations(self):
expected_versions = []
expected_versions.append([1, "0", None, None, None, None])
expected_versions.append([2, "20090211120001", "label", "20090211120001_name", "sql_up", "sql_down"])
db_versions = [{'id': db_version[0], 'version':db_version[1], 'label':db_version[2], 'name':db_version[3], 'sql_up':db_version[4], 'sql_down':db_version[5]} for db_version in expected_versions]
self.execute_returns = {'select count(*) from __db_version__;': 0, 'select id, version, label, name, cast(sql_up as text) as sql_up, cast(sql_down as text) as sql_down from __db_version__ order by id;': db_versions}
mssql = MSSQL(self.config_mock, self.db_driver_mock)
schema_migrations = mssql.get_all_schema_migrations()
self.assertEquals(len(expected_versions), len(schema_migrations))
for index, migration in enumerate(schema_migrations):
self.assertEqual(migration.id, expected_versions[index][0])
self.assertEqual(migration.version, expected_versions[index][1])
self.assertEqual(migration.label, expected_versions[index][2])
self.assertEqual(migration.file_name, expected_versions[index][3])
self.assertEqual(migration.sql_up, expected_versions[index][4] and expected_versions[index][4] or "")
self.assertEqual(migration.sql_down, expected_versions[index][5] and expected_versions[index][5] or "")
expected_query_calls = [
call("if not exists ( select 1 from sysdatabases where name = 'migration_test' ) create database migration_test;"),
call("if not exists ( select 1 from sysobjects where name = '__db_version__' and type = 'u' ) create table __db_version__ ( id INT IDENTITY(1,1) NOT NULL PRIMARY KEY, version varchar(20) NOT NULL default '0', label varchar(255), name varchar(255), sql_up NTEXT, sql_down NTEXT)"),
call("insert into __db_version__ (version) values ('0')")
]
self.assertEqual(expected_query_calls, self.db_mock.execute_non_query.mock_calls)
self.db_mock.select_db.assert_called_with('migration_test')
self.assertEqual(5, self.db_mock.close.call_count)
expected_execute_calls = [
call('select count(*) from __db_version__;'),
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_scalar.mock_calls)
expected_execute_calls = [
call('select id, version, label, name, cast(sql_up as text) as sql_up, cast(sql_down as text) as sql_down from __db_version__ order by id;')
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_query.mock_calls)
def test_it_should_parse_sql_statements(self):
statements = MSSQL._parse_sql_statements('; ; create table eggs; drop table spam; ; ;')
self.assertEqual(2, len(statements))
self.assertEqual('create table eggs', statements[0])
self.assertEqual('drop table spam', statements[1])
def test_it_should_parse_sql_statements_with_html_inside(self):
sql = u"""
create table eggs;
INSERT INTO widget_parameter_domain (widget_parameter_id, label, value)
VALUES ((SELECT MAX(widget_parameter_id)
FROM widget_parameter), "Carros", '<div class="box-zap-geral">
<div class="box-zap box-zap-autos">
<a class="logo" target="_blank" title="ZAP" href="http://www.zap.com.br/Parceiros/g1/RedirG1.aspx?CodParceriaLink=42&URL=http://www.zap.com.br">');
drop table spam;
"""
statements = MSSQL._parse_sql_statements(sql)
expected_sql_with_html = """INSERT INTO widget_parameter_domain (widget_parameter_id, label, value)
VALUES ((SELECT MAX(widget_parameter_id)
FROM widget_parameter), "Carros", '<div class="box-zap-geral">
<div class="box-zap box-zap-autos">
<a class="logo" target="_blank" title="ZAP" href="http://www.zap.com.br/Parceiros/g1/RedirG1.aspx?CodParceriaLink=42&URL=http://www.zap.com.br">')"""
self.assertEqual(3, len(statements))
self.assertEqual('create table eggs', statements[0])
self.assertEqual(expected_sql_with_html, statements[1])
self.assertEqual('drop table spam', statements[2])
def test_it_should_get_none_for_a_non_existent_version_in_database(self):
mssql = MSSQL(self.config_mock, self.db_driver_mock)
ret = mssql.get_version_id_from_version_number('xxx')
self.assertEqual(None, ret)
expected_query_calls = [
call("if not exists ( select 1 from sysdatabases where name = 'migration_test' ) create database migration_test;"),
call("if not exists ( select 1 from sysobjects where name = '__db_version__' and type = 'u' ) create table __db_version__ ( id INT IDENTITY(1,1) NOT NULL PRIMARY KEY, version varchar(20) NOT NULL default '0', label varchar(255), name varchar(255), sql_up NTEXT, sql_down NTEXT)"),
call("insert into __db_version__ (version) values ('0')")
]
self.assertEqual(expected_query_calls, self.db_mock.execute_non_query.mock_calls)
self.db_mock.select_db.assert_called_with('migration_test')
self.assertEqual(5, self.db_mock.close.call_count)
expected_execute_calls = [
call('select count(*) from __db_version__;'),
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_scalar.mock_calls)
expected_execute_calls = [
call("select id from __db_version__ where version = 'xxx' order by id desc;")
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_row.mock_calls)
def test_it_should_get_most_recent_version_for_a_existent_label_in_database(self):
self.execute_returns = {'select count(*) from __db_version__;': 0, "select version from __db_version__ where label = 'xxx' order by id desc": {'version':"vesion"}}
mssql = MSSQL(self.config_mock, self.db_driver_mock)
ret = mssql.get_version_number_from_label('xxx')
self.assertEqual("vesion", ret)
expected_query_calls = [
call("if not exists ( select 1 from sysdatabases where name = 'migration_test' ) create database migration_test;"),
call("if not exists ( select 1 from sysobjects where name = '__db_version__' and type = 'u' ) create table __db_version__ ( id INT IDENTITY(1,1) NOT NULL PRIMARY KEY, version varchar(20) NOT NULL default '0', label varchar(255), name varchar(255), sql_up NTEXT, sql_down NTEXT)"),
call("insert into __db_version__ (version) values ('0')")
]
self.assertEqual(expected_query_calls, self.db_mock.execute_non_query.mock_calls)
self.db_mock.select_db.assert_called_with('migration_test')
self.assertEqual(5, self.db_mock.close.call_count)
expected_execute_calls = [
call('select count(*) from __db_version__;'),
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_scalar.mock_calls)
expected_execute_calls = [
call("select version from __db_version__ where label = 'xxx' order by id desc")
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_row.mock_calls)
def test_it_should_get_none_for_a_non_existent_label_in_database(self):
mssql = MSSQL(self.config_mock, self.db_driver_mock)
ret = mssql.get_version_number_from_label('xxx')
self.assertEqual(None, ret)
expected_query_calls = [
call("if not exists ( select 1 from sysdatabases where name = 'migration_test' ) create database migration_test;"),
call("if not exists ( select 1 from sysobjects where name = '__db_version__' and type = 'u' ) create table __db_version__ ( id INT IDENTITY(1,1) NOT NULL PRIMARY KEY, version varchar(20) NOT NULL default '0', label varchar(255), name varchar(255), sql_up NTEXT, sql_down NTEXT)"),
call("insert into __db_version__ (version) values ('0')")
]
self.assertEqual(expected_query_calls, self.db_mock.execute_non_query.mock_calls)
self.db_mock.select_db.assert_called_with('migration_test')
self.assertEqual(5, self.db_mock.close.call_count)
expected_execute_calls = [
call('select count(*) from __db_version__;'),
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_scalar.mock_calls)
expected_execute_calls = [
call("select version from __db_version__ where label = 'xxx' order by id desc")
]
self.assertEqual(expected_execute_calls, self.db_mock.execute_row.mock_calls)
def side_effect(self, returns, default_value):
result = returns.get(self.last_execute_command, default_value)
if isinstance(result, Exception):
raise result
return result
def iter_side_effect(self, *args):
return iter(self.side_effect(self.execute_returns, []))
def execute_side_effect(self, *args):
self.last_execute_command = args[0]
return self.side_effect(self.execute_returns, 0)
def close_side_effect(self, *args):
return self.side_effect(self.close_returns, None)
if __name__ == "__main__":
unittest.main()
| 61.484783
| 292
| 0.69639
| 3,722
| 28,283
| 4.922085
| 0.063407
| 0.028493
| 0.037118
| 0.036845
| 0.84738
| 0.820579
| 0.793504
| 0.780568
| 0.774672
| 0.761354
| 0
| 0.02845
| 0.199661
| 28,283
| 459
| 293
| 61.618736
| 0.78088
| 0
| 0
| 0.526596
| 0
| 0.06117
| 0.412863
| 0.04554
| 0
| 0
| 0
| 0
| 0.25
| 0
| null | null | 0.00266
| 0.018617
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
61b6dfe4e98a08af4d7a9d8ce94cb3db74ff22b4
| 13,133
|
py
|
Python
|
workout_tracker/tests/functional/auth/test_views.py
|
e-dang/Workout-Tracker
|
00a27597ea628cff62b320d616f56b2df4f344a0
|
[
"MIT"
] | null | null | null |
workout_tracker/tests/functional/auth/test_views.py
|
e-dang/Workout-Tracker
|
00a27597ea628cff62b320d616f56b2df4f344a0
|
[
"MIT"
] | null | null | null |
workout_tracker/tests/functional/auth/test_views.py
|
e-dang/Workout-Tracker
|
00a27597ea628cff62b320d616f56b2df4f344a0
|
[
"MIT"
] | null | null | null |
import pytest
from rest_framework.authtoken.models import Token
from rest_framework.reverse import reverse
from tests.utils import invalidate_credentials
from users.models import User
@pytest.mark.django_db
def test_login(api_client, user_factory, test_password):
url = reverse('rest_login')
user = user_factory()
resp = api_client.post(url, {'username': user.username, 'password': test_password})
assert resp.status_code == 200
assert len(resp.data) == 1
assert 'key' in resp.data
assert len(resp.data['key']) == 40
@pytest.mark.django_db
def test_login_fail(api_client, user_factory, test_password):
url = reverse('rest_login')
user = user_factory()
resp = api_client.post(url, {'email': user.email, 'password': test_password + 'wrong'})
assert resp.status_code == 400
@pytest.mark.django_db
def test_logout(auto_login_user):
url = reverse('rest_logout')
api_client, _ = auto_login_user()
resp = api_client.post(url)
assert resp.status_code == 200
assert len(Token.objects.all()) == 0
@pytest.mark.django_db
def test_logout_fail_invalid_credentials(auto_login_user):
url = reverse('rest_logout')
api_client, _ = auto_login_user()
invalidate_credentials(api_client)
resp = api_client.post(url)
assert resp.status_code == 401
assert len(Token.objects.all()) == 1
@pytest.mark.django_db
def test_logout_fail_not_logged_in(auto_login_user):
url = reverse('rest_logout')
api_client, _ = auto_login_user()
api_client.credentials()
resp = api_client.post(url)
assert resp.status_code == 200
assert len(Token.objects.all()) == 1
@pytest.mark.parametrize('api_client, data', [
(None, {'username': 'testname1', 'email': 'JohnDoe@demo.com', 'password1': 'thisisatest123',
'password2': 'thisisatest123', 'first_name': 'John', 'last_name': 'Doe'}),
(None, {'username': 'testname2', 'email': 'Johnoe@demo.com',
'password1': 'thisisatest123', 'password2': 'thisisatest123'}),
],
indirect=['api_client'],
ids=['with_names', 'without_names'])
@pytest.mark.django_db
def test_registration(api_client, data):
url = reverse('rest_register')
before = len(User.objects.all())
resp = api_client.post(url, data)
assert resp.status_code == 201
user = User.objects.get(email=data['email'])
assert len(User.objects.all()) == before + 1
assert resp.data['key'] == Token.objects.get(user=user.id).key
@pytest.mark.django_db
def test_rest_user_detail_get(auto_login_user):
url = reverse('rest_user_details')
api_client, user = auto_login_user()
resp = api_client.get(url)
assert resp.status_code == 200
assert resp.data['username'] == user.username
assert resp.data['email'] == user.email
assert resp.data['first_name'] == user.first_name
assert resp.data['last_name'] == user.last_name
@pytest.mark.django_db
def test_rest_user_detail_get_fail_invalid_credentials(auto_login_user):
url = reverse('rest_user_details')
api_client, _ = auto_login_user()
invalidate_credentials(api_client)
resp = api_client.get(url)
assert resp.status_code == 401
@pytest.mark.django_db
def test_rest_user_detail_get_fail_not_logged_in(auto_login_user):
url = reverse('rest_user_details')
api_client, _ = auto_login_user()
api_client.credentials()
resp = api_client.get(url)
assert resp.status_code == 401
@pytest.mark.django_db
def test_rest_user_detail_put(auto_login_user):
url = reverse('rest_user_details')
api_client, user = auto_login_user()
data = {'username': user.username, 'email': user.email,
'first_name': 'new_first_name', 'last_name': 'new_last_name'}
resp = api_client.put(url, data)
assert resp.status_code == 200
assert resp.data['username'] == data['username']
assert resp.data['email'] == data['email']
assert resp.data['first_name'] != user.first_name
assert resp.data['last_name'] != user.last_name
assert resp.data['first_name'] == data['first_name']
assert resp.data['last_name'] == data['last_name']
@pytest.mark.django_db
def test_rest_user_detail_put_fail_invalid_credentials(auto_login_user):
url = reverse('rest_user_details')
api_client, user = auto_login_user()
invalidate_credentials(api_client)
data = {'username': user.username, 'email': user.email,
'first_name': 'new_first_name', 'last_name': 'new_last_name'}
resp = api_client.put(url, data)
assert resp.status_code == 401
@pytest.mark.django_db
def test_rest_user_detail_put_fail_not_logged_in(auto_login_user):
url = reverse('rest_user_details')
api_client, user = auto_login_user()
api_client.credentials()
data = {'username': user.username, 'email': user.email,
'first_name': 'new_first_name', 'last_name': 'new_last_name'}
resp = api_client.put(url, data)
assert resp.status_code == 401
assert resp.data['detail'].code == 'not_authenticated'
@pytest.mark.django_db
def test_rest_user_detail_put_fail_not_full_state(auto_login_user):
url = reverse('rest_user_details')
api_client, _ = auto_login_user()
data = {'first_name': 'new_first_name', 'last_name': 'new_last_name'}
resp = api_client.put(url, data)
assert resp.status_code == 400
@pytest.mark.django_db
def test_rest_user_detail_patch(auto_login_user):
url = reverse('rest_user_details')
api_client, user = auto_login_user()
data = {'first_name': 'new_first_name', 'last_name': 'new_last_name'}
resp = api_client.patch(url, data)
assert resp.status_code == 200
assert resp.data['username'] == user.username
assert resp.data['email'] == user.email
assert resp.data['first_name'] != user.first_name
assert resp.data['last_name'] != user.last_name
assert resp.data['first_name'] == data['first_name']
assert resp.data['last_name'] == data['last_name']
@pytest.mark.django_db
def test_rest_user_detail_patch_fail_invalid_credentials(auto_login_user):
url = reverse('rest_user_details')
api_client, _ = auto_login_user()
invalidate_credentials(api_client)
data = {'first_name': 'new_first_name', 'last_name': 'new_last_name'}
resp = api_client.patch(url, data)
assert resp.status_code == 401
@pytest.mark.django_db
def test_rest_user_detail_patch_fail_not_logged_in(auto_login_user):
url = reverse('rest_user_details')
api_client, _ = auto_login_user()
api_client.credentials()
data = {'first_name': 'new_first_name', 'last_name': 'new_last_name'}
resp = api_client.patch(url, data)
assert resp.status_code == 401
@pytest.mark.django_db
def test_user_change_password(auto_login_user, test_password):
url = reverse('rest_password_change')
api_client, user = auto_login_user()
new_password = 'thisisanewpassword123'
data = {'new_password1': new_password, 'new_password2': new_password, 'old_password': test_password}
resp = api_client.post(url, data)
assert resp.status_code == 200
assert User.objects.get(email=user.email).check_password(new_password)
@pytest.mark.django_db
def test_user_change_password_fail_invalid_credentials(auto_login_user, test_password):
url = reverse('rest_password_change')
api_client, _ = auto_login_user()
invalidate_credentials(api_client)
new_password = 'thisisanewpassword123'
data = {'new_password1': new_password, 'new_password2': new_password, 'old_password': test_password}
resp = api_client.post(url, data)
assert resp.status_code == 401
@pytest.mark.django_db
def test_user_change_password_fail_not_logged_in(auto_login_user, test_password):
url = reverse('rest_password_change')
api_client, _ = auto_login_user()
api_client.credentials()
new_password = 'thisisanewpassword123'
data = {'new_password1': new_password, 'new_password2': new_password, 'old_password': test_password}
resp = api_client.post(url, data)
assert resp.status_code == 401
@pytest.mark.parametrize('auto_login_user, test_password, data, error_field', [
(None, None, {'new_password2': 'a_different_test_password123', 'old_password': None}, 'new_password1'),
(None, None, {'new_password1': 'a_unique_test_password123', 'old_password': None}, 'new_password2'),
(None, None, {'new_password1': 'a_unique_test_password123',
'new_password2': 'a_unique_test_password123'}, 'old_password'),
(None, None, {'new_password1': 'a_unique_test_password123',
'new_password2': 'a_different_test_password123', 'old_password': None}, 'new_password2'),
(None, None, {'new_password1': 'a_unique_test_password123',
'new_password2': 'a_unique_test_password123', 'old_password': 'invalid_password'}, 'old_password')
],
indirect=['auto_login_user', 'test_password'],
ids=['missing new_password1', 'missing new_password2', 'missing old_password', 'mismatching new passwords', 'invalid old_password'])
@pytest.mark.django_db
def test_user_change_password_fail_invalid_input(auto_login_user, test_password, data, error_field):
url = reverse('rest_password_change')
api_client, _ = auto_login_user()
if 'old_password' in data and data['old_password'] is None:
data['old_password'] = test_password
resp = api_client.post(url, data)
assert resp.status_code == 400
assert error_field in resp.data
@pytest.mark.parametrize('auto_login_user, logout', [
(None, True),
(None, False)
], indirect=['auto_login_user'], ids=['logged out', 'logged in'])
@pytest.mark.django_db
def test_password_reset_request(auto_login_user, logout):
url = reverse('rest_password_reset')
api_client, user = auto_login_user()
if logout:
api_client.credentials()
resp = api_client.post(url, {'email': user.email})
assert resp.status_code == 200
assert resp.context['user'].email == user.email
assert 'uid' in resp.context
assert 'token' in resp.context
@pytest.mark.django_db
def test_password_reset_request_fail_invalid_credentials(auto_login_user):
url = reverse('rest_password_reset')
api_client, user = auto_login_user()
invalidate_credentials(api_client)
resp = api_client.post(url, {'email': user.email})
assert resp.status_code == 401
@pytest.mark.parametrize('auto_login_user, logout', [
(None, True),
(None, False)
], indirect=['auto_login_user'], ids=['logged out', 'logged in'])
@pytest.mark.django_db
def test_password_reset_confirm(auto_login_user, logout):
url = reverse('rest_password_reset')
api_client, user = auto_login_user()
if logout:
api_client.credentials()
resp = api_client.post(url, {'email': user.email})
url = reverse('password_reset_confirm', kwargs={'token': resp.context['token'], 'uidb64': resp.context['uid']})
new_password = 'thisisanewpassword123'
data = {'new_password1': new_password, 'new_password2': new_password,
'token': resp.context['token'], 'uid': resp.context['uid']}
resp = api_client.post(url, data)
assert resp.status_code == 200
assert User.objects.get(email=user.email).check_password(new_password)
@pytest.mark.django_db
def test_password_reset_confirm_fail_invalid_credentials(auto_login_user):
url = reverse('rest_password_reset')
api_client, user = auto_login_user()
resp = api_client.post(url, {'email': user.email})
url = reverse('password_reset_confirm', kwargs={'token': resp.context['token'], 'uidb64': resp.context['uid']})
new_password = 'thisisanewpassword123'
data = {'new_password1': new_password, 'new_password2': new_password,
'token': resp.context['token'], 'uid': resp.context['uid']}
invalidate_credentials(api_client)
resp = api_client.post(url, data)
assert resp.status_code == 401
@pytest.mark.parametrize('auto_login_user, test_password, password1, password2, token, uid, error_field', [
(None, None, 'thisisanewpassword123', 'thisisanewpassword', None, None, 'new_password2'),
(None, None, 'thisisanewpassword123', 'thisisanewpassword123', 'WRONG_TOKEN', None, 'token'),
(None, None, 'thisisanewpassword123', 'thisisanewpassword123', None, 'WRONG_UID', 'uid'),
],
indirect=['auto_login_user', 'test_password'],
ids=['mismatch passwords', 'wrong token', 'wrong uid'])
@pytest.mark.django_db
def test_password_reset_confirm_fail_invalid_input(auto_login_user, test_password, password1, password2, token, uid, error_field):
url = reverse('rest_password_reset')
api_client, user = auto_login_user()
resp = api_client.post(url, {'email': user.email})
url = reverse('password_reset_confirm', kwargs={'token': resp.context['token'], 'uidb64': resp.context['uid']})
data = {'new_password1': password1, 'new_password2': password2,
'token': token or resp.context['token'], 'uid': uid or resp.context['uid']}
resp = api_client.post(url, data)
assert resp.status_code == 400
assert error_field in resp.data
assert User.objects.get(email=user.email).check_password(test_password)
| 35.021333
| 136
| 0.714917
| 1,766
| 13,133
| 4.976217
| 0.066251
| 0.070665
| 0.076923
| 0.051206
| 0.861288
| 0.855485
| 0.836368
| 0.812016
| 0.786755
| 0.760924
| 0
| 0.017262
| 0.15305
| 13,133
| 374
| 137
| 35.114973
| 0.772813
| 0
| 0
| 0.727941
| 0
| 0
| 0.217239
| 0.036701
| 0
| 0
| 0
| 0
| 0.213235
| 1
| 0.091912
| false
| 0.224265
| 0.018382
| 0
| 0.110294
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
140103793cafa936150a2c7c6d23a69e07006deb
| 15,692
|
py
|
Python
|
BPt/main/tests/test_BPtEvaluator.py
|
sahahn/BPt
|
1a2967f4ca3fa070b7417a4f59a218ae171daadd
|
[
"MIT"
] | 6
|
2020-11-06T15:45:28.000Z
|
2022-03-08T19:15:35.000Z
|
BPt/main/tests/test_BPtEvaluator.py
|
sahahn/BPt
|
1a2967f4ca3fa070b7417a4f59a218ae171daadd
|
[
"MIT"
] | 14
|
2020-10-20T13:55:23.000Z
|
2022-01-25T17:36:07.000Z
|
BPt/main/tests/test_BPtEvaluator.py
|
sahahn/BPt
|
1a2967f4ca3fa070b7417a4f59a218ae171daadd
|
[
"MIT"
] | 2
|
2020-10-23T19:48:53.000Z
|
2020-11-06T15:46:04.000Z
|
from .test_evaluate import get_fake_dataset
from ..input import (Model, Pipeline, Scaler, CV)
from ...dataset.Dataset import Dataset
from ..funcs import evaluate
from ..BPtEvaluator import is_notebook
import pytest
import numpy as np
from tempfile import gettempdir
import os
import pickle as pkl
def test_is_notebook():
assert not is_notebook()
def test_bpt_evaluator_neg_verbose():
dataset = get_fake_dataset()
pipe = Pipeline(Model('dt'))
evaluate(pipeline=pipe,
dataset=dataset,
progress_bar=False,
random_state=2,
cv=2,
eval_verbose=-2)
def test_bpt_evaluator_repr():
dataset = get_fake_dataset()
pipe = Pipeline(Model('dt'))
results = evaluate(pipeline=pipe,
dataset=dataset,
progress_bar=False,
cv=2)
rr = repr(results)
assert 'BPtEvaluator' in rr
assert 'all_train_subjects' not in rr
assert 'all_val_subjects' not in rr
assert 'coef_' not in rr
def test_bpt_evaluator_repr2():
dataset = get_fake_dataset()
pipe = Pipeline(Model('linear'))
results = evaluate(pipeline=pipe,
dataset=dataset,
progress_bar=False,
cv=2)
rr = repr(results)
assert 'coef_' in rr
def test_bpt_evaluator_store_preds_false():
dataset = get_fake_dataset()
pipe = Pipeline(Model('dt'))
results = evaluate(pipeline=pipe,
dataset=dataset,
store_preds=False,
progress_bar=False,
cv=2)
assert results.preds is None
def test_bpt_evaluator_progress_bars():
dataset = get_fake_dataset()
pipe = Pipeline(Model('dt'))
# No repeats
evaluate(pipeline=pipe,
dataset=dataset,
progress_bar=True,
random_state=2,
cv=CV(splits=2, n_repeats=1))
# With repeats
evaluate(pipeline=pipe,
dataset=dataset,
progress_bar=True,
random_state=2,
cv=CV(splits=2, n_repeats=2))
def test_bpt_evaluator_score():
dataset = get_fake_dataset()
pipe = Pipeline(Model('dt'))
results = evaluate(pipeline=pipe,
dataset=dataset,
progress_bar=False,
random_state=2,
cv=2)
# Make sure score attribute works
first_scorer = list(results.mean_scores)[0]
assert results.score == results.mean_scores[first_scorer]
results = evaluate(pipeline=pipe,
dataset=dataset,
progress_bar=False,
random_state=2,
scorer='neg_mean_squared_error',
cv=2)
assert results.mean_scores['neg_mean_squared_error'] == results.score
def test_bpt_evaluator_compare_fail():
pipe1 = Pipeline([Scaler('standard'), Model('linear')])
pipe2 = Pipeline([Scaler('standard'), Model('dt')])
dataset = get_fake_dataset()
results1 = evaluate(pipeline=pipe1,
dataset=dataset,
progress_bar=False,
random_state=2,
cv=2)
results2 = evaluate(pipeline=pipe2,
dataset=dataset,
progress_bar=False,
random_state=2,
cv=2)
with pytest.raises(RuntimeError):
results1.compare(results2)
def test_bpt_evaluator_compare():
pipe1 = Pipeline([Scaler('standard'), Model('linear')])
pipe2 = Pipeline([Model('dt')])
dataset = get_fake_dataset()
dataset['3'] = np.random.random(len(dataset))
results1 = evaluate(pipeline=pipe1,
dataset=dataset,
progress_bar=False,
scorer=['neg_mean_squared_error',
'explained_variance'],
random_state=2,
cv=2)
dataset['3'] = np.random.random(len(dataset))
results2 = evaluate(pipeline=pipe2,
dataset=dataset,
progress_bar=False,
random_state=2,
scorer=['neg_mean_squared_error',
'explained_variance'],
cv=2)
# Just in case, make sure dif results
results2.mean_scores['explained_variance'] = .9
results2.scores['explained_variance'] = [.8, 1]
compare_df = results1.compare(results2)
assert compare_df.shape == (2, 7)
def test_bpt_evaluator_compare_non_overlap_metric():
pipe1 = Pipeline([Scaler('standard'), Model('linear')])
pipe2 = Pipeline([Model('dt')])
dataset = get_fake_dataset()
dataset['3'] = np.random.random(len(dataset))
results1 = evaluate(pipeline=pipe1,
dataset=dataset,
progress_bar=False,
scorer=['neg_mean_squared_error'],
random_state=2,
cv=2)
# Just in case, make sure dif results
dataset['3'] = np.random.random(len(dataset))
results2 = evaluate(pipeline=pipe2,
dataset=dataset,
progress_bar=False,
random_state=2,
scorer=['neg_mean_squared_error',
'explained_variance'],
cv=2)
compare_df = results1.compare(results2)
assert compare_df.shape == (1, 7)
def test_bpt_evaluator_compare_non_overlap_cv1():
pipe1 = Pipeline([Scaler('standard'), Model('linear')])
pipe2 = Pipeline([Model('dt')])
dataset = get_fake_dataset()
dataset['3'] = np.random.random(len(dataset))
results1 = evaluate(pipeline=pipe1,
dataset=dataset,
progress_bar=False,
scorer=['neg_mean_squared_error',
'explained_variance'],
random_state=2,
cv=2)
# Just in case, make sure dif results
dataset['3'] = np.random.random(len(dataset))
results2 = evaluate(pipeline=pipe2,
dataset=dataset,
progress_bar=False,
random_state=2,
scorer=['neg_mean_squared_error',
'explained_variance'],
cv=3)
results2.mean_scores['explained_variance'] = .9
results2.scores['explained_variance'] = [.8, 1, .9]
compare_df = results1.compare(results2)
assert compare_df.shape == (2, 2)
def test_bpt_evaluator_compare_non_overlap_cv2():
pipe1 = Pipeline([Scaler('standard'), Model('linear')])
pipe2 = Pipeline([Model('dt')])
dataset = get_fake_dataset()
dataset['3'] = np.random.random(len(dataset))
results1 = evaluate(pipeline=pipe1,
dataset=dataset,
progress_bar=False,
scorer=['neg_mean_squared_error',
'explained_variance'],
random_state=2,
cv=2)
# Just in case, make sure dif results
dataset['3'] = np.random.random(len(dataset))
results2 = evaluate(pipeline=pipe2,
dataset=dataset,
progress_bar=False,
random_state=10,
scorer=['neg_mean_squared_error',
'explained_variance'],
cv=2)
results2.mean_scores['explained_variance'] = .9
results2.scores['explained_variance'] = [.8, 1]
compare_df = results1.compare(results2)
assert compare_df.shape == (2, 2)
def test_multiclass_get_preds_df():
df = get_fake_dataset()
df['3'] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2]
df.ordinalize('3', inplace=True)
pipe = Pipeline([Model('linear')])
results = evaluate(pipeline=pipe,
dataset=df,
progress_bar=False,
scorer='roc_auc_ovr',
cv=CV(splits=2))
assert len(results.preds['predict']) == 2
assert len(results.preds['predict_proba']) == 2
assert len(results.preds['predict'][0]) == 10
assert len(results.preds['predict_proba'][0]) == 10
assert len(results.preds['predict_proba'][0][0]) == 3
# Test get preds df
r_df = results.get_preds_dfs()
assert r_df[0].shape == (10, 8)
assert r_df[0].shape == (10, 8)
def test_permutation_feature_importance():
pipe = Pipeline([Scaler('standard'), Model('linear')])
dataset = get_fake_dataset()
results = evaluate(pipeline=pipe,
dataset=dataset,
progress_bar=False,
scorer='neg_mean_squared_error',
random_state=2,
cv=2)
fis = results.permutation_importance(dataset, n_repeats=10)
assert fis['importances_mean'].shape == (2, 2)
assert fis['importances_std'].shape == (2, 2)
def test_subset_by_fail():
data = np.array([[1, 1, 1, 1, 1, 1],
[2, 2, 2, 2, 2, 2],
[.1, .2, .3, .4, .5, .6],
[1, 1, 1, 2, 2, 2]])
data = data.transpose((1, 0))
data = Dataset(data=data,
columns=['1', '2', 't', 'grp'],
targets='t', non_inputs='grp')
data = data.to_binary('grp')
pipe = Pipeline([Scaler('standard'), Model('linear')])
results = evaluate(pipeline=pipe,
dataset=data,
store_preds=False,
progress_bar=False,
random_state=2,
cv=2)
with pytest.raises(RuntimeError):
results.subset_by('grp', data)
def test_subset_by():
data = np.array([[1, 1, 1, 1, 1, 1],
[2, 2, 2, 2, 2, 2],
[.1, .2, .3, .4, .5, .6],
[1, 1, 1, 2, 2, 2]])
data = data.transpose((1, 0))
data = Dataset(data=data,
columns=['1', '2', 't', 'grp'],
targets='t', non_inputs='grp')
data = data.to_binary('grp')
pipe = Pipeline([Scaler('standard'), Model('linear')])
results = evaluate(pipeline=pipe,
dataset=data,
progress_bar=False,
random_state=2,
cv=2)
subsets = results.subset_by('grp', data)
g1 = subsets['1.0']
g2 = subsets['2.0']
assert len(g1.scores['explained_variance']) == 2
assert len(g2.scores['explained_variance']) == 2
assert len(g1.mean_scores) == 2
assert len(g2.mean_scores) == 2
assert len(g1.train_subjects) == 2
assert len(g2.train_subjects) == 2
assert len(g1.val_subjects) == 2
assert len(g2.val_subjects) == 2
assert len(g1.val_subjects[0].intersection(g2.val_subjects[0])) == 0
assert len(g1.val_subjects[1].intersection(g2.val_subjects[1])) == 0
g1_preds = g1.get_preds_dfs()
g2_preds = g2.get_preds_dfs()
assert list(g1_preds[0]) == list(g2_preds[0])
assert list(g1_preds[1]) == list(g2_preds[1])
def test_subset_by_binary():
data = np.array([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2],
[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1],
[1, 1, 1, 2, 2, 2, 1, 1, 1, 2, 2, 2]])
data = data.transpose((1, 0))
data = Dataset(data=data,
columns=['1', '2', 't', 'grp'],
targets='t', non_inputs='grp')
data = data.to_binary('grp')
data = data.to_binary('t')
pipe = Pipeline([Scaler('standard'), Model('linear')])
results = evaluate(pipeline=pipe,
dataset=data,
progress_bar=False,
random_state=2,
scorer='roc_auc',
problem_type='binary',
cv=2)
subsets = results.subset_by('grp', data)
g1 = subsets['1']
g2 = subsets['2']
assert len(g1.scores['roc_auc']) == 2
assert len(g2.scores['roc_auc']) == 2
assert len(g1.mean_scores) == 1
assert len(g2.mean_scores) == 1
assert len(g1.train_subjects) == 2
assert len(g2.train_subjects) == 2
assert len(g1.val_subjects) == 2
assert len(g2.val_subjects) == 2
assert len(g1.val_subjects[0].intersection(g2.val_subjects[0])) == 0
assert len(g1.val_subjects[1].intersection(g2.val_subjects[1])) == 0
g1_preds = g1.get_preds_dfs()
g2_preds = g2.get_preds_dfs()
assert list(g1_preds[0]) == list(g2_preds[0])
assert list(g1_preds[1]) == list(g2_preds[1])
def test_subset_by_categorical():
data = np.array([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2],
[1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1,
1, 1, 1, 2, 2, 2, 3, 3, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1],
[1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 2, 2, 1, 1, 1]]
)
data = data.transpose((1, 0))
data = Dataset(data=data,
columns=['1', '2', 't', 'grp'],
targets='t', non_inputs='grp')
data = data.to_binary('grp')
data = data.ordinalize('t')
pipe = Pipeline([Scaler('standard'), Model('linear')])
results = evaluate(pipeline=pipe,
dataset=data,
progress_bar=False,
random_state=1,
scorer='roc_auc_ovr',
problem_type='categorical',
cv=2)
subsets = results.subset_by('grp', data)
g1 = subsets['1']
g2 = subsets['2']
assert len(g1.scores['roc_auc_ovr']) == 2
assert len(g2.scores['roc_auc_ovr']) == 2
assert len(g1.mean_scores) == 1
assert len(g2.mean_scores) == 1
assert len(g1.train_subjects) == 2
assert len(g2.train_subjects) == 2
assert len(g1.val_subjects) == 2
assert len(g2.val_subjects) == 2
assert len(g1.val_subjects[0].intersection(g2.val_subjects[0])) == 0
assert len(g1.val_subjects[1].intersection(g2.val_subjects[1])) == 0
g1_preds = g1.get_preds_dfs()
g2_preds = g2.get_preds_dfs()
assert list(g1_preds[0]) == list(g2_preds[0])
assert list(g1_preds[1]) == list(g2_preds[1])
assert 'BPtEvaluatorSubset(grp=1)' in repr(g1)
assert 'BPtEvaluatorSubset(grp=2)' in repr(g2)
def test_bpt_evaluator_to_pickle():
dataset = get_fake_dataset()
pipe = Pipeline(Model('dt'))
results = evaluate(pipeline=pipe,
dataset=dataset,
cv=2)
assert results.score is not None
# Save to temp spot
temp_dr = gettempdir()
temp_save_loc = os.path.join(temp_dr, 'temp.pkl')
results.to_pickle(temp_save_loc)
# Then load in, mostly
# just testing for no errors thrown
with open(temp_save_loc, 'rb') as f:
results_loaded = pkl.load(f)
assert results_loaded.score == results.score
| 30.829077
| 79
| 0.52651
| 1,910
| 15,692
| 4.151832
| 0.08377
| 0.019672
| 0.02459
| 0.027238
| 0.816141
| 0.790164
| 0.749811
| 0.729382
| 0.703531
| 0.663808
| 0
| 0.055151
| 0.343678
| 15,692
| 508
| 80
| 30.889764
| 0.714827
| 0.018481
| 0
| 0.708791
| 0
| 0
| 0.069006
| 0.018973
| 0
| 0
| 0
| 0
| 0.17033
| 1
| 0.052198
| false
| 0
| 0.038462
| 0
| 0.090659
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
14332d1422905c1ab3faa1c7e37a1f936b252866
| 24
|
py
|
Python
|
python/ql/test/library-tests/stmts/general/subexpr_test.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 4,036
|
2020-04-29T00:09:57.000Z
|
2022-03-31T14:16:38.000Z
|
python/ql/test/library-tests/stmts/general/subexpr_test.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 2,970
|
2020-04-28T17:24:18.000Z
|
2022-03-31T22:40:46.000Z
|
python/ql/test/library-tests/stmts/general/subexpr_test.py
|
ScriptBox99/github-codeql
|
2ecf0d3264db8fb4904b2056964da469372a235c
|
[
"MIT"
] | 794
|
2020-04-29T00:28:25.000Z
|
2022-03-30T08:21:46.000Z
|
del a[b]
del x
del a.b
| 4.8
| 8
| 0.583333
| 8
| 24
| 1.75
| 0.5
| 0.571429
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.291667
| 24
| 4
| 9
| 6
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
146aca4f3b90a5ebbf6a82d3a7343d6bb8a476c9
| 112,066
|
py
|
Python
|
ml4a/nicer_model.py
|
golmschenk/ml4a
|
4e0b7576d131c63fc4c8b94158d5a0052adae651
|
[
"Apache-2.0"
] | null | null | null |
ml4a/nicer_model.py
|
golmschenk/ml4a
|
4e0b7576d131c63fc4c8b94158d5a0052adae651
|
[
"Apache-2.0"
] | null | null | null |
ml4a/nicer_model.py
|
golmschenk/ml4a
|
4e0b7576d131c63fc4c8b94158d5a0052adae651
|
[
"Apache-2.0"
] | null | null | null |
from tensorflow.keras import Model, regularizers
import tensorflow as tf
from tensorflow.keras.layers import LeakyReLU, Dense, Conv1DTranspose, Reshape, Cropping1D, Layer, Dropout,\
SpatialDropout1D, BatchNormalization, GaussianDropout, AlphaDropout
from tensorflow.keras.layers import GaussianNoise
class SimpleModel(Model):
def __init__(self, number_of_label_types=1):
super().__init__()
leaky_relu = LeakyReLU(alpha=0.01)
# l2_regularizer = l2(0.001)
self.dense0 = Dense(11, activation=leaky_relu)
self.dense1 = Dense(11, activation=leaky_relu)
self.dense2 = Dense(11, activation=leaky_relu)
self.reshape0 = Reshape([1, 11])
self.transposed_convolution0 = Conv1DTranspose(filters=8, kernel_size=4, strides=2, activation=leaky_relu)
self.transposed_convolution1 = Conv1DTranspose(filters=4, kernel_size=4, strides=2, activation=leaky_relu)
self.transposed_convolution2 = Conv1DTranspose(filters=2, kernel_size=4, strides=2, activation=leaky_relu)
self.transposed_convolution3 = Conv1DTranspose(filters=1, kernel_size=4, strides=3, activation=leaky_relu)
self.cropping0 = Cropping1D((0, 3))
self.reshape1 = Reshape([64])
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Conv1DTransposeBlock(Layer):
def __init__(self, filters: int, kernel_size: int, strides: int, dropout_rate: float = 0.1,
batch_normalization: bool = True, spatial: bool = True):
super().__init__()
leaky_relu = LeakyReLU(alpha=0.01)
self.convolution = Conv1DTranspose(filters, kernel_size=kernel_size, strides=strides, activation=leaky_relu, kernel_regularizer=regularizers.L2(l2=0.0001))
if dropout_rate > 0:
if spatial:
self.dropout = SpatialDropout1D(dropout_rate)
else:
self.dropout = Dropout(dropout_rate)
else:
self.dropout = None
if batch_normalization:
self.batch_normalization = BatchNormalization(scale=False)
if not spatial:
self.batch_normalization_input_reshape = Reshape([-1])
self.batch_normalization_output_reshape = Reshape([-1, filters])
else:
self.batch_normalization_input_reshape = None
self.batch_normalization_output_reshape = None
else:
self.batch_normalization = None
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.convolution(x, training=training)
if self.batch_normalization is not None:
if self.batch_normalization_input_reshape is not None:
x = self.batch_normalization_input_reshape(x, training=training)
x = self.batch_normalization(x, training=training)
if self.batch_normalization_output_reshape is not None:
x = self.batch_normalization_output_reshape(x, training=training)
if self.dropout is not None:
x = self.dropout(x, training=training)
return x
class DenseBlock(Layer):
def __init__(self, filters: int, dropout_rate: float = 0.1,
batch_normalization: bool = True, spatial: bool = False):
super().__init__()
leaky_relu = LeakyReLU(alpha=0.01)
self.dense = Dense(filters, activation=leaky_relu, kernel_regularizer=regularizers.L2(l2=0.0001))
if dropout_rate > 0:
self.dropout = Dropout(dropout_rate)
else:
self.dropout = None
if batch_normalization:
self.batch_normalization = BatchNormalization(scale=False)
if not spatial:
self.batch_normalization_input_reshape = Reshape([-1, filters])
self.batch_normalization_output_reshape = Reshape([-1])
else:
self.batch_normalization_input_reshape = None
self.batch_normalization_output_reshape = None
else:
self.batch_normalization = None
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense(x, training=training)
if self.batch_normalization is not None:
if self.batch_normalization_input_reshape is not None:
x = self.batch_normalization_input_reshape(x, training=training)
x = self.batch_normalization(x, training=training)
if self.batch_normalization_output_reshape is not None:
x = self.batch_normalization_output_reshape(x, training=training)
if self.dropout is not None:
x = self.dropout(x, training=training)
return x
class WiderWithDropoutModel(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(20, batch_normalization=False)
self.dense1 = DenseBlock(30, batch_normalization=False)
self.dense2 = DenseBlock(40, batch_normalization=False, spatial=True)
self.reshape0 = Reshape([1, 40])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=30, kernel_size=4, strides=2, batch_normalization=False)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=20, kernel_size=4, strides=2, batch_normalization=False)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=10, kernel_size=4, strides=2, batch_normalization=False)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=3, batch_normalization=False, dropout_rate=0)
self.cropping0 = Cropping1D((0, 3))
self.reshape1 = Reshape([64])
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(20, batch_normalization=False)
self.dense1 = DenseBlock(30, batch_normalization=False)
self.dense2 = DenseBlock(40, batch_normalization=False)
self.dense3 = DenseBlock(50, batch_normalization=True)
self.dense4 = DenseBlock(60, batch_normalization=True)
self.dense5 = DenseBlock(70, batch_normalization=True, spatial=True)
self.reshape0 = Reshape([1, 70])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=60, kernel_size=2, strides=1, batch_normalization=True)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=50, kernel_size=3, strides=1, batch_normalization=True)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=40, kernel_size=4, strides=1, batch_normalization=True)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=30, kernel_size=4, strides=2, batch_normalization=False)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=20, kernel_size=4, strides=2, batch_normalization=False)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((3, 3))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx2(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(30, batch_normalization=False)
self.dense1 = DenseBlock(60, batch_normalization=False)
self.dense2 = DenseBlock(120, batch_normalization=False)
self.dense3 = DenseBlock(200, batch_normalization=True)
self.dense4 = DenseBlock(400, batch_normalization=True)
self.dense5 = DenseBlock(800, batch_normalization=True, spatial=True)
self.reshape0 = Reshape([1, 800])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=2, strides=1, batch_normalization=True)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=200, kernel_size=3, strides=1, batch_normalization=True)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=120, kernel_size=4, strides=1, batch_normalization=True)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=60, kernel_size=4, strides=2, batch_normalization=False)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=30, kernel_size=4, strides=2, batch_normalization=False)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((3, 3))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx3(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(30, batch_normalization=False)
self.dense1 = DenseBlock(60, batch_normalization=False)
self.dense2 = DenseBlock(120, batch_normalization=False)
self.dense3 = DenseBlock(200, batch_normalization=False)
self.dense4 = DenseBlock(400, batch_normalization=False)
self.dense5 = DenseBlock(800, batch_normalization=False, spatial=True)
self.reshape0 = Reshape([1, 800])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=2, strides=1, batch_normalization=False)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=200, kernel_size=3, strides=1, batch_normalization=False)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=120, kernel_size=4, strides=1, batch_normalization=False)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=60, kernel_size=4, strides=2, batch_normalization=False)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=30, kernel_size=4, strides=2, batch_normalization=False)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((3, 3))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx4(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(30, batch_normalization=False, dropout_rate=0)
self.dense1 = DenseBlock(60, batch_normalization=False, dropout_rate=0)
self.dense2 = DenseBlock(120, batch_normalization=False, dropout_rate=0)
self.dense3 = DenseBlock(200, batch_normalization=True, dropout_rate=0)
self.dense4 = DenseBlock(400, batch_normalization=True, dropout_rate=0)
self.dense5 = DenseBlock(800, batch_normalization=True, spatial=True, dropout_rate=0)
self.reshape0 = Reshape([1, 800])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=2, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=200, kernel_size=3, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=120, kernel_size=4, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=60, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=30, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((3, 3))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx5(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(30, batch_normalization=False, dropout_rate=0)
self.dense1 = DenseBlock(60, batch_normalization=False, dropout_rate=0)
self.dense2 = DenseBlock(120, batch_normalization=True, dropout_rate=0)
self.dense3 = DenseBlock(200, batch_normalization=True, dropout_rate=0)
self.dense4 = DenseBlock(400, batch_normalization=True, dropout_rate=0)
self.dense5 = DenseBlock(800, batch_normalization=True, spatial=True, dropout_rate=0)
self.reshape0 = Reshape([1, 800])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=2, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=200, kernel_size=3, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=120, kernel_size=4, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=60, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=30, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((3, 3))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx4Narrow(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(15, batch_normalization=False, dropout_rate=0)
self.dense1 = DenseBlock(20, batch_normalization=False, dropout_rate=0)
self.dense2 = DenseBlock(25, batch_normalization=False, dropout_rate=0)
self.dense3 = DenseBlock(30, batch_normalization=True, dropout_rate=0)
self.dense4 = DenseBlock(35, batch_normalization=True, dropout_rate=0)
self.dense5 = DenseBlock(40, batch_normalization=True, spatial=True, dropout_rate=0)
self.reshape0 = Reshape([1, 40])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=35, kernel_size=2, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=30, kernel_size=3, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=25, kernel_size=4, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=20, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=15, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((3, 3))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx6(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(30, batch_normalization=False)
self.dense1 = DenseBlock(60, batch_normalization=False)
self.dense2 = DenseBlock(120, batch_normalization=False)
self.dense3 = DenseBlock(200, batch_normalization=True)
self.dense4 = DenseBlock(400, batch_normalization=True)
self.dense5 = DenseBlock(800, batch_normalization=True, spatial=True)
self.reshape0 = Reshape([1, 800])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=800, kernel_size=2, strides=1, batch_normalization=True)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=800, kernel_size=3, strides=1, batch_normalization=True)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=800, kernel_size=4, strides=1, batch_normalization=True)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=800, kernel_size=4, strides=2, batch_normalization=False)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=800, kernel_size=4, strides=2, batch_normalization=False)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((3, 3))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx7(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(1000, batch_normalization=False)
self.dense1 = DenseBlock(900, batch_normalization=True)
self.dense2 = DenseBlock(800, batch_normalization=True)
self.dense3 = DenseBlock(700, batch_normalization=True)
self.dense4 = DenseBlock(600, batch_normalization=True)
self.dense5 = DenseBlock(500, batch_normalization=True, spatial=True)
self.reshape0 = Reshape([1, 500])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=2, strides=1, batch_normalization=True)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=200, kernel_size=3, strides=1, batch_normalization=True)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=120, kernel_size=4, strides=1, batch_normalization=True)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=60, kernel_size=4, strides=2, batch_normalization=False)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=30, kernel_size=4, strides=2, batch_normalization=False)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((3, 3))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx8(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(1000, batch_normalization=False, dropout_rate=0.5)
self.dense1 = DenseBlock(900, batch_normalization=False, dropout_rate=0.5)
self.dense2 = DenseBlock(800, batch_normalization=True)
self.dense3 = DenseBlock(700, batch_normalization=True)
self.dense4 = DenseBlock(600, batch_normalization=True)
self.dense5 = DenseBlock(500, batch_normalization=True, spatial=True)
self.reshape0 = Reshape([1, 500])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=2, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=200, kernel_size=3, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=120, kernel_size=4, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=60, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=30, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((3, 3))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx9(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(4000, batch_normalization=False, dropout_rate=0.5)
self.dense1 = DenseBlock(3000, batch_normalization=False, dropout_rate=0.5)
self.dense2 = DenseBlock(2000, batch_normalization=False, dropout_rate=0.5)
self.dense3 = DenseBlock(1000, batch_normalization=False, dropout_rate=0.5)
self.dense4 = DenseBlock(750, batch_normalization=False)
self.dense5 = DenseBlock(500, batch_normalization=False, spatial=True)
self.reshape0 = Reshape([1, 500])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=2, strides=1, batch_normalization=False, dropout_rate=0)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=200, kernel_size=3, strides=1, batch_normalization=False, dropout_rate=0)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=120, kernel_size=4, strides=1, batch_normalization=False, dropout_rate=0)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=60, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=30, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((3, 3))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx10(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(4000, batch_normalization=False)
self.dense1 = DenseBlock(3000, batch_normalization=True)
self.dense2 = DenseBlock(2000, batch_normalization=True)
self.dense3 = DenseBlock(1000, batch_normalization=True)
self.dense4 = DenseBlock(750, batch_normalization=True)
self.dense5 = DenseBlock(500, batch_normalization=True, spatial=True)
self.reshape0 = Reshape([1, 500])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=2, strides=1, batch_normalization=True)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=200, kernel_size=3, strides=1, batch_normalization=True)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=120, kernel_size=4, strides=1, batch_normalization=True)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=60, kernel_size=4, strides=2, batch_normalization=False)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=30, kernel_size=4, strides=2, batch_normalization=False)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((3, 3))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx11(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(1000, batch_normalization=False, dropout_rate=0.5)
self.dense1 = DenseBlock(900, batch_normalization=False, dropout_rate=0.5)
self.dense2 = DenseBlock(800, batch_normalization=False, dropout_rate=0.5)
self.dense3 = DenseBlock(700, batch_normalization=True, dropout_rate=0)
self.dense4 = DenseBlock(600, batch_normalization=True, dropout_rate=0)
self.dense5 = DenseBlock(500, batch_normalization=True, spatial=True, dropout_rate=0)
self.reshape0 = Reshape([1, 500])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=2, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=200, kernel_size=3, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=120, kernel_size=4, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=60, kernel_size=4, strides=2, batch_normalization=True, dropout_rate=0)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=30, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((3, 3))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx12(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(1000, batch_normalization=False, dropout_rate=0.5)
self.dense1 = DenseBlock(900, batch_normalization=False, dropout_rate=0.5)
self.dense2 = DenseBlock(800, batch_normalization=False, dropout_rate=0.5)
self.dense3 = DenseBlock(700, batch_normalization=False, dropout_rate=0.5)
self.dense4 = DenseBlock(600, batch_normalization=False, dropout_rate=0.5)
self.dense5 = DenseBlock(500, batch_normalization=False, spatial=True, dropout_rate=0.5)
self.reshape0 = Reshape([1, 500])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=2, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=200, kernel_size=3, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=120, kernel_size=4, strides=1, batch_normalization=True, dropout_rate=0)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=60, kernel_size=4, strides=2, batch_normalization=True, dropout_rate=0)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=30, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((3, 3))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx13(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(4000, batch_normalization=False, dropout_rate=0.5)
self.dense1 = DenseBlock(3000, batch_normalization=False, dropout_rate=0.5)
self.dense2 = DenseBlock(2000, batch_normalization=False, dropout_rate=0.5)
self.dense3 = DenseBlock(1000, batch_normalization=False, dropout_rate=0.5)
self.dense4 = DenseBlock(750, batch_normalization=False)
self.dense5 = DenseBlock(500, batch_normalization=False, spatial=True)
self.reshape0 = Reshape([1, 500])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=4, strides=1, batch_normalization=False)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=350, kernel_size=4, strides=1, batch_normalization=False)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=300, kernel_size=4, strides=1, batch_normalization=False)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=250, kernel_size=4, strides=2, batch_normalization=False)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=200, kernel_size=4, strides=1, batch_normalization=False)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=150, kernel_size=4, strides=1, batch_normalization=False)
self.transposed_convolution6 = Conv1DTransposeBlock(filters=100, kernel_size=4, strides=1, batch_normalization=False)
self.transposed_convolution7 = Conv1DTransposeBlock(filters=50, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution8 = Conv1DTranspose(filters=1, kernel_size=4, strides=1, activation='linear')
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((1, 2))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.transposed_convolution6(x, training=training)
x = self.transposed_convolution7(x, training=training)
x = self.transposed_convolution8(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx14(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(4000, batch_normalization=False, dropout_rate=0.5)
self.dense1 = DenseBlock(3500, batch_normalization=False, dropout_rate=0.5)
self.dense2 = DenseBlock(3000, batch_normalization=False, dropout_rate=0.5)
self.dense3 = DenseBlock(2500, batch_normalization=False, dropout_rate=0.5)
self.dense4 = DenseBlock(2000, batch_normalization=False, dropout_rate=0.5)
self.dense5 = DenseBlock(1500, batch_normalization=False, dropout_rate=0.5)
self.dense6 = DenseBlock(1000, batch_normalization=False, dropout_rate=0.5)
self.dense7 = DenseBlock(750, batch_normalization=False)
self.dense8 = DenseBlock(500, batch_normalization=False, spatial=True)
self.reshape0 = Reshape([1, 500])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=2, strides=1, batch_normalization=False, dropout_rate=0)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=200, kernel_size=3, strides=1, batch_normalization=False, dropout_rate=0)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=120, kernel_size=4, strides=1, batch_normalization=False, dropout_rate=0)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=60, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=30, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((3, 3))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.dense6(x, training=training)
x = self.dense7(x, training=training)
x = self.dense8(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx15(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(1000, batch_normalization=False)
self.dense1 = DenseBlock(1000, batch_normalization=False)
self.dense2 = DenseBlock(900, batch_normalization=True)
self.dense3 = DenseBlock(900, batch_normalization=True)
self.dense4 = DenseBlock(800, batch_normalization=True)
self.dense5 = DenseBlock(800, batch_normalization=True)
self.dense6 = DenseBlock(700, batch_normalization=True)
self.dense7 = DenseBlock(700, batch_normalization=True)
self.dense8 = DenseBlock(600, batch_normalization=True)
self.dense9 = DenseBlock(600, batch_normalization=True)
self.dense10 = DenseBlock(500, batch_normalization=True)
self.dense11 = DenseBlock(500, batch_normalization=True, spatial=True)
self.reshape0 = Reshape([1, 500])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=4, strides=1,
batch_normalization=True)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=350, kernel_size=4, strides=1,
batch_normalization=True)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=300, kernel_size=4, strides=1,
batch_normalization=True)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=250, kernel_size=4, strides=2,
batch_normalization=True)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=200, kernel_size=4, strides=1,
batch_normalization=True)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=150, kernel_size=4, strides=1,
batch_normalization=False)
self.transposed_convolution6 = Conv1DTransposeBlock(filters=100, kernel_size=4, strides=1,
batch_normalization=False)
self.transposed_convolution7 = Conv1DTransposeBlock(filters=50, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution8 = Conv1DTranspose(filters=1, kernel_size=4, strides=1, activation='linear')
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((1, 2))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.dense6(x, training=training)
x = self.dense7(x, training=training)
x = self.dense8(x, training=training)
x = self.dense9(x, training=training)
x = self.dense10(x, training=training)
x = self.dense11(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.transposed_convolution6(x, training=training)
x = self.transposed_convolution7(x, training=training)
x = self.transposed_convolution8(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx16(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(1000, batch_normalization=False, dropout_rate=0.5)
self.dense1 = DenseBlock(1000, batch_normalization=False, dropout_rate=0.5)
self.dense2 = DenseBlock(900, batch_normalization=True, dropout_rate=0.5)
self.dense3 = DenseBlock(900, batch_normalization=True, dropout_rate=0.5)
self.dense4 = DenseBlock(800, batch_normalization=True, dropout_rate=0.5)
self.dense5 = DenseBlock(800, batch_normalization=True, dropout_rate=0.5)
self.dense6 = DenseBlock(700, batch_normalization=True, dropout_rate=0.5)
self.dense7 = DenseBlock(700, batch_normalization=True, dropout_rate=0.5)
self.dense8 = DenseBlock(600, batch_normalization=True, dropout_rate=0.5)
self.dense9 = DenseBlock(600, batch_normalization=True, dropout_rate=0.5)
self.dense10 = DenseBlock(500, batch_normalization=True, dropout_rate=0.5)
self.dense11 = DenseBlock(500, batch_normalization=True, spatial=True, dropout_rate=0.5)
self.reshape0 = Reshape([1, 500])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=4, strides=1,
batch_normalization=True, dropout_rate=0.5)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=350, kernel_size=4, strides=1,
batch_normalization=True, dropout_rate=0.5)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=300, kernel_size=4, strides=1,
batch_normalization=True, dropout_rate=0.5)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=250, kernel_size=4, strides=2,
batch_normalization=True, dropout_rate=0.5)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=200, kernel_size=4, strides=1,
batch_normalization=True, dropout_rate=0.5)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=150, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution6 = Conv1DTransposeBlock(filters=100, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution7 = Conv1DTransposeBlock(filters=50, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution8 = Conv1DTranspose(filters=1, kernel_size=4, strides=1, activation='linear')
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((1, 2))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.dense6(x, training=training)
x = self.dense7(x, training=training)
x = self.dense8(x, training=training)
x = self.dense9(x, training=training)
x = self.dense10(x, training=training)
x = self.dense11(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.transposed_convolution6(x, training=training)
x = self.transposed_convolution7(x, training=training)
x = self.transposed_convolution8(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx17(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(1000, batch_normalization=False, dropout_rate=0.5)
self.dense1 = DenseBlock(1000, batch_normalization=False, dropout_rate=0.5)
self.dense2 = DenseBlock(900, batch_normalization=False, dropout_rate=0.5)
self.dense3 = DenseBlock(900, batch_normalization=False, dropout_rate=0.5)
self.dense4 = DenseBlock(800, batch_normalization=False, dropout_rate=0.5)
self.dense5 = DenseBlock(800, batch_normalization=False, dropout_rate=0.5)
self.dense6 = DenseBlock(700, batch_normalization=False, dropout_rate=0.5)
self.dense7 = DenseBlock(700, batch_normalization=False, dropout_rate=0.5)
self.dense8 = DenseBlock(600, batch_normalization=False, dropout_rate=0.5)
self.dense9 = DenseBlock(600, batch_normalization=False, dropout_rate=0.5)
self.dense10 = DenseBlock(500, batch_normalization=False, dropout_rate=0.5)
self.dense11 = DenseBlock(500, batch_normalization=False, spatial=True, dropout_rate=0.5)
self.reshape0 = Reshape([1, 500])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=350, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=300, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=250, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=200, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=150, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution6 = Conv1DTransposeBlock(filters=100, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution7 = Conv1DTransposeBlock(filters=50, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution8 = Conv1DTranspose(filters=1, kernel_size=4, strides=1, activation='linear')
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((1, 2))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.dense6(x, training=training)
x = self.dense7(x, training=training)
x = self.dense8(x, training=training)
x = self.dense9(x, training=training)
x = self.dense10(x, training=training)
x = self.dense11(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.transposed_convolution6(x, training=training)
x = self.transposed_convolution7(x, training=training)
x = self.transposed_convolution8(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx18(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(1000*4, batch_normalization=False, dropout_rate=0.5)
self.dense1 = DenseBlock(1000*4, batch_normalization=False, dropout_rate=0.5)
self.dense2 = DenseBlock(900*4, batch_normalization=False, dropout_rate=0.5)
self.dense3 = DenseBlock(900*4, batch_normalization=False, dropout_rate=0.5)
self.dense4 = DenseBlock(800*4, batch_normalization=False, dropout_rate=0.5)
self.dense5 = DenseBlock(800*4, batch_normalization=False, dropout_rate=0.5)
self.dense6 = DenseBlock(700*4, batch_normalization=False, dropout_rate=0.5)
self.dense7 = DenseBlock(700*4, batch_normalization=False, dropout_rate=0.5)
self.dense8 = DenseBlock(600*4, batch_normalization=False, dropout_rate=0.5)
self.dense9 = DenseBlock(600*4, batch_normalization=False, dropout_rate=0.5)
self.dense10 = DenseBlock(500*4, batch_normalization=False, dropout_rate=0.5)
self.dense11 = DenseBlock(500*4, batch_normalization=False, spatial=True, dropout_rate=0.5)
self.reshape0 = Reshape([1, 500*4])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=350*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=300*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=250*4, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=200*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=150*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution6 = Conv1DTransposeBlock(filters=100*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution7 = Conv1DTransposeBlock(filters=50*4, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution8 = Conv1DTranspose(filters=1, kernel_size=4, strides=1, activation='linear')
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((1, 2))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.dense6(x, training=training)
x = self.dense7(x, training=training)
x = self.dense8(x, training=training)
x = self.dense9(x, training=training)
x = self.dense10(x, training=training)
x = self.dense11(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.transposed_convolution6(x, training=training)
x = self.transposed_convolution7(x, training=training)
x = self.transposed_convolution8(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx19(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(1000*4, batch_normalization=False, dropout_rate=0.5)
self.dense1 = DenseBlock(1000*4, batch_normalization=False, dropout_rate=0.5)
self.dense2 = DenseBlock(900*4, batch_normalization=True, dropout_rate=0.5)
self.dense3 = DenseBlock(900*4, batch_normalization=True, dropout_rate=0.5)
self.dense4 = DenseBlock(800*4, batch_normalization=True, dropout_rate=0.5)
self.dense5 = DenseBlock(800*4, batch_normalization=True, dropout_rate=0.5)
self.dense6 = DenseBlock(700*4, batch_normalization=True, dropout_rate=0.5)
self.dense7 = DenseBlock(700*4, batch_normalization=True, dropout_rate=0.5)
self.dense8 = DenseBlock(600*4, batch_normalization=True, dropout_rate=0.5)
self.dense9 = DenseBlock(600*4, batch_normalization=True, dropout_rate=0.5)
self.dense10 = DenseBlock(500*4, batch_normalization=True, dropout_rate=0.5)
self.dense11 = DenseBlock(500*4, batch_normalization=True, spatial=True, dropout_rate=0.5)
self.reshape0 = Reshape([1, 500*4])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400*4, kernel_size=4, strides=1,
batch_normalization=True, dropout_rate=0.5)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=350*4, kernel_size=4, strides=1,
batch_normalization=True, dropout_rate=0.5)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=300*4, kernel_size=4, strides=1,
batch_normalization=True, dropout_rate=0.5)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=250*4, kernel_size=4, strides=2,
batch_normalization=True, dropout_rate=0.5)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=200*4, kernel_size=4, strides=1,
batch_normalization=True, dropout_rate=0.5)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=150*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution6 = Conv1DTransposeBlock(filters=100*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.5)
self.transposed_convolution7 = Conv1DTransposeBlock(filters=50*4, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution8 = Conv1DTranspose(filters=1, kernel_size=4, strides=1, activation='linear')
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((1, 2))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.dense6(x, training=training)
x = self.dense7(x, training=training)
x = self.dense8(x, training=training)
x = self.dense9(x, training=training)
x = self.dense10(x, training=training)
x = self.dense11(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.transposed_convolution6(x, training=training)
x = self.transposed_convolution7(x, training=training)
x = self.transposed_convolution8(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx20(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(1000*4, batch_normalization=False, dropout_rate=0.9)
self.dense1 = DenseBlock(1000*4, batch_normalization=False, dropout_rate=0.9)
self.dense2 = DenseBlock(900*4, batch_normalization=False, dropout_rate=0.9)
self.dense3 = DenseBlock(900*4, batch_normalization=False, dropout_rate=0.9)
self.dense4 = DenseBlock(800*4, batch_normalization=False, dropout_rate=0.9)
self.dense5 = DenseBlock(800*4, batch_normalization=False, dropout_rate=0.9)
self.dense6 = DenseBlock(700*4, batch_normalization=False, dropout_rate=0.9)
self.dense7 = DenseBlock(700*4, batch_normalization=False, dropout_rate=0.9)
self.dense8 = DenseBlock(600*4, batch_normalization=False, dropout_rate=0.9)
self.dense9 = DenseBlock(600*4, batch_normalization=False, dropout_rate=0.9)
self.dense10 = DenseBlock(500*4, batch_normalization=False, dropout_rate=0.9)
self.dense11 = DenseBlock(500*4, batch_normalization=False, spatial=True, dropout_rate=0.9)
self.reshape0 = Reshape([1, 500*4])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.9)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=350*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.9)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=300*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.9)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=250*4, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0.9)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=200*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.9)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=150*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.9)
self.transposed_convolution6 = Conv1DTransposeBlock(filters=100*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.9)
self.transposed_convolution7 = Conv1DTransposeBlock(filters=50*4, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution8 = Conv1DTranspose(filters=1, kernel_size=4, strides=1, activation='linear')
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((1, 2))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.dense6(x, training=training)
x = self.dense7(x, training=training)
x = self.dense8(x, training=training)
x = self.dense9(x, training=training)
x = self.dense10(x, training=training)
x = self.dense11(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.transposed_convolution6(x, training=training)
x = self.transposed_convolution7(x, training=training)
x = self.transposed_convolution8(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx21(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(1000*4, batch_normalization=False, dropout_rate=0.9)
self.dense1 = DenseBlock(1000*4, batch_normalization=False, dropout_rate=0.9)
self.dense2 = DenseBlock(900*4, batch_normalization=False, dropout_rate=0.9)
self.dense3 = DenseBlock(900*4, batch_normalization=False, dropout_rate=0.9)
self.dense4 = DenseBlock(800*4, batch_normalization=False, dropout_rate=0.9)
self.dense5 = DenseBlock(800*4, batch_normalization=False, dropout_rate=0.9)
self.dense6 = DenseBlock(700*4, batch_normalization=False, dropout_rate=0)
self.dense7 = DenseBlock(700*4, batch_normalization=False, dropout_rate=0)
self.dense8 = DenseBlock(600*4, batch_normalization=False, dropout_rate=0)
self.dense9 = DenseBlock(600*4, batch_normalization=False, dropout_rate=0)
self.dense10 = DenseBlock(500*4, batch_normalization=False, dropout_rate=0)
self.dense11 = DenseBlock(500*4, batch_normalization=False, spatial=True, dropout_rate=0)
self.reshape0 = Reshape([1, 500*4])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=350*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=300*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=250*4, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=200*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=150*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution6 = Conv1DTransposeBlock(filters=100*4, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution7 = Conv1DTransposeBlock(filters=50*4, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution8 = Conv1DTranspose(filters=1, kernel_size=4, strides=1, activation='linear')
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((1, 2))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.dense6(x, training=training)
x = self.dense7(x, training=training)
x = self.dense8(x, training=training)
x = self.dense9(x, training=training)
x = self.dense10(x, training=training)
x = self.dense11(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.transposed_convolution6(x, training=training)
x = self.transposed_convolution7(x, training=training)
x = self.transposed_convolution8(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx22(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(1000, batch_normalization=False, dropout_rate=0.5)
self.dense1 = DenseBlock(1000, batch_normalization=False, dropout_rate=0.5)
self.dense2 = DenseBlock(900, batch_normalization=True, dropout_rate=0.5)
self.dense3 = DenseBlock(900, batch_normalization=True, dropout_rate=0.5)
self.dense4 = DenseBlock(800, batch_normalization=True, dropout_rate=0.5)
self.dense5 = DenseBlock(800, batch_normalization=True, dropout_rate=0.5)
self.dense6 = DenseBlock(700, batch_normalization=True, dropout_rate=0)
self.dense7 = DenseBlock(700, batch_normalization=True, dropout_rate=0)
self.dense8 = DenseBlock(600, batch_normalization=True, dropout_rate=0)
self.dense9 = DenseBlock(600, batch_normalization=True, dropout_rate=0)
self.dense10 = DenseBlock(500, batch_normalization=True, dropout_rate=0)
self.dense11 = DenseBlock(500, batch_normalization=True, spatial=True, dropout_rate=0)
self.reshape0 = Reshape([1, 500])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=4, strides=1,
batch_normalization=True, dropout_rate=0)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=350, kernel_size=4, strides=1,
batch_normalization=True, dropout_rate=0)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=300, kernel_size=4, strides=1,
batch_normalization=True, dropout_rate=0)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=250, kernel_size=4, strides=2,
batch_normalization=True, dropout_rate=0)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=200, kernel_size=4, strides=1,
batch_normalization=True, dropout_rate=0)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=150, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution6 = Conv1DTransposeBlock(filters=100, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution7 = Conv1DTransposeBlock(filters=50, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution8 = Conv1DTranspose(filters=1, kernel_size=4, strides=1, activation='linear')
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((1, 2))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.dense6(x, training=training)
x = self.dense7(x, training=training)
x = self.dense8(x, training=training)
x = self.dense9(x, training=training)
x = self.dense10(x, training=training)
x = self.dense11(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.transposed_convolution6(x, training=training)
x = self.transposed_convolution7(x, training=training)
x = self.transposed_convolution8(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx23(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(1000, batch_normalization=False, dropout_rate=0.5)
self.dense1 = DenseBlock(1000, batch_normalization=False, dropout_rate=0.5)
self.dense2 = DenseBlock(900, batch_normalization=True, dropout_rate=0.5)
self.dense3 = DenseBlock(900, batch_normalization=True, dropout_rate=0.5)
self.dense4 = DenseBlock(800, batch_normalization=True, dropout_rate=0.5)
self.dense5 = DenseBlock(800, batch_normalization=True, dropout_rate=0.5)
self.dense6 = DenseBlock(700, batch_normalization=True, dropout_rate=0)
self.dense7 = DenseBlock(700, batch_normalization=True, dropout_rate=0)
self.dense8 = DenseBlock(600, batch_normalization=True, dropout_rate=0)
self.dense9 = DenseBlock(600, batch_normalization=True, dropout_rate=0)
self.dense10 = DenseBlock(500, batch_normalization=True, dropout_rate=0)
self.dense11 = DenseBlock(500, batch_normalization=True, spatial=True, dropout_rate=0)
self.reshape0 = Reshape([1, 500])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=4, strides=1,
batch_normalization=True, dropout_rate=0)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=350, kernel_size=4, strides=1,
batch_normalization=True, dropout_rate=0)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=300, kernel_size=4, strides=1,
batch_normalization=True, dropout_rate=0)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=250, kernel_size=4, strides=2,
batch_normalization=True, dropout_rate=0)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=200, kernel_size=4, strides=1,
batch_normalization=True, dropout_rate=0)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=150, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution6 = Conv1DTransposeBlock(filters=100, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution7 = Conv1DTransposeBlock(filters=50, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution8 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((1, 2))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.dense6(x, training=training)
x = self.dense7(x, training=training)
x = self.dense8(x, training=training)
x = self.dense9(x, training=training)
x = self.dense10(x, training=training)
x = self.dense11(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.transposed_convolution6(x, training=training)
x = self.transposed_convolution7(x, training=training)
x = self.transposed_convolution8(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Eos0(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(20, batch_normalization=False, dropout_rate=0)
self.dense1 = DenseBlock(30, batch_normalization=False, dropout_rate=0)
self.dense2 = DenseBlock(40, batch_normalization=False, dropout_rate=0)
self.dense3 = DenseBlock(50, batch_normalization=False, dropout_rate=0)
self.dense4 = DenseBlock(100, batch_normalization=False, dropout_rate=0.5)
self.dense5 = DenseBlock(100, batch_normalization=False, dropout_rate=0.5)
self.dense6 = DenseBlock(200, batch_normalization=False, dropout_rate=0.5)
self.dense7 = DenseBlock(200, batch_normalization=False, dropout_rate=0.5)
self.dense8 = DenseBlock(300, batch_normalization=False, dropout_rate=0.5)
self.dense9 = DenseBlock(300, batch_normalization=False, dropout_rate=0.5)
self.dense10 = DenseBlock(500, batch_normalization=False, dropout_rate=0.5)
self.dense11 = DenseBlock(500, batch_normalization=False, spatial=True, dropout_rate=0.5)
self.reshape0 = Reshape([1, 500])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=350, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=300, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=250, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=200, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=150, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution6 = Conv1DTransposeBlock(filters=100, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution7 = Conv1DTransposeBlock(filters=50, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution8 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((1, 2))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.dense6(x, training=training)
x = self.dense7(x, training=training)
x = self.dense8(x, training=training)
x = self.dense9(x, training=training)
x = self.dense10(x, training=training)
x = self.dense11(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.transposed_convolution6(x, training=training)
x = self.transposed_convolution7(x, training=training)
x = self.transposed_convolution8(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Eos1(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(20, batch_normalization=False, dropout_rate=0)
self.dense1 = DenseBlock(30, batch_normalization=False, dropout_rate=0)
self.dense2 = DenseBlock(40, batch_normalization=False, dropout_rate=0)
self.dense3 = DenseBlock(50, batch_normalization=False, dropout_rate=0)
self.dense4 = DenseBlock(60, batch_normalization=False, dropout_rate=0)
self.dense5 = DenseBlock(60, batch_normalization=False, dropout_rate=0)
self.dense6 = DenseBlock(60, batch_normalization=False, dropout_rate=0)
self.dense7 = DenseBlock(60, batch_normalization=False, dropout_rate=0)
self.dense8 = DenseBlock(60, batch_normalization=False, dropout_rate=0)
self.dense9 = DenseBlock(60, batch_normalization=False, dropout_rate=0)
self.dense10 = DenseBlock(60, batch_normalization=False, dropout_rate=0)
self.dense11 = DenseBlock(60, batch_normalization=False, spatial=True, dropout_rate=0)
self.reshape0 = Reshape([1, 60])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=60, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=60, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=60, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=50, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=40, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=30, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution6 = Conv1DTransposeBlock(filters=20, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution7 = Conv1DTransposeBlock(filters=10, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0)
self.transposed_convolution8 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((1, 2))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.dense6(x, training=training)
x = self.dense7(x, training=training)
x = self.dense8(x, training=training)
x = self.dense9(x, training=training)
x = self.dense10(x, training=training)
x = self.dense11(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.transposed_convolution6(x, training=training)
x = self.transposed_convolution7(x, training=training)
x = self.transposed_convolution8(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Eos2(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(20, batch_normalization=False, dropout_rate=0.1)
self.dense1 = DenseBlock(30, batch_normalization=False, dropout_rate=0.1)
self.dense2 = DenseBlock(40, batch_normalization=False, dropout_rate=0.1)
self.dense3 = DenseBlock(50, batch_normalization=False, dropout_rate=0.1)
self.dense4 = DenseBlock(60, batch_normalization=False, dropout_rate=0.1)
self.dense5 = DenseBlock(60, batch_normalization=False, dropout_rate=0.1)
self.dense6 = DenseBlock(60, batch_normalization=False, dropout_rate=0.1)
self.dense7 = DenseBlock(60, batch_normalization=False, dropout_rate=0.1)
self.dense8 = DenseBlock(60, batch_normalization=False, dropout_rate=0.1)
self.dense9 = DenseBlock(60, batch_normalization=False, dropout_rate=0.1)
self.dense10 = DenseBlock(60, batch_normalization=False, dropout_rate=0.1)
self.dense11 = DenseBlock(60, batch_normalization=False, spatial=True, dropout_rate=0.1)
self.reshape0 = Reshape([1, 60])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=60, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.1)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=60, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.1)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=60, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.1)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=50, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0.1)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=40, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.1)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=30, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.1)
self.transposed_convolution6 = Conv1DTransposeBlock(filters=20, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0.1)
self.transposed_convolution7 = Conv1DTransposeBlock(filters=10, kernel_size=4, strides=2,
batch_normalization=False, dropout_rate=0.1)
self.transposed_convolution8 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=1,
batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((1, 2))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.dense6(x, training=training)
x = self.dense7(x, training=training)
x = self.dense8(x, training=training)
x = self.dense9(x, training=training)
x = self.dense10(x, training=training)
x = self.dense11(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.transposed_convolution6(x, training=training)
x = self.transposed_convolution7(x, training=training)
x = self.transposed_convolution8(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx9Narrow(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(4000//2, batch_normalization=False, dropout_rate=0.5)
self.dense1 = DenseBlock(3000//2, batch_normalization=False, dropout_rate=0.5)
self.dense2 = DenseBlock(2000//2, batch_normalization=False, dropout_rate=0.5)
self.dense3 = DenseBlock(1000//2, batch_normalization=False, dropout_rate=0.5)
self.dense4 = DenseBlock(750//2, batch_normalization=False)
self.dense5 = DenseBlock(500//2, batch_normalization=False, spatial=True)
self.reshape0 = Reshape([1, 500//2])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400//2, kernel_size=2, strides=1, batch_normalization=False, dropout_rate=0)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=200//2, kernel_size=3, strides=1, batch_normalization=False, dropout_rate=0)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=120//2, kernel_size=4, strides=1, batch_normalization=False, dropout_rate=0)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=60//2, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=30//2, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((3, 3))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx9Wide(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(4000*2, batch_normalization=False, dropout_rate=0)
self.dense1 = DenseBlock(3000*2, batch_normalization=False, dropout_rate=0)
self.dense2 = DenseBlock(2000*2, batch_normalization=False, dropout_rate=0)
self.dense3 = DenseBlock(1000*2, batch_normalization=False, dropout_rate=0)
self.dense4 = DenseBlock(750*2, batch_normalization=False, dropout_rate=0)
self.dense5 = DenseBlock(500*2, batch_normalization=False, spatial=True, dropout_rate=0)
self.reshape0 = Reshape([1, 500*2])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400*2, kernel_size=2, strides=1, batch_normalization=False, dropout_rate=0)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=200*2, kernel_size=3, strides=1, batch_normalization=False, dropout_rate=0)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=120*2, kernel_size=4, strides=1, batch_normalization=False, dropout_rate=0)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=60*2, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=30*2, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((3, 3))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
class Nyx9Wider(Model):
def __init__(self):
super().__init__()
self.dense0 = DenseBlock(4000*8, batch_normalization=False, dropout_rate=0.0)
self.dense1 = DenseBlock(3000*8, batch_normalization=False, dropout_rate=0.0)
self.dense2 = DenseBlock(2000*8, batch_normalization=False, dropout_rate=0.0)
self.dense3 = DenseBlock(1000*8, batch_normalization=False, dropout_rate=0.0)
self.dense4 = DenseBlock(750*8, batch_normalization=False, dropout_rate=0.0)
self.dense5 = DenseBlock(500*8, batch_normalization=False, spatial=True, dropout_rate=0.0)
# self.noise0 = GaussianNoise(0.05)
self.reshape0 = Reshape([1, 500*8])
self.transposed_convolution0 = Conv1DTransposeBlock(filters=400*8, kernel_size=2, strides=1, batch_normalization=False, dropout_rate=0)
self.transposed_convolution1 = Conv1DTransposeBlock(filters=200*8, kernel_size=3, strides=1, batch_normalization=False, dropout_rate=0)
self.transposed_convolution2 = Conv1DTransposeBlock(filters=120*8, kernel_size=4, strides=1, batch_normalization=False, dropout_rate=0)
self.transposed_convolution3 = Conv1DTransposeBlock(filters=60*8, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution4 = Conv1DTransposeBlock(filters=30*8, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.transposed_convolution5 = Conv1DTransposeBlock(filters=1, kernel_size=4, strides=2, batch_normalization=False, dropout_rate=0)
self.reshape1 = Reshape([64])
self.cropping0 = Cropping1D((3, 3))
def call(self, inputs, training=False, mask=None):
"""
The forward pass of the layer.
:param inputs: The input tensor.
:param training: A boolean specifying if the layer should be in training mode.
:param mask: A mask for the input tensor.
:return: The output tensor of the layer.
"""
x = inputs
# ones = tf.ones([1, 11])
# noise_multiplier = self.noise0(ones, training=training)
# x = x * noise_multiplier
x = self.dense0(x, training=training)
x = self.dense1(x, training=training)
x = self.dense2(x, training=training)
x = self.dense3(x, training=training)
x = self.dense4(x, training=training)
x = self.dense5(x, training=training)
x = self.reshape0(x, training=training)
x = self.transposed_convolution0(x, training=training)
x = self.transposed_convolution1(x, training=training)
x = self.transposed_convolution2(x, training=training)
x = self.transposed_convolution3(x, training=training)
x = self.transposed_convolution4(x, training=training)
x = self.transposed_convolution5(x, training=training)
x = self.cropping0(x, training=training)
x = self.reshape1(x, training=training)
return x
| 59.325569
| 163
| 0.663172
| 13,036
| 112,066
| 5.557073
| 0.014268
| 0.131415
| 0.139394
| 0.137655
| 0.986555
| 0.983504
| 0.975663
| 0.967671
| 0.964482
| 0.952804
| 0
| 0.049972
| 0.239305
| 112,066
| 1,888
| 164
| 59.356992
| 0.799808
| 0.070351
| 0
| 0.836921
| 0
| 0
| 0.000528
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044357
| false
| 0
| 0.002609
| 0
| 0.091324
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
148a8fcebc22e06d57e31ca1eb35ee3894a7bebe
| 10,717
|
py
|
Python
|
10.py
|
rulitka/clean_code
|
23e23990a48d13edf58d9d669a7becabaf70e044
|
[
"MIT"
] | null | null | null |
10.py
|
rulitka/clean_code
|
23e23990a48d13edf58d9d669a7becabaf70e044
|
[
"MIT"
] | null | null | null |
10.py
|
rulitka/clean_code
|
23e23990a48d13edf58d9d669a7becabaf70e044
|
[
"MIT"
] | null | null | null |
1. Задача №4.
// Инициализация переменных перенесена поближе к моменту их использования:
// Было
lst_new = []
lst_curr = []
// ... много кода
for i in range(len(Tele_new)):
if i <= center:
lst_new.append(Tele_new[i])
if i > center:
lst_curr.append(Tele_new[i])
// Стало
lst_new = []
lst_curr = []
for i in range(len(Tele_new)):
if i <= center:
lst_new.append(Tele_new[i])
if i > center:
lst_curr.append(Tele_new[i])
2. Задача №4.
// Убрана строка с присвоением переменной значения заранее:
// Было
string_result = 0
// ... много кода
string_result = ''
old_with_null = '0'
new_no_null = ''
for i in string_no_point:
if i == old_with_null:
i = new_no_null
string_result += i
// Стало
string_result = ''
old_with_null = '0'
new_no_null = ''
for i in string_no_point:
if i == old_with_null:
i = new_no_null
string_result += i
3. Задача №15.
// Инициализация переменных перенесена поближе к моменту их использования:
// Было
pre_result_S1 = create_matrix(H1, W1, S1)
pre_result_S2 = create_matrix(H2, W2, S2)
// ... много кода
for i in range(len(pre_result_S1)):
for j in range(len(pre_result_S1[i])):
if pre_result_S2 == tmp:
// ... дальше код
// Стало
pre_result_S1 = create_matrix(H1, W1, S1)
pre_result_S2 = create_matrix(H2, W2, S2)
for i in range(len(pre_result_S1)):
for j in range(len(pre_result_S1[i])):
if pre_result_S2 == tmp:
// ... дальше код
4.Задача №15.
// Убрана строка с присвоением переменной значения заранее:
// Было
pre_result_S1 = create_matrix(H1, W1, S1)
pre_result_S2 = create_matrix(H2, W2, S2)
tmp = [[0] * W2 for i in range(H2)]
x = 0
y = 0
num = 0
lum = 0
for i in range(len(pre_result_S1)):
for j in range(len(pre_result_S1[i])):
// ... дальше код
// Стало
pre_result_S1 = create_matrix(H1, W1, S1)
pre_result_S2 = create_matrix(H2, W2, S2)
tmp = [[0] * W2 for i in range(H2)]
x = 0
y = 0
for i in range(len(pre_result_S1)):
for j in range(len(pre_result_S1[i])):
// ... дальше код
5. Задача №15.
// Убрана строка с присвоением переменной значения заранее:
// Было
result_S = []
result_S = [0] * H
// Стало
result_S = [0] * H
6.Задача №15.
// Инициализация переменных перенесена поближе к моменту их использования:
// Было
def create_matrix(H, W, S):
result_S = []
result_S = [0] * H
a = 0
j = 0
for i in range(H):
result_S[i] = [0] * W
for i in range(len(S)):
if S[i] == ' ':
a += 1
j = 0
pass
else:
result_S[a][j] = int(S[i])
j += 1
continue
return result_S
// Стало
def create_matrix(H, W, S):
result_S = [0] * H
for i in range(H):
result_S[i] = [0] * W
a = 0
j = 0
for i in range(len(S)):
if S[i] == ' ':
a += 1
j = 0
pass
else:
result_S[a][j] = int(S[i])
j += 1
continue
return result_S
7. Задача №20:
// Инициализация переменной перенесена поближе к моменту ее использования:
//Было
def BastShoe(command):
check_str = check_string(command)
final_elem = ''
global final_string
if check_str[0] == 1:
final_string = add_string(check_str)
if check_str[0] == 2:
final_string = del_elements(check_str)
if check_str[0] == 3:
final_elem = get_index_element(check_str)
return final_elem
if check_str[0] == 4:
final_string = undo_position()
if check_str[0] == 5:
final_string = redo_position(check_str)
return final_string
//Стало
def BastShoe(command):
check_str = check_string(command)
global final_string
if check_str[0] == 1:
final_string = add_string(check_str)
if check_str[0] == 2:
final_string = del_elements(check_str)
final_elem = ''
if check_str[0] == 3:
final_elem = get_index_element(check_str)
return final_elem
if check_str[0] == 4:
final_string = undo_position()
if check_str[0] == 5:
final_string = redo_position(check_str)
return final_string
8. Задача №21
// Убарана лишняя переменная, которая дальше в программен не была задействована:
// Было
def oddswap(in_put):
temp_matrix = list(in_put)
full_matrix = []
part_matrix= []
// Стало
def oddswap(in_put):
temp_matrix = list(in_put)
full_matrix = []
9. Задача №26
// Инициализация переменной перенесена поближе к моменту ее использования:
// Было
def BalancedParentheses(N):
global combs
combs = []
combinations(N, left = N, right = N, toClose = 0, str="")
result = ' '.join(combs)
return result
// Стало
def BalancedParentheses(N):
global combs
combinations(N, left = N, right = N, toClose = 0, str="")
combs = []
result = ' '.join(combs)
return result
10. Задача №27
//Исправлена ошибка переменная length была инициализирована, но не была использована:
//Было
def rule_one(F):
Flag = True
n = 0
m = 0
length = len(F)
pos1 = 0
pos2 = 0
for i in range(1, len(F)):
// Дальше код
// Стало
def rule_one(F):
Flag = True
n = 0
m = 0
length = len(F)
pos1 = 0
pos2 = 0
for i in range(1, length):
// Дальше код
11. Задача №2
// Размещение переменной-счетчика непосредственно перед циклом:
//Было
num = 0
sum_distance = 0
distance_all = 0
for index, number in enumerate(N):
if index%2 == 0:
speed = N[index]
else:
if num == 0:
time = N[index]
num=+1
distance = speed*time
else:
time_new = N[index] - time
time = N[index]
distance = speed*time_new
sum_distance += distance
distance_all += sum_distance
return distance_all
else:
exit()
//Стало
sum_distance = 0
distance_all = 0
num = 0
for index, number in enumerate(N):
if index%2 == 0:
speed = N[index]
else:
if num == 0:
time = N[index]
num=+1
distance = speed*time
else:
time_new = N[index] - time
time = N[index]
distance = speed*time_new
sum_distance += distance
distance_all += sum_distance
return distance_all
else:
exit()
12. Задача № 3
// Размещение переменной-счетчика непосредственно перед циклом:
// Было
def ConquestCampaign(N, M, L, battalion):
board = [[0]*M for i in range(N)]
day = 1
count = N*M
for i in range(0, 2*L, 2):
x = battalion[i] - 1
y = battalion[i+1] - 1
if board[x][y] == 0:
board[x][y] = 1
count -= 1
while count != 0:
new_board = [[board[i][j] for j in range(M)] for i in range(N)]
for i in range(N):
for j in range(M):
if board[i][j] == 1:
for x, y in (i-1, j), (i, j-1), (i, j+1), (i+1, j):
if 0 <= x < N and 0 <= y < M and new_board[x][y] == 0:
new_board[x][y] = 1
count -= 1
board = new_board
day += 1
return day
// Стало
def ConquestCampaign(N, M, L, battalion):
board = [[0]*M for i in range(N)]
count = N*M
for i in range(0, 2*L, 2):
x = battalion[i] - 1
y = battalion[i+1] - 1
if board[x][y] == 0:
board[x][y] = 1
count -= 1
day = 1
while count != 0:
new_board = [[board[i][j] for j in range(M)] for i in range(N)]
for i in range(N):
for j in range(M):
if board[i][j] == 1:
for x, y in (i-1, j), (i, j-1), (i, j+1), (i+1, j):
if 0 <= x < N and 0 <= y < M and new_board[x][y] == 0:
new_board[x][y] = 1
count -= 1
board = new_board
day += 1
return day
13. Задача №12
// Размещение переменной-счетчика непосредственно перед циклом:
// Было
def MassVote(N, Votes):
mx = Votes[0]
n = 0
for i in range(len(Votes)):
if Votes[i] > mx:
mx = Votes[i]
for i in range(len(Votes)):
if mx == Votes[i]:
n += 1
// Дальше код
// Стало
def MassVote(N, Votes):
mx = Votes[0]
for i in range(len(Votes)):
if Votes[i] > mx:
mx = Votes[i]
n = 0
for i in range(len(Votes)):
if mx == Votes[i]:
n += 1
// Дальше код
14. Задача №14
// Размещение переменной-счетчика непосредственно перед циклом:
// Было
def Unmanned(L, N, track):
num = 0
number_of_tr_light = 0
real_time = 0
for i in range(1, L+1):
tr_light = track[number_of_tr_light][0]
if i == tr_light:
real_time += 1
num += 1
// Стало
def Unmanned(L, N, track):
number_of_tr_light = 0
real_time = 0
num = 0
for i in range(1, L+1):
tr_light = track[number_of_tr_light][0]
if i == tr_light:
real_time += 1
num += 1
15. Задача № 16
// Размещение переменной-счетчика непосредственно перед циклом:
// Было
def create_matrix(N, price):
H = 3
W = (int(N / H) + (N % H > 0))
result_S = []
result_S = [0] * W
a = 0
j = 0
n_elem = 0
for i in range(W):
result_S[i] = [0] * H
for i in range(len(price)):
if n_elem < 3:
result_S[a][j] = int(price[i])
j += 1
n_elem += 1
if n_elem == 3:
a += 1
j = 0
n_elem = 0
return result_S
// Стало
def create_matrix(N, price):
H = 3
W = (int(N / H) + (N % H > 0))
result_S = []
result_S = [0] * W
a = 0
j = 0
for i in range(W):
result_S[i] = [0] * H
n_elem = 0
for i in range(len(price)):
if n_elem < 3:
result_S[a][j] = int(price[i])
j += 1
n_elem += 1
if n_elem == 3:
a += 1
j = 0
n_elem = 0
return result_S
| 26.075426
| 85
| 0.510404
| 1,524
| 10,717
| 3.46063
| 0.116798
| 0.053091
| 0.03868
| 0.066743
| 0.911832
| 0.884907
| 0.868221
| 0.841107
| 0.750284
| 0.737012
| 0
| 0.039525
| 0.372026
| 10,717
| 410
| 86
| 26.139024
| 0.739673
| 0
| 0
| 0.943445
| 0
| 0
| 0.00056
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.005141
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
14bd1e9ef230480a30da9e67629a6d6a576ab445
| 54
|
py
|
Python
|
hypernetworks/utils/HTTransform.py
|
rdchar/HypernetworkTheory
|
d18696c5ac8db3c8633d4441b8932b9a4c1efbd4
|
[
"MIT"
] | 1
|
2022-03-30T18:30:01.000Z
|
2022-03-30T18:30:01.000Z
|
hypernetworks/utils/HTTransform.py
|
rdchar/hypernetworks
|
c49882aae05ba9c1a4d50f0d0214e6533124984f
|
[
"MIT"
] | null | null | null |
hypernetworks/utils/HTTransform.py
|
rdchar/hypernetworks
|
c49882aae05ba9c1a4d50f0d0214e6533124984f
|
[
"MIT"
] | null | null | null |
# TODO
def from_R():
pass
def to_R():
pass
| 6
| 13
| 0.518519
| 9
| 54
| 2.888889
| 0.666667
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.351852
| 54
| 8
| 14
| 6.75
| 0.742857
| 0.074074
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
1ad4d7d675f19fbf7e59e161f8fdd57934bb8cba
| 2,277
|
py
|
Python
|
test/functions/test_select_items.py
|
rjagerman/chainerltr
|
1fdb6a0a304a465d27149011951a01a5e3de4bbc
|
[
"MIT"
] | 1
|
2019-04-10T03:18:23.000Z
|
2019-04-10T03:18:23.000Z
|
test/functions/test_select_items.py
|
rjagerman/chainerltr
|
1fdb6a0a304a465d27149011951a01a5e3de4bbc
|
[
"MIT"
] | null | null | null |
test/functions/test_select_items.py
|
rjagerman/chainerltr
|
1fdb6a0a304a465d27149011951a01a5e3de4bbc
|
[
"MIT"
] | null | null | null |
import numpy as np
from chainer import as_variable
from chainer.testing import assert_allclose
from chainerltr.functions import select_items_per_row, inverse_select_items_per_row
def test_select_items_identity():
idx = as_variable(np.array([[0, 1, 2, 3], [0, 1, 2, 3]]))
val = as_variable(np.array([[0.5, 3.14, 0.0, -9.9], [1.0, -1.0, 1.0, 4.0]]))
out = select_items_per_row(val, idx)
assert_allclose(out.data, val.data)
def test_select_items_none():
idx = as_variable(np.array([[], []], dtype=np.int32))
val = as_variable(np.array([[0.5, 3.14, 0.0, -9.9], [1.0, -1.0, 1.0, 4.0]]))
out = select_items_per_row(val, idx)
assert_allclose(out.data, np.array([[], []], dtype=np.int32))
def test_select_items_permuted():
idx = as_variable(np.array([[3, 1, 0, 2], [1, 0, 3, 2]]))
val = as_variable(np.array([[0.5, 3.14, 0.0, -9.9], [1.0, -1.0, 1.0, 4.0]]))
exp = as_variable(np.array([[-9.9, 3.14, 0.5, 0.0], [-1.0, 1.0, 4.0, 1.0]]))
out = select_items_per_row(val, idx)
assert_allclose(out.data, exp.data)
def test_select_items_less_idx():
idx = as_variable(np.array([[3, 1], [1, 3]]))
val = as_variable(np.array([[0.5, 3.14, 0.0, -9.9], [1.0, -1.0, 1.0, 4.0]]))
exp = as_variable(np.array([[-9.9, 3.14], [-1.0, 4.0]]))
out = select_items_per_row(val, idx)
assert_allclose(out.data, exp.data)
def test_inv_select_items_identity():
idx = as_variable(np.array([[], []], dtype=np.int32))
val = as_variable(np.array([[0.5, 3.14, 0.0, -9.9], [1.0, -1.0, 1.0, 4.0]]))
out = inverse_select_items_per_row(val, idx)
assert_allclose(out.data, val.data)
def test_inv_select_items_none():
idx = as_variable(np.array([[0, 1, 2, 3], [0, 1, 2, 3]], dtype=np.int32))
val = as_variable(np.array([[0.5, 3.14, 0.0, -9.9], [1.0, -1.0, 1.0, 4.0]]))
out = inverse_select_items_per_row(val, idx)
assert_allclose(out.data, np.array([[], []], dtype=np.int32))
def test_inv_select_items_less_idx():
idx = as_variable(np.array([[3, 1], [1, 3]]))
val = as_variable(np.array([[0.5, 3.14, 0.0, -9.9], [1.0, -1.0, 1.0, 4.0]]))
exp = as_variable(np.array([[0.5, 0.0], [1.0, 1.0]]))
out = inverse_select_items_per_row(val, idx)
assert_allclose(out.data, exp.data)
| 32.070423
| 83
| 0.613966
| 438
| 2,277
| 3.002283
| 0.091324
| 0.044106
| 0.043346
| 0.219772
| 0.879087
| 0.858555
| 0.853992
| 0.835741
| 0.772624
| 0.772624
| 0
| 0.10247
| 0.164251
| 2,277
| 70
| 84
| 32.528571
| 0.588544
| 0
| 0
| 0.595238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 1
| 0.166667
| false
| 0
| 0.095238
| 0
| 0.261905
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1af7e3fb2724efbcb451a7e10fcfc357fc78d9a0
| 2,546
|
py
|
Python
|
gnetwork/hadamard_base_matrices.py
|
vicariousgreg/neuroceril
|
708be54f665b28bf80b0bd7ec3a384d794cbf49a
|
[
"MIT"
] | 3
|
2020-11-18T11:04:03.000Z
|
2021-03-31T06:58:49.000Z
|
gnetwork/hadamard_base_matrices.py
|
vicariousgreg/neuroceril
|
708be54f665b28bf80b0bd7ec3a384d794cbf49a
|
[
"MIT"
] | null | null | null |
gnetwork/hadamard_base_matrices.py
|
vicariousgreg/neuroceril
|
708be54f665b28bf80b0bd7ec3a384d794cbf49a
|
[
"MIT"
] | 4
|
2019-07-23T04:42:55.000Z
|
2022-02-07T02:57:59.000Z
|
import numpy as np
H_20 = np.array([
[+1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1],
[+1, +1, -1, +1, +1, -1, -1, -1, -1, +1, -1, +1, -1, +1, +1, +1, +1, -1, -1, +1],
[+1, +1, +1, -1, +1, +1, -1, -1, -1, -1, +1, -1, +1, -1, +1, +1, +1, +1, -1, -1],
[+1, -1, +1, +1, -1, +1, +1, -1, -1, -1, -1, +1, -1, +1, -1, +1, +1, +1, +1, -1],
[+1, -1, -1, +1, +1, -1, +1, +1, -1, -1, -1, -1, +1, -1, +1, -1, +1, +1, +1, +1],
[+1, +1, -1, -1, +1, +1, -1, +1, +1, -1, -1, -1, -1, +1, -1, +1, -1, +1, +1, +1],
[+1, +1, +1, -1, -1, +1, +1, -1, +1, +1, -1, -1, -1, -1, +1, -1, +1, -1, +1, +1],
[+1, +1, +1, +1, -1, -1, +1, +1, -1, +1, +1, -1, -1, -1, -1, +1, -1, +1, -1, +1],
[+1, +1, +1, +1, +1, -1, -1, +1, +1, -1, +1, +1, -1, -1, -1, -1, +1, -1, +1, -1],
[+1, -1, +1, +1, +1, +1, -1, -1, +1, +1, -1, +1, +1, -1, -1, -1, -1, +1, -1, +1],
[+1, +1, -1, +1, +1, +1, +1, -1, -1, +1, +1, -1, +1, +1, -1, -1, -1, -1, +1, -1],
[+1, -1, +1, -1, +1, +1, +1, +1, -1, -1, +1, +1, -1, +1, +1, -1, -1, -1, -1, +1],
[+1, +1, -1, +1, -1, +1, +1, +1, +1, -1, -1, +1, +1, -1, +1, +1, -1, -1, -1, -1],
[+1, -1, +1, -1, +1, -1, +1, +1, +1, +1, -1, -1, +1, +1, -1, +1, +1, -1, -1, -1],
[+1, -1, -1, +1, -1, +1, -1, +1, +1, +1, +1, -1, -1, +1, +1, -1, +1, +1, -1, -1],
[+1, -1, -1, -1, +1, -1, +1, -1, +1, +1, +1, +1, -1, -1, +1, +1, -1, +1, +1, -1],
[+1, -1, -1, -1, -1, +1, -1, +1, -1, +1, +1, +1, +1, -1, -1, +1, +1, -1, +1, +1],
[+1, +1, -1, -1, -1, -1, +1, -1, +1, -1, +1, +1, +1, +1, -1, -1, +1, +1, -1, +1],
[+1, +1, +1, -1, -1, -1, -1, +1, -1, +1, -1, +1, +1, +1, +1, -1, -1, +1, +1, -1],
[+1, -1, +1, +1, -1, -1, -1, -1, +1, -1, +1, -1, +1, +1, +1, +1, -1, -1, +1, +1]])
H_12=np.array([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1,-1,-1, 1,-1,-1,-1, 1, 1, 1,-1, 1],
[1,-1, 1,-1,-1,-1, 1, 1, 1,-1, 1,-1],
[1, 1,-1,-1,-1, 1, 1, 1,-1, 1,-1,-1],
[1,-1,-1,-1, 1, 1, 1,-1, 1,-1,-1, 1],
[1,-1,-1, 1, 1, 1,-1, 1,-1,-1, 1,-1],
[1,-1, 1, 1, 1,-1, 1,-1,-1, 1,-1,-1],
[1, 1, 1, 1,-1, 1,-1,-1, 1,-1,-1,-1],
[1, 1, 1,-1, 1,-1,-1, 1,-1,-1,-1, 1],
[1, 1,-1, 1,-1,-1, 1,-1,-1,-1, 1, 1],
[1,-1, 1,-1,-1, 1,-1,-1,-1, 1, 1, 1],
[1, 1,-1,-1, 1,-1,-1,-1, 1, 1, 1,-1]])
H_4=np.array([[1, 1, 1, 1],
[1,-1, 1,-1],
[1, 1,-1,-1],
[1,-1,-1, 1]
])
| 57.863636
| 86
| 0.238413
| 576
| 2,546
| 1.048611
| 0.017361
| 1.844371
| 2.751656
| 3.649007
| 0.965232
| 0.965232
| 0.965232
| 0.965232
| 0.965232
| 0.965232
| 0
| 0.327346
| 0.322074
| 2,546
| 43
| 87
| 59.209302
| 0.022596
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.025641
| 0
| 0.025641
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
21375a55839af6dbc9a5c0a735e5818e051e1521
| 14,342
|
py
|
Python
|
apps/tests/test_category.py
|
anyric/Yummy-Recipes-Api
|
08e8b75e5aabee45eead44a3c949830f0dcd782d
|
[
"MIT"
] | null | null | null |
apps/tests/test_category.py
|
anyric/Yummy-Recipes-Api
|
08e8b75e5aabee45eead44a3c949830f0dcd782d
|
[
"MIT"
] | 17
|
2018-01-03T12:57:34.000Z
|
2018-03-21T15:04:43.000Z
|
apps/tests/test_category.py
|
anyric/Yummy-Recipes-Api
|
08e8b75e5aabee45eead44a3c949830f0dcd782d
|
[
"MIT"
] | 1
|
2018-03-17T14:38:22.000Z
|
2018-03-17T14:38:22.000Z
|
"""module to test category view"""
from flask_testing import TestCase
import json
from apps import app, db
from apps import config
from apps.models.category import Category
from apps.models.user import Users
class CategoryTests(TestCase):
"""class to test category views"""
def create_app(self):
return app
def setUp(self):
"""function to setup app variables"""
config_name = config.TestingConfig
app.config.from_object(config_name)
self.app = app
self.test_client = self.app.test_client()
self.app_context = self.app.app_context()
self.app_context.push()
self.name = 'test user'
self.email = 'testuser@example.com'
self.test_username = 'testuser'
self.test_password = 'test1234'
with self.app_context:
db.create_all()
def tearDown(self):
"""function to delete test_db after every test case"""
db.session.remove()
db.drop_all()
self.app_context.pop()
def get_header_token(self):
"""function to retrieve token from header"""
data = {"username":self.test_username, "password":self.test_password}
response = self.test_client.post('/recipe/api/v1.0/user/login', data=json.dumps(data), \
content_type='application/json')
res = json.loads(response.data.decode())['token']
token = {"x-access-token": res}
return token
def register_new_user(self, name, email, username, password):
"""function to register_new_user view"""
data = {'name': name, 'email':email, \
'username':username, 'password': password}
response = self.client.post('/recipe/api/v1.0/user/register', data=json.dumps(data), \
content_type='application/json')
return response
def create_new_category(self, user_id, name, description):
"""function to test create_new_category view"""
data = {'user_id':user_id, 'name':name, 'description':description}
response = self.test_client.post('/recipe/api/v1.0/category', headers=\
self.get_header_token(), data=json.dumps(data), content_type='application/json')
return response
def test_create_new_category(self):
"""function to test create_new_category view"""
self.register_new_user(self.name, self.email, \
self.test_username, self.test_password)
user = Users.query.filter_by(username=self.test_username).first()
new_category_name = 'local food'
description = 'list of local foods'
response = self.create_new_category(user.id, new_category_name, description)
self.assertEqual(response.status_code, 201)
self.assertEqual(Category.query.count(), 1)
def test_missing_fields_values(self):
"""function to test for missing values during creation of category"""
self.register_new_user(self.name, self.email,
self.test_username, self.test_password)
user = Users.query.filter_by(username=self.test_username).first()
response = self.create_new_category(user.id, 'indian food', '')
self.assertEqual(response.status_code, 400)
def test_category_exists(self):
"""function to test if a category exists"""
self.register_new_user(self.name, self.email, \
self.test_username, self.test_password)
user = Users.query.filter_by(username=self.test_username).first()
# first category values
new_category_name = 'local food'
description = 'list of local foods'
self.create_new_category(user.id, new_category_name, description)
# second category values
new_category_name1 = 'local food'
description1 = 'list of local foods'
response = self.create_new_category(user.id, new_category_name1, description1)
self.assertEqual(response.status_code, 400)
def test_view_category(self):
"""function to test view_category view"""
self.register_new_user(self.name, self.email, \
self.test_username, self.test_password)
user = Users.query.filter_by(username=self.test_username).first()
new_category_name = 'local food'
description = 'list of local foods'
self.create_new_category(user.id, new_category_name, description)
get_response = self.test_client.get('/recipe/api/v1.0/category/', headers=\
self.get_header_token())
self.assertEqual(get_response.status_code, 200)
self.assertEqual('application/json', get_response.headers['Content-Type'])
def test_view_category_page(self):
"""function to test view_category with wrong page number"""
self.register_new_user(self.name, self.email, \
self.test_username, self.test_password)
user = Users.query.filter_by(username=self.test_username).first()
new_category_name = 'local food'
description = 'list of local foods'
self.create_new_category(user.id, new_category_name, description)
get_response = self.test_client.get('/recipe/api/v1.0/category/?page=3', headers=\
self.get_header_token())
self.assertEqual(get_response.status_code, 404)
self.assertEqual('application/json', get_response.headers['Content-Type'])
def test_view_category_search_page(self):
"""function to test view_category with search and page parameters"""
self.register_new_user(self.name, self.email, \
self.test_username, self.test_password)
user = Users.query.filter_by(username=self.test_username).first()
new_category_name = 'local food'
description = 'list of local foods'
self.create_new_category(user.id, new_category_name, description)
get_response = self.test_client.get('/recipe/api/v1.0/category/?page=3&q=l', headers=\
self.get_header_token())
self.assertEqual(get_response.status_code, 404)
self.assertEqual('application/json', get_response.headers['Content-Type'])
def test_category_search_valid(self):
"""function to test view_category search work ok"""
self.register_new_user(self.name, self.email, \
self.test_username, self.test_password)
user = Users.query.filter_by(username=self.test_username).first()
new_category_name = 'local food'
description = 'list of local foods'
self.create_new_category(user.id, new_category_name, description)
get_response = self.test_client.get('/recipe/api/v1.0/category/?q=f', headers=\
self.get_header_token())
self.assertEqual(get_response.status_code, 200)
def test_category_search_invalid(self):
"""function to test view_category view"""
self.register_new_user(self.name, self.email, \
self.test_username, self.test_password)
user = Users.query.filter_by(username=self.test_username).first()
new_category_name = 'local food'
description = 'list of local foods'
self.create_new_category(user.id, new_category_name, description)
get_response = self.test_client.get('/recipe/api/v1.0/category/?q=p', headers=\
self.get_header_token())
self.assertEqual(get_response.status_code, 404)
def test_invalid_view_category(self):
"""function to test view_category view"""
self.register_new_user(self.name, self.email, \
self.test_username, self.test_password)
get_response = self.test_client.get('/recipe/api/v1.0/category/?', headers=\
self.get_header_token())
self.assertEqual(get_response.status_code, 404)
def test_view_category_by_id(self):
"""function to test view_category_by_id"""
self.register_new_user(self.name, self.email, \
self.test_username, self.test_password)
user = Users.query.filter_by(username=self.test_username).first()
new_category_name = 'local food'
description = 'list of local foods'
self.create_new_category(user.id, new_category_name, description)
get_response = self.test_client.get('/recipe/api/v1.0/category/1', headers=\
self.get_header_token())
self.assertEqual(get_response.status_code, 200)
def test_category_by_invalid_id(self):
"""function to test view_category_by_id with invalid id"""
self.register_new_user(self.name, self.email, \
self.test_username, self.test_password)
user = Users.query.filter_by(username=self.test_username).first()
new_category_name = 'local food'
description = 'list of local foods'
self.create_new_category(user.id, new_category_name, description)
get_response = self.test_client.get('/recipe/api/v1.0/category/2', headers=\
self.get_header_token())
self.assertEqual(get_response.status_code, 404)
def test_category_unauthorized_user(self):
"""function to test view_category_by_id with invalid user"""
get_response = self.test_client.get('/recipe/api/v1.0/category/1')
self.assertEqual(get_response.status_code, 401)
def test_view_category_not_found(self):
"""function to test view_category_by_id badrequest"""
self.register_new_user(self.name, self.email,\
self.test_username, self.test_password)
user = Users.query.filter_by(username=self.test_username).first()
new_category_name = 'local food'
description = 'list of local foods'
self.create_new_category(user.id, new_category_name, description)
get_response = self.test_client.get('/recipe/api/v1.0/category/0', headers=\
self.get_header_token())
self.assertEqual(get_response.status_code, 404)
def test_update_category_ok(self):
"""function to test update_category"""
self.register_new_user(self.name, self.email,\
self.test_username, self.test_password)
user = Users.query.filter_by(username=self.test_username).first()
new_category_name = 'local food'
description = 'list of local foods'
self.create_new_category(user.id, new_category_name, description)
update_data = {'name':'ethiopian food', 'description':'list of ethiopian food'}
response = self.test_client.put('/recipe/api/v1.0/category/1', headers=\
self.get_header_token(), data=json.dumps(update_data), content_type='application/json')
self.assertEqual(response.status_code, 201)
def test_update_category_no_record(self):
"""function to test update_category no record for update"""
self.register_new_user(self.name, self.email,\
self.test_username, self.test_password)
user = Users.query.filter_by(username=self.test_username).first()
new_category_name = 'local food'
description = 'list of local foods'
self.create_new_category(user.id, new_category_name, description)
update_data = {'name':'ethiopian food', 'description':'list of ethiopian food'}
response = self.test_client.put('/recipe/api/v1.0/category/2', headers=\
self.get_header_token(), data=json.dumps(update_data), content_type='application/json')
self.assertEqual(response.status_code, 400)
def test_update_category_missing(self):
"""function to test update_category variable missing """
self.register_new_user(self.name, self.email, \
self.test_username, self.test_password)
user = Users.query.filter_by(username=self.test_username).first()
new_category_name = 'local food'
description = 'list of local foods'
self.create_new_category(user.id, new_category_name, description)
update_data = {'name':'ethiopian food', 'description':''}
response = self.test_client.put('/recipe/api/v1.0/category/1', headers=\
self.get_header_token(), data=json.dumps(update_data), content_type='application/json')
self.assertEqual(response.status_code, 400)
def test_delete_category_no_record(self):
"""function to test delete_category record not found"""
self.register_new_user(self.name, self.email, \
self.test_username, self.test_password)
user = Users.query.filter_by(username=self.test_username).first()
new_category_name = 'local food'
description = 'list of local foods'
self.create_new_category(user.id, new_category_name, description)
response = self.test_client.delete('/recipe/api/v1.0/category/2', headers=\
self.get_header_token(), content_type='application/json')
self.assertEqual(response.status_code, 404)
def test_delete_category_ok(self):
"""function to test delete_category deletes ok"""
self.register_new_user(self.name, self.email, \
self.test_username, self.test_password)
user = Users.query.filter_by(username=self.test_username).first()
new_category_name = 'local food'
description = 'list of local foods'
self.create_new_category(user.id, new_category_name, description)
response = self.test_client.delete('/recipe/api/v1.0/category/1', headers=\
self.get_header_token(), content_type='application/json')
self.assertEqual(response.status_code, 200)
def test_delete_category_not_found(self):
"""function to test update_category"""
self.register_new_user(self.name, self.email, \
self.test_username, self.test_password)
user = Users.query.filter_by(username=self.test_username).first()
new_category_name = 'local food'
description = 'list of local foods'
self.create_new_category(user.id, new_category_name, description)
response = self.test_client.delete('/recipe/api/v1.0/category/0', headers=\
self.get_header_token(), content_type='application/json')
self.assertEqual(response.status_code, 404)
| 50.678445
| 96
| 0.665876
| 1,809
| 14,342
| 5.03759
| 0.075732
| 0.066718
| 0.064962
| 0.025019
| 0.829584
| 0.822561
| 0.795347
| 0.770877
| 0.744651
| 0.733677
| 0
| 0.010433
| 0.224725
| 14,342
| 282
| 97
| 50.858156
| 0.809156
| 0.081788
| 0
| 0.631111
| 0
| 0
| 0.116975
| 0.041037
| 0
| 0
| 0
| 0
| 0.102222
| 1
| 0.111111
| false
| 0.097778
| 0.026667
| 0.004444
| 0.16
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
215e5d026f53cd8d66443021f9c815646c02cc56
| 3,398
|
py
|
Python
|
bin/GamespyQuery.py
|
radiosmersh/DiscordGSM
|
cbce54c46f4a0df31417a0b0a934793d5f59b743
|
[
"MIT"
] | null | null | null |
bin/GamespyQuery.py
|
radiosmersh/DiscordGSM
|
cbce54c46f4a0df31417a0b0a934793d5f59b743
|
[
"MIT"
] | null | null | null |
bin/GamespyQuery.py
|
radiosmersh/DiscordGSM
|
cbce54c46f4a0df31417a0b0a934793d5f59b743
|
[
"MIT"
] | null | null | null |
import socket
import time
import struct
import sys
import re
class GamespyV1Query(object):
def __init__(self, addr, port=23000, timeout=5.0):
self.ip, self.port, self.timeout = socket.gethostbyname(addr), port, timeout
self.sock = False
def disconnect(self):
if self.sock:
self.sock.close()
self.sock = False
def connect(self):
self.disconnect()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.settimeout(self.timeout)
self.sock.connect((self.ip, self.port))
def getInfo(self):
self.connect()
# request
try:
self.sock.send(b'\\info\\')
response = self.sock.recv(1400)
except Exception as e:
print(e)
return False
try:
response = response[1:].decode('ascii').split('\\')
result = dict(zip(response[::2], response[1::2]))
return result
except Exception as e:
print(e)
return False
return False
class GamespyV2Query(object):
def __init__(self, addr, port=29900, timeout=5.0):
self.ip, self.port, self.timeout = socket.gethostbyname(addr), port, timeout
self.sock = False
def disconnect(self):
if self.sock:
self.sock.close()
self.sock = False
def connect(self):
self.disconnect()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.settimeout(self.timeout)
self.sock.connect((self.ip, self.port))
def getInfo(self):
self.connect()
# request
try:
self.sock.send(b'\xFE\xFD\x00\x43\x4F\x52\x59\xFF\x00\x00')
response = self.sock.recv(1400)
except Exception as e:
print(e)
return False
try:
response = response[5:].decode('ascii').split('\x00')
result = dict(zip(response[::2], response[1::2]))
return result
except Exception as e:
print(e)
return False
return False
class GamespyV3Query(object):
def __init__(self, addr, port=29900, timeout=5.0):
self.ip, self.port, self.timeout = socket.gethostbyname(addr), port, timeout
self.sock = False
def disconnect(self):
if self.sock:
self.sock.close()
self.sock = False
def connect(self):
self.disconnect()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.settimeout(self.timeout)
self.sock.connect((self.ip, self.port))
def getInfo(self):
self.connect()
# request
try:
timestamp = b'\x10\x20\x30\x40' # timestamp
query = b'\xFE\xFD\x00' + timestamp + b'\xFF\x00\x00\x00'
self.sock.send(query)
response = self.sock.recv(1400)
except Exception as e:
print(e)
return False
try:
response = response[5:-2].decode('ascii').split('\x00')
result = dict(zip(response[::2], response[1::2]))
return result
except Exception as e:
print(e)
return False
return False
if __name__ == '__main__':
query = GamespyV3Query('bfewaw.com', 29900)
print(query.getInfo())
| 26.341085
| 84
| 0.55827
| 401
| 3,398
| 4.665835
| 0.184539
| 0.115446
| 0.032068
| 0.044896
| 0.839658
| 0.839658
| 0.826296
| 0.826296
| 0.826296
| 0.826296
| 0
| 0.038145
| 0.321071
| 3,398
| 129
| 85
| 26.341085
| 0.772865
| 0.011477
| 0
| 0.793814
| 0
| 0
| 0.040238
| 0.011923
| 0
| 0
| 0
| 0
| 0
| 1
| 0.123711
| false
| 0
| 0.051546
| 0
| 0.329897
| 0.072165
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2161239bc4815fd233b12aeb260fe1547664fd2d
| 124
|
py
|
Python
|
svar_block_mc/__init__.py
|
QBatista/SVARBlockMC.py
|
025dfa30e10ac551acc4ae2f9752c1a1cc3d228f
|
[
"BSD-3-Clause"
] | null | null | null |
svar_block_mc/__init__.py
|
QBatista/SVARBlockMC.py
|
025dfa30e10ac551acc4ae2f9752c1a1cc3d228f
|
[
"BSD-3-Clause"
] | null | null | null |
svar_block_mc/__init__.py
|
QBatista/SVARBlockMC.py
|
025dfa30e10ac551acc4ae2f9752c1a1cc3d228f
|
[
"BSD-3-Clause"
] | 3
|
2019-05-24T11:30:45.000Z
|
2020-04-05T04:34:11.000Z
|
"""
Import the main name to the top level.
"""
from .sampling import gen_samples_A_L, gen_samples_A_ii_0, gen_samples_C_i
| 17.714286
| 74
| 0.774194
| 24
| 124
| 3.583333
| 0.708333
| 0.348837
| 0.255814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009434
| 0.145161
| 124
| 6
| 75
| 20.666667
| 0.801887
| 0.306452
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dcc85e232dbd6dce55fd1b739ee5441d48f6dfa2
| 2,498
|
py
|
Python
|
client/test/test_code_eps.py
|
estcube/telemetry-forwarding-client
|
be659c8dd8e4bd26d1d1974d63f90acffd150e34
|
[
"MIT"
] | 3
|
2020-06-11T12:34:25.000Z
|
2020-09-16T12:06:32.000Z
|
client/test/test_code_eps.py
|
estcube/telemetry-forwarding-client
|
be659c8dd8e4bd26d1d1974d63f90acffd150e34
|
[
"MIT"
] | 57
|
2020-09-16T09:11:04.000Z
|
2022-02-28T01:32:13.000Z
|
client/test/test_code_eps.py
|
estcube/Telemetry-Forwarding-Client
|
be659c8dd8e4bd26d1d1974d63f90acffd150e34
|
[
"MIT"
] | null | null | null |
from hk_eps import *
from numberGen import generate
class EpsData():
def createData(self):
hk_packet = bytearray(hk_eps_enabled([1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0]))
hk_packet.extend(hk_eps_errors([1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0]))
hk_packet.extend(hk_eps_bus_voltage(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_avg_power_balance(generate(333, -32768, 32767)))
hk_packet.extend(hk_eps_battery_status([1, 1, 1, 1, 1, 1, 1, 1]))
hk_packet.extend(hk_eps_bat_curr_a(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_bat_curr_b(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_bat_curr_c(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_bat_curr_d(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_bat_volt_a(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_bat_volt_b(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_bat_volt_c(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_bat_volt_d(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_bat_temp_a(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_bat_temp_b(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_bat_temp_c(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_bat_temp_d(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_obc_curr_cons(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_com_curr_cons(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_eps_curr_cons(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_st_curr_cons(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_x_plus_curr_cons(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_x_minus_curr_cons(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_y_plus_curr_cons(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_y_minus_curr_cons(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_z_plus_curr_cons(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_z_minus_curr_cons(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_cdp_curr_cons(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_cam_curr_cons(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_hscom_curr_cons(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_cre_curr_cons(generate(3333, 0, 65535)))
hk_packet.extend(hk_eps_cgp_curr_cons(generate(3333, 0, 65535)))
return hk_packet
| 53.148936
| 95
| 0.700961
| 428
| 2,498
| 3.712617
| 0.123832
| 0.103839
| 0.273128
| 0.312146
| 0.869729
| 0.857772
| 0.84141
| 0.820013
| 0.820013
| 0.796728
| 0
| 0.16025
| 0.168135
| 2,498
| 46
| 96
| 54.304348
| 0.604427
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027027
| false
| 0
| 0.054054
| 0
| 0.135135
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
dcdd00d093b56500535c0881f1bff25f83a0933f
| 58,512
|
py
|
Python
|
tcga_encoder/models/survival_analysis.py
|
tedmeeds/tcga_encoder
|
805f9a5bcc422a43faea45baa0996c88d346e3b4
|
[
"MIT"
] | 2
|
2017-12-19T15:32:46.000Z
|
2018-01-12T11:24:24.000Z
|
tcga_encoder/models/survival_analysis.py
|
tedmeeds/tcga_encoder
|
805f9a5bcc422a43faea45baa0996c88d346e3b4
|
[
"MIT"
] | null | null | null |
tcga_encoder/models/survival_analysis.py
|
tedmeeds/tcga_encoder
|
805f9a5bcc422a43faea45baa0996c88d346e3b4
|
[
"MIT"
] | null | null | null |
from tcga_encoder.utils.helpers import *
from tcga_encoder.definitions.locations import *
import sklearn
from sklearn.cluster import KMeans, SpectralClustering
from sklearn.model_selection import KFold
from tcga_encoder.models.lda import LinearDiscriminantAnalysis
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as sk_LinearDiscriminantAnalysis
from tcga_encoder.models.survival import *
import pdb
from lifelines import KaplanMeierFitter
import torch
from torch.autograd import Variable
from tcga_encoder.models.tf.weibull_survival import WeibullSurvivalModel
from tcga_encoder.models.tf.main import main as tf_main
#from tcga_encoder.models.pytorch.weibull_survival import WeibullSurvivalModel,WeibullSurvivalModelNeuralNetwork
from tcga_encoder.models.pytorch.lasso_regression import PytorchLasso
from tcga_encoder.models.pytorch.bootstrap_linear_regression import BootstrapLinearRegression, BootstrapLassoRegression
from tcga_encoder.models.pytorch.dropout_linear_regression import DropoutLinearRegression
#from tcga_encoder.models.pytorch.lasso_regression_gprior_ard import PytorchLasso
#import autograd.numpy as np
#from autograd import grad
#def cost_lineat_reg_ard( X, y, w, b, h );
def linear_reg_gprior_ard( X, y, alpha, lr = 1e-4, iters = 10, verbose = False, eps = 1e-6, w_init = None ):
n,d = X.shape
XTX = np.dot( X.T, X )
XXT = np.dot( X, X.T )
XX = X*X
sX = XX.sum(0)
if w_init is not None:
w = w_init
else:
w = np.zeros(d,dtype=float)
h = np.zeros(d,dtype=float)
z = np.exp(h)
b = np.mean(y)
old_cost = np.inf
for i in range(iters):
y_hat = np.dot( X, w ) + b
cost = np.sum( np.square( y - y_hat ) )
# if np.abs(old_cost - cost) < 0.001:
# print "stopping at ", i, old_cost, cost
# break
old_cost = cost
if verbose:
print "Error = ", np.sum( np.square( y - y_hat ) ), w[:5], b, z[:5]
g_w = - np.dot( X.T, (y-y_hat) )/n + (z+eps)*np.sign(w)/d
g_b = np.sum(y-np.dot( X, w ))
##g_h = np.abs(w)*z - alpha*np.dot( X.T, np.dot( X, (z+eps)**-2 ))*z
g_h = np.abs(w)*z/d - np.dot( sX.T, alpha*z*(z+eps)**-2 )/d
#g_w = - np.dot( X.T, (y-y_hat) ) + z*np.sign(w)
#g_b = np.sum(y-np.dot( X, w ))
#g_h = np.abs(w)*z - alpha*np.dot( X.T, np.dot( X, z**-1 ))
#g_h = np.abs(w)*z - np.dot( sX.T, alpha*z**-1 )
if np.any(np.isnan(g_w)):
pdb.set_trace()
if np.any(np.isinf(g_w)):
pdb.set_trace()
if np.any(np.isnan(g_h)):
pdb.set_trace()
if np.any(np.isinf(g_h)):
pdb.set_trace()
h = np.maximum( h - lr*g_h, -20 )
old_w = cost
w = w - lr*g_w
b = b - lr*g_b
z = np.exp(h)
# dif_w = np.linalg.norm( w-old_w )
#
# if i > 10 and dif_w < 1e-3:
# print "stopping at ", i, dif_w
# break
if np.any(np.isnan(w)):
pdb.set_trace()
y_hat = np.dot( X, w ) + b
if verbose:
print "Final Error = ", np.sum( np.square( y - y_hat ) )
#pdb.set_trace()
return w, b
def make_bootstraps( x, m ):
# samples from arange(n) with replacement, m times.
#x = np.arange(n, dtype=int)
n = len(x)
N = np.zeros( (m,n), dtype=int)
for i in range(m):
N[i,:] = sklearn.utils.resample( x, replace = True )
return N
def xval_folds( n, K, randomize = False, seed = None ):
if randomize is True:
print("XVAL RANDOMLY PERMUTING")
if seed is not None:
print( "XVAL SETTING SEED = %d"%(seed) )
np.random.seed(seed)
x = np.random.permutation(n)
else:
print( "XVAL JUST IN ARANGE ORDER")
x = np.arange(n,dtype=int)
kf = KFold( K )
train = []
test = []
for train_ids, test_ids in kf.split( x ):
#train_ids = np.setdiff1d( x, test_ids )
train.append( x[train_ids] )
test.append( x[test_ids] )
#pdb.set_trace()
return train, test
def kmeans_survival( X, y, K ):
kmeans = KMeans(n_clusters=K ).fit(X.astype(float))
predictions = kmeans.predict(X)
# f = pp.figure()
# kmf = KaplanMeierFitter()
# ax1 = f.add_subplot(311)
# ax2 = f.add_subplot(312)
# ax3 = f.add_subplot(313)
#
# test_labels = []
# if len(Z_test) > 0:
# test_labels = kmeans.predict( Z_test.astype(float) )
# #pdb.set_trace()
#
# colours = "brgkmcbrgkmcbrgkmcbrgkmcbrgkmcbrgkmcbrgkmc"
# for k in range(K):
# I = pp.find( kmeans.labels_==k)
# Ti=T_train[I]
# Ei=E_train[I]
#
# if len(Ti)>0:
# kmf.fit(Ti, event_observed=Ei, label = "train_k=%d"%k)
# ax1=kmf.plot(ax=ax1, color=colours[k])
#
# if len(test_labels) > 0:
# I_test = pp.find( test_labels==k)
# Ti_test=T_test[I_test]
# Ei_test=E_test[I_test]
#
# if len(Ti_test)>0:
# kmf.fit(Ti_test, event_observed=Ei_test, label = "test_k=%d"%k)
# ax2=kmf.plot(ax=ax2, color=colours[k])
#
# T = np.hstack( (Ti,Ti_test))
# E = np.hstack( (Ei,Ei_test))
# if len(T)>0:
# kmf.fit(T, event_observed=E, label = "all_k=%d"%k)
# ax3=kmf.plot(ax=ax3, color=colours[k])
# #pdb.set_trace()
# pp.suptitle("%s"%(disease))
return predictions
return (mean_projections,var_projections),(mean_probabilities,var_probabilities),(w_mean,w_var),(avg_projection,avg_probability)
def lda_with_xval_and_bootstrap( X, y, k_fold = 10, n_bootstraps = 10, randomize = True, seed = 0, epsilon = 1e-12 ):
print "epsilon", epsilon
n,d = X.shape
assert len(y) == n, "incorrect sizes"
train_folds, test_folds = xval_folds( n, k_fold, randomize = randomize, seed = seed )
avg_projection = np.zeros( n, dtype=float )
avg_probability = np.zeros( n, dtype=float )
mean_projections = np.zeros( n, dtype=float )
var_projections = np.zeros( n, dtype=float )
mean_probabilities = np.zeros( n, dtype=float )
var_probabilities = np.zeros( n, dtype=float )
# for each fold, compute mean and variances
w_mean = np.zeros( (k_fold,d), dtype = float )
w_var = np.zeros( (k_fold,d), dtype = float )
for k, train_ids, test_ids in zip( range(k_fold), train_folds, test_folds ):
X_test = X[test_ids,:]
bootstrap_ids = bootstraps( train_ids, n_bootstraps )
for bootstrap_train_ids in bootstrap_ids:
#pdb.set_trace()
X_train = X[bootstrap_train_ids,:]
y_train = y[bootstrap_train_ids]
lda = LinearDiscriminantAnalysis(epsilon=epsilon)
lda.fit( X_train, y_train )
w = lda.w_prop_to
sk_lda = sk_LinearDiscriminantAnalysis(solver='lsqr', shrinkage='auto')
sk_lda.fit( X_train, y_train )
try:
sk_test_proj = np.squeeze(sk_lda.predict_log_proba( X_test ))[:,1]
test_proj = sk_test_proj #lda.transform( X_test )
test_prob = np.squeeze(sk_lda.predict_proba( X_test )[:,1]) # lda.predict( X_test )
except:
test_proj = lda.transform( X_test )
test_prob = lda.prob( X_test )
#pdb.set_trace()
#ranked = np.argsort(test_proj).astype(float) / len(test_proj)
#test_proj = ranked
#test_prob = lda.prob( X_test )
#test_proj = sk_test_proj
#I=pp.find( np.isinf(test_prob) )
#test_prob[I] = 1
#test_prob = np.squeeze(sk_lda.predict_proba( X_test )[:,1]) # lda.predict( X_test )
#test_predic
mean_projections[ test_ids ] += test_proj
mean_probabilities[ test_ids ] += test_prob
var_projections[ test_ids ] += np.square( test_proj )
var_probabilities[ test_ids ] += np.square( test_prob )
w_mean[k] += w
w_var[k] += np.square(w)
w_mn = w_mean[k] / n_bootstraps
lda = LinearDiscriminantAnalysis(epsilon=epsilon)
lda.fit( X[train_ids,:], y[train_ids] )
lda.w_prop_to = w_mn
lda.fit_density()
avg_projection[ test_ids ] = lda.transform( X_test )
avg_probability[ test_ids ] = lda.prob( X_test )
I=pp.find( np.isinf(avg_probability) )
avg_probability[I] = 1
w_mean /= n_bootstraps
w_var /= n_bootstraps
w_var -= np.square( w_mean )
print "xval w = ", w_mean.mean(0), w_var.mean(0)
mean_projections /= n_bootstraps
var_projections /= n_bootstraps
mean_probabilities /= n_bootstraps
var_probabilities /= n_bootstraps
var_projections -= np.square( mean_projections )
var_probabilities -= np.square( mean_probabilities )
return (mean_projections,var_projections),(mean_probabilities,var_probabilities),(w_mean,w_var),(avg_projection,avg_probability)
def predict_groups_with_loo_with_regression_gprior( X, y, C ):
#print "epsilon", epsilon
n,d = X.shape
assert len(y) == n, "incorrect sizes"
#train_folds, test_folds = xval_folds( n, k_fold, randomize = randomize, seed = seed )
avg_projection = np.zeros( n, dtype=float )
avg_probability = np.zeros( n, dtype=float )
mean_projections = np.zeros( n, dtype=float )
var_projections = np.zeros( n, dtype=float )
mean_probabilities = np.zeros( n, dtype=float )
var_probabilities = np.zeros( n, dtype=float )
# for each fold, compute mean and variances
w_mean = np.zeros( (n,d), dtype = float )
w_var = np.zeros( (n,d), dtype = float )
all_I = np.arange(n,dtype=int)
Ws = []
bs = []
for i in xrange(n):
train_ids = np.setdiff1d( all_I, i )
test_ids = [i]
X_test = X[test_ids,:]
#bootstrap_ids = bootstraps( train_ids, n_bootstraps )
X_train = X[train_ids,:]
y_train = y[train_ids]
#sklearn.linear_model.LogisticRegression()
penalty="l2"
#
#sk_lda = sklearn.linear_model.ARDRegression(alpha=0.5, fit_intercept=True, verbose=True)
#sk_lda = sklearn.linear_model.ElasticNet(alpha=0.5, fit_intercept=True)
#sk_lda = sklearn.linear_model.Ridge(alpha=1.5, fit_intercept=True)
sk_lda = sklearn.linear_model.Lasso(alpha=C, fit_intercept=True)
#sklearn.linear_model.BayesianRidge
#sk_lda = sklearn.linear_model.BayesianRidge(fit_intercept=False, verbose=True)
sk_lda.fit( X_train, y_train )
#pdb.set_trace()
sk_test_proj = np.squeeze(sk_lda.predict( X_test ))
test_proj = sk_test_proj #lda.transform( X_test )
#test_prob = np.squeeze(sk_lda.predict_proba( X_test ))[1] # lda.predict( X_test )
mean_projections[ i ] += test_proj
#mean_probabilities[ i ] += test_prob
var_projections[ i ] += np.square( test_proj )
#var_probabilities[ i ] += np.square( test_prob )
w = np.squeeze( sk_lda.coef_ )
Ws.append( w )
bs.append( sk_lda.intercept_ )
w_mean[i] += w
w_var[i] += np.square(w)
Ws = np.array(Ws)
bs = np.array(bs)
w_mn = w_mean.mean(0)
w_var = w_mean.var(0)
#print "loo w = ", w_mn
#print "loo w_var = ", w_var
var_projections -= np.square( mean_projections )
#var_probabilities -= np.square( mean_probabilities )
avg_projection=mean_projections
#avg_probability=mean_probabilities
return (mean_projections,var_projections),(w_mn,w_var,Ws,bs),(avg_projection,)
def tensorflow_survival_train_val( train, val, spec = None ):
l1 = 0
n_epochs = 1000
lr = 1e-3
logging_frequency = 2000
testing_frequency = 100
if spec is not None:
if spec.has_key( "lr" ):
lr = float( spec["lr"] )
if spec.has_key( "l1" ):
l1 = float( spec["l1"] )
if spec.has_key( "n_epochs" ):
n_epochs = int( spec["n_epochs"] )
if spec.has_key( "logging_frequency" ):
logging_frequency = int( spec["logging_frequency"] )
if spec.has_key( "testing_frequency" ):
testing_frequency = int( spec["testing_frequency"] )
print( "Running : pytorch_survival_train_val")
print (" with ")
print( " l1 = " + str(l1))
print( " n_epochs = " + str(n_epochs))
print( " lr = " + str(lr))
print( " logging_frequency = " + str(logging_frequency))
print( " testing_frequency = " + str(testing_frequency))
Z_train = train[0]
T_train = train[1]
E_train = train[2]
tissue_train = train[3]
Z_val = val[0]
T_val = val[1]
E_val = val[2]
tissue_val = val[3]
#Z_val -= Z_train.mean(0)
#Z_val /= Z_train.std(0)
#Z_train -= Z_train.mean(0)
#Z_train /= Z_train.std(0)
#print "epsilon", epsilon
n,dim = Z_val.shape
assert len(T_val) == n, "incorrect sizes"
assert len(E_val) == n, "incorrect sizes"
model, sess = tf_main( train, val, spec)
model.sess = sess
print "Done: model.fit()"
w = model.GetWeights( sess) #np.zeros(5) #model.w.data.numpy().flatten() #beta.data.numpy()
#pdb.set_trace()
test_proj = np.squeeze( model.LogTime( sess, Z_val, tissue_val, at_time=0.5 ) )
train_proj = np.squeeze( model.LogTime( sess, Z_train, tissue_train, at_time=0.5 ) )
train_cost = model.ComputeCost(sess, train)
test_cost = model.ComputeCost(sess, val)
test_time = np.exp( test_proj )
#T_test_proj = Variable( torch.FloatTensor( test_time ) )
#test_prob = model.LogLikelihood( E_test_py, T_test_py, Z_test_py ).data.numpy()
#train_proj = np.squeeze( model.LogTime( Z_train_py, at_time=0.5 ).data.numpy() )
train_time = np.exp( train_proj )
#T_train_proj = Variable( torch.FloatTensor( train_time ) )
#train_prob = model.LogLikelihood( E_train_py, T_train_py, Z_train_py ).data.numpy()
#pdb.set_trace()
return (train_proj, test_proj), (train_cost, test_cost), (train_time, test_time), (w,model)
def pytorch_survival_train_val( train, val, spec = None ):
l1 = 0
n_epochs = 1000
lr = 1e-3
logging_frequency = 2000
testing_frequency = 100
if spec is not None:
if spec.has_key( "lr" ):
lr = float( spec["lr"] )
if spec.has_key( "l1" ):
l1 = float( spec["l1"] )
if spec.has_key( "n_epochs" ):
n_epochs = int( spec["n_epochs"] )
if spec.has_key( "logging_frequency" ):
logging_frequency = int( spec["logging_frequency"] )
if spec.has_key( "testing_frequency" ):
testing_frequency = int( spec["testing_frequency"] )
print( "Running : pytorch_survival_train_val")
print (" with ")
print( " l1 = " + str(l1))
print( " n_epochs = " + str(n_epochs))
print( " lr = " + str(lr))
print( " logging_frequency = " + str(logging_frequency))
print( " testing_frequency = " + str(testing_frequency))
Z_train = train[0]
T_train = train[1]
E_train = train[2]
Z_val = val[0]
T_val = val[1]
E_val = val[2]
#Z_val -= Z_train.mean(0)
#Z_val /= Z_train.std(0)
#Z_train -= Z_train.mean(0)
#Z_train /= Z_train.std(0)
#print "epsilon", epsilon
n,dim = Z_val.shape
assert len(T_val) == n, "incorrect sizes"
assert len(E_val) == n, "incorrect sizes"
avg_projection = np.zeros( n, dtype=float )
avg_probability = np.zeros( n, dtype=float )
mean_projections = np.zeros( n, dtype=float )
var_projections = np.zeros( n, dtype=float )
mean_probabilities = np.zeros( n, dtype=float )
var_probabilities = np.zeros( n, dtype=float )
K = 10
# for each fold, compute mean and variances
w_mean = np.zeros( dim, dtype = float )
w_var = np.zeros( dim, dtype = float )
Z_test_py = Variable( torch.FloatTensor( Z_val ) )
T_test_py = Variable( torch.FloatTensor( T_val ) )
E_test_py = Variable( torch.FloatTensor( E_val ) )
Z_train_py = Variable( torch.FloatTensor( Z_train ) )
E_train_py = Variable( torch.FloatTensor( E_train ) )
T_train_py = Variable( torch.FloatTensor( T_train ) )
model = WeibullSurvivalModel( dim )
#model = WeibullSurvivalModelNeuralNetwork( dim, K )
model.add_test(E_test_py,T_test_py,Z_test_py)
#model.fit( E_train, T_train, Z_train, lr = 1e-3, logging_frequency = 2000, l1 = l1, n_epochs = n_epochs, normalize=False )
model.fit( E_train, T_train, Z_train, lr = lr, \
logging_frequency = logging_frequency, \
l1 = l1, \
n_epochs = n_epochs, \
normalize=False, testing_frequency=testing_frequency )
print "Done: model.fit()"
w = model.w.data.numpy().flatten() #beta.data.numpy()
#pdb.set_trace()
test_proj = np.squeeze( model.LogTime( Z_test_py, at_time=0.5 ).data.numpy() )
test_time = np.exp( test_proj )
T_test_proj = Variable( torch.FloatTensor( test_time ) )
test_prob = model.LogLikelihood( E_test_py, T_test_py, Z_test_py ).data.numpy()
train_proj = np.squeeze( model.LogTime( Z_train_py, at_time=0.5 ).data.numpy() )
train_time = np.exp( train_proj )
T_train_proj = Variable( torch.FloatTensor( train_time ) )
train_prob = model.LogLikelihood( E_train_py, T_train_py, Z_train_py ).data.numpy()
return (train_proj, test_proj), (train_prob, test_prob), (train_time, test_time), w
#return (mean_projections,var_projections),(mean_probabilities,var_probabilities),(w_mean,w_var),(avg_projection,avg_probability)
def pytorch_survival_xval( E, T, Z_orig, k_fold = 10, n_bootstraps = 10, randomize = True, seed = 0, l1 = 0.0, n_epochs = 1000, normalize = False ):
#print "epsilon", epsilon
n,dim = Z_orig.shape
assert len(T) == n, "incorrect sizes"
assert len(E) == n, "incorrect sizes"
train_folds, test_folds = xval_folds( n, k_fold, randomize = True, seed=0 )
avg_projection = np.zeros( n, dtype=float )
avg_probability = np.zeros( n, dtype=float )
mean_projections = np.zeros( n, dtype=float )
var_projections = np.zeros( n, dtype=float )
mean_probabilities = np.zeros( n, dtype=float )
var_probabilities = np.zeros( n, dtype=float )
K = 10
# for each fold, compute mean and variances
w_mean = np.zeros( (k_fold,dim), dtype = float )
w_var = np.zeros( (k_fold,dim), dtype = float )
for k, train_ids, test_ids in zip( range(k_fold), train_folds, test_folds ):
Z = Z_orig.copy()
mn_z = Z[train_ids,:].mean(0)
std_z = Z[train_ids,:].std(0)
if normalize is True:
print( "normalizing" )
Z -= mn_z
Z /= std_z
Z_test = Variable( torch.FloatTensor( Z[test_ids,:] ) )
T_test = Variable( torch.FloatTensor( T[test_ids] ) )
E_test = Variable( torch.FloatTensor( E[test_ids] ) )
Z_train = Variable( torch.FloatTensor( Z[train_ids,:] ) )
E_train = Variable( torch.FloatTensor( E[train_ids] ) )
T_train = Variable( torch.FloatTensor( T[train_ids] ) )
#pdb.set_trace()
Z_train_val = Z[train_ids,:]
T_train_val = T[train_ids]
E_train_val = E[train_ids]
mean_E_train = E_train_val.sum()
mean_E_test = E[test_ids].sum()
print("events train %d events test %d"%(mean_E_train,mean_E_test))
#pdb.set_trace()
model = WeibullSurvivalModel( dim )
#model = WeibullSurvivalModelNeuralNetwork( dim, K )
model.add_test(E_test,T_test,Z_test)
#model.fit( E_train, T_train, Z_train, lr = 1e-3, logging_frequency = 2000, l1 = l1, n_epochs = n_epochs, normalize=False )
model.fit( E_train_val, T_train_val, Z_train_val, lr = 1e-3, logging_frequency = 2000, l1 = l1, n_epochs = n_epochs, normalize=False )
w = model.w.data.numpy().flatten() #beta.data.numpy()
#pdb.set_trace()
test_proj = np.squeeze( model.LogTime( Z_test, at_time=0.5 ).data.numpy() )
time_proj = np.exp( test_proj )
T_test_proj = Variable( torch.FloatTensor( time_proj ) )
S_test_proj = np.squeeze(model.Survival( T_test_proj, Z_test ).data.numpy())
S_test = np.squeeze(model.Survival( T_test, Z_test ).data.numpy())
#test_proj /= 365.0
#test_proj = np.log(test_proj)
#test_proj -= np.median( test_proj )
# pp.figure()
#
f = pp.figure()
ax1 = f.add_subplot(111)
kmf = KaplanMeierFitter()
kmf.fit(T_train.data.numpy(), event_observed=E_train.data.numpy(), label = "train" )
ax1=kmf.plot(ax=ax1,at_risk_counts=False,show_censors=True, color='blue')
kmf.fit(T_test.data.numpy(), event_observed=E_test.data.numpy(), label = "test" )
ax1=kmf.plot(ax=ax1,at_risk_counts=False,show_censors=True, color='red')
model.PlotSurvival( E_train, T_train, Z_train, ax=ax1, color = "b" )
ax=model.PlotSurvival( E_test, T_test, Z_test, ax=ax1, color = "r" )
#ax.vlines(time_proj,0,1)
#ax.plot( np.vstack( (T_test.data.numpy(), time_proj) ), np.vstack( (S_test, S_test_proj) ), 'm-')
pp.title("TRAIN")
#pp.show()
#pdb.set_trace()
#pp.close('all')
test_prob = model.LogLikelihood( E_test, T_test, Z_test ).data.numpy()
#pdb.set_trace()
mean_projections[ test_ids ] += test_proj
mean_probabilities[ test_ids ] += test_prob
var_projections[ test_ids ] += np.square( test_proj )
var_probabilities[ test_ids ] += np.square( test_prob )
w_mean[k] += w
w_var[k] += np.square(w)
w_mn = w_mean[k] / n_bootstraps
#I=pp.find( np.isinf(avg_probability) )
#avg_probability[I] = 1
w_var -= np.square( w_mean )
var_projections -= np.square( mean_projections )
var_probabilities -= np.square( mean_probabilities )
return (mean_projections,var_projections),(mean_probabilities,var_probabilities),(w_mean,w_var),(avg_projection,avg_probability)
def predict_groups_with_xval_with_regression( X_orig, y_orig, l1, k_fold=10, randomize = True, seed = 0, use_cuda=False ):
#print "epsilon", epsilon
n,d = X_orig.shape
assert len(y_orig) == n, "incorrect sizes"
train_folds, test_folds = xval_folds( n, k_fold, randomize = randomize, seed = seed )
avg_projection = np.zeros( n, dtype=float )
avg_probability = np.zeros( n, dtype=float )
mean_projections = np.zeros( n, dtype=float )
var_projections = np.zeros( n, dtype=float )
mean_probabilities = np.zeros( n, dtype=float )
var_probabilities = np.zeros( n, dtype=float )
# for each fold, compute mean and variances
w_mean = np.zeros( (n,d), dtype = float )
w_var = np.zeros( (n,d), dtype = float )
all_I = np.arange(n,dtype=int)
Ws = np.zeros( (n,d) )
bs = np.zeros( (n,) )
for k, train_ids, test_ids in zip( range(k_fold), train_folds, test_folds ):
X = X_orig.copy()
y = y_orig.copy()
#y -= y.mean()
#X -= X.mean(0)
#X /= X.std(0)
X_test_val = X[test_ids,:]
y_test_val = y[test_ids]
X_train = X[train_ids,:]
y_train = y[train_ids]
#y_train -= np.median(y[train_ids])
#y_test_val -= np.median(y[train_ids])
# X_train = Variable( torch.FloatTensor( X_train ) )
if use_cuda is True:
X_test = Variable( torch.FloatTensor( X_test_val ) ).cuda()
y_test = Variable( torch.FloatTensor( y_test_val ) ).cuda()
else:
X_test = Variable( torch.FloatTensor( X_test_val ) )
y_test = Variable( torch.FloatTensor( y_test_val ) )
#penalty="l2"
#model = BootstrapLinearRegression( d, l1 )
#model = BootstrapLassoRegression( d, l1 )
model = DropoutLinearRegression( d, use_cuda )
#pdb.set_trace()
model.add_test( X_test, y_test )
model.fit( X_train, y_train, \
n_epochs=10000, \
min_epochs = 2000, \
logging_frequency = 500, \
testing_frequency = 100, \
lr=0.005, l1=l1 ,l2=0.00 ) #n_epochs=2000, lr = 0.01, logging_frequency = 500 )
#sk_lda = sklearn.linear_model.Lasso(alpha=l1, fit_intercept=True, normalize=True)
#sk_lda.fit( X_train, y_train )
#w_ard, b_ard = linear_reg_gprior_ard( X_train, y_train, C, lr = 0.001, iters=1500, verbose = False )
#sk_test_proj2 = model.predict( X_test ) #np.squeeze(model.predict( X_test ).data.numpy())
#sk_test_proj = np.dot( X_test, w_ard ) + b_ard
test_proj = np.squeeze( model.predict( X_test ) )
#pdb.set_trace()
#w_est_linear = np.dot( np.linalg.inv( np.dot(X_train.T,X_train) ), np.dot( X_train.T, y_train ) )
#pdb.set_trace()
#y_est_linear = np.dot( X_test_val, w_est_linear)
w = np.squeeze( model.get_w() ) #.data.numpy() )
#y_est_model = np.dot( X_test_val, w)+model.get_b()
#test_proj = y_est_model
#pdb.set_trace()
mean_projections[ test_ids ] += test_proj
var_projections[ test_ids ] += np.square( test_proj )
#w = w_ard #
#pdb.set_trace()
Ws[test_ids,:] = w
bs[test_ids] = model.get_b() #sk_lda.intercept_
#bs[test_ids] = model.bias.data.numpy()
w_mean[test_ids] += w
w_var[test_ids] += np.square(w)
#Ws = np.array(Ws)
#bs = np.array(bs)
w_mn = w_mean.mean(0)
w_var = w_mean.var(0)
#print "loo w = ", w_mn
#print "loo w_var = ", w_var
var_projections -= np.square( mean_projections )
#var_probabilities -= np.square( mean_probabilities )
avg_projection=mean_projections
#avg_probability=mean_probabilities
return (mean_projections,var_projections),(w_mn,w_var,Ws,bs),(avg_projection,)
def predict_groups_with_loo_with_regression( X, y, C ):
#print "epsilon", epsilon
n,d = X.shape
assert len(y) == n, "incorrect sizes"
#train_folds, test_folds = xval_folds( n, k_fold, randomize = randomize, seed = seed )
avg_projection = np.zeros( n, dtype=float )
avg_probability = np.zeros( n, dtype=float )
mean_projections = np.zeros( n, dtype=float )
var_projections = np.zeros( n, dtype=float )
mean_probabilities = np.zeros( n, dtype=float )
var_probabilities = np.zeros( n, dtype=float )
# for each fold, compute mean and variances
w_mean = np.zeros( (n,d), dtype = float )
w_var = np.zeros( (n,d), dtype = float )
all_I = np.arange(n,dtype=int)
Ws = []
bs = []
for i in xrange(n):
train_ids = np.setdiff1d( all_I, i )
test_ids = [i]
X_test = X[test_ids,:]
#bootstrap_ids = bootstraps( train_ids, n_bootstraps )
X_train = X[train_ids,:]
y_train = y[train_ids]
# mn_x = X_train.mean(0)
# std_x = X_train.std(0); i_bad = pp.find( std_x == 0 ); std_x[i_bad]=1.0
# mn_y = y_train.mean()
# std_y = y_train.std()
#X_train -= mn_x;
#X_train /= std_x
#X_test -= mn_x;
#X_test /= std_x
#X_train -= 0.5
#X_test -= 0.5
# I_ = X_test < -0.5
# X_test = (1-I_)*X_test - I_
# I_ = X_test > 0.5
# X_test = (1-I_)*X_test + I_
#y_train -= mn_y;# y_train /= std_y
#sklearn.linear_model.LogisticRegression()
penalty="l2"
#
#sk_lda = sklearn.linear_model.ARDRegression(alpha=0.5, fit_intercept=True, verbose=True)
#sk_lda = sklearn.linear_model.ElasticNet(alpha=0.5, fit_intercept=True)
#sk_lda = sklearn.linear_model.Ridge(alpha=1.5, fit_intercept=True)
sk_lda = sklearn.linear_model.Lasso(alpha=C, fit_intercept=True)
#sklearn.linear_model.BayesianRidge
#sk_lda = sklearn.linear_model.BayesianRidge(fit_intercept=False, verbose=True)
sk_lda.fit( X_train, y_train )
w_ard, b_ard = linear_reg_gprior_ard( X_train, y_train, C, lr = 0.001, iters=1500, verbose = False, w_init = np.squeeze( sk_lda.coef_ ) )
sk_test_proj = np.squeeze(sk_lda.predict( X_test ))
sk_test_proj = np.dot( X_test, w_ard ) + b_ard
test_proj = sk_test_proj #lda.transform( X_test )
#test_prob = np.squeeze(sk_lda.predict_proba( X_test ))[1] # lda.predict( X_test )
mean_projections[ i ] += test_proj
#mean_probabilities[ i ] += test_prob
var_projections[ i ] += np.square( test_proj )
#var_probabilities[ i ] += np.square( test_prob )
w = w_ard #np.squeeze( sk_lda.coef_ )
#pdb.set_trace()
Ws.append( w )
bs.append( sk_lda.intercept_ )
w_mean[i] += w
w_var[i] += np.square(w)
Ws = np.array(Ws)
bs = np.array(bs)
w_mn = w_mean.mean(0)
w_var = w_mean.var(0)
#print "loo w = ", w_mn
#print "loo w_var = ", w_var
var_projections -= np.square( mean_projections )
#var_probabilities -= np.square( mean_probabilities )
avg_projection=mean_projections
#avg_probability=mean_probabilities
return (mean_projections,var_projections),(w_mn,w_var,Ws,bs),(avg_projection,)
def predict_groups_with_loo( X, y, C ):
#print "epsilon", epsilon
n,d = X.shape
assert len(y) == n, "incorrect sizes"
#train_folds, test_folds = xval_folds( n, k_fold, randomize = randomize, seed = seed )
avg_projection = np.zeros( n, dtype=float )
avg_probability = np.zeros( n, dtype=float )
mean_projections = np.zeros( n, dtype=float )
var_projections = np.zeros( n, dtype=float )
mean_probabilities = np.zeros( n, dtype=float )
var_probabilities = np.zeros( n, dtype=float )
# for each fold, compute mean and variances
w_mean = np.zeros( (n,d), dtype = float )
w_var = np.zeros( (n,d), dtype = float )
all_I = np.arange(n,dtype=int)
Ws = []
for i in xrange(n):
train_ids = np.setdiff1d( all_I, i )
test_ids = [i]
X_test = X[test_ids,:]
#bootstrap_ids = bootstraps( train_ids, n_bootstraps )
X_train = X[train_ids,:]
y_train = y[train_ids]
# X_train -= 0.5
# X_test -= 0.5
# mn_x = X_train.mean(0)
# std_x = X_train.std(0); i_bad = pp.find( std_x == 0 ); std_x[i_bad]=1.0
# mn_y = y_train.mean()
# std_y = y_train.std()
# X_train -= mn_x;
# X_train /= std_x
#
# X_test -= mn_x;
# X_test /= std_x
#sklearn.linear_model.LogisticRegression()
penalty="l2"
if penalty == "l1":
sk_lda = sklearn.linear_model.LogisticRegression(C=C, penalty='l1',solver='liblinear', fit_intercept=True)
else:
sk_lda = sklearn.linear_model.LogisticRegression(solver='liblinear', C=C, penalty='l2', fit_intercept=True)
sk_lda.fit( X_train, y_train )
#pdb.set_trace()
sk_test_proj = np.squeeze(sk_lda.predict_log_proba( X_test ))[1]
test_proj = sk_test_proj #lda.transform( X_test )
test_prob = np.squeeze(sk_lda.predict_proba( X_test ))[1] # lda.predict( X_test )
mean_projections[ i ] += test_proj
mean_probabilities[ i ] += test_prob
var_projections[ i ] += np.square( test_proj )
var_probabilities[ i ] += np.square( test_prob )
w = np.squeeze( sk_lda.coef_ )
Ws.append( w )
w_mean[i] += w
w_var[i] += np.square(w)
w_mn = w_mean.mean(0)
w_var = w_mean.var(0)
Ws = np.array(Ws)
#print "loo w = ", w_mn
#print "loo w_var = ", w_var
var_projections -= np.square( mean_projections )
var_probabilities -= np.square( mean_probabilities )
avg_projection=mean_projections
avg_probability=mean_probabilities
return (mean_projections,var_projections),(mean_probabilities,var_probabilities),(w_mn,w_var,Ws),(avg_projection,avg_probability)
def lda_with_loo( X, y, epsilon = 1e-12 ):
print "epsilon", epsilon
n,d = X.shape
assert len(y) == n, "incorrect sizes"
#train_folds, test_folds = xval_folds( n, k_fold, randomize = randomize, seed = seed )
avg_projection = np.zeros( n, dtype=float )
avg_probability = np.zeros( n, dtype=float )
mean_projections = np.zeros( n, dtype=float )
var_projections = np.zeros( n, dtype=float )
mean_probabilities = np.zeros( n, dtype=float )
var_probabilities = np.zeros( n, dtype=float )
# for each fold, compute mean and variances
w_mean = np.zeros( (n,d), dtype = float )
w_var = np.zeros( (n,d), dtype = float )
all_I = np.arange(n,dtype=int)
for i in xrange(n):
train_ids = np.setdiff1d( all_I, i )
test_ids = [i]
X_test = X[test_ids,:]
#bootstrap_ids = bootstraps( train_ids, n_bootstraps )
X_train = X[train_ids,:]
y_train = y[train_ids]
sk_lda = sk_LinearDiscriminantAnalysis(solver='lsqr', shrinkage='auto')
sk_lda.fit( X_train, y_train )
#pdb.set_trace()
sk_test_proj = np.squeeze(sk_lda.predict_log_proba( X_test ))[1]
test_proj = sk_test_proj #lda.transform( X_test )
test_prob = np.squeeze(sk_lda.predict_proba( X_test ))[1] # lda.predict( X_test )
mean_projections[ i ] += test_proj
mean_probabilities[ i ] += test_prob
var_projections[ i ] += np.square( test_proj )
var_probabilities[ i ] += np.square( test_prob )
w = np.squeeze( sk_lda.coef_ )
w_mean[i] += w
w_var[i] += np.square(w)
w_mn = w_mean.mean(0)
w_var = w_mean.var(0)
#print "loo w = ", w_mn
#print "loo w_var = ", w_var
var_projections -= np.square( mean_projections )
var_probabilities -= np.square( mean_probabilities )
avg_projection=mean_projections
avg_probability=mean_probabilities
return (mean_projections,var_projections),(mean_probabilities,var_probabilities),(w_mn,w_var),(avg_projection,avg_probability)
def lda_on_train( X, y, k_fold = 10, n_bootstraps = 10, randomize = True, seed = 0, epsilon = 1e-12 ):
n,d = X.shape
assert len(y) == n, "incorrect sizes"
train_folds, test_folds = xval_folds( n, k_fold, randomize = randomize, seed = seed )
avg_projection = np.zeros( n, dtype=float )
avg_probability = np.zeros( n, dtype=float )
mean_projections = np.zeros( n, dtype=float )
var_projections = np.zeros( n, dtype=float )
mean_probabilities = np.zeros( n, dtype=float )
var_probabilities = np.zeros( n, dtype=float )
# for each fold, compute mean and variances
w_mean = np.zeros( (k_fold,d), dtype = float )
w_var = np.zeros( (k_fold,d), dtype = float )
lda = LinearDiscriminantAnalysis(epsilon=epsilon)
lda.fit( X, y )
w = lda.w_prop_to
sk_lda = sk_LinearDiscriminantAnalysis(solver='lsqr', shrinkage='auto')
sk_lda.fit( X, y )
sk_test_proj = np.squeeze(sk_lda.predict_log_proba( X ))
w = lda.w_prop_to
test_proj = lda.transform( X )
#pdb.set_trace()
ranked = np.argsort(test_proj).astype(float) / len(test_proj)
#test_proj = ranked
test_prob = lda.prob( X )
I=pp.find( np.isinf(test_prob) )
test_prob[I] = 1
test_predict = lda.predict( X )
mean_projections = test_proj
mean_probabilities = test_prob
var_projections = np.square( test_proj )
var_probabilities = np.square( test_predict )
w_mean = w
w_var = np.square(w)
print "train w = ", w
I=pp.find( np.isinf(avg_probability) )
avg_probability[I] = 1
return (mean_projections,var_projections),(mean_probabilities,var_probabilities),(w_mean,w_var),(avg_projection,avg_probability)
def run_survival_analysis( disease_list, fill_store, data_store, k_fold = 10, n_bootstraps = 10, epsilon = 1e-12 ):
fill_store.open()
data_store.open()
ALL_SURVIVAL = data_store["/CLINICAL/data"][["patient.days_to_last_followup","patient.days_to_death"]]
tissue_barcodes = np.array( ALL_SURVIVAL.index.tolist(), dtype=str )
surv_barcodes = np.array([ x+"_"+y for x,y in tissue_barcodes])
NEW_SURVIVAL = pd.DataFrame( ALL_SURVIVAL.values, index =surv_barcodes, columns = ALL_SURVIVAL.columns )
val_survival = pd.concat( [NEW_SURVIVAL, fill_store["/Z/VAL/Z/mu"]], axis=1, join = 'inner' )
fill_store.close()
data_store.close()
#-------
predict_survival_train = val_survival #pd.concat( [test_survival, val_survival], axis=0, join = 'outer' )
predict_barcodes_train = predict_survival_train.index
splt = np.array( [ [s.split("_")[0], s.split("_")[1]] for s in predict_barcodes_train ] )
predict_survival_train = pd.DataFrame( predict_survival_train.values, index = splt[:,1], columns = predict_survival_train.columns )
predict_survival_train["disease"] = splt[:,0]
Times_train = predict_survival_train[ "patient.days_to_last_followup" ].fillna(0).values.astype(int)+predict_survival_train[ "patient.days_to_death" ].fillna(0).values.astype(int)
predict_survival_train["T"] = Times_train
Events_train = (1-np.isnan( predict_survival_train[ "patient.days_to_death" ].astype(float)) ).astype(int)
predict_survival_train["E"] = Events_train
X_columns = val_survival.columns[2:]
X = predict_survival_train[X_columns].values.astype(float)
i_event = pp.find(predict_survival_train["E"].values)
#median_time = np.median( predict_survival_train["T"].values[i_event] )
median_time = np.mean( predict_survival_train["T"].values )
i_less = pp.find(predict_survival_train["T"].values<median_time)
#y = predict_survival_train["E"].values.astype(int)
y = np.zeros( len(predict_survival_train["T"].values) )
y[i_less] = 1
projections, probabilties, weights, averages = lda_with_xval_and_bootstrap( X, y, k_fold = k_fold, n_bootstraps = n_bootstraps )
return projections, probabilties, weights, averages, X, y, Events_train, Times_train
def run_survival_analysis_lda( disease_list, fill_store, data_store, k_fold = 10, n_bootstraps = 10, epsilon = 1e-12 ):
fill_store.open()
data_store.open()
ALL_SURVIVAL = data_store["/CLINICAL/data"][["patient.days_to_last_followup","patient.days_to_death"]]
tissue_barcodes = np.array( ALL_SURVIVAL.index.tolist(), dtype=str )
surv_barcodes = np.array([ x+"_"+y for x,y in tissue_barcodes])
NEW_SURVIVAL = pd.DataFrame( ALL_SURVIVAL.values, index =surv_barcodes, columns = ALL_SURVIVAL.columns )
val_survival = pd.concat( [NEW_SURVIVAL, fill_store["/Z/VAL/Z/mu"]], axis=1, join = 'inner' )
fill_store.close()
data_store.close()
#-------
predict_survival_train = val_survival #pd.concat( [test_survival, val_survival], axis=0, join = 'outer' )
predict_barcodes_train = predict_survival_train.index
splt = np.array( [ [s.split("_")[0], s.split("_")[1]] for s in predict_barcodes_train ] )
predict_survival_train = pd.DataFrame( predict_survival_train.values, index = splt[:,1], columns = predict_survival_train.columns )
predict_survival_train["disease"] = splt[:,0]
Times_train = predict_survival_train[ "patient.days_to_last_followup" ].fillna(0).values.astype(int)+predict_survival_train[ "patient.days_to_death" ].fillna(0).values.astype(int)
predict_survival_train["T"] = Times_train
Events_train = (1-np.isnan( predict_survival_train[ "patient.days_to_death" ].astype(float)) ).astype(int)
predict_survival_train["E"] = Events_train
X_columns = val_survival.columns[2:]
X = predict_survival_train[X_columns].values.astype(float)
i_event = pp.find(predict_survival_train["E"].values)
#median_time = np.median( predict_survival_train["T"].values[i_event] )
median_time = np.mean( predict_survival_train["T"].values )
i_less = pp.find(predict_survival_train["T"].values<median_time)
y = predict_survival_train["E"].values.astype(int)
#y = np.zeros( len(predict_survival_train["T"].values) )
#y[i_less] = 1
projections, probabilties, weights, averages = lda_with_xval_and_bootstrap( X, y, k_fold = k_fold, n_bootstraps = n_bootstraps, epsilon=epsilon )
return projections, probabilties, weights, averages, X, y, Events_train, Times_train
def run_survival_analysis_lda_loo( disease_list, fill_store, data_store, k_fold = 10, n_bootstraps = 10, epsilon = 1e-12 ):
fill_store.open()
data_store.open()
ALL_SURVIVAL = data_store["/CLINICAL/data"][["patient.days_to_last_followup","patient.days_to_death"]]
tissue_barcodes = np.array( ALL_SURVIVAL.index.tolist(), dtype=str )
surv_barcodes = np.array([ x+"_"+y for x,y in tissue_barcodes])
NEW_SURVIVAL = pd.DataFrame( ALL_SURVIVAL.values, index =surv_barcodes, columns = ALL_SURVIVAL.columns )
val_survival = pd.concat( [NEW_SURVIVAL, fill_store["/Z/VAL/Z/mu"]], axis=1, join = 'inner' )
fill_store.close()
data_store.close()
#-------
predict_survival_train = val_survival #pd.concat( [test_survival, val_survival], axis=0, join = 'outer' )
predict_barcodes_train = predict_survival_train.index
splt = np.array( [ [s.split("_")[0], s.split("_")[1]] for s in predict_barcodes_train ] )
predict_survival_train = pd.DataFrame( predict_survival_train.values, index = splt[:,1], columns = predict_survival_train.columns )
predict_survival_train["disease"] = splt[:,0]
Times_train = predict_survival_train[ "patient.days_to_last_followup" ].fillna(0).values.astype(int)+predict_survival_train[ "patient.days_to_death" ].fillna(0).values.astype(int)
predict_survival_train["T"] = Times_train
Events_train = (1-np.isnan( predict_survival_train[ "patient.days_to_death" ].astype(float)) ).astype(int)
predict_survival_train["E"] = Events_train
X_columns = val_survival.columns[2:]
X = predict_survival_train[X_columns].values.astype(float)
i_event = pp.find(predict_survival_train["E"].values)
#median_time = np.median( predict_survival_train["T"].values[i_event] )
median_time = np.mean( predict_survival_train["T"].values )
i_less = pp.find(predict_survival_train["T"].values<median_time)
y = predict_survival_train["E"].values.astype(int)
#y = np.zeros( len(predict_survival_train["T"].values) )
#y[i_less] = 1
projections, probabilties, weights, averages = lda_with_loo( X, y, epsilon=epsilon )
return projections, probabilties, weights, averages, X, y, Events_train, Times_train
def run_pytorch_survival_train_val( train_survival, val_survival, spec = None ):
# projections, \
# probabilties, \
# weights, averages, X, y, E_train, T_train =
X_columns = val_survival.columns[2:]
X_train = train_survival[X_columns].values.astype(float)
X_val = val_survival[X_columns].values.astype(float)
i_event = pp.find(train_survival["E"].values)
E_train = train_survival["E"].values.astype(int)
i_event = pp.find(val_survival["E"].values)
E_val = val_survival["E"].values.astype(int)
#E = predict_survival_train["E"].values
T_train = np.maximum( 1, train_survival["T"].values )
T_val = np.maximum( 1, val_survival["T"].values )
train = [X_train, T_train, E_train]
val = [X_val, T_val, E_val]
#(train_proj, test_proj), (train_prob, test_prob), (train_time, test_time), w
projections, probabilties, times, w = pytorch_survival_train_val( train, val, spec = spec )
return projections, probabilties, times, w, train, val
def run_tensorflow_survival_train_val( train_survival, tissue_train, val_survival, tissue_val, spec = None ):
# projections, \
# probabilties, \
# weights, averages, X, y, E_train, T_train =
X_columns = val_survival.columns[2:]
X_train = train_survival[X_columns].values.astype(float)
X_val = val_survival[X_columns].values.astype(float)
i_event = pp.find(train_survival["E"].values)
E_train = train_survival["E"].values.astype(int)
i_event = pp.find(val_survival["E"].values)
E_val = val_survival["E"].values.astype(int)
#E = predict_survival_train["E"].values
T_train = np.maximum( 1, train_survival["T"].values )
T_val = np.maximum( 1, val_survival["T"].values )
train = [X_train, T_train, E_train, tissue_train.values]
val = [X_val, T_val, E_val, tissue_val.values]
#(train_proj, test_proj), (train_prob, test_prob), (train_time, test_time), w
projections, probabilties, times, w = tensorflow_survival_train_val( train, val, spec = spec )
return projections, probabilties, times, w, train, val
def run_pytorch_survival_folds( disease_list, fill_store, data_store, \
k_fold = 10, \
n_bootstraps = 10, \
l1 = 0.0, \
n_epochs=1000, \
normalize = False, seed = 0):
fill_store.open()
data_store.open()
ALL_SURVIVAL = data_store["/CLINICAL/data"][["patient.days_to_last_followup","patient.days_to_death"]]
tissue_barcodes = np.array( ALL_SURVIVAL.index.tolist(), dtype=str )
surv_barcodes = np.array([ x+"_"+y for x,y in tissue_barcodes])
NEW_SURVIVAL = pd.DataFrame( ALL_SURVIVAL.values, index =surv_barcodes, columns = ALL_SURVIVAL.columns )
val_survival = pd.concat( [NEW_SURVIVAL, fill_store["/Z/VAL/Z/mu"]], axis=1, join = 'inner' )
fill_store.close()
data_store.close()
#-------
predict_survival_train = val_survival #pd.concat( [test_survival, val_survival], axis=0, join = 'outer' )
predict_barcodes_train = predict_survival_train.index
splt = np.array( [ [s.split("_")[0], s.split("_")[1]] for s in predict_barcodes_train ] )
predict_survival_train = pd.DataFrame( predict_survival_train.values, index = splt[:,1], columns = predict_survival_train.columns )
predict_survival_train["disease"] = splt[:,0]
Times_train = predict_survival_train[ "patient.days_to_last_followup" ].fillna(0).values.astype(int)+predict_survival_train[ "patient.days_to_death" ].fillna(0).values.astype(int)
predict_survival_train["T"] = Times_train
Events_train = (1-np.isnan( predict_survival_train[ "patient.days_to_death" ].astype(float)) ).astype(int)
predict_survival_train["E"] = Events_train
X_columns = val_survival.columns[2:]
X = predict_survival_train[X_columns].values.astype(float)
i_event = pp.find(predict_survival_train["E"].values)
#median_time = np.median( predict_survival_train["T"].values[i_event] )
median_time = np.mean( predict_survival_train["T"].values )
i_less = pp.find(predict_survival_train["T"].values<median_time)
y = predict_survival_train["E"].values.astype(int)
#y = np.zeros( len(predict_survival_train["T"].values) )
#y[i_less] = 1
E = predict_survival_train["E"].values
T = np.maximum( 1, predict_survival_train["T"].values )
Z = X
projections, probabilties, weights, averages = pytorch_survival_xval( E, T, Z, k_fold, l1=l1, n_epochs=n_epochs, normalize=normalize, seed=seed )
return projections, probabilties, weights, averages, X, y, Events_train, Times_train
def run_survival_prediction_loo( disease_list, fill_store, data_store, group0, group1, data_keys, data_names, C = 1 ):
fill_store.open()
data_store.open()
ALL_SURVIVAL = data_store["/CLINICAL/data"][["patient.days_to_last_followup","patient.days_to_death"]]
tissue_barcodes = np.array( ALL_SURVIVAL.index.tolist(), dtype=str )
surv_barcodes = np.array([ x+"_"+y for x,y in tissue_barcodes])
NEW_SURVIVAL = pd.DataFrame( ALL_SURVIVAL.values, index =surv_barcodes, columns = ALL_SURVIVAL.columns )
val_survival = pd.concat( [NEW_SURVIVAL, fill_store["/Z/VAL/Z/mu"]], axis=1, join = 'inner' )
datas = []
for data_key, data_name in zip( data_keys, data_names):
datas.append( data_store[data_key].loc[val_survival.index].fillna(0) )
data_columns = {}
for b in data_store[data_key].columns:
if len(data_keys)>1:
data_columns[b] = "%s_%s"%(data_name,b)
else:
data_columns[b] = "%s"%(b)
datas[-1].rename( columns = data_columns, inplace=True)
data_train = pd.concat(datas, axis=1)
fill_store.close()
data_store.close()
#pdb.set_trace()
X_columns = data_train.columns
X = data_train[X_columns].values.astype(float)
y = np.zeros(len(X),dtype=int)
y[group1] = 1
#pdb.set_trace()
predictions, probabilties, weights, averages = predict_groups_with_loo( X, y, C )
return predictions, probabilties, weights, averages, data_train, y
def run_survival_prediction_loo_regression( disease_list, fill_store, data_store, targets, data_keys, data_names, C = 1 ):
fill_store.open()
data_store.open()
ALL_SURVIVAL = data_store["/CLINICAL/data"][["patient.days_to_last_followup","patient.days_to_death"]]
tissue_barcodes = np.array( ALL_SURVIVAL.index.tolist(), dtype=str )
surv_barcodes = np.array([ x+"_"+y for x,y in tissue_barcodes])
NEW_SURVIVAL = pd.DataFrame( ALL_SURVIVAL.values, index =surv_barcodes, columns = ALL_SURVIVAL.columns )
val_survival = pd.concat( [NEW_SURVIVAL, fill_store["/Z/VAL/Z/mu"]], axis=1, join = 'inner' )
datas = []
for data_key, data_name in zip( data_keys, data_names):
datas.append( data_store[data_key].loc[val_survival.index].fillna(0) )
data_columns = {}
for b in data_store[data_key].columns:
if len(data_keys)>1:
data_columns[b] = "%s_%s"%(data_name,b)
else:
data_columns[b] = "%s"%(b)
datas[-1].rename( columns = data_columns, inplace=True)
data_train = pd.concat(datas, axis=1)
fill_store.close()
data_store.close()
#pdb.set_trace()
X_columns = data_train.columns
X = data_train[X_columns].values.astype(float)
#y = np.zeros(len(X),dtype=int)
#y[group1] = 1
y=targets
#pdb.set_trace()
assert len(y) == len(X), "made different sizes"
predictions, weights, averages = predict_groups_with_loo_with_regression( X, y, C )
#(mean_projections,var_projections),(w_mn,w_var),(avg_projection,)
return predictions, weights, averages, data_train, y
def run_survival_prediction_xval_regression( disease_list, \
fill_store, data_store, targets, data_keys, \
data_names, l1 = 0.0, k_fold = 10, seed=0, use_cuda = False ):
fill_store.open()
data_store.open()
ALL_SURVIVAL = data_store["/CLINICAL/data"][["patient.days_to_last_followup","patient.days_to_death"]]
tissue_barcodes = np.array( ALL_SURVIVAL.index.tolist(), dtype=str )
surv_barcodes = np.array([ x+"_"+y for x,y in tissue_barcodes])
NEW_SURVIVAL = pd.DataFrame( ALL_SURVIVAL.values, index =surv_barcodes, columns = ALL_SURVIVAL.columns )
val_survival = pd.concat( [NEW_SURVIVAL, fill_store["/Z/VAL/Z/mu"]], axis=1, join = 'inner' )
fill_type = "VAL"
na_datas = []
datas = []
for data_key, data_name in zip( data_keys, data_names):
all_bad = False
try:
na_datas.append( data_store[data_key].loc[val_survival.index])
datas.append( data_store[data_key].loc[val_survival.index].fillna(0) )
bad_ids = pp.find( pp.isnan(na_datas[-1].values.sum(1)))
except:
all_bad = True
bad_ids=[x,x,x,x]
if len(bad_ids) > 0 or all_bad is True:
if all_bad is True:
bad_bcs = val_survival.index
else:
bad_bcs = na_datas[-1].index.values[bad_ids]
data_type = data_key.split("/")[1]
key = "/Fill/%s/%s"%(fill_type,data_type)
#pdb.set_trace()
if key in fill_store:
x_fill = fill_store[key].loc[bad_bcs]
if all_bad is False:
XX = na_datas[-1].values
XX[bad_ids,:] = x_fill.values
datas[-1] = pd.DataFrame( XX, columns = na_datas[-1].columns, index=na_datas[-1].index )
else:
XX = x_fill.values
datas.append( pd.DataFrame( XX, columns = x_fill.columns, index=x_fill.index ) )
else:
print "skipping filling in %s for ids "%(data_key), bad_bcs
#pdb.set_trace()
#pdb.set_trace()
data_columns = {}
for b in data_store[data_key].columns:
if len(data_keys)>1:
data_columns[b] = "%s_%s"%(data_name,b)
else:
data_columns[b] = "%s"%(b)
datas[-1].rename( columns = data_columns, inplace=True)
data_train = pd.concat(datas, axis=1)
fill_store.close()
data_store.close()
#pdb.set_trace()
X_columns = data_train.columns
X = data_train[X_columns].values.astype(float)
#y = np.zeros(len(X),dtype=int)
#y[group1] = 1
y=targets
#pdb.set_trace()
assert len(y) == len(X), "made different sizes"
predictions, weights, averages = predict_groups_with_xval_with_regression( X, y, l1, k_fold=k_fold, use_cuda=use_cuda )
#(mean_projections,var_projections),(w_mn,w_var),(avg_projection,)
return predictions, weights, averages, data_train, y
def run_survival_analysis_lda_train( disease_list, fill_store, data_store, k_fold = 10, n_bootstraps = 10, epsilon = 1e-12 ):
fill_store.open()
data_store.open()
ALL_SURVIVAL = data_store["/CLINICAL/data"][["patient.days_to_last_followup","patient.days_to_death"]]
tissue_barcodes = np.array( ALL_SURVIVAL.index.tolist(), dtype=str )
surv_barcodes = np.array([ x+"_"+y for x,y in tissue_barcodes])
NEW_SURVIVAL = pd.DataFrame( ALL_SURVIVAL.values, index =surv_barcodes, columns = ALL_SURVIVAL.columns )
val_survival = pd.concat( [NEW_SURVIVAL, fill_store["/Z/VAL/Z/mu"]], axis=1, join = 'inner' )
fill_store.close()
data_store.close()
#-------
predict_survival_train = val_survival #pd.concat( [test_survival, val_survival], axis=0, join = 'outer' )
predict_barcodes_train = predict_survival_train.index
splt = np.array( [ [s.split("_")[0], s.split("_")[1]] for s in predict_barcodes_train ] )
predict_survival_train = pd.DataFrame( predict_survival_train.values, index = splt[:,1], columns = predict_survival_train.columns )
predict_survival_train["disease"] = splt[:,0]
Times_train = predict_survival_train[ "patient.days_to_last_followup" ].fillna(0).values.astype(int)+predict_survival_train[ "patient.days_to_death" ].fillna(0).values.astype(int)
predict_survival_train["T"] = Times_train
Events_train = (1-np.isnan( predict_survival_train[ "patient.days_to_death" ].astype(float)) ).astype(int)
predict_survival_train["E"] = Events_train
X_columns = val_survival.columns[2:]
X = predict_survival_train[X_columns].values.astype(float)
i_event = pp.find(predict_survival_train["E"].values)
#median_time = np.median( predict_survival_train["T"].values[i_event] )
median_time = np.mean( predict_survival_train["T"].values )
i_less = pp.find(predict_survival_train["T"].values<median_time)
y = predict_survival_train["E"].values.astype(int)
#y = np.zeros( len(predict_survival_train["T"].values) )
#y[i_less] = 1
projections, probabilties, weights, averages = lda_on_train( X, y, k_fold = k_fold, n_bootstraps = n_bootstraps, epsilon=epsilon )
return projections, probabilties, weights, averages, X, y, Events_train, Times_train
def run_survival_analysis_kmeans( disease_list, fill_store, data_store, k_fold, K ):
fill_store.open()
data_store.open()
ALL_SURVIVAL = data_store["/CLINICAL/data"][["patient.days_to_last_followup","patient.days_to_death"]]
tissue_barcodes = np.array( ALL_SURVIVAL.index.tolist(), dtype=str )
surv_barcodes = np.array([ x+"_"+y for x,y in tissue_barcodes])
NEW_SURVIVAL = pd.DataFrame( ALL_SURVIVAL.values, index =surv_barcodes, columns = ALL_SURVIVAL.columns )
val_survival = pd.concat( [NEW_SURVIVAL, fill_store["/Z/VAL/Z/mu"]], axis=1, join = 'inner' )
fill_store.close()
data_store.close()
#-------
predict_survival_train = val_survival #pd.concat( [test_survival, val_survival], axis=0, join = 'outer' )
predict_barcodes_train = predict_survival_train.index
splt = np.array( [ [s.split("_")[0], s.split("_")[1]] for s in predict_barcodes_train ] )
predict_survival_train = pd.DataFrame( predict_survival_train.values, index = splt[:,1], columns = predict_survival_train.columns )
predict_survival_train["disease"] = splt[:,0]
Times_train = predict_survival_train[ "patient.days_to_last_followup" ].fillna(0).values.astype(int)+predict_survival_train[ "patient.days_to_death" ].fillna(0).values.astype(int)
predict_survival_train["T"] = Times_train
Events_train = (1-np.isnan( predict_survival_train[ "patient.days_to_death" ].astype(float)) ).astype(int)
predict_survival_train["E"] = Events_train
X_columns = val_survival.columns[2:]
X = predict_survival_train[X_columns].values.astype(float)
i_event = pp.find(predict_survival_train["E"].values)
#median_time = np.median( predict_survival_train["T"].values[i_event] )
median_time = np.mean( predict_survival_train["T"].values )
i_less = pp.find(predict_survival_train["T"].values<median_time)
y = predict_survival_train["E"].values.astype(int)
#y = np.zeros( len(predict_survival_train["T"].values) )
#y[i_less] = 1
#projections, probabilties, weights, averages = lda_with_xval_and_bootstrap( X, y, k_fold = k_fold, n_bootstraps = n_bootstraps )
predictions = kmeans_survival( X, y, K = K )
return predictions, X, y, Events_train, Times_train
if __name__ == "__main__":
disease = "blca"
data_file = "pan_tiny_multi_set"
experiment_name = "tiny_leave_%s_out"%(disease)
if len(sys.argv) == 4:
disease = sys.argv[1]
data_file = sys.argv[2]
#experiment_name = sys.argv[3]
data_location = os.path.join( HOME_DIR, "data/broad_processed_post_recomb/20160128/%s/data.h5"%(data_file) )
fill_location = os.path.join( HOME_DIR, "results/tcga_vae_post_recomb/leave_out/medium/leave_out_%s/full_vae_fill.h5"%(disease) )
survival_location = os.path.join( HOME_DIR, "results/tcga_vae_post_recomb/leave_out/medium/leave_out_%s/full_vae_survival.h5"%(disease) )
savename = os.path.join( HOME_DIR, "results/tcga_vae_post_recomb/leave_out/medium/leave_out_%s/survival_xval.png"%(disease))
else:
data_location = os.path.join( HOME_DIR, "data/broad_processed_post_recomb/20160128/%s/data.h5"%(data_file) )
fill_location = os.path.join( HOME_DIR, "results/tcga_vae_post_recomb/leave_out_sandbox/tiny/leave_out_%s/full_vae_fill.h5"%(disease) )
survival_location = os.path.join( HOME_DIR, "results/tcga_vae_post_recomb/leave_out_sandbox/tiny/leave_out_%s/full_vae_survival.h5"%(disease) )
savename = os.path.join( HOME_DIR, "results/tcga_vae_post_recomb/leave_out/tiny/leave_out_%s/survival_xval.png"%(disease))
s=pd.HDFStore( survival_location, "r" )
d=pd.HDFStore( data_location, "r" )
f=pd.HDFStore( fill_location, "r" )
projections, probabilties, weights, averages, X, y, E_train, T_train = run_survival_analysis( [disease], f, d, k_fold = 20, n_bootstraps = 10, epsilon= 0.1 )
avg_proj = averages[0]
avg_prob = averages[1]
f = pp.figure()
mn_proj = projections[0]
std_proj = np.sqrt(projections[1])
mn_prob = probabilties[0]
std_prob = np.sqrt(probabilties[1])
mn_w = weights[0]
std_w = np.sqrt(weights[1])
ax1 = f.add_subplot(211)
I = np.argsort(-mn_proj)
ax1.plot( mn_proj[I], mn_prob[I], 'o')
ax2 = f.add_subplot(212)
ax2.plot( mn_w, 'o-')
#I = np.argsort( mn_prob )
I1 = pp.find( mn_prob > np.median(mn_prob) )
I0 = pp.find( mn_prob <= np.median(mn_prob) )
#I1 = pp.find( avg_prob > np.median(avg_prob) )
#I0 = pp.find( avg_prob <= np.median(avg_prob) )
f = pp.figure()
ax3 = f.add_subplot(111)
kmf = KaplanMeierFitter()
if len(I1) > 0:
kmf.fit(T_train[I1], event_observed=E_train[I1], label = "lda_1 E=%d C=%d"%(E_train[I1].sum(),len(I1)-E_train[I1].sum()))
ax3=kmf.plot(ax=ax3,at_risk_counts=False,show_censors=True, color='red')
if len(I0) > 0:
kmf.fit(T_train[I0], event_observed=E_train[I0], label = "lda_0 E=%d C=%d"%(E_train[I0].sum(),len(I0)-E_train[I0].sum()))
ax3=kmf.plot(ax=ax3,at_risk_counts=False,show_censors=True, color='blue')
pp.savefig(savename, dpi=300, format='png')
print "ROC mn_prob ", roc_auc_score(y,mn_prob)
print "ROC avg_prob ", roc_auc_score(y,avg_prob)
pp.show()
| 37.555841
| 181
| 0.672836
| 8,834
| 58,512
| 4.178854
| 0.045619
| 0.042258
| 0.060678
| 0.019016
| 0.828665
| 0.794669
| 0.769666
| 0.751788
| 0.732961
| 0.724428
| 0
| 0.012074
| 0.188953
| 58,512
| 1,558
| 182
| 37.555841
| 0.765825
| 0.181741
| 0
| 0.64
| 0
| 0
| 0.061702
| 0.031577
| 0
| 0
| 0
| 0
| 0.016216
| 0
| null | null | 0
| 0.018378
| null | null | 0.032432
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d3cf6ac79c5b25a496dc1d28ead0b59a52eb0ce
| 1,808
|
py
|
Python
|
nodasf/migrations/0024_auto_20191010_1521.py
|
bopopescu/nodasf
|
32718c9ba606a7373b20c77710fd3706fc583396
|
[
"MIT"
] | null | null | null |
nodasf/migrations/0024_auto_20191010_1521.py
|
bopopescu/nodasf
|
32718c9ba606a7373b20c77710fd3706fc583396
|
[
"MIT"
] | 9
|
2019-12-05T20:37:07.000Z
|
2022-02-10T12:34:48.000Z
|
nodasf/migrations/0024_auto_20191010_1521.py
|
bopopescu/nodasf
|
32718c9ba606a7373b20c77710fd3706fc583396
|
[
"MIT"
] | 1
|
2020-07-25T23:37:21.000Z
|
2020-07-25T23:37:21.000Z
|
# Generated by Django 2.2.4 on 2019-10-10 22:21
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('nodasf', '0023_auto_20190904_1042'),
]
operations = [
migrations.AlterField(
model_name='stf',
name='city',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='nodasf.City'),
),
migrations.AlterField(
model_name='stf',
name='county',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='nodasf.County'),
),
migrations.AlterField(
model_name='stf',
name='hub',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='nodasf.STF_Hub'),
),
migrations.AlterField(
model_name='stf',
name='issue',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='nodasf.Issue'),
),
migrations.AlterField(
model_name='stf_hub',
name='city',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='nodasf.City'),
),
migrations.AlterField(
model_name='stf_hub',
name='county',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='nodasf.County'),
),
migrations.AlterField(
model_name='stf_hub',
name='issue',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='nodasf.Issue'),
),
]
| 36.16
| 124
| 0.603982
| 202
| 1,808
| 5.30198
| 0.20297
| 0.067227
| 0.104575
| 0.164332
| 0.806723
| 0.806723
| 0.743231
| 0.704949
| 0.704949
| 0.662932
| 0
| 0.023238
| 0.262168
| 1,808
| 49
| 125
| 36.897959
| 0.77961
| 0.024889
| 0
| 0.767442
| 1
| 0
| 0.102783
| 0.013061
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.046512
| 0
| 0.116279
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b4a0709f097a3a9fe7f8580868716b18c7343d6b
| 17,038
|
py
|
Python
|
application/src/pytest/python/modules/locations/routes_public_test.py
|
okebinda/base.api.python
|
fdf6dc02ab73d588919f38d6017788f7822cfd04
|
[
"Apache-2.0"
] | null | null | null |
application/src/pytest/python/modules/locations/routes_public_test.py
|
okebinda/base.api.python
|
fdf6dc02ab73d588919f38d6017788f7822cfd04
|
[
"Apache-2.0"
] | 2
|
2021-06-02T03:26:04.000Z
|
2021-09-30T03:04:00.000Z
|
application/src/pytest/python/modules/locations/routes_public_test.py
|
okebinda/base.api.python
|
fdf6dc02ab73d588919f38d6017788f7822cfd04
|
[
"Apache-2.0"
] | null | null | null |
from copy import copy
import pytest
from sqlalchemy.orm.exc import NoResultFound
from fixtures import Fixtures
from app import create_app
from config import Config
from modules.locations.routes_public import get_countries, get_regions
from modules.locations.model import Country, Region
from modules.app_keys.model import AppKey
@pytest.fixture
def app(request):
config = copy(Config)
config.TESTING = True
config.APP_TYPE = 'admin' if 'admin_api' in request.keywords else 'public'
app = create_app(config)
if 'unit' in request.keywords:
yield app
else:
fixtures = Fixtures(app)
fixtures.setup()
yield app
fixtures.teardown()
# UNIT TESTS
@pytest.mark.unit
def test_get_countries(app, mocker):
expected_status = 200
expected_length = 2
expected_properties = ['code_2', 'code_3', 'id', 'name', 'regions_uri']
expected_limit = 250
expected_page = 1
expected_total = 2
query_mock = mocker.patch('flask_sqlalchemy._QueryProperty.__get__')
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.limit.return_value \
.offset.return_value \
.__iter__.return_value = [Country()] * expected_length
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.count.return_value = expected_total
result = get_countries()
assert result[1] == expected_status
assert len(result[0].json['countries']) == expected_length
assert result[0].json['countries'][0] == {
x: None for x in expected_properties}
assert result[0].json['limit'] == expected_limit
assert result[0].json['page'] == expected_page
assert result[0].json['total'] == expected_total
@pytest.mark.unit
def test_get_countries_limit_10_page_2_of_3(app, mocker):
expected_status = 200
expected_length = 10
expected_properties = ['code_2', 'code_3', 'id', 'name', 'regions_uri']
expected_limit = 10
expected_page = 2
expected_total = 25
expected_previous_uri = 'http://localhost/countries/1/10'
expected_next_uri = 'http://localhost/countries/3/10'
query_mock = mocker.patch('flask_sqlalchemy._QueryProperty.__get__')
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.limit.return_value \
.offset.return_value \
.__iter__.return_value = [Country()] * expected_length
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.count.return_value = expected_total
result = get_countries(expected_page, expected_limit)
assert result[1] == expected_status
assert len(result[0].json['countries']) == expected_length
assert result[0].json['countries'][0] == {
x: None for x in expected_properties}
assert result[0].json['previous_uri'] == expected_previous_uri
assert result[0].json['next_uri'] == expected_next_uri
assert result[0].json['limit'] == expected_limit
assert result[0].json['page'] == expected_page
assert result[0].json['total'] == expected_total
@pytest.mark.unit
def test_get_countries_empty(app, mocker):
expected_status = 204
expected_content = ''
query_mock = mocker.patch('flask_sqlalchemy._QueryProperty.__get__')
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.limit.return_value \
.offset.return_value \
.__iter__.return_value = []
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.count.return_value = 15
result = get_countries(5, 10)
assert result[1] == expected_status
assert result[0] == expected_content
@pytest.mark.unit
def test_get_countries_route_ok(app, mocker, client):
expected_status = 200
expected_length = 8
expected_limit = 250
expected_page = 1
expected_total = 8
query_mock = mocker.patch('flask_sqlalchemy._QueryProperty.__get__')
# mock app key authorization db query
query_mock.return_value \
.filter.return_value \
.one.return_value = AppKey()
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.limit.return_value \
.offset.return_value \
.__iter__.return_value = [Country()] * expected_length
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.count.return_value = expected_total
response = client.get("/countries?app_key=123")
assert response.status_code == expected_status
assert len(response.json['countries']) == expected_length
assert response.json['limit'] == expected_limit
assert response.json['page'] == expected_page
assert response.json['total'] == expected_total
@pytest.mark.unit
def test_get_countries_limit_5_page_2_of_3_route(app, mocker, client):
expected_status = 200
expected_length = 5
expected_limit = 5
expected_page = 2
expected_total = 12
expected_next_uri = 'http://localhost/countries/3/5'
expected_previous_uri = 'http://localhost/countries/1/5'
query_mock = mocker.patch('flask_sqlalchemy._QueryProperty.__get__')
# mock app key authorization db query
query_mock.return_value \
.filter.return_value \
.one.return_value = AppKey()
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.limit.return_value \
.offset.return_value \
.__iter__.return_value = [Country()] * expected_length
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.count.return_value = expected_total
response = client.get(
"/countries/{}/{}?app_key=123".format(expected_page,
expected_limit))
assert response.status_code == expected_status
assert len(response.json['countries']) == expected_length
assert response.json['limit'] == expected_limit
assert response.json['page'] == expected_page
assert response.json['total'] == expected_total
assert response.json['next_uri'] == expected_next_uri
assert response.json['previous_uri'] == expected_previous_uri
@pytest.mark.unit
def test_get_countries_empty_route(app, mocker, client):
expected_status = 204
expected_json = None
query_mock = mocker.patch('flask_sqlalchemy._QueryProperty.__get__')
# mock app key authorization db query
query_mock.return_value \
.filter.return_value \
.one.return_value = AppKey()
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.limit.return_value \
.offset.return_value \
.__iter__.return_value = []
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.count.return_value = 15
response = client.get("/countries/3?app_key=123")
assert response.status_code == expected_status
assert response.json == expected_json
@pytest.mark.unit
def test_get_countries_route_no_app_key(app, client):
expected_status = 401
response = client.get("/countries")
assert response.status_code == expected_status
assert 'error' in response.json
@pytest.mark.unit
def test_get_countries_route_bad_app_key(app, mocker, client):
expected_status = 401
query_mock = mocker.patch('flask_sqlalchemy._QueryProperty.__get__')
# mock app key authorization db query
query_mock.return_value \
.filter.return_value \
.one.side_effect = NoResultFound()
response = client.get("/countries?app_key=BAD_KEY")
assert response.status_code == expected_status
assert 'error' in response.json
@pytest.mark.unit
def test_get_regions(app, mocker):
expected_status = 200
expected_length = 3
expected_properties = ['code_2', 'id', 'name']
expected_limit = 100
expected_page = 1
expected_total = 3
query_mock = mocker.patch('flask_sqlalchemy._QueryProperty.__get__')
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.filter.return_value \
.limit.return_value \
.offset.return_value \
.__iter__.return_value = [Region()] * expected_length
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.filter.return_value \
.count.return_value = expected_total
result = get_regions('US')
assert result[1] == expected_status
assert len(result[0].json['regions']) == expected_length
assert result[0].json['regions'][0] == {
x: None for x in expected_properties}
assert result[0].json['limit'] == expected_limit
assert result[0].json['page'] == expected_page
assert result[0].json['total'] == expected_total
@pytest.mark.unit
def test_get_regions_limit_10_page_2_of_3(app, mocker):
expected_status = 200
expected_length = 3
expected_properties = ['code_2', 'id', 'name']
expected_limit = 10
expected_page = 2
expected_total = 25
expected_previous_uri = 'http://localhost/regions/US/1/10'
expected_next_uri = 'http://localhost/regions/US/3/10'
query_mock = mocker.patch('flask_sqlalchemy._QueryProperty.__get__')
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.filter.return_value \
.limit.return_value \
.offset.return_value \
.__iter__.return_value = [Region()] * expected_length
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.filter.return_value \
.count.return_value = expected_total
result = get_regions('US', expected_page, expected_limit)
assert result[1] == expected_status
assert len(result[0].json['regions']) == expected_length
assert result[0].json['regions'][0] == {
x: None for x in expected_properties}
assert result[0].json['previous_uri'] == expected_previous_uri
assert result[0].json['next_uri'] == expected_next_uri
assert result[0].json['limit'] == expected_limit
assert result[0].json['page'] == expected_page
assert result[0].json['total'] == expected_total
@pytest.mark.unit
def test_get_regions_empty(app, mocker):
expected_status = 204
expected_content = ''
query_mock = mocker.patch('flask_sqlalchemy._QueryProperty.__get__')
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.filter.return_value \
.limit.return_value \
.offset.return_value \
.__iter__.return_value = []
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.filter.return_value \
.count.return_value = 15
result = get_regions('US', 5, 10)
assert result[1] == expected_status
assert result[0] == expected_content
@pytest.mark.unit
def test_get_regions_route_ok(app, mocker, client):
expected_status = 200
expected_length = 8
expected_limit = 100
expected_page = 1
expected_total = 8
query_mock = mocker.patch('flask_sqlalchemy._QueryProperty.__get__')
# mock app key authorization db query
query_mock.return_value \
.filter.return_value \
.one.return_value = AppKey()
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.filter.return_value \
.limit.return_value \
.offset.return_value \
.__iter__.return_value = [Region()] * expected_length
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.filter.return_value \
.count.return_value = expected_total
response = client.get("/regions/US?app_key=123")
assert response.status_code == expected_status
assert len(response.json['regions']) == expected_length
assert response.json['limit'] == expected_limit
assert response.json['page'] == expected_page
assert response.json['total'] == expected_total
@pytest.mark.unit
def test_get_regions_limit_5_page_2_of_3_route(app, mocker, client):
expected_status = 200
expected_length = 5
expected_limit = 5
expected_page = 2
expected_total = 12
expected_next_uri = 'http://localhost/regions/US/3/5'
expected_previous_uri = 'http://localhost/regions/US/1/5'
query_mock = mocker.patch('flask_sqlalchemy._QueryProperty.__get__')
# mock app key authorization db query
query_mock.return_value \
.filter.return_value \
.one.return_value = AppKey()
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.filter.return_value \
.limit.return_value \
.offset.return_value \
.__iter__.return_value = [Region()] * expected_length
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.filter.return_value \
.count.return_value = expected_total
response = client.get(
"/regions/US/{}/{}?app_key=123".format(expected_page,
expected_limit))
assert response.status_code == expected_status
assert len(response.json['regions']) == expected_length
assert response.json['limit'] == expected_limit
assert response.json['page'] == expected_page
assert response.json['total'] == expected_total
assert response.json['next_uri'] == expected_next_uri
assert response.json['previous_uri'] == expected_previous_uri
@pytest.mark.unit
def test_get_regions_empty_route(app, mocker, client):
expected_status = 204
expected_json = None
query_mock = mocker.patch('flask_sqlalchemy._QueryProperty.__get__')
# mock app key authorization db query
query_mock.return_value \
.filter.return_value \
.one.return_value = AppKey()
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.filter.return_value \
.limit.return_value \
.offset.return_value \
.__iter__.return_value = []
query_mock.return_value \
.filter.return_value \
.order_by.return_value \
.filter.return_value \
.count.return_value = 15
response = client.get("/regions/US/3?app_key=123")
assert response.status_code == expected_status
assert response.json == expected_json
@pytest.mark.unit
def test_get_regions_route_no_app_key(app, client):
expected_status = 401
response = client.get("/regions/US")
assert response.status_code == expected_status
assert 'error' in response.json
@pytest.mark.unit
def test_get_regions_route_bad_app_key(app, mocker, client):
expected_status = 401
query_mock = mocker.patch('flask_sqlalchemy._QueryProperty.__get__')
# mock app key authorization db query
query_mock.return_value \
.filter.return_value \
.one.side_effect = NoResultFound()
response = client.get("/regions/US?app_key=BAD_KEY")
assert response.status_code == expected_status
assert 'error' in response.json
# INTEGRATION TESTS
@pytest.mark.integration
def test_get_countries_route(client):
expected_status = 200
expected_json = {
"countries": [
{
"code_2": "CA",
"code_3": "CAN",
"id": 3,
"name": "Canada",
"regions_uri": "http://localhost/regions/CA"
},
{
"code_2": "MX",
"code_3": "MEX",
"id": 2,
"name": "Mexico",
"regions_uri": "http://localhost/regions/MX"
},
{
"code_2": "US",
"code_3": "USA",
"id": 1,
"name": "United States",
"regions_uri": "http://localhost/regions/US"
}
],
"limit": 250,
"page": 1,
"total": 3
}
response = client.get("/countries?app_key=7sv3aPS45Ck8URGRKUtBdMWgKFN4ahfW")
assert response.status_code == expected_status
assert response.json == expected_json
@pytest.mark.integration
def test_get_regions_route(client):
expected_status = 200
expected_json = {
"limit": 100,
"page": 1,
"regions": [
{
"code_2": "CA",
"id": 1,
"name": "California"
},
{
"code_2": "OR",
"id": 2,
"name": "Oregon"
},
{
"code_2": "WA",
"id": 3,
"name": "Washington"
}
],
"total": 3
}
response = client.get("/regions/US?app_key=7sv3aPS45Ck8URGRKUtBdMWgKFN4ahfW")
assert response.status_code == expected_status
assert response.json == expected_json
| 30.316726
| 81
| 0.654361
| 2,022
| 17,038
| 5.172601
| 0.063798
| 0.161966
| 0.071517
| 0.096759
| 0.926284
| 0.913089
| 0.904771
| 0.876852
| 0.856583
| 0.850942
| 0
| 0.019103
| 0.238056
| 17,038
| 561
| 82
| 30.370766
| 0.786551
| 0.018547
| 0
| 0.769231
| 0
| 0
| 0.111424
| 0.051044
| 0
| 0
| 0
| 0
| 0.162896
| 1
| 0.042986
| false
| 0
| 0.020362
| 0
| 0.063348
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b4b6f32e68b0f305e300de595c4d4406066a67d4
| 5,244
|
py
|
Python
|
notebooks/submission_generator_tools.py
|
sagoyal2/edit_axondeepseg
|
73ac9759fbe4887d87c0baa701828f9e24283b04
|
[
"MIT"
] | null | null | null |
notebooks/submission_generator_tools.py
|
sagoyal2/edit_axondeepseg
|
73ac9759fbe4887d87c0baa701828f9e24283b04
|
[
"MIT"
] | null | null | null |
notebooks/submission_generator_tools.py
|
sagoyal2/edit_axondeepseg
|
73ac9759fbe4887d87c0baa701828f9e24283b04
|
[
"MIT"
] | null | null | null |
import json
import os
import numpy as np
from AxonDeepSeg.config_tools import *
## ----------------------------------------------------------------------------------------------------------------
def generate_heliosjob(path_project, path_venv, bashname, configfile, config, path_trainingset, path_model, path_model_init = None, walltime=43200, gpu_per=1.0):
"""Generate config file given a config dict. Generate the corresponding submission."""
if not os.path.exists(path_model):
os.makedirs(path_model)
with open(os.path.join(path_model, configfile), 'w') as f:
json.dump(config, f, indent=2)
name_model = path_model.split('/')[-1]
file = open(os.path.join(path_model, bashname),"w")
file.write("#!/bin/bash \n")
file.write("#PBS -N "+ name_model +" \n")
file.write("#PBS -A rrp-355-aa \n")
file.write("#PBS -l walltime="+str(walltime)+" \n")
file.write("#PBS -l nodes=1:gpus=1 \n")
file.write("#PBS -l feature=k80 \n")
file.write("cd $SCRATCH/"+path_project+ "/axondeepseg/models/" + name_model + "/ \n")
file.write("source "+path_venv+"/bin/activate \n")
file.write("module load compilers/gcc/4.8.5 compilers/java/1.8 apps/buildtools compilers/swig apps/git apps/bazel/0.4.3 \n")
file.write("module load cuda/7.5 \n")
file.write("module load libs/cuDNN/5 \n")
file.write("python ../../AxonDeepSeg/trainingforhelios.py -co ")
file.write(str(configfile))
file.write(" -t ")
file.write(str(path_trainingset))
file.write(" -m ")
file.write(str(path_model))
if path_model_init:
file.write(" -i ")
file.write(str(path_model_init))
if gpu_per != 1.0:
file.write(" -g ")
file.write(str(gpu_per))
print name_model + ' created ...'
file.close()
## ----------------------------------------------------------------------------------------------------------------
def generate_guilliminjob(path_project, path_venv, bashname, configfile, config, path_trainingset, path_model,path_model_init = None, walltime=43200, gpu_per=1.0):
"""Generate config file given a config dict. Generate the corresponding submission."""
if not os.path.exists(path_model):
os.makedirs(path_model)
with open(os.path.join(path_model, configfile), 'w') as f:
json.dump(config, f, indent=2)
name_model = path_model.split('/')[-1]
file = open(os.path.join(path_model, bashname),"w")
file.write("#!/bin/bash \n")
file.write("#PBS -N "+ name_model +" \n")
file.write("#PBS -A rrp-355-aa \n")
file.write("#PBS -l walltime="+str(walltime)+" \n")
file.write("#PBS -l nodes=1:gpus=1 \n")
file.write("cd "+path_project+"/axondeepseg/models/" + name_model + "/ \n")
file.write("module load foss/2015b Python/2.7.12 \n")
file.write("source "+path_venv+"/bin/activate \n")
file.write("module load GCC/5.3.0-2.26 Bazel/0.4.4 CUDA/7.5.18 \n")
file.write("module load Tensorflow/1.0.0-Python-2.7.12 \n")
file.write("python ../../AxonDeepSeg/trainingforhelios.py -co ")
file.write(str(configfile))
file.write(" -t ")
file.write(str(path_trainingset))
file.write(" -m ")
file.write(str(path_model))
if path_model_init:
file.write(" -i ")
file.write(str(path_model_init))
if gpu_per != 1.0:
file.write(" -g ")
file.write(str(gpu_per))
print name_model + ' created ...'
file.close()
## ----------------------------------------------------------------------------------------------------------------
def generate_cedarjob(path_project, path_venv, bashname, configfile, config, path_trainingset, path_model, path_model_init = None, walltime=43200, gpu_per=1.0):
"""Generate config file given a config dict. Generate the corresponding submission."""
if not os.path.exists(path_model):
os.makedirs(path_model)
with open(os.path.join(path_model, configfile), 'w') as f:
json.dump(config, f, indent=2)
name_model = path_model.split('/')[-1]
h, m = divmod(walltime, 3600)
m = str(int(m/60))
h2 = str(h)
if len(h2) == 1:
h = '0' + h2
else:
h = h2
file = open(os.path.join(path_model, bashname),"w")
file.write("#!/bin/bash \n")
file.write("#SBATCH --account=def-jcohen \n")
file.write("#SBATCH --time 0-" + h + ":" + m + " \n")
file.write("#SBATCH --gres=gpu:1 \n")
file.write("#SBATCH --mem=11700M \n")
file.write("#SBATCH --output="+ name_model +".out \n")
file.write("cd /home/maxwab/scratch/"+path_project+ "/axondeepseg/models/" + name_model + "/ \n")
file.write("source "+path_venv+"/bin/activate \n")
file.write("python ../../AxonDeepSeg/trainingforhelios.py -co ")
file.write(str(configfile))
file.write(" -t ")
file.write(str(path_trainingset))
file.write(" -m ")
file.write(str(path_model))
if path_model_init:
file.write(" -i ")
file.write(str(path_model_init))
if gpu_per != 1.0:
file.write(" -g ")
file.write(str(gpu_per))
print name_model + ' created ...'
file.close()
| 36.671329
| 163
| 0.57704
| 711
| 5,244
| 4.140647
| 0.174402
| 0.180367
| 0.098505
| 0.039742
| 0.839334
| 0.814198
| 0.814198
| 0.807405
| 0.807405
| 0.79144
| 0
| 0.024471
| 0.197368
| 5,244
| 142
| 164
| 36.929577
| 0.674982
| 0.064455
| 0
| 0.718447
| 0
| 0.019417
| 0.228615
| 0.035553
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.038835
| null | null | 0.029126
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b4fae755156edc4bb3cf706b2aadc3b247b630fd
| 121
|
py
|
Python
|
sectrails/__init__.py
|
SteveMcGrath/python-sectrails-lib
|
3f8d69064fbfe58bdda058c6ef994dc762e75134
|
[
"MIT"
] | 1
|
2021-07-26T14:23:36.000Z
|
2021-07-26T14:23:36.000Z
|
sectrails/__init__.py
|
SteveMcGrath/python-sectrails-lib
|
3f8d69064fbfe58bdda058c6ef994dc762e75134
|
[
"MIT"
] | null | null | null |
sectrails/__init__.py
|
SteveMcGrath/python-sectrails-lib
|
3f8d69064fbfe58bdda058c6ef994dc762e75134
|
[
"MIT"
] | null | null | null |
from .version import version as __version__
from .version import author as __author__
from .session import SecurityTrails
| 40.333333
| 43
| 0.85124
| 16
| 121
| 5.9375
| 0.4375
| 0.231579
| 0.357895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123967
| 121
| 3
| 44
| 40.333333
| 0.896226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2e9b350dffb3e16d5e388faab095daead3a6081b
| 1,025
|
py
|
Python
|
src/helpers/paths/rgb.py
|
markrofail/multi-modal-deep-learning-for-vehicle-sensor-data-abstraction-and-attack-detection
|
2f252c072f3091bb27506978dd90311f7f82f386
|
[
"MIT"
] | null | null | null |
src/helpers/paths/rgb.py
|
markrofail/multi-modal-deep-learning-for-vehicle-sensor-data-abstraction-and-attack-detection
|
2f252c072f3091bb27506978dd90311f7f82f386
|
[
"MIT"
] | 6
|
2020-09-25T22:41:00.000Z
|
2021-06-08T21:50:37.000Z
|
src/helpers/paths/rgb.py
|
markrofail/multi-modal-deep-learning-for-vehicle-sensor-data-abstraction-and-attack-detection
|
2f252c072f3091bb27506978dd90311f7f82f386
|
[
"MIT"
] | null | null | null |
from .. import paths
def external_frame(drive_date, drive_number, frame):
drive_string = '{}_drive_{:04d}_sync'.format(drive_date, drive_number)
frame_string = '{:010d}.png'.format(frame)
path = paths.DATA_EXTERNAL_PATH.joinpath('KITTI', drive_date, drive_string,
'image_02', 'data', frame_string)
return path
def interim_frame(drive_date, drive_number, frame):
drive_string = '{}_drive_{:04d}_sync'.format(drive_date, drive_number)
frame_string = '{:010d}.png'.format(frame)
path = paths.DATA_INTERIM_PATH.joinpath('KITTI', drive_date, drive_string,
'rgb', frame_string)
return path
def processed_tensor(drive_date, drive_number, frame):
drive_string = '{}_drive_{:04d}_sync'.format(drive_date, drive_number)
frame_string = '{:010d}.npy'.format(frame)
path = paths.DATA_PROCESSED_PATH.joinpath('KITTI', drive_date, drive_string,
'rgb', frame_string)
return path
| 35.344828
| 78
| 0.656585
| 125
| 1,025
| 5.016
| 0.216
| 0.129187
| 0.200957
| 0.191388
| 0.862839
| 0.781499
| 0.781499
| 0.722488
| 0.722488
| 0.722488
| 0
| 0.021303
| 0.221463
| 1,025
| 28
| 79
| 36.607143
| 0.764411
| 0
| 0
| 0.526316
| 0
| 0
| 0.122927
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157895
| false
| 0
| 0.052632
| 0
| 0.368421
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2c2bce3d557d301dbf7d21649a90ae5071eaa09c
| 7,414
|
py
|
Python
|
server/tests/test_xss.py
|
gprime31/XSS-Catcher
|
caffe46eb633c37cd4081e21f7ac26384d6f19a0
|
[
"MIT"
] | null | null | null |
server/tests/test_xss.py
|
gprime31/XSS-Catcher
|
caffe46eb633c37cd4081e21f7ac26384d6f19a0
|
[
"MIT"
] | null | null | null |
server/tests/test_xss.py
|
gprime31/XSS-Catcher
|
caffe46eb633c37cd4081e21f7ac26384d6f19a0
|
[
"MIT"
] | null | null | null |
import base64
import json
from app.models import XSS, Client
from .fixtures import client
from .functions import *
def test_generate_payload(client):
access_header, _ = login_get_headers(client, "admin", "xss")
create_client(client, access_header, name="name1", description="desc1")
client_name1 = Client.query.first()
rv = generate_payload(
client,
access_header,
client_id=1,
url="http://127.0.0.1",
xss_type="s",
to_gather=["cookies", "local_storage", "session_storage", "origin_url", "referrer"],
code_type="html",
tags=["tag1", "tag2"],
)
expected_response = f'\'>"><script>new Image().src="http://127.0.0.1/api/x/s/{client_name1.uid}?tags=tag1,tag2&cookies="+encodeURIComponent(document.cookie)+"&local_storage="+encodeURIComponent(JSON.stringify(localStorage))+"&session_storage="+encodeURIComponent(JSON.stringify(sessionStorage))+"&origin_url="+encodeURIComponent(location.href)+"&referrer="+encodeURIComponent(document.referrer)</script>'
assert rv.get_json()["detail"] == expected_response
rv = generate_payload(
client,
access_header,
client_id=1,
code_type="js",
xss_type="r",
to_gather=["cookies", "local_storage", "session_storage", "origin_url", "referrer", "dom", "screenshot", "fingerprint"],
url="http://127.0.0.1",
)
b64_payload = base64.b64encode(str.encode(",".join(["r", client_name1.uid, "", ""]))).decode()
expected_response = f';}};var js=document.createElement("script");js.src="http://127.0.0.1/static/collector.min.js";js.setAttribute("data", "{b64_payload}");document.body.appendChild(js);'
assert rv.get_json()["detail"] == expected_response
rv = generate_payload(client, access_header, client_id=1)
assert b"Missing url" in rv.data
rv = generate_payload(client, access_header, client_id=1, url="http://127.0.0.1")
assert b"Missing xss_type" in rv.data
rv = generate_payload(client, access_header, client_id=1, url="http://127.0.0.1", xss_type="s")
assert b"Missing code_type" in rv.data
rv = generate_payload(client, access_header, url="http://127.0.0.1")
assert b"Missing client_id" in rv.data
rv = generate_payload(client, access_header, client_id=1, url="http://127.0.0.1", code_type="js", xss_type="r")
expected_response = ';};new Image().src="http://127.0.0.1/api/x/r/' + client_name1.uid + '";'
assert rv.get_json()["detail"] == expected_response
rv = generate_payload(
client,
access_header,
client_id=1,
url="http://127.0.0.1",
code_type="html",
xss_type="r",
to_gather=["cookies", "local_storage", "session_storage", "origin_url", "referrer", "dom", "screenshot", "fingerprint"],
)
b64_payload = base64.b64encode(str.encode(",".join(["r", client_name1.uid, "", ""]))).decode()
expected_response = f'\'>"><script src=http://127.0.0.1/static/collector.min.js data="{b64_payload}"></script>'
assert rv.get_json()["detail"] == expected_response
rv = generate_payload(client, access_header, client_id=1, url="http://127.0.0.1", code_type="html", xss_type="r")
expected_response = '\'>"><img src="http://127.0.0.1/api/x/r/{}" />'.format(client_name1.uid)
assert rv.get_json()["detail"] == expected_response
rv = generate_payload(
client,
access_header,
client_id=1,
url="http://127.0.0.1",
code_type="js",
xss_type="r",
tags=["tag1", "tag2"],
to_gather=["origin_url"],
)
expected_response = f';}};new Image().src="http://127.0.0.1/api/x/r/{client_name1.uid}?tags=tag1,tag2&origin_url="+encodeURIComponent(location.href);'
assert rv.get_json()["detail"] == expected_response
rv = generate_payload(client, access_header, client_id=1, url="http://127.0.0.1", code_type="js", xss_type="r", tags=["tag1", "tag2"])
expected_response = f';}};new Image().src="http://127.0.0.1/api/x/r/{client_name1.uid}?tags=tag1,tag2;'
assert rv.get_json()["detail"] == expected_response
rv = generate_payload(
client,
access_header,
client_id=1,
url="http://127.0.0.1",
code_type="js",
xss_type="r",
to_gather=["origin_url"],
)
expected_response = f';}};new Image().src="http://127.0.0.1/api/x/r/{client_name1.uid}?origin_url="+encodeURIComponent(location.href);'
assert rv.get_json()["detail"] == expected_response
def test_delete_xss(client):
access_header, _ = login_get_headers(client, "admin", "xss")
create_client(client, access_header, name="name1", description="desc1")
client_name1 = Client.query.first()
get_x(client, access_header, "s", client_name1.uid)
assert XSS.query.count() == 1
delete_xss(client, access_header, 1)
assert XSS.query.count() == 0
def test_get_loot(client):
access_header, _ = login_get_headers(client, "admin", "xss")
create_client(client, access_header, name="name1", description="desc1")
client_name1 = Client.query.first()
get_x(client, access_header, "s", client_name1.uid, cookies="cookie=good")
rv = get_loot_type(client, access_header, 1, "cookies")
assert json.loads(rv.data)["data"] == {"cookie": "good"}
def test_delete_loot(client):
access_header, _ = login_get_headers(client, "admin", "xss")
create_client(client, access_header, name="name1", description="desc1")
client_name1 = Client.query.first()
get_x(client, access_header, "s", client_name1.uid, cookies="cookie=good")
delete_loot_type(client, access_header, 1, "cookies")
assert json.loads(XSS.query.first().data) == {}
def test_get_xss_all(client):
access_header, _ = login_get_headers(client, "admin", "xss")
create_client(client, access_header, name="name1", description="desc1")
client1 = Client.query.filter_by(id=1).first()
get_x(client, access_header, "r", client1.uid)
rv = get_xss_all(client, access_header, client_id=1, type="reflected")
assert len(json.loads(rv.data)) == 1
rv = get_xss_all(client, access_header, client_id=1, type="badtype")
assert b"Unknown XSS type" in rv.data
rv = get_xss_all(client, access_header, client_id="asd", type="reflected")
assert b"Bad client ID" in rv.data
def test_get_xss(client):
access_header, _ = login_get_headers(client, "admin", "xss")
create_client(client, access_header, name="name1", description="desc1")
client1 = Client.query.filter_by(id=1).first()
get_x(client, access_header, "r", client1.uid)
rv = get_xss(client, access_header, 1)
assert "ip_addr" in json.loads(rv.data).keys()
def test_get_all_loot(client):
access_header, _ = login_get_headers(client, "admin", "xss")
create_client(client, access_header, name="name1", description="desc1")
client1 = Client.query.filter_by(id=1).first()
get_x(client, access_header, "r", client1.uid, test_data="test", dom="<h1>test</h1>")
rv = get_loot(client, access_header, client_id=1)
for xss in json.loads(rv.data):
for element_name, element_value in xss["data"].items():
if element_name == "test_data":
assert element_value == "test"
if element_name == "dom":
assert element_value == ""
rv = get_loot(client, access_header, client_id="asd")
assert b"Bad client ID" in rv.data
| 47.22293
| 408
| 0.664823
| 1,040
| 7,414
| 4.519231
| 0.113462
| 0.104681
| 0.157021
| 0.036383
| 0.828298
| 0.793404
| 0.760426
| 0.760426
| 0.734468
| 0.711915
| 0
| 0.033043
| 0.163205
| 7,414
| 156
| 409
| 47.525641
| 0.724533
| 0
| 0
| 0.553957
| 1
| 0.035971
| 0.225519
| 0.029808
| 0
| 0
| 0
| 0
| 0.165468
| 1
| 0.05036
| false
| 0
| 0.035971
| 0
| 0.086331
| 0.014388
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
25aa59c221af7f6f424ff4d96716e5f732be58bc
| 118
|
py
|
Python
|
tests/__init__.py
|
jobvdl1997/llvm-lnt
|
f4838441d0ae6ecc3af0e8446e9e4a70f546371f
|
[
"Apache-2.0"
] | 12
|
2015-10-29T19:28:02.000Z
|
2020-02-04T21:25:32.000Z
|
tests/__init__.py
|
jobvdl1997/llvm-lnt
|
f4838441d0ae6ecc3af0e8446e9e4a70f546371f
|
[
"Apache-2.0"
] | 3
|
2017-03-04T14:23:14.000Z
|
2019-11-02T21:56:51.000Z
|
tests/__init__.py
|
jobvdl1997/llvm-lnt
|
f4838441d0ae6ecc3af0e8446e9e4a70f546371f
|
[
"Apache-2.0"
] | 14
|
2015-04-03T03:36:06.000Z
|
2019-10-23T14:09:08.000Z
|
import os
import lit.discovery
def test_all():
return lit.discovery.load_test_suite([os.path.dirname(__file__)])
| 19.666667
| 69
| 0.771186
| 18
| 118
| 4.666667
| 0.722222
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110169
| 118
| 5
| 70
| 23.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
25aa85866010ce822b07ba3aaef048cf131442ee
| 114
|
py
|
Python
|
the_pile/__init__.py
|
gunjan-bhattarai/the-pile
|
a21d15c2602258ca4ce2531dba192501aa56d6b6
|
[
"MIT"
] | 380
|
2020-12-27T03:13:39.000Z
|
2022-03-29T06:25:15.000Z
|
the_pile/__init__.py
|
gunjan-bhattarai/the-pile
|
a21d15c2602258ca4ce2531dba192501aa56d6b6
|
[
"MIT"
] | 66
|
2020-09-06T14:38:38.000Z
|
2020-12-12T20:07:18.000Z
|
the_pile/__init__.py
|
gunjan-bhattarai/the-pile
|
a21d15c2602258ca4ce2531dba192501aa56d6b6
|
[
"MIT"
] | 29
|
2021-01-02T08:31:48.000Z
|
2022-02-17T20:44:06.000Z
|
from the_pile.pile import ThePile
from the_pile.datasets import *
import hashlib
def pile():
return ThePile()
| 19
| 33
| 0.77193
| 17
| 114
| 5.058824
| 0.529412
| 0.162791
| 0.255814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 114
| 6
| 34
| 19
| 0.895833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.6
| 0.2
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
25e4651e72501f7e41461d6f31629f3a87c07cf8
| 45,785
|
py
|
Python
|
whats_fresh/whats_fresh_api/tests/views/test_vendors.py
|
osu-cass/whats-fresh-api
|
0ace76c3d7d423e95d5e3b3c7cd0f74abcf975bd
|
[
"Apache-2.0"
] | 4
|
2015-08-20T19:38:03.000Z
|
2016-01-20T18:52:24.000Z
|
whats_fresh/whats_fresh_api/tests/views/test_vendors.py
|
osu-cass/whats-fresh-api
|
0ace76c3d7d423e95d5e3b3c7cd0f74abcf975bd
|
[
"Apache-2.0"
] | 39
|
2015-01-08T23:50:47.000Z
|
2021-01-05T20:19:15.000Z
|
whats_fresh/whats_fresh_api/tests/views/test_vendors.py
|
osu-cass/whats-fresh-api
|
0ace76c3d7d423e95d5e3b3c7cd0f74abcf975bd
|
[
"Apache-2.0"
] | 8
|
2015-03-07T23:52:30.000Z
|
2015-12-25T04:25:23.000Z
|
from django.test import TestCase
from django.test.client import Client
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from django.contrib.auth.models import Group
from django.contrib.auth.models import User
import json
class VendorsTestCase(TestCase):
fixtures = ['test_fixtures']
def setUp(self):
user = User.objects.create_user(username='test', password='pass')
admin_group = Group(name='Administration Users')
admin_group.save()
user.groups.add(admin_group)
self.client.post(
reverse('login'), {'username': 'test', 'password': 'pass'})
self.maxDiff = None
self.expected_list = """
{
"error": {
"status": false,
"name": null,
"text": null,
"debug": null,
"level": null
},
"vendors": [
{
"id": 1,
"name": "No Optional Null Fields Are Null",
"status": true,
"description": "This is a vendor shop.",
"lat": 37.833688,
"lng": -122.478002,
"street": "1633 Sommerville Rd",
"city": "Sausalito",
"state": "CA",
"zip": "94965",
"hours": "Open Tuesday, 10am to 5pm",
"location_description": "Location description",
"contact_name": "A. Persson",
"phone": "+15417377627",
"website": "http://example.com",
"email": "a@perr.com",
"story": 1,
"ext": {
},
"created": "2014-08-08T23:27:05.568Z",
"modified": "2014-08-08T23:27:05.568Z",
"products": [
{
"product_id": 2,
"name": "Starfish Voyager",
"preparation": "Live",
"preparation_id": 1
},
{
"product_id": 1,
"name": "Ezri Dax",
"preparation": "Live",
"preparation_id": 1
}
]
},
{
"id": 2,
"name": "All Optional Null Fields Are Null",
"status": null,
"description": "Ceci n'est pas un magasin.",
"lat": 37.833688,
"lng": -122.478002,
"street": "501 Isabelle Rd",
"city": "North Bend",
"state": "OR",
"zip": "97459",
"location_description": "",
"contact_name": "Isabelle",
"phone": null,
"hours": "",
"website": "",
"email": "",
"story": null,
"ext": {},
"created": "2014-08-08T23:27:05.568Z",
"modified": "2014-08-08T23:27:05.568Z",
"products": [
{
"product_id": 1,
"name": "Ezri Dax",
"preparation": "Live",
"preparation_id": 1
}
]
}
]
}
"""
self.expected_limited_error = """
{
"error": {
"status": false,
"name": null,
"text": null,
"debug": null,
"level": null
}
}"""
def test_url_endpoint(self):
url = reverse('vendors-list')
self.assertEqual(url, '/1/vendors')
def test_no_parameters(self):
Client()
response = self.client.get(reverse('vendors-list')).content
parsed_answer = json.loads(response)
expected_answer = json.loads(self.expected_list)
self.maxDiff = None
self.assertEqual(parsed_answer, expected_answer)
def test_limited_vendors(self):
response = self.client.get(
"%s?limit=1" % reverse('vendors-list')).content
parsed_answer = json.loads(response)
expected_answer = json.loads(self.expected_limited_error)
self.assertEqual(parsed_answer['error'], expected_answer['error'])
self.assertEqual(len(parsed_answer['vendors']), 1)
class NoVendorViewTestCase(TestCase):
def setUp(self):
user = User.objects.create_user(username='test', password='pass')
admin_group = Group(name='Administration Users')
admin_group.save()
user.groups.add(admin_group)
self.client.post(
reverse('login'), {'username': 'test', 'password': 'pass'})
self.expected_no_vendors = """
{
"error": {
"status": true,
"text": "No Vendors found",
"name": "No Vendors",
"debug": "",
"level": "Information"
},
"vendors": []
}"""
def test_no_products(self):
response = self.client.get(reverse('vendors-list'))
parsed_answer = json.loads(response.content)
expected_answer = json.loads(self.expected_no_vendors)
self.assertEqual(response.status_code, 200)
expected_answer = json.loads(self.expected_no_vendors)
self.maxDiff = None
self.assertEqual(parsed_answer, expected_answer)
class VendorsLocationTestCase(TestCase):
"""
Test whether the /vendors/ view returns the correct results when given a
coordinate to center on.
For future test-writers: the location_fixtures tests have six vendors
in them -- two in Newport, two in Waldport, and two in Portland. Each
vendor has one product, and each product is sold at one of the two vendors
in the city.
This means you can easily test the proximity limit by limiting yourself
to one city, or just the coast, etc.
"""
fixtures = ['location_fixtures']
# These tests are made assuming a proximity of 20. If this default value
# is changed, then the tests would break without overriding it.
@override_settings(DEFAULT_PROXIMITY='20')
def setUp(self):
user = User.objects.create_user(username='test', password='pass')
admin_group = Group(name='Administration Users')
admin_group.save()
user.groups.add(admin_group)
self.client.post(
reverse('login'), {'username': 'test', 'password': 'pass'})
self.maxDiff = None
# No vendors. This is the return for location queries from
# the middle of nowhere.
self.expected_no_vendors = """
{
"error": {
"debug": "",
"status": true,
"level": "Information",
"text": "No Vendors found",
"name": "No Vendors"
},
"vendors": []
}"""
# All fish around Newport
# This JSON contains the four stores in Newport and Waldport,
# but not the Portland ones.
self.expected_nearby_all_vendors = """
{
"error": {
"level": null,
"status": false,
"name": null,
"debug": null,
"text": null
},
"vendors": [{
"id": 3,
"website": "",
"street": "146 SE Bay Blvd",
"contact_name": "Newport Tuna Contact",
"city": "Newport",
"story": 2,
"zip": "97365",
"location_description": "Located on Bay Blvd in Newport",
"lng": -124.050122,
"state": "OR",
"email": "",
"hours": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Newport Tuna",
"phone": null,
"lat": 44.631592,
"name": "Newport Tuna",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
]
},
{
"id": 4,
"website": "",
"street": "1226 Oregon Coast Hwy",
"contact_name": "Newpotr Halibut Contact",
"city": "Newport",
"story": 1,
"zip": "97365",
"location_description": "Located on Oregon Coast Hwy in Newport",
"lng": -124.052868,
"state": "OR",
"email": "",
"hours": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Newport Halibut",
"phone": null,
"lat": 44.646006,
"name": "Newport Halibut",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 1,
"preparation": "Frozen",
"preparation_id": 1,
"name": "Halibut"
}
]
},
{
"id": 5,
"website": "",
"street": "522 NW Spring St",
"contact_name": "Waldport Tuna Contact",
"city": "Waldport",
"story": 2,
"zip": "97394",
"location_description": "Located on Spring St in Waldport",
"lng": -124.066166,
"state": "OR",
"email": "",
"hours": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Waldport Tuna",
"phone": null,
"lat": 44.427761,
"name": "Waldport Tuna",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
]
},
{
"id": 6,
"website": "",
"street": "190 SW Maple St",
"contact_name": "Waldport Halibut Contact",
"city": "Waldport",
"story": 1,
"zip": "97364",
"location_description": "Located on SW Maple St in Waldport",
"lng": -124.069126,
"state": "OR",
"email": "",
"hours": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Waldport Halibut",
"phone": null,
"lat": 44.425188,
"name": "Waldport Halibut",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 1,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Halibut"
}
]
}]
}"""
# All fish around Newport, with extended proximity.
# This JSON contains the six stores in Newport, Waldport, Pacific City
# but not the Portland ones.
self.expected_nearby_extended = """
{
"error": {
"debug": null,
"status": false,
"text": null,
"name": null,
"level": null
},
"vendors": [
{
"status": true,
"city": "Newport",
"website": "",
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Newport Tuna",
"zip": "97365",
"created": "2014-08-08T23:27:05.568Z",
"story": 2,
"ext": {
},
"location_description": "Located on Bay Blvd in Newport",
"lng": -124.050122,
"email": "",
"hours": "",
"phone": null,
"state": "OR",
"street": "146 SE Bay Blvd",
"products": [
{
"preparation": "Frozen",
"preparation_id": 1,
"product_id": 2,
"name": "Tuna"
}
],
"lat": 44.631592,
"contact_name": "Newport Tuna Contact",
"id": 3,
"name": "Newport Tuna"
},
{
"status": true,
"city": "Newport",
"website": "",
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Newport Halibut",
"zip": "97365",
"created": "2014-08-08T23:27:05.568Z",
"story": 1,
"ext": {
},
"location_description": "Located on Oregon Coast Hwy in Newport",
"lng": -124.052868,
"email": "",
"hours": "",
"phone": null,
"state": "OR",
"street": "1226 Oregon Coast Hwy",
"products": [
{
"preparation": "Frozen",
"preparation_id": 1,
"product_id": 1,
"name": "Halibut"
}
],
"lat": 44.646006,
"contact_name": "Newpotr Halibut Contact",
"id": 4,
"name": "Newport Halibut"
},
{
"status": true,
"city": "Waldport",
"website": "",
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Waldport Tuna",
"zip": "97394",
"created": "2014-08-08T23:27:05.568Z",
"story": 2,
"ext": {
},
"location_description": "Located on Spring St in Waldport",
"lng": -124.066166,
"email": "",
"hours": "",
"phone": null,
"state": "OR",
"street": "522 NW Spring St",
"products": [
{
"preparation": "Frozen",
"preparation_id": 1,
"product_id": 2,
"name": "Tuna"
}
],
"lat": 44.427761,
"contact_name": "Waldport Tuna Contact",
"id": 5,
"name": "Waldport Tuna"
},
{
"status": true,
"city": "Waldport",
"website": "",
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Waldport Halibut",
"zip": "97364",
"created": "2014-08-08T23:27:05.568Z",
"story": 1,
"ext": {
},
"location_description": "Located on SW Maple St in Waldport",
"lng": -124.069126,
"email": "",
"hours": "",
"phone": null,
"state": "OR",
"street": "190 SW Maple St",
"products": [
{
"preparation": "Frozen",
"preparation_id": 1,
"product_id": 1,
"name": "Halibut"
}
],
"lat": 44.425188,
"contact_name": "Waldport Halibut Contact",
"id": 6,
"name": "Waldport Halibut"
},
{
"status": true,
"city": "Cloverdale",
"website": "",
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Pacific City Tuna",
"zip": "97112",
"created": "2014-08-08T23:27:05.568Z",
"story": 2,
"ext": {
},
"location_description": "Located on Roger Ave in Pacific City",
"lng": -123.958093,
"email": "",
"hours": "",
"phone": null,
"state": "OR",
"street": "35650 Roger Ave",
"products": [
{
"preparation": "Frozen",
"preparation_id": 1,
"product_id": 2,
"name": "Tuna"
}
],
"lat": 45.197105,
"contact_name": "Pacific City Tuna Contact",
"id": 7,
"name": "Pacific City Tuna"
},
{
"status": true,
"city": "Cloverdale",
"website": "",
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Pacific City Halibut",
"zip": "97112",
"created": "2014-08-08T23:27:05.568Z",
"story": 1,
"ext": {
},
"location_description": "Located on Brooten Rd in Pacific City",
"lng": -123.959418,
"email": "",
"hours": "",
"phone": null,
"state": "OR",
"street": "34455 Brooten Rd",
"products": [
{
"preparation": "Frozen",
"preparation_id": 1,
"product_id": 1,
"name": "Halibut"
}
],
"lat": 45.207253,
"contact_name": "Pacific City Halibut Contact",
"id": 8,
"name": "Pacific City Halibut"
}
]
}"""
# All vendors for all products
# This JSON contains the six fish stores in Newport, Waldport,
# and Portland. This is the return for a bad coordinates.
self.expected_error_result = """
{
"error": {
"level": "Warning",
"status": true,
"text": "There was an error with the given coordinates \
not_a_latitude, not_a_longitude",
"name": "Bad location",
"debug": "ValueError: String or unicode input unrecognized as \
WKT EWKT, and HEXEWKB."
},
"vendors": [
{
"id": 1,
"website": "",
"street": "720 SW Broadway",
"contact_name": "Portland Tuna Contact",
"city": "Portland",
"story": 2,
"zip": "97204",
"location_description": "Located on Broadway in Portland",
"lng": -122.67963,
"state": "OR",
"email": "",
"hours": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Portland Tuna",
"phone": null,
"lat": 45.518962,
"name": "Portland Tuna",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
]
},
{
"id": 3,
"website": "",
"street": "146 SE Bay Blvd",
"contact_name": "Newport Tuna Contact",
"city": "Newport",
"story": 2,
"zip": "97365",
"location_description": "Located on Bay Blvd in Newport",
"lng": -124.050122,
"state": "OR",
"email": "",
"hours": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Newport Tuna",
"phone": null,
"lat": 44.631592,
"name": "Newport Tuna",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
]
},
{
"id": 2,
"website": "",
"street": "1 SW Pine St",
"contact_name": "Portland Halibut Contact",
"city": "Portland",
"story": 1,
"zip": "97204",
"location_description": "Located on Pine in Portland",
"lng": -122.670619,
"state": "OR",
"hours": "",
"email": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Portland Halibut",
"phone": null,
"lat": 45.520988,
"name": "Portland Halibut",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 1,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Halibut"
}
]
},
{
"id": 5,
"website": "",
"street": "522 NW Spring St",
"contact_name": "Waldport Tuna Contact",
"city": "Waldport",
"story": 2,
"zip": "97394",
"location_description": "Located on Spring St in Waldport",
"lng": -124.066166,
"state": "OR",
"hours": "",
"email": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Waldport Tuna",
"phone": null,
"lat": 44.427761,
"name": "Waldport Tuna",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
]
},
{
"id": 4,
"website": "",
"street": "1226 Oregon Coast Hwy",
"contact_name": "Newpotr Halibut Contact",
"city": "Newport",
"story": 1,
"zip": "97365",
"location_description": "Located on Oregon Coast Hwy in Newport",
"lng": -124.052868,
"state": "OR",
"hours": "",
"email": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Newport Halibut",
"phone": null,
"lat": 44.646006,
"name": "Newport Halibut",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 1,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Halibut"
}
]
},
{
"id": 6,
"website": "",
"street": "190 SW Maple St",
"contact_name": "Waldport Halibut Contact",
"city": "Waldport",
"story": 1,
"zip": "97364",
"location_description": "Located on SW Maple St in Waldport",
"lng": -124.069126,
"state": "OR",
"hours": "",
"email": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Waldport Halibut",
"phone": null,
"lat": 44.425188,
"name": "Waldport Halibut",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 1,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Halibut"
}
]
},
{
"status": true,
"city": "Cloverdale",
"website": "",
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Pacific City Tuna",
"zip": "97112",
"created": "2014-08-08T23:27:05.568Z",
"story": 2,
"ext": {
},
"location_description": "Located on Roger Ave in Pacific City",
"lng": -123.958093,
"email": "",
"hours": "",
"phone": null,
"state": "OR",
"street": "35650 Roger Ave",
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
],
"lat": 45.197105,
"contact_name": "Pacific City Tuna Contact",
"id": 7,
"name": "Pacific City Tuna"
},
{
"status": true,
"city": "Cloverdale",
"website": "",
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Pacific City Halibut",
"zip": "97112",
"created": "2014-08-08T23:27:05.568Z",
"story": 1,
"ext": {
},
"location_description": "Located on Brooten Rd in Pacific City",
"lng": -123.959418,
"email": "",
"hours": "",
"phone": null,
"state": "OR",
"street": "34455 Brooten Rd",
"products": [
{
"product_id": 1,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Halibut"
}
],
"lat": 45.207253,
"contact_name": "Pacific City Halibut Contact",
"id": 8,
"name": "Pacific City Halibut"
}
]
}"""
# All vendors for all products
# This JSON contains the six fish stores in Newport, Waldport,
# and Portland. This is the return for a bad coordinates.
self.expected_error_missing_long = """
{
"error": {
"level": "Warning",
"status": true,
"name": "Bad location",
"text": "There was an error with the given coordinates -45.232, None",
"debug": "GEOSException: Error encountered checking \
Geometry returned from GEOS C function \\"GEOSWKTReader_read_r\\"."
},
"vendors": [
{
"id": 1,
"website": "",
"street": "720 SW Broadway",
"contact_name": "Portland Tuna Contact",
"city": "Portland",
"story": 2,
"zip": "97204",
"location_description": "Located on Broadway in Portland",
"lng": -122.67963,
"state": "OR",
"hours": "",
"email": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Portland Tuna",
"phone": null,
"lat": 45.518962,
"name": "Portland Tuna",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
]
},
{
"id": 3,
"website": "",
"street": "146 SE Bay Blvd",
"contact_name": "Newport Tuna Contact",
"city": "Newport",
"story": 2,
"zip": "97365",
"location_description": "Located on Bay Blvd in Newport",
"lng": -124.050122,
"state": "OR",
"hours": "",
"email": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Newport Tuna",
"phone": null,
"lat": 44.631592,
"name": "Newport Tuna",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
]
},
{
"id": 2,
"website": "",
"street": "1 SW Pine St",
"contact_name": "Portland Halibut Contact",
"city": "Portland",
"story": 1,
"zip": "97204",
"location_description": "Located on Pine in Portland",
"lng": -122.670619,
"state": "OR",
"hours": "",
"email": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Portland Halibut",
"phone": null,
"lat": 45.520988,
"name": "Portland Halibut",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 1,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Halibut"
}
]
},
{
"id": 5,
"website": "",
"street": "522 NW Spring St",
"contact_name": "Waldport Tuna Contact",
"city": "Waldport",
"story": 2,
"zip": "97394",
"location_description": "Located on Spring St in Waldport",
"lng": -124.066166,
"state": "OR",
"hours": "",
"email": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Waldport Tuna",
"phone": null,
"lat": 44.427761,
"name": "Waldport Tuna",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
]
},
{
"id": 4,
"website": "",
"street": "1226 Oregon Coast Hwy",
"contact_name": "Newpotr Halibut Contact",
"city": "Newport",
"story": 1,
"zip": "97365",
"location_description": "Located on Oregon Coast Hwy in Newport",
"lng": -124.052868,
"state": "OR",
"hours": "",
"email": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Newport Halibut",
"phone": null,
"lat": 44.646006,
"name": "Newport Halibut",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 1,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Halibut"
}
]
},
{
"id": 6,
"website": "",
"street": "190 SW Maple St",
"contact_name": "Waldport Halibut Contact",
"city": "Waldport",
"story": 1,
"zip": "97364",
"location_description": "Located on SW Maple St in Waldport",
"lng": -124.069126,
"state": "OR",
"hours": "",
"email": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Waldport Halibut",
"phone": null,
"lat": 44.425188,
"name": "Waldport Halibut",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 1,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Halibut"
}
]
},
{
"status": true,
"city": "Cloverdale",
"website": "",
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Pacific City Tuna",
"zip": "97112",
"created": "2014-08-08T23:27:05.568Z",
"story": 2,
"ext": {
},
"location_description": "Located on Roger Ave in Pacific City",
"lng": -123.958093,
"email": "",
"hours": "",
"phone": null,
"state": "OR",
"street": "35650 Roger Ave",
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
],
"lat": 45.197105,
"contact_name": "Pacific City Tuna Contact",
"id": 7,
"name": "Pacific City Tuna"
},
{
"status": true,
"city": "Cloverdale",
"website": "",
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Pacific City Halibut",
"zip": "97112",
"created": "2014-08-08T23:27:05.568Z",
"story": 1,
"ext": {
},
"location_description": "Located on Brooten Rd in Pacific City",
"lng": -123.959418,
"email": "",
"hours": "",
"phone": null,
"state": "OR",
"street": "34455 Brooten Rd",
"products": [
{
"product_id": 1,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Halibut"
}
],
"lat": 45.207253,
"contact_name": "Pacific City Halibut Contact",
"id": 8,
"name": "Pacific City Halibut"
}
]
}"""
# All vendors for all products
# This JSON contains the six fish stores in Newport, Waldport,
# and Portland. This is the return for a bad coordinates.
self.expected_error_missing_lat = """
{
"error": {
"level": "Warning",
"status": true,
"name": "Bad location",
"text": "There was an error with the given coordinates None, -45.232",
"debug": "GEOSException: Error encountered checking Geometry \
returned from GEOS C function \\"GEOSWKTReader_read_r\\"."
},
"vendors": [
{
"id": 1,
"website": "",
"street": "720 SW Broadway",
"contact_name": "Portland Tuna Contact",
"city": "Portland",
"story": 2,
"zip": "97204",
"location_description": "Located on Broadway in Portland",
"lng": -122.67963,
"state": "OR",
"hours": "",
"email": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Portland Tuna",
"phone": null,
"lat": 45.518962,
"name": "Portland Tuna",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
]
},
{
"id": 3,
"website": "",
"street": "146 SE Bay Blvd",
"contact_name": "Newport Tuna Contact",
"city": "Newport",
"story": 2,
"zip": "97365",
"location_description": "Located on Bay Blvd in Newport",
"lng": -124.050122,
"state": "OR",
"hours": "",
"email": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Newport Tuna",
"phone": null,
"lat": 44.631592,
"name": "Newport Tuna",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
]
},
{
"id": 2,
"website": "",
"street": "1 SW Pine St",
"contact_name": "Portland Halibut Contact",
"city": "Portland",
"story": 1,
"zip": "97204",
"location_description": "Located on Pine in Portland",
"lng": -122.670619,
"state": "OR",
"hours": "",
"email": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Portland Halibut",
"phone": null,
"lat": 45.520988,
"name": "Portland Halibut",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 1,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Halibut"
}
]
},
{
"id": 5,
"website": "",
"street": "522 NW Spring St",
"contact_name": "Waldport Tuna Contact",
"city": "Waldport",
"story": 2,
"zip": "97394",
"location_description": "Located on Spring St in Waldport",
"lng": -124.066166,
"state": "OR",
"hours": "",
"email": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Waldport Tuna",
"phone": null,
"lat": 44.427761,
"name": "Waldport Tuna",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
]
},
{
"id": 4,
"website": "",
"street": "1226 Oregon Coast Hwy",
"contact_name": "Newpotr Halibut Contact",
"city": "Newport",
"story": 1,
"zip": "97365",
"location_description": "Located on Oregon Coast Hwy in Newport",
"lng": -124.052868,
"state": "OR",
"hours": "",
"email": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Newport Halibut",
"phone": null,
"lat": 44.646006,
"name": "Newport Halibut",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 1,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Halibut"
}
]
},
{
"id": 6,
"website": "",
"street": "190 SW Maple St",
"contact_name": "Waldport Halibut Contact",
"city": "Waldport",
"story": 1,
"zip": "97364",
"location_description": "Located on SW Maple St in Waldport",
"lng": -124.069126,
"state": "OR",
"hours": "",
"email": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Waldport Halibut",
"phone": null,
"lat": 44.425188,
"name": "Waldport Halibut",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 1,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Halibut"
}
]
},
{
"status": true,
"city": "Cloverdale",
"website": "",
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Pacific City Tuna",
"zip": "97112",
"created": "2014-08-08T23:27:05.568Z",
"story": 2,
"ext": {
},
"location_description": "Located on Roger Ave in Pacific City",
"lng": -123.958093,
"email": "",
"hours": "",
"phone": null,
"state": "OR",
"street": "35650 Roger Ave",
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
],
"lat": 45.197105,
"contact_name": "Pacific City Tuna Contact",
"id": 7,
"name": "Pacific City Tuna"
},
{
"status": true,
"city": "Cloverdale",
"website": "",
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Pacific City Halibut",
"zip": "97112",
"created": "2014-08-08T23:27:05.568Z",
"story": 1,
"ext": {
},
"location_description": "Located on Brooten Rd in Pacific City",
"lng": -123.959418,
"email": "",
"hours": "",
"phone": null,
"state": "OR",
"street": "34455 Brooten Rd",
"products": [
{
"product_id": 1,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Halibut"
}
],
"lat": 45.207253,
"contact_name": "Pacific City Halibut Contact",
"id": 8,
"name": "Pacific City Halibut"
}
]
}"""
# Nearest 3 fish stores, with a proximity of 20 miles.
self.expected_nearby_limit_3 = """
{
"error": {
"level": null,
"status": false,
"name": null,
"debug": null,
"text": null
},
"vendors": [{
"id": 3,
"website": "",
"street": "146 SE Bay Blvd",
"contact_name": "Newport Tuna Contact",
"city": "Newport",
"story": 2,
"zip": "97365",
"location_description": "Located on Bay Blvd in Newport",
"lng": -124.050122,
"state": "OR",
"email": "",
"hours": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Newport Tuna",
"phone": null,
"lat": 44.631592,
"name": "Newport Tuna",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
]
},
{
"id": 4,
"website": "",
"street": "1226 Oregon Coast Hwy",
"contact_name": "Newpotr Halibut Contact",
"city": "Newport",
"story": 1,
"zip": "97365",
"location_description": "Located on Oregon Coast Hwy in Newport",
"lng": -124.052868,
"state": "OR",
"email": "",
"hours": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Newport Halibut",
"phone": null,
"lat": 44.646006,
"name": "Newport Halibut",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 1,
"preparation": "Frozen",
"preparation_id": 1,
"name": "Halibut"
}
]
},
{
"id": 5,
"website": "",
"street": "522 NW Spring St",
"contact_name": "Waldport Tuna Contact",
"city": "Waldport",
"story": 2,
"zip": "97394",
"location_description": "Located on Spring St in Waldport",
"lng": -124.066166,
"state": "OR",
"email": "",
"hours": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Waldport Tuna",
"phone": null,
"lat": 44.427761,
"name": "Waldport Tuna",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
]
}]
}"""
# When a bad limit is given, warn and ignore it.
# This JSON contains the four stores in Newport and Waldport,
# but not the Portland ones.
self.expected_nearby_bad_limit = """
{
"error": {
"level": "Warning",
"status": true,
"name": "Bad Limit",
"debug": "ValueError: invalid literal for int() with base 10: 'cat'",
"text": "Invalid limit. Returning all results."
},
"vendors": [{
"id": 3,
"website": "",
"street": "146 SE Bay Blvd",
"contact_name": "Newport Tuna Contact",
"city": "Newport",
"story": 2,
"zip": "97365",
"location_description": "Located on Bay Blvd in Newport",
"lng": -124.050122,
"state": "OR",
"email": "",
"hours": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Newport Tuna",
"phone": null,
"lat": 44.631592,
"name": "Newport Tuna",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
]
},
{
"id": 4,
"website": "",
"street": "1226 Oregon Coast Hwy",
"contact_name": "Newpotr Halibut Contact",
"city": "Newport",
"story": 1,
"zip": "97365",
"location_description": "Located on Oregon Coast Hwy in Newport",
"lng": -124.052868,
"state": "OR",
"email": "",
"hours": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Newport Halibut",
"phone": null,
"lat": 44.646006,
"name": "Newport Halibut",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 1,
"preparation": "Frozen",
"preparation_id": 1,
"name": "Halibut"
}
]
},
{
"id": 5,
"website": "",
"street": "522 NW Spring St",
"contact_name": "Waldport Tuna Contact",
"city": "Waldport",
"story": 2,
"zip": "97394",
"location_description": "Located on Spring St in Waldport",
"lng": -124.066166,
"state": "OR",
"email": "",
"hours": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Waldport Tuna",
"phone": null,
"lat": 44.427761,
"name": "Waldport Tuna",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 2,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Tuna"
}
]
},
{
"id": 6,
"website": "",
"street": "190 SW Maple St",
"contact_name": "Waldport Halibut Contact",
"city": "Waldport",
"story": 1,
"zip": "97364",
"location_description": "Located on SW Maple St in Waldport",
"lng": -124.069126,
"state": "OR",
"email": "",
"hours": "",
"status": true,
"modified": "2014-08-08T23:27:05.568Z",
"description": "Fake Waldport Halibut",
"phone": null,
"lat": 44.425188,
"name": "Waldport Halibut",
"created": "2014-08-08T23:27:05.568Z",
"ext": {
},
"products": [
{
"product_id": 1,
"preparation_id": 1,
"preparation": "Frozen",
"name": "Halibut"
}
]
}]
}"""
def test_successful_location_all_products(self):
"""
Test that good parameters return all vendors ordered by location.
There will also be a default limit of 20 miles.
"""
all_vendors_data = json.loads(self.client.get(
'%s?lat=44.609079&lng=-124.052538' % reverse('vendors-list')
).content)
expected_answer = json.loads(self.expected_nearby_all_vendors)
self.assertEqual(all_vendors_data, expected_answer)
def test_good_proximity_all_products(self):
"""
Test that good parameters return all vendors ordered by location.
Extending the proximity to 50 miles adds two stores.
"""
extended_proximity = json.loads(self.client.get(
'%s?lat=44.609079&lng=-124.052538&'
'proximity=50' % reverse('vendors-list')).content)
expected_answer = json.loads(self.expected_nearby_extended)
self.assertEqual(extended_proximity, expected_answer)
def test_bad_location_with_proximity_parameters(self):
"""
Test that a bad location returns an error with good proximity.
"""
all_vendors_data = json.loads(self.client.get(
'%s?lat=not_a_latitude&lng=not_a_longitude&'
'proximity=50' % reverse('vendors-list')).content)
expected_answer = json.loads(self.expected_error_result)
all_vendors_data['vendors'] = sorted(
all_vendors_data['vendors'], key=lambda k: k['id'])
expected_answer['vendors'] = sorted(
expected_answer['vendors'], key=lambda k: k['id'])
self.assertEqual(all_vendors_data, expected_answer)
def test_bad_location_parameters(self):
"""
Test that only one parameter (only lat/only long) returns a Warning,
and that bad parameter values (text) return Warning.
"""
# Coordinates are not numbers
all_vendors_data = json.loads(self.client.get(
'%s?lat=not_a_latitude&lng=not_a_longitude' % reverse(
'vendors-list')).content)
expected_answer = json.loads(self.expected_error_result)
all_vendors_data['vendors'] = sorted(
all_vendors_data['vendors'], key=lambda k: k['id'])
expected_answer['vendors'] = sorted(
expected_answer['vendors'], key=lambda k: k['id'])
for vendor in all_vendors_data['vendors']:
for product in vendor['products']:
self.assertTrue('product_id' in product)
for vendor in expected_answer['vendors']:
vendor['products'] = sorted(
vendor['products'], key=lambda k: k['product_id'])
for vendor in all_vendors_data['vendors']:
vendor['products'] = sorted(
vendor['products'], key=lambda k: k['product_id'])
self.assertEqual(all_vendors_data, expected_answer)
# Lat is missing
expected_answer = json.loads(self.expected_error_missing_lat)
all_vendors_data = json.loads(self.client.get(
'%s?lng=-45.232' % reverse('vendors-list')).content)
all_vendors_data['vendors'] = sorted(
all_vendors_data['vendors'], key=lambda k: k['id'])
expected_answer['vendors'] = sorted(
expected_answer['vendors'], key=lambda k: k['id'])
for vendor in all_vendors_data['vendors']:
for product in vendor['products']:
self.assertTrue('product_id' in product)
for vendor in expected_answer['vendors']:
vendor['products'] = sorted(
vendor['products'], key=lambda k: k['product_id'])
for vendor in all_vendors_data['vendors']:
vendor['products'] = sorted(
vendor['products'], key=lambda k: k['product_id'])
self.assertEqual(all_vendors_data, expected_answer)
# Long is missing
expected_answer = json.loads(self.expected_error_missing_long)
all_vendors_data = json.loads(self.client.get(
'%s?lat=-45.232' % reverse('vendors-list')).content)
all_vendors_data['vendors'] = sorted(
all_vendors_data['vendors'], key=lambda k: k['id'])
expected_answer['vendors'] = sorted(
expected_answer['vendors'], key=lambda k: k['id'])
for vendor in expected_answer['vendors']:
vendor['products'] = sorted(
vendor['products'], key=lambda k: k['product_id'])
for vendor in all_vendors_data['vendors']:
vendor['products'] = sorted(
vendor['products'], key=lambda k: k['product_id'])
self.assertEqual(all_vendors_data, expected_answer)
def test_no_vendors_nearby(self):
"""
Test that, when there are no vendors, we get an empty list back.
"""
all_vendors_data = json.loads(self.client.get(
'%s?lat=44.015225&lng=-123.016873' % reverse('vendors-list')
).content)
expected_answer = json.loads(self.expected_no_vendors)
self.assertEqual(all_vendors_data, expected_answer)
def test_limit_with_location_all_products(self):
"""
Test that the limit parameter limits the number of vendors with the
location parameters. There will also be a default proximity of
20 miles.
"""
all_vendors_data = json.loads(self.client.get(
'%s?lat=44.609079&lng=-124.052538&limit=3' % reverse(
'vendors-list')
).content)
expected_answer = json.loads(self.expected_nearby_limit_3)
self.assertEqual(all_vendors_data, expected_answer)
def test_bad_limit_with_location_all_products(self):
"""
Test that invalid limit parameters return an error.
There will also be a default proximity of 20 miles.
"""
all_vendors_data = json.loads(self.client.get(
'%s?lat=44.609079&lng=-124.052538&limit=cat' % reverse(
'vendors-list')).content)
expected_answer = json.loads(self.expected_nearby_bad_limit)
self.assertEqual(all_vendors_data, expected_answer)
def test_limit_larger_than_length_all_products(self):
"""
Test that a limit larger than the length of the list does not
affect the list.
"""
all_vendors_data = json.loads(self.client.get(
'%s?lat=44.609079&lng=-124.052538&'
'limit=200' % reverse('vendors-list')).content)
expected_answer = json.loads(self.expected_nearby_all_vendors)
self.assertEqual(all_vendors_data, expected_answer)
| 25.323562
| 78
| 0.526199
| 4,887
| 45,785
| 4.84203
| 0.072846
| 0.021806
| 0.039978
| 0.047247
| 0.881165
| 0.861894
| 0.853484
| 0.83844
| 0.833622
| 0.823522
| 0
| 0.08975
| 0.29573
| 45,785
| 1,807
| 79
| 25.337576
| 0.6441
| 0.052375
| 0
| 0.767098
| 0
| 0
| 0.818887
| 0.085009
| 0
| 0
| 0
| 0
| 0.011091
| 1
| 0.009242
| false
| 0.003697
| 0.004313
| 0
| 0.016636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
25f8ae6b04bf1bc7404a0ea2020e701bd59a3f17
| 1,595
|
py
|
Python
|
hash_a_file.py
|
RichardEllicott/PythonSnippets
|
9c66e1e4e33569525e282587d083109f0109e9f7
|
[
"CC0-1.0"
] | 1
|
2020-04-30T22:42:23.000Z
|
2020-04-30T22:42:23.000Z
|
hash_a_file.py
|
RichardEllicott/PythonSnippets
|
9c66e1e4e33569525e282587d083109f0109e9f7
|
[
"CC0-1.0"
] | null | null | null |
hash_a_file.py
|
RichardEllicott/PythonSnippets
|
9c66e1e4e33569525e282587d083109f0109e9f7
|
[
"CC0-1.0"
] | null | null | null |
from __future__ import absolute_import, division, print_function # makes code Python 2 and 3 compatible mostly
def get_file_md5(filename):
with open(filename,'r') as f:
chunk_size = 1024
hasher = hashlib.md5()
while True:
try:
data = f.read(chunk_size)
except IOError, e:
log.error('error hashing %s on Agent %s' % (path, agent.name))
return {'error': '%s' % e}
if not data:
break
hasher.update(data)
return hasher
def get_file_sha1(filename):
with open(filename,'r') as f:
chunk_size = 1024
hasher = hashlib.sha1()
while True:
try:
data = f.read(chunk_size)
except IOError, e:
log.error('error hashing %s on Agent %s' % (path, agent.name))
return {'error': '%s' % e}
if not data:
break
hasher.update(data)
return hasher
def get_file_md5_and_sha1(filename):
with open(filename,'r') as f:
chunk_size = 1024
hasher = hashlib.md5()
hasher_sha = hashlib.sha1()
while True:
try:
data = f.read(chunk_size)
except IOError, e:
log.error('error hashing %s on Agent %s' % (path, agent.name))
return {'error': '%s' % e}
if not data:
break
hasher.update(data)
hasher_sha.update(data)
return (hasher.hexdigest(), hasher_sha.hexdigest())
| 30.09434
| 110
| 0.512853
| 186
| 1,595
| 4.274194
| 0.290323
| 0.067925
| 0.037736
| 0.090566
| 0.78239
| 0.78239
| 0.78239
| 0.78239
| 0.78239
| 0.78239
| 0
| 0.022495
| 0.386834
| 1,595
| 53
| 111
| 30.09434
| 0.790389
| 0.026959
| 0
| 0.822222
| 0
| 0
| 0.069722
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.022222
| null | null | 0.022222
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d30bb2c959cec06c7560bbcf07e29734fba81a81
| 19,055
|
py
|
Python
|
pimdm/tree/upstream_prune.py
|
leoplo/pim_dm
|
e097fb8e247b14f142b6aa97d8ee34440aeba806
|
[
"MIT"
] | 6
|
2020-02-04T20:59:59.000Z
|
2021-11-24T09:56:07.000Z
|
pimdm/tree/upstream_prune.py
|
leoplo/pim_dm
|
e097fb8e247b14f142b6aa97d8ee34440aeba806
|
[
"MIT"
] | 4
|
2020-04-10T14:51:39.000Z
|
2022-02-14T00:59:21.000Z
|
pimdm/tree/upstream_prune.py
|
leoplo/pim_dm
|
e097fb8e247b14f142b6aa97d8ee34440aeba806
|
[
"MIT"
] | 3
|
2020-08-13T17:56:35.000Z
|
2021-11-24T11:03:12.000Z
|
from abc import ABCMeta, abstractmethod
from pimdm.utils import TYPE_CHECKING
if TYPE_CHECKING:
from .tree_if_upstream import TreeInterfaceUpstream
class UpstreamStateABC(metaclass=ABCMeta):
@staticmethod
@abstractmethod
def dataArrivesRPFinterface_OListNull_PLTstoped(interface: "TreeInterfaceUpstream"):
"""
Data arrives on RPF_Interface(S) AND
olist(S, G) == NULL AND
PLT(S, G) not running
@type interface: Upstream
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def stateRefreshArrivesRPFnbr_pruneIs1(interface: "TreeInterfaceUpstream"):
"""
State Refresh(S,G) received from RPF‘(S) AND
Prune Indicator == 1
@type interface: Upstream
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def stateRefreshArrivesRPFnbr_pruneIs0_PLTstoped(interface: "TreeInterfaceUpstream"):
"""
State Refresh(S,G) received from RPF‘(S) AND
Prune Indicator == 0 AND
PLT(S, G) not running
@type interface: Upstream
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def seeJoinToRPFnbr(interface: "TreeInterfaceUpstream"):
"""
See Join(S,G) to RPF’(S)
@type interface: Upstream
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def seePrune(interface: "TreeInterfaceUpstream"):
"""
See Prune(S,G)
@type interface: Upstream
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def OTexpires(interface: "TreeInterfaceUpstream"):
"""
OT(S,G) Expires
@type interface: Upstream
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def olistIsNowNull(interface: "TreeInterfaceUpstream"):
"""
olist(S,G)->NULL
@type interface: Upstream
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def olistIsNowNotNull(interface: "TreeInterfaceUpstream"):
"""
olist(S,G)->non-NULL
@type interface: Upstream
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def RPFnbrChanges_olistIsNotNull(interface: "TreeInterfaceUpstream"):
"""
RPF’(S) Changes AND
olist(S,G) != NULL AND
S not directly connected
@type interface: Upstream
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def RPFnbrChanges_olistIsNull(interface: "TreeInterfaceUpstream"):
"""
RPF’(S) Changes AND
olist(S,G) == NULL
@type interface: Upstream
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def sourceIsNowDirectConnect(interface: "TreeInterfaceUpstream"):
"""
S becomes directly connected
@type interface: Upstream
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def GRTexpires(interface: "TreeInterfaceUpstream"):
"""
GRT(S,G) Expires
@type interface: Upstream
"""
raise NotImplementedError()
@staticmethod
@abstractmethod
def recvGraftAckFromRPFnbr(interface: "TreeInterfaceUpstream"):
"""
Receive GraftAck(S,G) from RPF’(S)
@type interface: Upstream
"""
raise NotImplementedError()
class Forward(UpstreamStateABC):
"""
Forwarding (F)
This is the starting state of the Upsteam(S,G) state machine.
The state machine is in this state if it just started or if
oiflist(S,G) != NULL.
"""
@staticmethod
def dataArrivesRPFinterface_OListNull_PLTstoped(interface: "TreeInterfaceUpstream"):
"""
Data arrives on RPF_Interface(S) AND
olist(S, G) == NULL AND
PLT(S, G) not running
@type interface: TreeInterfaceUpstream
"""
if not interface.is_S_directly_conn():
interface.join_prune_logger.debug("dataArrivesRPFinterface_OListNull_PLTstoped, F -> P")
interface.set_state(UpstreamState.Pruned)
interface.send_prune()
interface.set_prune_limit_timer()
@staticmethod
def stateRefreshArrivesRPFnbr_pruneIs1(interface: "TreeInterfaceUpstream"):
"""
State Refresh(S,G) received from RPF‘(S) AND
Prune Indicator == 1
@type interface: TreeInterfaceUpstream
"""
# if OT is not running the router must set OT to t_override seconds
interface.join_prune_logger.debug('stateRefreshArrivesRPFnbr_pruneIs1, F -> F')
if not interface.is_override_timer_running():
interface.set_override_timer()
@staticmethod
def stateRefreshArrivesRPFnbr_pruneIs0_PLTstoped(interface: "TreeInterfaceUpstream"):
"""
State Refresh(S,G) received from RPF‘(S) AND
Prune Indicator == 0 AND
PLT(S, G) not running
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('stateRefreshArrivesRPFnbr_pruneIs0_PLTstoped, F -> F')
@staticmethod
def seeJoinToRPFnbr(interface: "TreeInterfaceUpstream"):
"""
See Join(S,G) to RPF’(S)
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('seeJoinToRPFnbr, F -> F')
interface.clear_override_timer()
@staticmethod
def seePrune(interface: "TreeInterfaceUpstream"):
"""
See Prune(S,G)
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('seePrune, F -> F')
if not interface.is_S_directly_conn() and not interface.is_override_timer_running():
interface.set_override_timer()
@staticmethod
def OTexpires(interface: "TreeInterfaceUpstream"):
"""
OT(S,G) Expires
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('OTexpires, F -> F')
if not interface.is_S_directly_conn():
interface.send_join()
@staticmethod
def olistIsNowNull(interface: "TreeInterfaceUpstream"):
"""
olist(S,G)->NULL
@type interface: TreeInterfaceUpstream
"""
if not interface.is_S_directly_conn():
interface.join_prune_logger.debug("olistIsNowNull, F -> P")
interface.set_state(UpstreamState.Pruned)
interface.send_prune()
interface.set_prune_limit_timer()
@staticmethod
def olistIsNowNotNull(interface: "TreeInterfaceUpstream"):
"""
olist(S,G)->non-NULL
@type interface: TreeInterfaceUpstream
"""
#assert False, "olistIsNowNotNull (in state F)"
return
@staticmethod
def RPFnbrChanges_olistIsNotNull(interface: "TreeInterfaceUpstream"):
"""
RPF’(S) Changes AND
olist(S,G) != NULL AND
S not directly connected
@type interface: TreeInterfaceUpstream
"""
if not interface.is_S_directly_conn():
interface.join_prune_logger.debug('RPFnbrChanges_olistIsNotNull, F -> AP')
interface.set_state(UpstreamState.AckPending)
interface.send_graft()
interface.set_graft_retry_timer()
@staticmethod
def RPFnbrChanges_olistIsNull(interface: "TreeInterfaceUpstream"):
"""
RPF’(S) Changes AND
olist(S,G) == NULL
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('RPFnbrChanges_olistIsNull, F -> P')
interface.set_state(UpstreamState.Pruned)
@staticmethod
def sourceIsNowDirectConnect(interface: "TreeInterfaceUpstream"):
"""
S becomes directly connected
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug("sourceIsNowDirectConnect, F -> F")
@staticmethod
def GRTexpires(interface: "TreeInterfaceUpstream"):
"""
GRT(S,G) Expires
@type interface: TreeInterfaceUpstream
"""
#assert False, "GRTexpires (in state F)"
return
@staticmethod
def recvGraftAckFromRPFnbr(interface: "TreeInterfaceUpstream"):
"""
Receive GraftAck(S,G) from RPF’(S)
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug("recvGraftAckFromRPFnbr, F -> F")
def __str__(self):
return "Forwarding"
class Pruned(UpstreamStateABC):
'''
Pruned (P)
The set, olist(S,G), is empty.
The router will not forward data from S addressed to group G.
'''
@staticmethod
def dataArrivesRPFinterface_OListNull_PLTstoped(interface: "TreeInterfaceUpstream"):
"""
Data arrives on RPF_Interface(S) AND
olist(S, G) == NULL AND
PLT(S, G) not running
@type interface: TreeInterfaceUpstream
"""
if not interface.is_S_directly_conn():
interface.join_prune_logger.debug("dataArrivesRPFinterface_OListNull_PLTstoped, P -> P")
interface.set_prune_limit_timer()
interface.send_prune()
@staticmethod
def stateRefreshArrivesRPFnbr_pruneIs1(interface: "TreeInterfaceUpstream"):
"""
State Refresh(S,G) received from RPF‘(S) AND
Prune Indicator == 1
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('stateRefreshArrivesRPFnbr_pruneIs1, P -> P')
interface.set_prune_limit_timer()
@staticmethod
def stateRefreshArrivesRPFnbr_pruneIs0_PLTstoped(interface: "TreeInterfaceUpstream"):
"""
State Refresh(S,G) received from RPF‘(S) AND
Prune Indicator == 0 AND
PLT(S, G) not running
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('stateRefreshArrivesRPFnbr_pruneIs0_PLTstoped, P -> P')
interface.send_prune()
interface.set_prune_limit_timer()
@staticmethod
def seeJoinToRPFnbr(interface: "TreeInterfaceUpstream"):
"""
See Join(S,G) to RPF’(S)
@type interface: TreeInterfaceUpstream
"""
# Do nothing
interface.join_prune_logger.debug('seeJoinToRPFnbr, P -> P')
@staticmethod
def seePrune(interface: "TreeInterfaceUpstream"):
"""
See Prune(S,G)
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('seePrune, P -> P')
if interface.get_received_prune_holdtime() > interface.remaining_prune_limit_timer():
interface.set_prune_limit_timer(time=interface.get_received_prune_holdtime())
@staticmethod
def OTexpires(interface: "TreeInterfaceUpstream"):
"""
OT(S,G) Expires
@type interface: TreeInterfaceUpstream
"""
#assert False, "OTexpires in state Pruned"
return
@staticmethod
def olistIsNowNull(interface: "TreeInterfaceUpstream"):
"""
olist(S,G)->NULL
@type interface: TreeInterfaceUpstream
"""
#assert False, "olistIsNowNull in state Pruned"
return
@staticmethod
def olistIsNowNotNull(interface: "TreeInterfaceUpstream"):
"""
olist(S,G)->non-NULL
@type interface: TreeInterfaceUpstream
"""
if not interface.is_S_directly_conn():
interface.join_prune_logger.debug('olistIsNowNotNull, P -> AP')
interface.clear_prune_limit_timer()
interface.set_state(UpstreamState.AckPending)
interface.send_graft()
interface.set_graft_retry_timer()
@staticmethod
def RPFnbrChanges_olistIsNotNull(interface: "TreeInterfaceUpstream"):
"""
RPF’(S) Changes AND
olist(S,G) != NULL AND
S not directly connected
@type interface: TreeInterfaceUpstream
"""
if not interface.is_S_directly_conn():
interface.join_prune_logger.debug('RPFnbrChanges_olistIsNotNull, P -> AP')
interface.clear_prune_limit_timer()
interface.set_state(UpstreamState.AckPending)
interface.send_graft()
interface.set_graft_retry_timer()
@staticmethod
def RPFnbrChanges_olistIsNull(interface: "TreeInterfaceUpstream"):
"""
RPF’(S) Changes AND
olist(S,G) == NULL
@type interface: TreeInterfaceUpstream
"""
if not interface.is_S_directly_conn():
interface.join_prune_logger.debug('RPFnbrChanges_olistIsNull, P -> P')
interface.clear_prune_limit_timer()
@staticmethod
def sourceIsNowDirectConnect(interface: "TreeInterfaceUpstream"):
"""
S becomes directly connected
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('sourceIsNowDirectConnect, P -> P')
@staticmethod
def GRTexpires(interface: "TreeInterfaceUpstream"):
"""
GRT(S,G) Expires
@type interface: TreeInterfaceUpstream
"""
#assert False, "GRTexpires in state Pruned"
return
@staticmethod
def recvGraftAckFromRPFnbr(interface: "TreeInterfaceUpstream"):
"""
Receive GraftAck(S,G) from RPF’(S)
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('recvGraftAckFromRPFnbr, P -> P')
def __str__(self):
return "Pruned"
class AckPending(UpstreamStateABC):
"""
AckPending (AP)
The router was in the Pruned(P) state, but a transition has
occurred in the Downstream(S,G) state machine for one of this
(S,G) entry’s outgoing interfaces, indicating that traffic from S
addressed to G should again be forwarded. A Graft message has
been sent to RPF’(S), but a Graft Ack message has not yet been
received.
"""
@staticmethod
def dataArrivesRPFinterface_OListNull_PLTstoped(interface: "TreeInterfaceUpstream"):
"""
Data arrives on RPF_Interface(S) AND
olist(S, G) == NULL AND
PLT(S, G) not running
@type interface: TreeInterfaceUpstream
"""
#assert False, "dataArrivesRPFinterface_OListNull_PLTstoped in state AP"
return
@staticmethod
def stateRefreshArrivesRPFnbr_pruneIs1(interface: "TreeInterfaceUpstream"):
"""
State Refresh(S,G) received from RPF‘(S) AND
Prune Indicator == 1
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('stateRefreshArrivesRPFnbr_pruneIs1, AP -> AP')
if not interface.is_override_timer_running():
interface.set_override_timer()
@staticmethod
def stateRefreshArrivesRPFnbr_pruneIs0_PLTstoped(interface: "TreeInterfaceUpstream"):
"""
State Refresh(S,G) received from RPF‘(S) AND
Prune Indicator == 0 AND
PLT(S, G) not running
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('stateRefreshArrivesRPFnbr_pruneIs0_PLTstoped, AP -> F')
interface.clear_graft_retry_timer()
interface.set_state(UpstreamState.Forward)
@staticmethod
def seeJoinToRPFnbr(interface: "TreeInterfaceUpstream"):
"""
See Join(S,G) to RPF’(S)
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('seeJoinToRPFnbr, AP -> AP')
interface.clear_override_timer()
@staticmethod
def seePrune(interface: "TreeInterfaceUpstream"):
"""
See Prune(S,G)
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('seePrune, AP -> AP')
if not interface.is_override_timer_running():
interface.set_override_timer()
@staticmethod
def OTexpires(interface: "TreeInterfaceUpstream"):
"""
OT(S,G) Expires
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('OTexpires, AP -> AP')
interface.send_join()
@staticmethod
def olistIsNowNull(interface: "TreeInterfaceUpstream"):
"""
olist(S,G)->NULL
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('olistIsNowNull, AP -> P')
interface.set_state(UpstreamState.Pruned)
interface.send_prune()
interface.clear_graft_retry_timer()
interface.set_prune_limit_timer()
@staticmethod
def olistIsNowNotNull(interface: "TreeInterfaceUpstream"):
"""
olist(S,G)->non-NULL
@type interface: TreeInterfaceUpstream
"""
#assert False, "olistIsNowNotNull in state AP"
return
@staticmethod
def RPFnbrChanges_olistIsNotNull(interface: "TreeInterfaceUpstream"):
"""
RPF’(S) Changes AND
olist(S,G) != NULL AND
S not directly connected
@type interface: TreeInterfaceUpstream
"""
if not interface.is_S_directly_conn():
interface.join_prune_logger.debug('RPFnbrChanges_olistIsNotNull, AP -> AP')
interface.send_graft()
interface.set_graft_retry_timer()
@staticmethod
def RPFnbrChanges_olistIsNull(interface: "TreeInterfaceUpstream"):
"""
RPF’(S) Changes AND
olist(S,G) == NULL
@type interface: TreeInterfaceUpstream
"""
if not interface.is_S_directly_conn():
interface.join_prune_logger.debug('RPFnbrChanges_olistIsNull, AP -> P')
interface.clear_graft_retry_timer()
interface.set_state(UpstreamState.Pruned)
@staticmethod
def sourceIsNowDirectConnect(interface: "TreeInterfaceUpstream"):
"""
S becomes directly connected
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('sourceIsNowDirectConnect, AP -> F')
interface.set_state(UpstreamState.Forward)
interface.clear_graft_retry_timer()
@staticmethod
def GRTexpires(interface: "TreeInterfaceUpstream"):
"""
GRT(S,G) Expires
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('GRTexpires, AP -> AP')
interface.set_graft_retry_timer()
interface.send_graft()
@staticmethod
def recvGraftAckFromRPFnbr(interface: "TreeInterfaceUpstream"):
"""
Receive GraftAck(S,G) from RPF’(S)
@type interface: TreeInterfaceUpstream
"""
interface.join_prune_logger.debug('recvGraftAckFromRPFnbr, AP -> F')
interface.clear_graft_retry_timer()
interface.set_state(UpstreamState.Forward)
def __str__(self):
return "AckPending"
class UpstreamState():
Forward = Forward()
Pruned = Pruned()
AckPending = AckPending()
| 29.136086
| 100
| 0.640619
| 1,761
| 19,055
| 6.771153
| 0.08234
| 0.22895
| 0.111204
| 0.064408
| 0.905065
| 0.887538
| 0.872442
| 0.858101
| 0.845606
| 0.828246
| 0
| 0.001573
| 0.265967
| 19,055
| 653
| 101
| 29.180704
| 0.850933
| 0.263028
| 0
| 0.800797
| 0
| 0
| 0.178273
| 0.143165
| 0
| 0
| 0
| 0
| 0
| 1
| 0.219124
| false
| 0
| 0.011952
| 0.011952
| 0.302789
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d375c4a19820f879932d9869e4bf2a34ac3e86fe
| 44,869
|
py
|
Python
|
tests/simulations/plant_loop/test_plant_loop_mixed_water_loop.py
|
john-grando/pyExpandObjects
|
c08b1d1bc45684bc71c0f49b4d2f22c707cd4aa4
|
[
"BSD-3-Clause"
] | null | null | null |
tests/simulations/plant_loop/test_plant_loop_mixed_water_loop.py
|
john-grando/pyExpandObjects
|
c08b1d1bc45684bc71c0f49b4d2f22c707cd4aa4
|
[
"BSD-3-Clause"
] | 1
|
2021-02-03T01:56:56.000Z
|
2021-02-03T01:56:56.000Z
|
tests/simulations/plant_loop/test_plant_loop_mixed_water_loop.py
|
john-grando/pyExpandObjects
|
c08b1d1bc45684bc71c0f49b4d2f22c707cd4aa4
|
[
"BSD-3-Clause"
] | 1
|
2022-01-11T18:31:05.000Z
|
2022-01-11T18:31:05.000Z
|
from pathlib import Path
from tests.simulations import BaseSimulationTest
from src.epjson_handler import EPJSON
test_dir = Path(__file__).parent.parent.parent
schedule_objects = {
"Schedule:Compact": {
"Always21": {
"data": [
{
"field": "Through: 12/31"
},
{
"field": "For: AllDays"
},
{
"field": "Until: 24:00"
},
{
"field": 21
}
],
"schedule_type_limits_name": "Any Number"
},
"Always33": {
"data": [
{
"field": "Through: 12/31"
},
{
"field": "For: AllDays"
},
{
"field": "Until: 24:00"
},
{
"field": 33
}
],
"schedule_type_limits_name": "Any Number"
}
}
}
class TestSimulationsPlantLoopMixedWaterLoop(BaseSimulationTest):
def setUp(self):
self.ej = EPJSON()
base_idf_file_path = test_dir.joinpath('..', 'simulation', 'ExampleFiles',
'HVACTemplate-5ZoneWaterToAirHeatPumpTowerBoiler.idf')
base_copy_file_path = self._copy_to_test_directory(base_idf_file_path)
# read in base file, then edit inputs for alternate tests
self.base_epjson = self.get_epjson_object_from_idf_file(base_copy_file_path)
return
def teardown(self):
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantEquipment:MixedWaterLoop:test_minimum_inputs")
def test_minimum_inputs(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop'].pop('Only Water Loop')
self.ej.merge_epjson(
super_dictionary=self.base_epjson,
object_dictionary={
'HVACTemplate:Plant:MixedWaterLoop': {
'Only Water Loop': {}
}
}
)
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:pump_schedule_name")
def test_pump_schedule_name(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop']['pump_schedule_name'] = 'OCCUPY-1'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'OCCUPY-1',
epjson_output['Pump:ConstantSpeed']['Only Water Loop Supply Pump']['pump_flow_rate_schedule_name'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:pump_control_type_intermittent")
def test_pump_control_type_intermittent(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop']['pump_control_type'] = 'Intermittent'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'Intermittent',
epjson_output['Pump:ConstantSpeed']['Only Water Loop Supply Pump']['pump_control_type'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:pump_control_type_continuous")
def test_pump_control_type_continuous(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop']['pump_control_type'] = 'Continuous'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'Continuous',
epjson_output['Pump:ConstantSpeed']['Only Water Loop Supply Pump']['pump_control_type'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"hot_water_plant_operation_scheme_type")
def test_operation_scheme_type(self):
# todo_eo: legacy fails with message: PlantEquipmentOperationSchemes = "HOT WATER LOOP OPERATION CUSTOM",
# could not find PlantEquipmentOperation:HeatingLoad = "HOT WATER LOOP OPERATION ALL HOURS".
self.ej.merge_epjson(
super_dictionary=self.base_epjson,
object_dictionary={
"Schedule:Compact": {
"HVACTemplate-Always1": {
"data": [
{
"field": "Through 12/31"
},
{
"field": "For AllDays"
},
{
"field": "Until 24:00"
},
{
"field": 1.0
}
],
"schedule_type_limits_name": "Any Number"
}
},
"PlantEquipmentOperationSchemes": {
"Only Water Loop Operation Custom": {
"control_scheme_1_name": "Only Water Loop Heat Operation All Hours",
"control_scheme_1_object_type": "PlantEquipmentOperation:HeatingLoad",
"control_scheme_1_schedule_name": "HVACTemplate-Always1",
"control_scheme_2_name": "Only Water Loop Cool Operation All Hours",
"control_scheme_2_object_type": "PlantEquipmentOperation:CoolingLoad",
"control_scheme_2_schedule_name": "HVACTemplate-Always1"
}
},
"PlantEquipmentOperation:CoolingLoad": {
"Only Water Loop Cool Operation All Hours": {
"load_range_1_lower_limit": 0,
"load_range_1_upper_limit": 1000000000000000,
"range_1_equipment_list_name": "Only Water Loop Cooling All Equipment"
}
},
"PlantEquipmentOperation:HeatingLoad": {
"Only Water Loop Heat Operation All Hours": {
"load_range_1_lower_limit": 0,
"load_range_1_upper_limit": 1000000000000000,
"range_1_equipment_list_name": "Only Water Loop Heating All Equipment"
}
}
}
)
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'operation_scheme_type'] = 'UserDefined'
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'equipment_operation_schemes_name'] = 'Only Water Loop Operation Custom'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNotNone(epjson_output['PlantEquipmentOperationSchemes'].get('Only Water Loop Operation Custom'))
return
def test_setpoint_schedule_name(self):
self.ej.merge_epjson(
super_dictionary=self.base_epjson,
object_dictionary=schedule_objects)
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'high_temperature_setpoint_schedule_name'] = 'Always33'
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'low_temperature_setpoint_schedule_name'] = 'Always21'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'Always21',
epjson_output['SetpointManager:Scheduled:DualSetpoint']['Only Water Loop Temp Manager'][
'low_setpoint_schedule_name'])
self.assertEqual(
'Always33',
epjson_output['SetpointManager:Scheduled:DualSetpoint']['Only Water Loop Temp Manager'][
'high_setpoint_schedule_name'])
return
def test_design_setpoint(self):
self.ej.merge_epjson(
super_dictionary=self.base_epjson,
object_dictionary=schedule_objects)
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'high_temperature_design_setpoint'] = 33
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'low_temperature_design_setpoint'] = 21
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'HVACTemplate-Always21.0',
epjson_output['SetpointManager:Scheduled:DualSetpoint']['Only Water Loop Temp Manager'][
'low_setpoint_schedule_name'])
self.assertEqual(
'HVACTemplate-Always33.0',
epjson_output['SetpointManager:Scheduled:DualSetpoint']['Only Water Loop Temp Manager'][
'high_setpoint_schedule_name'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"water_pump_configuration_variable_flow")
def test_water_pump_configuration_variable_flow(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_configuration'] = 'VariableFlow'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNotNone(epjson_output['Pump:VariableSpeed'].get('Only Water Loop Supply Pump'))
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"water_pump_configuration_variable_flow")
def test_water_pump_configuration_constant_flow(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_configuration'] = 'ConstantFlow'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNotNone(epjson_output['Pump:ConstantSpeed'].get('Only Water Loop Supply Pump'))
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"water_pump_rated_head")
def test_water_pump_rated_head(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_rated_head'] = 19000
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
19000,
epjson_output['Pump:ConstantSpeed']['Only Water Loop Supply Pump']['design_pump_head'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"water_pump_type_single_pump")
def test_water_pump_type_single_pump(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_type'] = 'SinglePump'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNotNone(
epjson_output['Pump:ConstantSpeed'].get('Only Water Loop Supply Pump'))
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"water_pump_type_single_pump_variable")
def test_water_pump_type_single_pump_variable(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_configuration'] = 'VariableFlow'
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_type'] = 'SinglePump'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNotNone(
epjson_output['Pump:VariableSpeed'].get('Only Water Loop Supply Pump'))
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"water_pump_type_pump_per_tower_or_boiler")
def test_water_pump_type_pump_per_tower_or_boiler(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_configuration'] = 'ConstantFlow'
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_type'] = 'PumpPerTowerOrBoiler'
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_rated_head'] = 19000
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'Main Boiler MW Branch Pump',
epjson_output['Branch']['Main Boiler MW Branch']['components'][0]['component_name'])
self.assertEqual(
'Main Boiler',
epjson_output['Branch']['Main Boiler MW Branch']['components'][1]['component_name'])
self.assertEqual(
'Main Tower Branch Pump',
epjson_output['Branch']['Main Tower Branch']['components'][0]['component_name'])
self.assertEqual(
'Main Tower',
epjson_output['Branch']['Main Tower Branch']['components'][1]['component_name'])
self.assertEqual(
19000,
epjson_output['Pump:ConstantSpeed']['Main Tower Branch Pump']['design_pump_head'])
self.assertEqual(
19000,
epjson_output['Pump:ConstantSpeed']['Main Boiler MW Branch Pump']['design_pump_head'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"water_pump_type_pump_per_tower_or_boiler")
def test_water_pump_type_pump_per_tower_or_boiler_variable(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_configuration'] = 'VariableFlow'
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_type'] = 'PumpPerTowerOrBoiler'
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_rated_head'] = 19000
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'Main Boiler MW Branch Pump',
epjson_output['Branch']['Main Boiler MW Branch']['components'][0]['component_name'])
self.assertEqual(
'Main Boiler',
epjson_output['Branch']['Main Boiler MW Branch']['components'][1]['component_name'])
self.assertEqual(
'Main Tower Branch Pump',
epjson_output['Branch']['Main Tower Branch']['components'][0]['component_name'])
self.assertEqual(
'Main Tower',
epjson_output['Branch']['Main Tower Branch']['components'][1]['component_name'])
self.assertEqual(
19000,
epjson_output['Pump:ConstantSpeed']['Main Tower Branch Pump']['design_pump_head'])
self.assertEqual(
19000,
epjson_output['Pump:VariableSpeed']['Main Boiler MW Branch Pump']['design_pump_head'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"water_pump_type_two_headered_pumps")
def test_water_pump_type_two_headered_pumps(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_type'] = 'TwoHeaderedPumps'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNotNone(
epjson_output['HeaderedPumps:ConstantSpeed'].get('Only Water Loop Supply Pump'))
self.assertEqual(
2,
epjson_output['HeaderedPumps:ConstantSpeed']['Only Water Loop Supply Pump']['number_of_pumps_in_bank'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"water_pump_type_two_headered_pumps_variable")
def test_water_pump_type_two_headered_pumps_variable(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_configuration'] = 'VariableFlow'
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_type'] = 'TwoHeaderedPumps'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNotNone(
epjson_output['HeaderedPumps:VariableSpeed'].get('Only Water Loop Supply Pump'))
self.assertEqual(
2,
epjson_output['HeaderedPumps:VariableSpeed']['Only Water Loop Supply Pump']['number_of_pumps_in_bank'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"water_pump_type_three_headered_pumps")
def test_water_pump_type_three_headered_pumps(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_type'] = 'ThreeHeaderedPumps'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNotNone(
epjson_output['HeaderedPumps:ConstantSpeed'].get('Only Water Loop Supply Pump'))
self.assertEqual(
3,
epjson_output['HeaderedPumps:ConstantSpeed']['Only Water Loop Supply Pump']['number_of_pumps_in_bank'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"water_pump_type_three_headered_pumps_variable")
def test_water_pump_type_three_headered_pumps_variable(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_configuration'] = 'VariableFlow'
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_type'] = 'ThreeHeaderedPumps'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNotNone(
epjson_output['HeaderedPumps:VariableSpeed'].get('Only Water Loop Supply Pump'))
self.assertEqual(
3,
epjson_output['HeaderedPumps:VariableSpeed']['Only Water Loop Supply Pump']['number_of_pumps_in_bank'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"water_pump_type_four_headered_pumps")
def test_water_pump_type_four_headered_pumps(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_type'] = 'FourHeaderedPumps'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNotNone(
epjson_output['HeaderedPumps:ConstantSpeed'].get('Only Water Loop Supply Pump'))
self.assertEqual(
4,
epjson_output['HeaderedPumps:ConstantSpeed']['Only Water Loop Supply Pump']['number_of_pumps_in_bank'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"water_pump_type_four_headered_pumps_variable")
def test_water_pump_type_four_headered_pumps_variable(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_configuration'] = 'VariableFlow'
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_type'] = 'FourHeaderedPumps'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNotNone(
epjson_output['HeaderedPumps:VariableSpeed'].get('Only Water Loop Supply Pump'))
self.assertEqual(
4,
epjson_output['HeaderedPumps:VariableSpeed']['Only Water Loop Supply Pump']['number_of_pumps_in_bank'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"water_pump_type_five_headered_pumps")
def test_water_pump_type_five_headered_pumps(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_type'] = 'FiveHeaderedPumps'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNotNone(
epjson_output['HeaderedPumps:ConstantSpeed'].get('Only Water Loop Supply Pump'))
self.assertEqual(
5,
epjson_output['HeaderedPumps:ConstantSpeed']['Only Water Loop Supply Pump']['number_of_pumps_in_bank'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"water_pump_type_five_headered_pumps_variable")
def test_water_pump_type_five_headered_pumps_variable(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_configuration'] = 'VariableFlow'
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'water_pump_type'] = 'FiveHeaderedPumps'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNotNone(
epjson_output['HeaderedPumps:VariableSpeed'].get('Only Water Loop Supply Pump'))
self.assertEqual(
5,
epjson_output['HeaderedPumps:VariableSpeed']['Only Water Loop Supply Pump']['number_of_pumps_in_bank'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"supply_side_bypass_pipe_yes")
def test_supply_side_bypass_pipe_yes(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'supply_side_bypass_pipe'] = 'Yes'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNotNone(
epjson_output['Pipe:Adiabatic'].get('Only Water Loop Supply Bypass Pipe'))
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"supply_side_bypass_pipe_no")
def test_supply_side_bypass_pipe_no(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'supply_side_bypass_pipe'] = 'No'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNone(
epjson_output['Pipe:Adiabatic'].get('Only Water Loop Supply Bypass Pipe'))
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"demand_side_bypass_pipe_yes")
def test_demand_side_bypass_pipe_yes(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'demand_side_bypass_pipe'] = 'Yes'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNotNone(
epjson_output['Pipe:Adiabatic'].get('Only Water Loop Demand Bypass Pipe'))
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"demand_side_bypass_pipe_no")
def test_demand_side_bypass_pipe_no(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'demand_side_bypass_pipe'] = 'No'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNone(
epjson_output['Pipe:Adiabatic'].get('Only Water Loop Demand Bypass Pipe'))
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"demand_side_bypass_pipe_no")
def test_supply_side_bypass_pipe_no_demand_side_bypass_pipe_no(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'supply_side_bypass_pipe'] = 'No'
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'demand_side_bypass_pipe'] = 'No'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNone(
epjson_output['Pipe:Adiabatic'].get('Only Water Loop Supply Bypass Pipe'))
self.assertIsNone(
epjson_output['Pipe:Adiabatic'].get('Only Water Loop Demand Bypass Pipe'))
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:fluid_type_water")
def test_fluid_type_water(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'fluid_type'] = 'Water'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertIsNone(
epjson_output['PlantLoop']['Only Water Loop PlantLoop'].get('fluid_type'))
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:fluid_type_ethylene_glycol_30")
def test_fluid_type_ethylene_glycol_30(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'fluid_type'] = 'EthyleneGlycol30'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'UserDefinedFluidType',
epjson_output['PlantLoop']['Only Water Loop PlantLoop']['fluid_type'])
self.assertEqual(
0.3,
epjson_output['FluidProperties:GlycolConcentration']['Only Water Loop Fluid']['glycol_concentration'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:fluid_type_ethylene_glycol_40")
def test_fluid_type_ethylene_glycol_40(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'fluid_type'] = 'EthyleneGlycol40'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'UserDefinedFluidType',
epjson_output['PlantLoop']['Only Water Loop PlantLoop']['fluid_type'])
self.assertEqual(
0.4,
epjson_output['FluidProperties:GlycolConcentration']['Only Water Loop Fluid']['glycol_concentration'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:fluid_type_ethylene_glycol_50")
def test_fluid_type_ethylene_glycol_50(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'fluid_type'] = 'EthyleneGlycol50'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'UserDefinedFluidType',
epjson_output['PlantLoop']['Only Water Loop PlantLoop']['fluid_type'])
self.assertEqual(
0.5,
epjson_output['FluidProperties:GlycolConcentration']['Only Water Loop Fluid']['glycol_concentration'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:fluid_type_ethylene_glycol_60")
def test_fluid_type_ethylene_glycol_60(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'fluid_type'] = 'EthyleneGlycol60'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'UserDefinedFluidType',
epjson_output['PlantLoop']['Only Water Loop PlantLoop']['fluid_type'])
self.assertEqual(
0.6,
epjson_output['FluidProperties:GlycolConcentration']['Only Water Loop Fluid']['glycol_concentration'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:fluid_type_propylene_glycol_30")
def test_fluid_type_propylene_glycol_30(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'fluid_type'] = 'PropyleneGlycol30'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'UserDefinedFluidType',
epjson_output['PlantLoop']['Only Water Loop PlantLoop']['fluid_type'])
self.assertEqual(
0.3,
epjson_output['FluidProperties:GlycolConcentration']['Only Water Loop Fluid']['glycol_concentration'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:fluid_type_propylene_glycol_40")
def test_fluid_type_propylene_glycol_40(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'fluid_type'] = 'PropyleneGlycol40'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'UserDefinedFluidType',
epjson_output['PlantLoop']['Only Water Loop PlantLoop']['fluid_type'])
self.assertEqual(
0.4,
epjson_output['FluidProperties:GlycolConcentration']['Only Water Loop Fluid']['glycol_concentration'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:fluid_type_propylene_glycol_50")
def test_fluid_type_propylene_glycol_50(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'fluid_type'] = 'PropyleneGlycol50'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'UserDefinedFluidType',
epjson_output['PlantLoop']['Only Water Loop PlantLoop']['fluid_type'])
self.assertEqual(
0.5,
epjson_output['FluidProperties:GlycolConcentration']['Only Water Loop Fluid']['glycol_concentration'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:fluid_type_propylene_glycol_60")
def test_fluid_type_propylene_glycol_60(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'fluid_type'] = 'PropyleneGlycol60'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'UserDefinedFluidType',
epjson_output['PlantLoop']['Only Water Loop PlantLoop']['fluid_type'])
self.assertEqual(
0.6,
epjson_output['FluidProperties:GlycolConcentration']['Only Water Loop Fluid']['glycol_concentration'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:loop_design_delta_temperature")
def test_loop_design_delta_temperature(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'loop_design_delta_temperature'] = 5.4
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
5.4,
epjson_output['Sizing:Plant']['Only Water Loop Sizing Plant']['loop_design_temperature_difference'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"load_distribution_scheme_none")
def test_load_distribution_scheme_none(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'].pop('load_distribution_scheme', None)
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'SequentialLoad',
epjson_output['PlantLoop']['Only Water Loop PlantLoop']['load_distribution_scheme'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"load_distribution_scheme_sequential_load")
def test_load_distribution_scheme_sequential_load(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'load_distribution_scheme'] = 'SequentialLoad'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'SequentialLoad',
epjson_output['PlantLoop']['Only Water Loop PlantLoop']['load_distribution_scheme'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"load_distribution_scheme_optimal")
def test_load_distribution_scheme_optimal(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'load_distribution_scheme'] = 'Optimal'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'Optimal',
epjson_output['PlantLoop']['Only Water Loop PlantLoop']['load_distribution_scheme'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"load_distribution_scheme_uniform_load")
def test_load_distribution_scheme_uniform_load(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'load_distribution_scheme'] = 'UniformLoad'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'UniformLoad',
epjson_output['PlantLoop']['Only Water Loop PlantLoop']['load_distribution_scheme'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"load_distribution_scheme_uniform_plr")
def test_load_distribution_scheme_uniform_plr(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'load_distribution_scheme'] = 'UniformPLR'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'UniformPLR',
epjson_output['PlantLoop']['Only Water Loop PlantLoop']['load_distribution_scheme'])
return
@BaseSimulationTest._test_logger(doc_text="Simulation:PlantLoop:MixedWaterLoop:"
"load_distribution_scheme_sequential_uniform_plr")
def test_load_distribution_scheme_sequential_uniform_plr(self):
self.base_epjson['HVACTemplate:Plant:MixedWaterLoop']['Only Water Loop'][
'load_distribution_scheme'] = 'SequentialUniformPLR'
base_file_path = self.create_idf_file_from_epjson(epjson=self.base_epjson, file_name='base_pre_input.epJSON')
self.perform_full_comparison(base_idf_file_path=base_file_path)
epjson_output = self.ej._get_json_file(test_dir.joinpath('..', 'simulation', 'test', 'test_input_epjson.epJSON'))
self.assertEqual(
'SequentialUniformPLR',
epjson_output['PlantLoop']['Only Water Loop PlantLoop']['load_distribution_scheme'])
return
| 59.825333
| 121
| 0.677194
| 4,993
| 44,869
| 5.686361
| 0.044462
| 0.037475
| 0.056777
| 0.051282
| 0.921457
| 0.913708
| 0.896731
| 0.883735
| 0.875387
| 0.869611
| 0
| 0.006487
| 0.216698
| 44,869
| 749
| 122
| 59.905207
| 0.801343
| 0.005594
| 0
| 0.701578
| 0
| 0
| 0.353476
| 0.2111
| 0
| 0
| 0
| 0.001335
| 0.101865
| 1
| 0.064562
| false
| 0.031564
| 0.004304
| 0.001435
| 0.134864
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d38150f04fe6399173966fb19aba74a1ce62616f
| 3,473
|
py
|
Python
|
Storage/q_pack/q_datafeeds/bt_datafeed_postgres.py
|
Alba-Intelligence/Microservices-Based-Algorithmic-Trading-System
|
78a2b8d485a799fe5759f024b202355e94aeeb10
|
[
"BSD-3-Clause"
] | 207
|
2020-01-09T14:07:47.000Z
|
2022-03-24T00:04:37.000Z
|
Storage/q_pack/q_datafeeds/bt_datafeed_postgres.py
|
snorics/Microservices-Based-Algorithmic-Trading-System
|
454b6fb679c1b59a2f7e3aac7b2167901c9b4f2d
|
[
"BSD-3-Clause"
] | 9
|
2020-02-11T14:10:27.000Z
|
2021-11-10T11:42:04.000Z
|
Storage/q_pack/q_datafeeds/bt_datafeed_postgres.py
|
snorics/Microservices-Based-Algorithmic-Trading-System
|
454b6fb679c1b59a2f7e3aac7b2167901c9b4f2d
|
[
"BSD-3-Clause"
] | 92
|
2020-01-10T01:23:37.000Z
|
2022-03-21T19:23:10.000Z
|
# from __future__ import (absolute_import, division, print_function,
# unicode_literals)
import datetime
from backtrader.feed import DataBase
from backtrader import date2num
from sqlalchemy import create_engine
class PostgreSQL_Daily(DataBase):
params = (
('dbHost', None),
('dbUser', None),
('dbPWD', None),
('dbName', None),
('ticker', 'EUR_USD'),
('fromdate', datetime.datetime.min),
('todate', datetime.datetime.max),
('name', ''),
)
def __init__(self):
self.engine = create_engine('postgresql+psycopg2://'+self.p.dbUser+':'+ self.p.dbPWD +'@'+ self.p.dbHost +'/'+ self.p.dbName)
# self.engine = psycopg2.connect(host=self.p.dbHost, database=self.p.dbName, user=self.p.dbUser, password=self.p.dbPWD)
def start(self):
self.conn = self.engine.connect()
sql = "select a.date_price date, a.close_price as close, a.high_price high, a.open_price open, a.low_price low from daily_data a inner join symbol b on a.stock_id = b.id where b.ticker='"+ self.p.ticker + "' and a.date_price between '"+self.p.fromdate.strftime("%Y-%m-%d")+"' and '"+self.p.todate.strftime("%Y-%m-%d")+"' order by date ASC"
self.result = self.conn.execute(sql)
def stop(self):
#self.conn.close()
self.engine.dispose()
def _load(self):
one_row = self.result.fetchone()
if one_row is None:
return False
self.lines.datetime[0] = date2num(one_row[0])
self.lines.open[0] = float(one_row[1])
self.lines.high[0] = float(one_row[2])
self.lines.low[0] = float(one_row[3])
self.lines.close[0] = float(one_row[4])
# self.lines.volume[0] = int(one_row[5])
self.lines.openinterest[0] = -1
return True
class PostgreSQL_Minute(DataBase):
params = (
('dbHost', None),
('dbUser', None),
('dbPWD', None),
('dbName', None),
('ticker', 'EUR_USD'),
('fromdate', datetime.datetime.min),
('todate', datetime.datetime.max),
('name', ''),
)
def __init__(self):
self.engine = create_engine('postgresql+psycopg2://'+self.p.dbUser+':'+ self.p.dbPWD +'@'+ self.p.dbHost +'/'+ self.p.dbName)
print(self.engine)
# self.engine = psycopg2.connect(host=self.p.dbHost, database=self.p.dbName, user=self.p.dbUser, password=self.p.dbPWD)
def start(self):
self.conn = self.engine.connect()
sql = "select a.date_price date, a.close_price as close, a.high_price high, a.open_price open, a.low_price low from minute_data a inner join symbol b on a.stock_id = b.id where b.ticker='"+ self.p.ticker + "' and a.date_price between '"+self.p.fromdate.strftime("%Y-%m-%d")+"' and '"+self.p.todate.strftime("%Y-%m-%d")+"' order by date ASC"
self.result = self.conn.execute(sql)
def stop(self):
#self.conn.close()
self.engine.dispose()
def _load(self):
one_row = self.result.fetchone()
if one_row is None:
return False
self.lines.datetime[0] = date2num(one_row[0])
self.lines.open[0] = float(one_row[1])
self.lines.high[0] = float(one_row[2])
self.lines.low[0] = float(one_row[3])
self.lines.close[0] = float(one_row[4])
# self.lines.volume[0] = int(one_row[5])
self.lines.openinterest[0] = -1
return True
| 39.91954
| 348
| 0.59977
| 481
| 3,473
| 4.214137
| 0.203742
| 0.054267
| 0.03552
| 0.047361
| 0.882092
| 0.882092
| 0.882092
| 0.882092
| 0.882092
| 0.882092
| 0
| 0.013198
| 0.236395
| 3,473
| 87
| 349
| 39.91954
| 0.751131
| 0.140513
| 0
| 0.830769
| 0
| 0.030769
| 0.220692
| 0.01478
| 0
| 0
| 0
| 0
| 0
| 1
| 0.123077
| false
| 0
| 0.061538
| 0
| 0.307692
| 0.015385
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d39daf3387a623616f51a9a28d4a408cf09d2f60
| 201
|
py
|
Python
|
identity.py
|
kingRovo/PythonCodingChalenge
|
b62938592df10ccafec9930b69c14c778e19ad37
|
[
"bzip2-1.0.6"
] | 1
|
2021-08-02T16:52:55.000Z
|
2021-08-02T16:52:55.000Z
|
identity.py
|
kingRovo/PythonCodingChalenge
|
b62938592df10ccafec9930b69c14c778e19ad37
|
[
"bzip2-1.0.6"
] | null | null | null |
identity.py
|
kingRovo/PythonCodingChalenge
|
b62938592df10ccafec9930b69c14c778e19ad37
|
[
"bzip2-1.0.6"
] | null | null | null |
#identity operator
x=12
y=13
if(x is y):
print("x is same as y")
else:
print("x is not same as y")
x=13
if(x is y):
print("x is same as y")
else:
print("x is not same as y")
| 12.5625
| 31
| 0.552239
| 44
| 201
| 2.522727
| 0.295455
| 0.162162
| 0.288288
| 0.126126
| 0.810811
| 0.810811
| 0.810811
| 0.810811
| 0.810811
| 0.810811
| 0
| 0.043165
| 0.308458
| 201
| 15
| 32
| 13.4
| 0.755396
| 0.084577
| 0
| 0.727273
| 0
| 0
| 0.349727
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.363636
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6cbadb0604f029295bcf4f345118340012afe75f
| 37
|
py
|
Python
|
src/utils/__init__.py
|
takedarts/BandaiNamco-DSChallenge-3rdSolution
|
5dd909333d3fd27c237dabea11679c0f92f50142
|
[
"MIT"
] | 3
|
2020-05-23T10:39:08.000Z
|
2022-02-04T12:29:20.000Z
|
src/utils/__init__.py
|
takedarts/BandaiNamco-DSChallenge-3rdSolution
|
5dd909333d3fd27c237dabea11679c0f92f50142
|
[
"MIT"
] | null | null | null |
src/utils/__init__.py
|
takedarts/BandaiNamco-DSChallenge-3rdSolution
|
5dd909333d3fd27c237dabea11679c0f92f50142
|
[
"MIT"
] | null | null | null |
from .data import Dataset, generate
| 18.5
| 36
| 0.783784
| 5
| 37
| 5.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 37
| 1
| 37
| 37
| 0.935484
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9f19a229d53a8292dbd3467e9bdfbe60e5b5c5cb
| 6,648
|
py
|
Python
|
bluebottle/cms/migrations/0050_auto_20171024_1623.py
|
terrameijar/bluebottle
|
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
|
[
"BSD-3-Clause"
] | 10
|
2015-05-28T18:26:40.000Z
|
2021-09-06T10:07:03.000Z
|
bluebottle/cms/migrations/0050_auto_20171024_1623.py
|
terrameijar/bluebottle
|
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
|
[
"BSD-3-Clause"
] | 762
|
2015-01-15T10:00:59.000Z
|
2022-03-31T15:35:14.000Z
|
bluebottle/cms/migrations/0050_auto_20171024_1623.py
|
terrameijar/bluebottle
|
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
|
[
"BSD-3-Clause"
] | 9
|
2015-02-20T13:19:30.000Z
|
2022-03-08T14:09:17.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2017-10-24 14:23
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.manager
class Migration(migrations.Migration):
dependencies = [
('cms', '0049_auto_20171024_1601'),
]
operations = [
migrations.AlterModelManagers(
name='categoriescontent',
managers=[
('objects', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='locationscontent',
managers=[
('objects', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='logoscontent',
managers=[
('objects', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='projectimagescontent',
managers=[
('objects', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='projectscontent',
managers=[
('objects', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='projectsmapcontent',
managers=[
('objects', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='quotescontent',
managers=[
('objects', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='shareresultscontent',
managers=[
('objects', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='slide',
managers=[
],
),
migrations.AlterModelManagers(
name='slidescontent',
managers=[
('objects', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='statscontent',
managers=[
('objects', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='stepscontent',
managers=[
('objects', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='supportertotalcontent',
managers=[
('objects', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='surveycontent',
managers=[
('objects', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.AlterField(
model_name='categoriescontent',
name='sub_title',
field=models.CharField(blank=True, max_length=180, null=True),
),
migrations.AlterField(
model_name='linkscontent',
name='sub_title',
field=models.CharField(blank=True, max_length=180, null=True),
),
migrations.AlterField(
model_name='locationscontent',
name='sub_title',
field=models.CharField(blank=True, max_length=180, null=True),
),
migrations.AlterField(
model_name='logoscontent',
name='sub_title',
field=models.CharField(blank=True, max_length=180, null=True),
),
migrations.AlterField(
model_name='projectimagescontent',
name='sub_title',
field=models.CharField(blank=True, max_length=180, null=True),
),
migrations.AlterField(
model_name='projectscontent',
name='sub_title',
field=models.CharField(blank=True, max_length=180, null=True),
),
migrations.AlterField(
model_name='projectsmapcontent',
name='sub_title',
field=models.CharField(blank=True, max_length=180, null=True),
),
migrations.AlterField(
model_name='quotescontent',
name='sub_title',
field=models.CharField(blank=True, max_length=180, null=True),
),
migrations.AlterField(
model_name='shareresultscontent',
name='sub_title',
field=models.CharField(blank=True, max_length=180, null=True),
),
migrations.AlterField(
model_name='slidescontent',
name='sub_title',
field=models.CharField(blank=True, max_length=180, null=True),
),
migrations.AlterField(
model_name='statscontent',
name='sub_title',
field=models.CharField(blank=True, max_length=180, null=True),
),
migrations.AlterField(
model_name='stepscontent',
name='sub_title',
field=models.CharField(blank=True, max_length=180, null=True),
),
migrations.AlterField(
model_name='supportertotalcontent',
name='sub_title',
field=models.CharField(blank=True, max_length=180, null=True),
),
migrations.AlterField(
model_name='surveycontent',
name='sub_title',
field=models.CharField(blank=True, max_length=180, null=True),
),
migrations.AlterField(
model_name='taskscontent',
name='sub_title',
field=models.CharField(blank=True, max_length=180, null=True),
),
]
| 35.361702
| 74
| 0.53911
| 551
| 6,648
| 6.384755
| 0.116152
| 0.063673
| 0.107447
| 0.161171
| 0.790506
| 0.790506
| 0.790506
| 0.790506
| 0.790506
| 0.790506
| 0
| 0.017599
| 0.333333
| 6,648
| 187
| 75
| 35.550802
| 0.776173
| 0.010229
| 0
| 0.788889
| 1
| 0
| 0.127566
| 0.009883
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016667
| 0
| 0.033333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4cffc3962cc9934f5a3a4154eb434f601b0109b8
| 1,747
|
py
|
Python
|
app/apimodels/distribution.py
|
z-zroud/bamboo_test_server
|
a2b4de5643aaa570cda2729a0934b61296e21025
|
[
"MIT"
] | null | null | null |
app/apimodels/distribution.py
|
z-zroud/bamboo_test_server
|
a2b4de5643aaa570cda2729a0934b61296e21025
|
[
"MIT"
] | null | null | null |
app/apimodels/distribution.py
|
z-zroud/bamboo_test_server
|
a2b4de5643aaa570cda2729a0934b61296e21025
|
[
"MIT"
] | null | null | null |
class LightDefectDistributionReq:
def __init__(self, classifier_id, real_light_defect_count, heavy_defect_to_light_defect_count, ok_to_light_defect_count):
self.classifier_id = classifier_id
self.real_light_defect_count = real_light_defect_count
self.heavy_defect_to_light_defect_count = heavy_defect_to_light_defect_count
self.ok_to_light_defect_count = ok_to_light_defect_count
class LightDefectDistributionResp:
def __init__(self, real_light_defect_count, heavy_defect_to_light_defect_count, ok_to_light_defect_count):
self.real_light_defect_count = real_light_defect_count
self.heavy_defect_to_light_defect_count = heavy_defect_to_light_defect_count
self.ok_to_light_defect_count = ok_to_light_defect_count
class UndefinedDistributionReq:
def __init__(self,classifier_id,real_undefined_count,light_defect_to_undefined_count,light_defect_to_ok_to_undefined_count,ok_to_undefined_count):
self.classifier_id = classifier_id
self.real_undefined_count = real_undefined_count
self.light_defect_to_undefined_count = light_defect_to_undefined_count
self.light_defect_to_ok_to_undefined_count = light_defect_to_ok_to_undefined_count
self.ok_to_undefined_count = ok_to_undefined_count
class UndefinedDistributionResp:
def __init__(self,real_undefined_count,light_defect_to_undefined_count,light_defect_to_ok_to_undefined_count,ok_to_undefined_count):
self.real_undefined_count = real_undefined_count
self.light_defect_to_undefined_count = light_defect_to_undefined_count
self.light_defect_to_ok_to_undefined_count = light_defect_to_ok_to_undefined_count
self.ok_to_undefined_count = ok_to_undefined_count
| 60.241379
| 150
| 0.838008
| 252
| 1,747
| 5.079365
| 0.071429
| 0.257813
| 0.225
| 0.16875
| 0.898438
| 0.895313
| 0.859375
| 0.859375
| 0.821875
| 0.821875
| 0
| 0
| 0.125358
| 1,747
| 29
| 151
| 60.241379
| 0.837696
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e29936f839a67fed1fe661e39741769f26ecfc1a
| 14,036
|
py
|
Python
|
pysmt/smtlib/script_rint.py
|
dsksh/pysmt
|
873d4ea556c6198916be7a63dfafeef193e1263c
|
[
"Apache-2.0"
] | null | null | null |
pysmt/smtlib/script_rint.py
|
dsksh/pysmt
|
873d4ea556c6198916be7a63dfafeef193e1263c
|
[
"Apache-2.0"
] | null | null | null |
pysmt/smtlib/script_rint.py
|
dsksh/pysmt
|
873d4ea556c6198916be7a63dfafeef193e1263c
|
[
"Apache-2.0"
] | null | null | null |
rint_param_decls = '''
(declare-const ri.max_value Real)
;(declare-const ri.normal_bound Real)
(declare-const ri.err_denom Real)
(declare-const ri.err_min Real)
(declare-const ri.large_value Real)
'''
def assert_rint_param_values(os, eb=11, sb=53):
emax = 2**(eb-1)-1
os.write('\n(assert (= ri.max_value %s))\n' % str((2**sb-1) * 2**(emax-sb+1)))
normal_bound = 2**(emax-1)
#os.write('(assert (= ri.normal_bound (/ 1 %d.)))\n' % normal_bound)
err_denom = 2**(sb-1)
os.write('(assert (= ri.err_denom %d))\n' % err_denom)
err_min_1 = 2**(sb+emax-2)
#os.write('(assert (= ri.err_min (/ %d %d)))\n' % (err_denom-1, normal_bound*err_denom))
os.write('(assert (= ri.err_min (/ 1 %d)))\n' % err_min_1)
def define_rint_rnd_funs(os, prec, prpr=True):
if prpr and len(prec) > 0 and prec[0][1] > 0:
sb = prec[0][1]
err_denom = str(2**(sb-1))
else:
err_denom = 'ri.err_denom'
os.write('\n(define-fun ri.r_dn ((v Real)) Real\n')
os.write(' (let ((w (- v (/ (ite (>= v 0) v (- v)) %s) ri.err_min)))\n' % err_denom)
os.write(' (ite (>= w (* (- 1) ri.max_value)) w\n')
os.write(' (* (- 1) ri.large_value) ))\n')
os.write(')\n\n')
os.write('(define-fun ri.r_up ((v Real)) Real\n')
os.write(' (let ((w (+ v (/ (ite (>= v 0) v (- v)) %s) ri.err_min)))\n' % err_denom)
os.write(' (ite (<= w ri.max_value) w\n')
os.write(' ri.large_value ))\n')
os.write(')\n\n')
rint_prologue = '''
(assert (> ri.large_value (* 2 ri.max_value)))
(declare-datatype RInt ((tpl (ri.l Real) (ri.u Real) (p_nan Bool) )))
(define-fun is_ninf ((v RInt)) Bool (< (ri.l v) (- ri.max_value)))
(define-fun is_pinf ((v RInt)) Bool (> (ri.u v) ri.max_value))
(define-const ri.zero RInt (tpl 0 0 false))
(define-const ri.zero_nan RInt (tpl 0 0 true))
(define-const ri.pinf RInt (tpl ri.large_value ri.large_value false))
(define-const ri.ninf RInt (tpl (- ri.large_value) (- ri.large_value) false))
(define-const ri.ninf_nan RInt (tpl (- ri.large_value) (- ri.large_value) true))
(define-const ri.entire RInt (tpl (- ri.large_value) ri.large_value true))
(define-fun ri_to_ri ((v RInt)) RInt
(let ( (l (ri.r_dn (ri.l v)))
(u (ri.r_up (ri.u v))) )
(tpl l u (p_nan v)) )
)
(define-fun real_to_ri ((v Real)) RInt
(let ( (l (ri.r_dn v))
(u (ri.r_up v)) )
(tpl l u false) )
)
(define-fun ri.exact ((v Real)) RInt
(tpl v v false)
)
(define-fun ri.abs ((x RInt)) RInt
(let ( (l (ite (>= (ri.l x) 0) (ri.u x) (ite (> (ri.u x) (- (ri.l x))) (ri.u x) (- (ri.l x)))))
(u (ite (>= (ri.l x) 0) (ri.l x) (ite (< (ri.u x) 0) (- (ri.u x)) 0))) )
(tpl l u (p_nan x) ) ) )
(define-fun ri.neg ((x RInt)) RInt
(tpl (- (ri.u x)) (- (ri.l x)) (p_nan x) ) )
(define-fun ri.add ((x RInt) (y RInt)) RInt
(let ( (l (ri.r_dn (+ (ri.l x) (ri.l y))))
(u (ri.r_up (+ (ri.u x) (ri.u y)))) )
(tpl l u
(or (p_nan x) (p_nan y) (and (is_ninf x) (is_pinf y)) (and (is_pinf x) (is_ninf y))) ) ) )
(define-fun ri.sub ((x RInt) (y RInt)) RInt
(let ( (l (ri.r_dn (- (ri.l x) (ri.u y))))
(u (ri.r_up (- (ri.u x) (ri.l y)))) )
(tpl l u
(or (p_nan x) (p_nan y) (and (is_ninf x) (is_ninf y)) (and (is_pinf x) (is_pinf y))) ) ) )
(define-fun ri.sub_exact ((x RInt) (y RInt)) RInt
(let ( (l (- (ri.l x) (ri.u y)))
(u (- (ri.u x) (ri.l y))) )
(tpl l u
(or (p_nan x) (p_nan y) (and (is_ninf x) (is_ninf y)) (and (is_pinf x) (is_pinf y))) ) ) )
(define-fun ri.mul ((x RInt) (y RInt)) RInt
(ite (>= (ri.l x) 0)
(ite (= (ri.u x) 0)
(ite (and (not (is_ninf y)) (not (is_pinf y)) (not (p_nan x)) (not (p_nan y)))
;; [x] = [0]
ri.zero
;; [x] = [0] and (-inf = [y] or [y] = +inf)
ri.zero_nan )
(ite (>= (ri.l y) 0)
(ite (= (ri.u y) 0)
;; 0 <= [x] and [x] != [0] and [y] = [0]
(ite (and (not (is_pinf x)) (not (p_nan x)) (not (p_nan y)))
ri.zero
ri.zero_nan )
;; 0 <= [x] and [x] != [0] and 0 <= [y] and [y] != [0]
(let ( (l (ri.r_dn (* (ri.l x) (ri.l y))))
(u (ite (or (is_pinf x) (is_pinf y)) ri.large_value
(ri.r_up (* (ri.u x) (ri.u y))) )))
(tpl l u
(or (p_nan x) (p_nan y) (and (= (ri.l x) 0) (is_pinf y))
(and (is_pinf x) (= (ri.l y) 0)) ))))
(ite (<= (ri.u y) 0)
;; 0 <= [x] and [x] != [0] and [y] <= 0 and [y] != [0]
(let ( (l (ite (or (is_pinf x) (is_ninf y)) (- ri.large_value)
(ri.r_dn (* (ri.u x) (ri.l y))) ))
(u (ri.r_up (* (ri.l x) (ri.u y)))) )
(tpl l u
(or (p_nan x) (p_nan y) (and (= (ri.l x) 0) (is_ninf y))
(and (is_pinf x) (= (ri.u y) 0) ))))
;; 0 <= [x] and [x] != [0] and [y] strictly contains 0
(let ( (l (ite (or (is_pinf x) (is_ninf y)) (- ri.large_value)
(ri.r_dn (* (ri.u x) (ri.l y))) ) )
(u (ite (or (is_pinf x) (is_pinf y)) ri.large_value
(ri.r_up (* (ri.u x) (ri.u y))) )))
(tpl l u
(or (p_nan x) (p_nan y) (is_pinf x)
(and (= (ri.l x) 0) (or (is_ninf y) (is_pinf y))) ))))))
(ite (<= (ri.u x) 0)
(ite (>= (ri.l y) 0)
(ite (and (not (is_pinf y)) (= (ri.u y) 0))
;; [x] <= 0 and [x] != [0] and [y] = [0]
(ite (and (not (is_ninf x)) (not (p_nan x)) (not (p_nan y)))
ri.zero
ri.zero_nan )
;; [x] <= 0 and [x] != [0] and 0 <= [y] and [y] != [0]
(let ( (l (ite (or (is_ninf x) (is_pinf y)) (- ri.large_value)
(ri.r_dn (* (ri.l x) (ri.u y))) ))
(u (ri.r_up (* (ri.u x) (ri.l y)))) )
(tpl l u
(or (p_nan x) (p_nan y) (and (= (ri.u x) 0) (is_pinf y))
(and (is_ninf x) (= (ri.l y) 0))) ) ) )
(ite (<= (ri.u y) 0)
;; [x] <= 0 and [x] != [0] and [y] <= 0 and [y] != [0]
(let ( (l (ri.r_dn (* (ri.u x) (ri.u y))))
(u (ite (or (is_ninf x) (is_ninf y)) ri.large_value
(ri.r_up (* (ri.l x) (ri.l y))) )))
(tpl l u
(or (p_nan x) (p_nan y) (and (= (ri.u x) 0) (is_ninf y))
(and (is_ninf x) (= (ri.u y) 0)) )))
;; [x] <= 0 and [x] != [0] and [y] strictly contains 0
(let ( (l (ite (or (is_ninf x) (is_pinf y)) (- ri.large_value)
(ri.r_dn (* (ri.l x) (ri.u y))) ))
(u (ite (or (is_ninf x) (is_ninf y)) ri.large_value
(ri.r_up (* (ri.l x) (ri.l y))) )))
(tpl l u
(or (p_nan x) (p_nan y) (is_ninf x) (and (= (ri.u x) 0)
(or (is_ninf y) (is_pinf y))) )))))
(ite (>= (ri.l y) 0)
(ite (= (ri.u y) 0)
;; [x] strictly contains 0 and [y] = [0]
(ite (and (not (is_ninf x)) (not (is_pinf x)) (not (p_nan x)) (not (p_nan y)))
ri.zero
ri.zero_nan )
;; [x] strictly contains 0 and 0 <= [y]
(let ( (l (ite (or (is_ninf x) (is_pinf y)) (- ri.large_value)
(ri.r_dn (* (ri.l x) (ri.u y))) ))
(u (ite (or (is_pinf x) (is_pinf y)) ri.large_value
(ri.r_up (* (ri.u x) (ri.u y))) )))
(tpl l u
(or (p_nan x) (p_nan y) (is_pinf y)
(and (or (is_ninf x) (is_pinf x)) (= (ri.l y) 0)) ))))
(ite (<= (ri.u y) 0)
;; [x] strictly contains 0 and [y] <= 0
(let ( (l (ite (or (is_pinf x) (is_ninf y)) (- ri.large_value)
(ri.r_dn (* (ri.u x) (ri.l y))) ))
(u (ite (or (is_ninf x) (is_ninf y)) ri.large_value
(ri.r_up (* (ri.l x) (ri.l y))) )))
(tpl l u
(or (p_nan x) (p_nan y) (is_ninf y)
(and (or (is_ninf x) (is_pinf x)) (= (ri.u y) 0)) )))
;; [x] and [y] strictly contains 0
(let ( (l1 (ite (or (is_ninf x) (is_pinf y)) (- ri.large_value)
(ri.r_dn (* (ri.l x) (ri.u y))) ))
(l2 (ite (or (is_pinf x) (is_ninf y)) (- ri.large_value)
(ri.r_dn (* (ri.u x) (ri.l y))) ))
(u1 (ite (or (is_ninf x) (is_ninf y)) ri.large_value
(ri.r_up (* (ri.l x) (ri.l y))) ))
(u2 (ite (or (is_pinf x) (is_pinf y)) ri.large_value
(ri.r_up (* (ri.u x) (ri.u y))) )))
(ite (< l1 l2)
(ite (> u1 u2)
(tpl l1 u1 (or (p_nan x) (p_nan y) (is_ninf x)))
(tpl l1 u2 (or (p_nan x) (p_nan y) (is_ninf x) (is_pinf y))))
(ite (> u1 u2)
(tpl l2 u1 (or (p_nan x) (p_nan y) (is_ninf x) (is_ninf y)))
(tpl l2 u2
(or (p_nan x) (p_nan y)
(is_ninf x) (is_pinf x) (is_ninf y) (is_pinf y)) )))))))))
(define-fun ri.div ((x RInt) (y RInt)) RInt
(ite (> (ri.l y) 0)
(ite (>= (ri.l x) 0)
;; 0 <= [x] and 0 < [y]
(let ( (l (ite (is_pinf y) 0 (ri.r_dn (/ (ri.l x) (ri.u y)))))
(u (ite (is_pinf x) ri.large_value (ri.r_up (/ (ri.u x) (ri.l y))))) )
(tpl l u (or (p_nan x) (p_nan y) (and (is_pinf x) (is_pinf y)))) )
(ite (<= (ri.u x) 0)
;; [x] <= 0 and 0 < [y]
(let ( (l (ite (is_ninf x) (- ri.large_value) (ri.r_dn (/ (ri.l x) (ri.l y)))))
(u (ite (is_pinf y) 0 (ri.r_up (/ (ri.u x) (ri.u y))))) )
(tpl l u (or (p_nan x) (p_nan y) (and (is_ninf x) (is_pinf y)))) )
;; [x] strictly contains 0 and 0 < [y]
(let ( (l (ite (is_ninf x) (- ri.large_value) (ri.r_dn (/ (ri.l x) (ri.l y)))))
(u (ite (is_pinf x) ri.large_value (ri.r_up (/ (ri.u x) (ri.l y))))) )
(tpl l u (or (p_nan x) (p_nan y) (and (or (is_ninf x) (is_pinf x)) (is_pinf y)))) )))
(ite (< (ri.u y) 0)
(ite (>= (ri.l x) 0)
;; 0 <= [x] and [y] < 0
(let ( (l (ite (is_pinf x) (- ri.large_value) (ri.r_dn (/ (ri.u x) (ri.u y)))))
(u (ite (is_ninf y) 0 (ri.r_up (/ (ri.l x) (ri.l y))))) )
(tpl l u (or (p_nan x) (p_nan y) (and (is_pinf x) (is_ninf y)))))
(ite (<= (ri.u x) 0)
;; [x] <= 0 and [y] < 0
(let ( (l (ite (is_ninf y) 0 (ri.r_dn (/ (ri.u x) (ri.l y)))))
(u (ite (is_ninf x) ri.large_value (ri.r_up (/ (ri.l x) (ri.u y))))) )
(tpl l u (or (p_nan x) (p_nan x) (and (is_ninf x) (is_ninf y)))) )
;; [x] strictly contains 0 and [y] < 0
(let ( (l (ite (is_pinf x) (- ri.large_value) (ri.r_dn (/ (ri.u x) (ri.u y)))))
(u (ite (is_ninf x) ri.large_value (ri.r_up (/ (ri.l x) (ri.u y))))) )
(tpl l u (or (p_nan x) (p_nan x) (and (or (is_ninf x) (is_pinf x)) (is_ninf y)))) )))
;; [y] contains 0; results in Entire
(tpl (- ri.large_value) ri.large_value
(or (p_nan x) (p_nan y) (and (or (is_ninf x) (is_pinf x)) (or (is_ninf y) (is_pinf y)))
(and (<= (ri.l x) 0) (<= 0 (ri.u x))) )))))
;;
;; Assignment for non-NaN values.
(define-fun ri.fpis ((x RInt) (y RInt)) Bool
(and (not (p_nan x)) (not (p_nan y))
(= x y) )
)
;; Assignment.
(define-fun ri.is ((x RInt) (y RInt)) Bool
(= x y)
)
'''
rint_prologue_m = '''
;; For positive literals.
(define-fun ri.gt0 ((x RInt)) Bool
(or (is_pinf x) (> (ri.u x) 0))
)
(define-fun ri.geq0 ((x RInt)) Bool
(or (is_pinf x) (>= (ri.u x) 0))
)
(define-fun ri.gt ((x RInt) (y RInt)) Bool
(or (is_pinf x) (is_ninf y) (ri.gt0 (ri.sub_exact x y)))
)
(define-fun ri.geq ((x RInt) (y RInt)) Bool
(or (is_pinf x) (is_ninf y) (ri.geq0 (ri.sub_exact x y)))
)
(define-fun ri.fpeq ((x RInt) (y RInt)) Bool
(and (ri.geq x y) (ri.geq y x))
)
;; For negative literals.
(define-fun ri.lt0 ((x RInt)) Bool
(or (p_nan x) (is_ninf x) (< (ri.l x) 0))
)
(define-fun ri.leq0 ((x RInt)) Bool
(or (p_nan x) (is_ninf x) (<= (ri.l x) 0))
)
(define-fun ri.lt ((x RInt) (y RInt)) Bool
(or (p_nan x) (p_nan y) (ri.lt0 (ri.sub_exact x y)))
)
(define-fun ri.leq ((x RInt) (y RInt)) Bool
(or (p_nan x) (p_nan y) (ri.leq0 (ri.sub_exact x y)))
)
(define-fun ri.fpneq ((x RInt) (y RInt)) Bool
(or (ri.lt x y) (ri.lt y x))
)
;; Syntactic equality.
(define-fun ri.eq ((x RInt) (y RInt)) Bool
(or (and (p_nan x) (p_nan y)) (ri.fpeq x y))
)
(define-fun ri.neq ((x RInt) (y RInt)) Bool
(ri.fpneq x y)
)
'''
rint_prologue_p = '''
;; For positive literals.
(define-fun ri.gt0 ((x RInt)) Bool
(and (not (p_nan x)) (not (is_ninf x)) (> (ri.l x) 0))
)
(define-fun ri.geq0 ((x RInt)) Bool
(and (not (p_nan x)) (not (is_ninf x)) (>= (ri.l x) 0))
)
(define-fun ri.gt ((x RInt) (y RInt)) Bool
(and (not (p_nan x)) (not (p_nan y)) (ri.gt0 (ri.sub_exact x y)))
)
(define-fun ri.geq ((x RInt) (y RInt)) Bool
(and (not (p_nan x)) (not (p_nan y)) (ri.geq0 (ri.sub_exact x y)))
)
(define-fun ri.fpeq ((x RInt) (y RInt)) Bool
(and (not (p_nan x)) (not (p_nan y))
(= (ri.l x) (ri.u x) (ri.l y) (ri.u y)) )
)
;; For negative literals.
(define-fun ri.lt0 ((x RInt)) Bool
(and (not (is_pinf x)) (< (ri.u x) 0))
)
(define-fun ri.leq0 ((x RInt)) Bool
(and (not (is_pinf x)) (<= (ri.u x) 0))
)
(define-fun ri.lt ((x RInt) (y RInt)) Bool
(and (not (is_pinf x)) (not (is_ninf y))
(ri.lt0 (ri.sub_exact x y)) )
)
(define-fun ri.leq ((x RInt) (y RInt)) Bool
(and (not (is_pinf x)) (not (is_ninf y))
(ri.leq0 (ri.sub_exact x y)) )
)
(define-fun ri.fpneq((x RInt) (y RInt)) Bool
(or (ri.lt x y) (ri.lt y x))
)
;; Syntactic equality.
(define-fun ri.eq ((x RInt) (y RInt)) Bool
(ri.fpeq x y)
)
(define-fun ri.neq((x RInt) (y RInt)) Bool
(and (or (not (p_nan x)) (not (p_nan y))) (ri.fpneq x y))
)
'''
# eof
| 35.09
| 99
| 0.461812
| 2,589
| 14,036
| 2.369255
| 0.041715
| 0.04956
| 0.034235
| 0.066188
| 0.824421
| 0.791816
| 0.753505
| 0.721715
| 0.694653
| 0.671177
| 0
| 0.017853
| 0.317612
| 14,036
| 399
| 100
| 35.177945
| 0.622573
| 0.011257
| 0
| 0.361905
| 0
| 0.24127
| 0.944284
| 0
| 0
| 0
| 0
| 0
| 0.015873
| 1
| 0.006349
| false
| 0
| 0
| 0
| 0.006349
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e2c182b71f7daef230b3c7a2a85816335df7f974
| 110
|
py
|
Python
|
tests/test_queue_with_stacks.py
|
jpchato/data-structures-and-algorithms-python
|
5615d3637deea1b876760d981682f7fea0c4988f
|
[
"MIT"
] | null | null | null |
tests/test_queue_with_stacks.py
|
jpchato/data-structures-and-algorithms-python
|
5615d3637deea1b876760d981682f7fea0c4988f
|
[
"MIT"
] | null | null | null |
tests/test_queue_with_stacks.py
|
jpchato/data-structures-and-algorithms-python
|
5615d3637deea1b876760d981682f7fea0c4988f
|
[
"MIT"
] | null | null | null |
from challenges.queue_with_stacks.queue_with_stacks import PseudoQueue
def is_alive():
assert PseudoQueue
| 27.5
| 70
| 0.845455
| 15
| 110
| 5.866667
| 0.733333
| 0.204545
| 0.340909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109091
| 110
| 4
| 71
| 27.5
| 0.897959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
e2c2ef69ef5f972007e14ba2a1585475c6c58b23
| 5,665
|
py
|
Python
|
experiments/approxminmax_quail/approxminmax.py
|
kvgarimella/sisyphus-ppml
|
bffacd778f2a39337897932c2468e108560e1f53
|
[
"MIT"
] | 2
|
2022-03-08T01:45:39.000Z
|
2022-03-14T18:59:49.000Z
|
experiments/approxminmax_quail/approxminmax.py
|
kvgarimella/sisyphus-ppml
|
bffacd778f2a39337897932c2468e108560e1f53
|
[
"MIT"
] | null | null | null |
experiments/approxminmax_quail/approxminmax.py
|
kvgarimella/sisyphus-ppml
|
bffacd778f2a39337897932c2468e108560e1f53
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
device = 'cuda' if torch.cuda.is_available() else 'cpu'
class ApproxMinMaxNorm2d(nn.Module):
def __init__(self, num_features, a=2., b=1., mode=1):
super(ApproxMinMaxNorm2d, self).__init__()
self.register_buffer('running_max', torch.ones(1,num_features,1,1))
self.register_buffer('running_min', torch.ones(1,num_features,1,1))
self.register_buffer('num_batches_tracked', torch.tensor(0, dtype=torch.long))
self.register_buffer('mode', torch.tensor(mode, dtype=torch.long))# 0 -> biggest, 1 -> average, 2 -> real val
self.a = a
self.b = b
def forward(self, input):
if self.training:
self.num_batches_tracked += 1
if self.num_batches_tracked == 1:
exponential_average_factor = 1.0
else:
exponential_average_factor = 0.1
### Min block ###
mins = input.amin(dim=(0,2,3), keepdim=True)
if self.mode == 0:
with torch.no_grad():
cond_min = mins < self.running_min
self.running_min = torch.where(cond_min, mins, self.running_min)
elif self.mode == 1:
with torch.no_grad():
self.running_min[:] = exponential_average_factor * mins + (1 - exponential_average_factor) * self.running_min
input = input.sub(mins)
### Min block ###
### Max block ###
maxs = input.amax(dim=(0,2,3), keepdim=True)
if self.mode == 0:
with torch.no_grad():
cond_max = maxs > self.running_max
self.running_max = torch.where(cond_max, maxs, self.running_max)
elif self.mode == 1:
with torch.no_grad():
self.running_max[:] = exponential_average_factor * maxs + (1 - exponential_average_factor) * self.running_max
input = input.div(maxs)
### Max Block ###
if not self.training:
if self.mode != 2:
input = input.sub(self.running_min)
input = input.div(self.running_max)
else:
mins = input.amin(dim=(0,2,3), keepdim=True)
input = input.sub(mins)
maxs = input.amax(dim=(0,2,3), keepdim=True)
input = input.div(maxs)
return self.a * input - self.b
class ApproxMinMaxNorm1d(nn.Module):
def __init__(self, num_features, a=2., b=1., mode=1):
super(ApproxMinMaxNorm1d, self).__init__()
self.register_buffer('running_max', torch.ones(1,num_features))
self.register_buffer('running_min', torch.ones(1,num_features))
self.register_buffer('num_batches_tracked', torch.tensor(0, dtype=torch.long))
self.register_buffer('mode', torch.tensor(mode, dtype=torch.long))# 0 -> biggest, 1 -> average, 2 -> real val
self.a = a
self.b = b
def forward(self, input):
if self.training:
self.num_batches_tracked += 1
if self.num_batches_tracked == 1:
exponential_average_factor = 1.0
else:
exponential_average_factor = 0.1
### Min block ###
mins = input.amin(dim=(0), keepdim=True)
if self.mode == 0:
with torch.no_grad():
cond_min = mins < self.running_min
self.running_min = torch.where(cond_min, mins, self.running_min)
elif self.mode == 1:
with torch.no_grad():
self.running_min[:] = exponential_average_factor * mins + (1 - exponential_average_factor) * self.running_min
input = input.sub(mins)
### Min block ###
### Max block ###
maxs = input.amax(dim=(0), keepdim=True)
if self.mode == 0:
with torch.no_grad():
cond_max = maxs > self.running_max
self.running_max = torch.where(cond_max, maxs, self.running_max)
elif self.mode == 1:
with torch.no_grad():
self.running_max[:] = exponential_average_factor * maxs + (1 - exponential_average_factor) * self.running_max
input = input.div(maxs)
### Max Block ###
if not self.training:
if self.mode != 2:
input = input.sub(self.running_min)
input = input.div(self.running_max)
else:
mins = input.amin(dim=(0), keepdim=True)
input = input.sub(mins)
maxs = input.amax(dim=(0), keepdim=True)
input = input.div(maxs)
return self.a * input - self.b
def test1d():
x = torch.randn(256,200)
approx1d = ApproxMinMaxNorm1d(num_features=200, mode=2)
x = approx1d(x)
print("before")
print(x.min(), x.max())
print("after")
approx1d.eval()
x = approx1d(x)
print("before")
print(x.min(), x.max())
print("after")
def test2d():
x = torch.randn(256,3,32,32)
print(x.amin(dim=(0,2,3), keepdim=True))
print(x.amax(dim=(0,2,3), keepdim=True) - x.amin(dim=(0,2,3), keepdim=True))
approx2d = ApproxMinMaxNorm2d(num_features=3, mode=2)
print(approx2d.num_batches_tracked == 0)
approx2d.train()
x = approx2d(x)
x = approx2d(x)
print(x.min(), x.max())
approx2d.eval()
x = approx2d(x)
print(x.min(), x.max())
#test1d()
#test2d()
| 34.754601
| 129
| 0.539806
| 693
| 5,665
| 4.246753
| 0.11544
| 0.089704
| 0.097859
| 0.040775
| 0.866803
| 0.866803
| 0.866803
| 0.859667
| 0.829086
| 0.827727
| 0
| 0.030456
| 0.333451
| 5,665
| 162
| 130
| 34.969136
| 0.748941
| 0.03301
| 0
| 0.801724
| 0
| 0
| 0.021935
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051724
| false
| 0
| 0.017241
| 0
| 0.103448
| 0.094828
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e2d4711b7a6d01b9ff9375c3091ac7b787cd26c7
| 181,203
|
py
|
Python
|
app/tests/test_endpoints_insert_data.py
|
martamatos/kinetics_db
|
2fd152a46be0b28d56d504a528b8a8b4f543dedd
|
[
"MIT"
] | null | null | null |
app/tests/test_endpoints_insert_data.py
|
martamatos/kinetics_db
|
2fd152a46be0b28d56d504a528b8a8b4f543dedd
|
[
"MIT"
] | 5
|
2019-02-26T15:02:27.000Z
|
2020-04-02T22:00:47.000Z
|
app/tests/test_endpoints_insert_data.py
|
martamatos/kinetics_db
|
2fd152a46be0b28d56d504a528b8a8b4f543dedd
|
[
"MIT"
] | null | null | null |
import re
import unittest
from app import create_app, db
from app.models import Compartment, Enzyme, EnzymeOrganism, EnzymeReactionOrganism, EnzymeStructure, \
EvidenceLevel, Gene, GibbsEnergy, GibbsEnergyReactionModel, Mechanism, Metabolite, Model, Organism, Reaction, \
ReactionMetabolite, Reference, EnzymeGeneOrganism, \
ReferenceType, EnzymeReactionInhibition, EnzymeReactionActivation, EnzymeReactionEffector, EnzymeReactionMiscInfo, \
ModelAssumptions
from app.utils.parsers import parse_input_list, ReactionParser
from app.utils.populate_db import add_models, add_mechanisms, add_reaction, add_reference_types, add_enzymes, \
add_compartments, add_evidence_levels, add_organisms, add_references
from config import Config
from werkzeug.datastructures import FileStorage
class TestConfig(Config):
TESTING = True
#SQLALCHEMY_DATABASE_URI = 'sqlite://'
POSTGRES_DB = 'kinetics_db_test'
LOGIN_DISABLED = True
WTF_CSRF_ENABLED = False
UPLOAD_FOLDER = '../../uploaded_models'
def populate_db(test_case, client=None):
if test_case == 'reaction':
add_compartments()
add_evidence_levels()
add_mechanisms()
add_organisms()
add_enzymes(client)
add_models()
add_reference_types()
add_references()
elif test_case == 'model':
add_compartments()
add_evidence_levels()
add_mechanisms()
add_organisms()
add_enzymes(client)
add_models()
add_reference_types()
add_references()
add_reaction(client)
elif test_case == 'upload_model':
add_compartments()
add_evidence_levels()
add_mechanisms()
add_organisms()
add_enzymes(client)
add_models()
add_reference_types()
add_references()
add_reaction(client)
else:
add_compartments()
add_evidence_levels()
add_mechanisms()
add_organisms()
add_enzymes(client)
add_models()
add_reference_types()
add_references()
add_reaction(client)
class TestAddEnzyme(unittest.TestCase):
def setUp(self):
self.app = create_app(TestConfig)
self.client = self.app.test_client()
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_add_first_enzyme(self):
enzyme_name = 'Phosphofructokinase'
enzyme_acronym = 'PFK'
isoenzyme = 'PFK1'
ec_number = '1.2.1.31'
organism_name = 'E. coli'
number_of_active_sites = 4
gene_names = 'b001 b003'
uniprot_ids = 'PC3W1, P34D'
pdb_structure_ids = '3H8A, 1E9I'
strain = 'WT'
gene_name_list = parse_input_list(gene_names)
uniprot_id_list = parse_input_list(uniprot_ids)
pdb_structure_id_list = parse_input_list(pdb_structure_ids)
strain_list = parse_input_list(strain)
organism = Organism(name=organism_name)
db.session.add(organism)
self.assertEqual(Organism().query.count(), 1)
self.assertEqual(Organism().query.first().name, organism_name)
self.assertEqual(Enzyme().query.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
response = self.client.post('/add_enzyme', data=dict(
name=enzyme_name,
acronym=enzyme_acronym,
isoenzyme=isoenzyme,
ec_number=ec_number,
organism_name='1', # querySelectField
number_of_active_sites=number_of_active_sites,
gene_names=gene_names,
uniprot_id_list=uniprot_ids,
pdb_structure_ids=pdb_structure_ids,
strain=strain), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See enzymes - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your enzyme is now live!' in response.data)
self.assertEqual(Enzyme().query.first().name, enzyme_name)
self.assertEqual(Enzyme().query.first().acronym, enzyme_acronym)
self.assertEqual(Enzyme().query.first().isoenzyme, isoenzyme)
self.assertEqual(Enzyme().query.first().ec_number, ec_number)
self.assertEqual(Enzyme().query.first().enzyme_structures.count(), 2)
self.assertEqual(Enzyme().query.first().enzyme_structures[0].pdb_id, pdb_structure_id_list[0])
self.assertEqual(Enzyme().query.first().enzyme_structures[1].pdb_id, pdb_structure_id_list[1])
self.assertEqual(Enzyme().query.first().enzyme_organisms.count(), 2)
self.assertEqual(Enzyme().query.first().enzyme_organisms[0].id, 1)
self.assertEqual(Enzyme().query.first().enzyme_organisms[1].id, 2)
self.assertEqual(Gene().query.count(), 2)
self.assertEqual(Gene().query.all()[0].name, gene_name_list[0])
self.assertEqual(Gene().query.all()[1].name, gene_name_list[1])
self.assertEqual(Gene().query.all()[0].enzyme_gene_organisms.count(), 1)
self.assertEqual(Gene().query.all()[1].enzyme_gene_organisms.count(), 1)
self.assertEqual(EnzymeOrganism().query.count(), 2)
self.assertEqual(EnzymeOrganism().query.all()[0].enzyme.name, enzyme_name)
self.assertEqual(EnzymeOrganism().query.all()[1].enzyme.name, enzyme_name)
self.assertEqual(EnzymeOrganism().query.all()[0].organism.name, organism_name)
self.assertEqual(EnzymeOrganism().query.all()[1].organism.name, organism_name)
self.assertEqual(EnzymeOrganism().query.all()[0].uniprot_id, uniprot_id_list[0])
self.assertEqual(EnzymeOrganism().query.all()[1].uniprot_id, uniprot_id_list[1])
self.assertEqual(EnzymeOrganism().query.all()[0].n_active_sites, number_of_active_sites)
self.assertEqual(EnzymeOrganism().query.all()[1].n_active_sites, number_of_active_sites)
self.assertEqual(EnzymeGeneOrganism().query.all()[0].gene.name, gene_name_list[0])
self.assertEqual(EnzymeGeneOrganism().query.all()[0].enzyme.isoenzyme, isoenzyme)
self.assertEqual(EnzymeGeneOrganism().query.all()[0].organism.name, organism_name)
self.assertEqual(EnzymeGeneOrganism().query.all()[1].gene.name, gene_name_list[1])
self.assertEqual(EnzymeGeneOrganism().query.all()[1].enzyme.isoenzyme, isoenzyme)
self.assertEqual(EnzymeGeneOrganism().query.all()[1].organism.name, organism_name)
self.assertEqual(EnzymeStructure().query.all()[0].enzyme.name, enzyme_name)
self.assertEqual(EnzymeStructure().query.all()[1].enzyme.name, enzyme_name)
self.assertEqual(EnzymeStructure().query.all()[0].organism.name, organism_name)
self.assertEqual(EnzymeStructure().query.all()[1].organism.name, organism_name)
self.assertEqual(EnzymeStructure().query.all()[0].pdb_id, pdb_structure_id_list[0])
self.assertEqual(EnzymeStructure().query.all()[1].pdb_id, pdb_structure_id_list[1])
self.assertEqual(EnzymeStructure().query.all()[0].strain, strain_list[0])
self.assertEqual(EnzymeStructure().query.all()[1].strain, strain_list[0])
def test_add_first_enzyme_basic(self):
enzyme_name = 'Phosphofructokinase'
enzyme_acronym = 'PFK'
isoenzyme = 'PFK1'
ec_number = '1.2.1.31'
number_of_active_sites = ''
gene_names = ''
uniprot_ids = ''
pdb_structure_ids = ''
strain = ''
self.assertEqual(Organism().query.count(), 0)
self.assertEqual(Enzyme().query.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
response = self.client.post('/add_enzyme', data=dict(
name=enzyme_name,
acronym=enzyme_acronym,
isoenzyme=isoenzyme,
ec_number=ec_number,
organism_name='__None', # querySelectField
number_of_active_sites=number_of_active_sites,
gene_names=gene_names,
uniprot_id_list=uniprot_ids,
pdb_structure_ids=pdb_structure_ids,
strain=strain), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See enzymes - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your enzyme is now live!' in response.data)
self.assertEqual(Enzyme().query.count(), 1)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
self.assertEqual(Enzyme().query.first().name, enzyme_name)
self.assertEqual(Enzyme().query.first().acronym, enzyme_acronym)
self.assertEqual(Enzyme().query.first().isoenzyme, isoenzyme)
self.assertEqual(Enzyme().query.first().ec_number, ec_number)
self.assertEqual(Enzyme().query.first().enzyme_structures.count(), 0)
self.assertEqual(Enzyme().query.first().enzyme_organisms.count(), 0)
def test_add_repeated_isoenzyme(self):
enzyme_name = 'Phosphofructokinase'
enzyme_acronym = 'PFK'
isoenzyme = 'PFK1'
ec_number = '1.2.1.31'
number_of_active_sites = ''
gene_names = ''
uniprot_ids = ''
pdb_structure_ids = ''
strain = ''
enzyme = Enzyme(name=enzyme_name,
acronym=enzyme_acronym,
isoenzyme=isoenzyme,
ec_number=ec_number)
db.session.add(enzyme)
self.assertEqual(Organism().query.count(), 0)
self.assertEqual(Enzyme().query.count(), 1)
self.assertEqual(Enzyme().query.first().name, enzyme_name)
self.assertEqual(Enzyme().query.first().acronym, enzyme_acronym)
self.assertEqual(Enzyme().query.first().isoenzyme, isoenzyme)
self.assertEqual(Enzyme().query.first().ec_number, ec_number)
self.assertEqual(Enzyme().query.first().enzyme_structures.count(), 0)
self.assertEqual(Enzyme().query.first().enzyme_organisms.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
response = self.client.post('/add_enzyme', data=dict(
name=enzyme_name,
acronym=enzyme_acronym,
isoenzyme=isoenzyme,
ec_number=ec_number,
organism_name='__None', # querySelectField
number_of_active_sites=number_of_active_sites,
gene_names=gene_names,
uniprot_id_list=uniprot_ids,
pdb_structure_ids=pdb_structure_ids,
strain=strain), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add enzyme - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'The isoenzyme you specified already exists. Please choose a different name.' in response.data)
self.assertEqual(Enzyme().query.count(), 1)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
def test_add_enzyme_number_of_active_sites_without_organism(self):
enzyme_name = 'Phosphofructokinase'
enzyme_acronym = 'PFK'
isoenzyme = 'PFK1'
ec_number = '1.2.1.31'
number_of_active_sites = 4
gene_names = ''
uniprot_ids = ''
pdb_structure_ids = ''
strain = ''
self.assertEqual(Organism().query.count(), 0)
self.assertEqual(Enzyme().query.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
response = self.client.post('/add_enzyme', data=dict(
name=enzyme_name,
acronym=enzyme_acronym,
isoenzyme=isoenzyme,
ec_number=ec_number,
organism_name='__None', # querySelectField
number_of_active_sites=number_of_active_sites,
gene_names=gene_names,
uniprot_id_list=uniprot_ids,
pdb_structure_ids=pdb_structure_ids,
strain=strain), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add enzyme - Kinetics DB \n</title>' in response.data)
self.assertTrue(
b'If you specify the number of active sites you must also specify the organism name.' in response.data)
self.assertEqual(Enzyme().query.count(), 0)
self.assertEqual(Organism().query.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
def test_add_enzyme_gene_names_without_organism(self):
enzyme_name = 'Phosphofructokinase'
enzyme_acronym = 'PFK'
isoenzyme = 'PFK1'
ec_number = '1.2.1.31'
number_of_active_sites = ''
gene_names = 'b001'
uniprot_ids = ''
pdb_structure_ids = ''
strain = ''
self.assertEqual(Organism().query.count(), 0)
self.assertEqual(Enzyme().query.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
response = self.client.post('/add_enzyme', data=dict(
name=enzyme_name,
acronym=enzyme_acronym,
isoenzyme=isoenzyme,
ec_number=ec_number,
organism_name='__None', # querySelectField
number_of_active_sites=number_of_active_sites,
gene_names=gene_names,
uniprot_id_list=uniprot_ids,
pdb_structure_ids=pdb_structure_ids,
strain=strain), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add enzyme - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'If you specify encoding genes you must also specify the organism name.' in response.data)
self.assertEqual(Enzyme().query.count(), 0)
self.assertEqual(Organism().query.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
def test_add_enzyme_uniprot_id_list_without_organism(self):
enzyme_name = 'Phosphofructokinase'
enzyme_acronym = 'PFK'
isoenzyme = 'PFK1'
ec_number = '1.2.1.31'
number_of_active_sites = ''
gene_names = ''
uniprot_ids = 'PC1R3'
pdb_structure_ids = ''
strain = ''
self.assertEqual(Organism().query.count(), 0)
self.assertEqual(Enzyme().query.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
response = self.client.post('/add_enzyme', data=dict(
name=enzyme_name,
acronym=enzyme_acronym,
isoenzyme=isoenzyme,
ec_number=ec_number,
organism_name='__None', # querySelectField
number_of_active_sites=number_of_active_sites,
gene_names=gene_names,
uniprot_id_list=uniprot_ids,
pdb_structure_ids=pdb_structure_ids,
strain=strain), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add enzyme - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'If you specify uniprot IDs you must also specify the organism name' in response.data)
self.assertEqual(Enzyme().query.count(), 0)
self.assertEqual(Organism().query.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
def test_add_enzyme_pdb_structure_ids_without_organism(self):
enzyme_name = 'Phosphofructokinase'
enzyme_acronym = 'PFK'
isoenzyme = 'PFK1'
ec_number = '1.2.1.31'
number_of_active_sites = ''
gene_names = ''
uniprot_ids = ''
pdb_structure_ids = '1E9I'
strain = ''
self.assertEqual(Organism().query.count(), 0)
self.assertEqual(Enzyme().query.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
response = self.client.post('/add_enzyme', data=dict(
name=enzyme_name,
acronym=enzyme_acronym,
isoenzyme=isoenzyme,
ec_number=ec_number,
organism_name='__None', # querySelectField
number_of_active_sites=number_of_active_sites,
gene_names=gene_names,
uniprot_id_list=uniprot_ids,
pdb_structure_ids=pdb_structure_ids,
strain=strain), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add enzyme - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'If you specify PDB structures you must also specify the organism name' in response.data)
self.assertEqual(Enzyme().query.count(), 0)
self.assertEqual(Organism().query.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
def test_add_enzyme_mismatched_strain(self):
enzyme_name = 'Phosphofructokinase'
enzyme_acronym = 'PFK'
isoenzyme = 'PFK1'
ec_number = '1.2.1.31'
number_of_active_sites = ''
gene_names = ''
uniprot_ids = ''
pdb_structure_ids = '1E9I, 38HA, UCW8'
strain = 'WT, knockout'
self.assertEqual(Organism().query.count(), 0)
self.assertEqual(Enzyme().query.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
response = self.client.post('/add_enzyme', data=dict(
name=enzyme_name,
acronym=enzyme_acronym,
isoenzyme=isoenzyme,
ec_number=ec_number,
organism_name='__None', # querySelectField
number_of_active_sites=number_of_active_sites,
gene_names=gene_names,
uniprot_id_list=uniprot_ids,
pdb_structure_ids=pdb_structure_ids,
strain=strain), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add enzyme - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'When providing PDB IDs either provide:' in response.data)
self.assertEqual(Enzyme().query.count(), 0)
self.assertEqual(Organism().query.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
def test_add_enzyme_mismatched_strain_2(self):
enzyme_name = 'Phosphofructokinase'
enzyme_acronym = 'PFK'
isoenzyme = 'PFK1'
ec_number = '1.2.1.31'
organism_name = 'E. coli'
number_of_active_sites = ''
gene_names = ''
uniprot_ids = ''
pdb_structure_ids = '1E9I, 38HA'
strain = 'WT, knockout'
pdb_structure_id_list = parse_input_list(pdb_structure_ids)
strain_list = parse_input_list(strain)
organism = Organism(name=organism_name)
db.session.add(organism)
self.assertEqual(Organism().query.count(), 1)
self.assertEqual(Enzyme().query.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
response = self.client.post('/add_enzyme', data=dict(
name=enzyme_name,
acronym=enzyme_acronym,
isoenzyme=isoenzyme,
ec_number=ec_number,
organism_name='1', # querySelectField
number_of_active_sites=number_of_active_sites,
gene_names=gene_names,
uniprot_id_list=uniprot_ids,
pdb_structure_ids=pdb_structure_ids,
strain=strain), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See enzymes - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your enzyme is now live!' in response.data)
self.assertEqual(Enzyme().query.first().name, enzyme_name)
self.assertEqual(Enzyme().query.first().acronym, enzyme_acronym)
self.assertEqual(Enzyme().query.first().isoenzyme, isoenzyme)
self.assertEqual(Enzyme().query.first().ec_number, ec_number)
self.assertEqual(Enzyme().query.first().enzyme_structures.count(), 2)
self.assertEqual(Enzyme().query.first().enzyme_structures[0].pdb_id, pdb_structure_id_list[0])
self.assertEqual(Enzyme().query.first().enzyme_structures[1].pdb_id, pdb_structure_id_list[1])
self.assertEqual(Enzyme().query.first().enzyme_organisms.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.all()[0].enzyme.name, enzyme_name)
self.assertEqual(EnzymeStructure().query.all()[1].enzyme.name, enzyme_name)
self.assertEqual(EnzymeStructure().query.all()[0].organism.name, organism_name)
self.assertEqual(EnzymeStructure().query.all()[1].organism.name, organism_name)
self.assertEqual(EnzymeStructure().query.all()[0].pdb_id, pdb_structure_id_list[0])
self.assertEqual(EnzymeStructure().query.all()[1].pdb_id, pdb_structure_id_list[1])
self.assertEqual(EnzymeStructure().query.all()[0].strain, strain_list[0])
self.assertEqual(EnzymeStructure().query.all()[1].strain, strain_list[1])
def test_add_enzyme_mismatched_strain_3(self):
enzyme_name = 'Phosphofructokinase'
enzyme_acronym = 'PFK'
isoenzyme = 'PFK1'
ec_number = '1.2.1.31'
organism_name = 'E. coli'
number_of_active_sites = ''
gene_names = ''
uniprot_ids = ''
pdb_structure_ids = '1E9I, 38HA, UCW8'
strain = 'WT'
pdb_structure_id_list = parse_input_list(pdb_structure_ids)
strain_list = parse_input_list(strain)
organism = Organism(name=organism_name)
db.session.add(organism)
self.assertEqual(Organism().query.count(), 1)
self.assertEqual(Enzyme().query.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
response = self.client.post('/add_enzyme', data=dict(
name=enzyme_name,
acronym=enzyme_acronym,
isoenzyme=isoenzyme,
ec_number=ec_number,
organism_name='1', # querySelectField
number_of_active_sites=number_of_active_sites,
gene_names=gene_names,
uniprot_id_list=uniprot_ids,
pdb_structure_ids=pdb_structure_ids,
strain=strain), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See enzymes - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your enzyme is now live!' in response.data)
self.assertEqual(Enzyme().query.first().name, enzyme_name)
self.assertEqual(Enzyme().query.first().acronym, enzyme_acronym)
self.assertEqual(Enzyme().query.first().isoenzyme, isoenzyme)
self.assertEqual(Enzyme().query.first().ec_number, ec_number)
self.assertEqual(Enzyme().query.first().enzyme_structures.count(), 3)
self.assertEqual(Enzyme().query.first().enzyme_structures[0].pdb_id, pdb_structure_id_list[0])
self.assertEqual(Enzyme().query.first().enzyme_structures[1].pdb_id, pdb_structure_id_list[1])
self.assertEqual(Enzyme().query.first().enzyme_structures[2].pdb_id, pdb_structure_id_list[2])
self.assertEqual(Enzyme().query.first().enzyme_organisms.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.all()[0].enzyme.name, enzyme_name)
self.assertEqual(EnzymeStructure().query.all()[1].enzyme.name, enzyme_name)
self.assertEqual(EnzymeStructure().query.all()[2].enzyme.name, enzyme_name)
self.assertEqual(EnzymeStructure().query.all()[0].organism.name, organism_name)
self.assertEqual(EnzymeStructure().query.all()[1].organism.name, organism_name)
self.assertEqual(EnzymeStructure().query.all()[2].organism.name, organism_name)
self.assertEqual(EnzymeStructure().query.all()[0].pdb_id, pdb_structure_id_list[0])
self.assertEqual(EnzymeStructure().query.all()[1].pdb_id, pdb_structure_id_list[1])
self.assertEqual(EnzymeStructure().query.all()[2].pdb_id, pdb_structure_id_list[2])
self.assertEqual(EnzymeStructure().query.all()[0].strain, strain_list[0])
self.assertEqual(EnzymeStructure().query.all()[1].strain, strain_list[0])
self.assertEqual(EnzymeStructure().query.all()[2].strain, strain_list[0])
def test_add_enzyme_mismatched_strain_4(self):
enzyme_name = 'Phosphofructokinase'
enzyme_acronym = 'PFK'
isoenzyme = 'PFK1'
ec_number = '1.2.1.31'
organism_name = 'E. coli'
number_of_active_sites = ''
gene_names = ''
uniprot_ids = ''
pdb_structure_ids = '1E9I, 38HA, UCW8'
strain = ''
pdb_structure_id_list = parse_input_list(pdb_structure_ids)
strain_list = parse_input_list(strain)
organism = Organism(name=organism_name)
db.session.add(organism)
self.assertEqual(Organism().query.count(), 1)
self.assertEqual(Enzyme().query.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.count(), 0)
response = self.client.post('/add_enzyme', data=dict(
name=enzyme_name,
acronym=enzyme_acronym,
isoenzyme=isoenzyme,
ec_number=ec_number,
organism_name='1', # querySelectField
number_of_active_sites=number_of_active_sites,
gene_names=gene_names,
uniprot_id_list=uniprot_ids,
pdb_structure_ids=pdb_structure_ids,
strain=strain), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See enzymes - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your enzyme is now live!' in response.data)
self.assertEqual(Enzyme().query.first().name, enzyme_name)
self.assertEqual(Enzyme().query.first().acronym, enzyme_acronym)
self.assertEqual(Enzyme().query.first().isoenzyme, isoenzyme)
self.assertEqual(Enzyme().query.first().ec_number, ec_number)
self.assertEqual(Enzyme().query.first().enzyme_structures.count(), 3)
self.assertEqual(Enzyme().query.first().enzyme_structures[0].pdb_id, pdb_structure_id_list[0])
self.assertEqual(Enzyme().query.first().enzyme_structures[1].pdb_id, pdb_structure_id_list[1])
self.assertEqual(Enzyme().query.first().enzyme_structures[2].pdb_id, pdb_structure_id_list[2])
self.assertEqual(Enzyme().query.first().enzyme_organisms.count(), 0)
self.assertEqual(EnzymeOrganism().query.count(), 0)
self.assertEqual(EnzymeStructure().query.all()[0].enzyme.name, enzyme_name)
self.assertEqual(EnzymeStructure().query.all()[1].enzyme.name, enzyme_name)
self.assertEqual(EnzymeStructure().query.all()[2].enzyme.name, enzyme_name)
self.assertEqual(EnzymeStructure().query.all()[0].organism.name, organism_name)
self.assertEqual(EnzymeStructure().query.all()[1].organism.name, organism_name)
self.assertEqual(EnzymeStructure().query.all()[2].organism.name, organism_name)
self.assertEqual(EnzymeStructure().query.all()[0].pdb_id, pdb_structure_id_list[0])
self.assertEqual(EnzymeStructure().query.all()[1].pdb_id, pdb_structure_id_list[1])
self.assertEqual(EnzymeStructure().query.all()[2].pdb_id, pdb_structure_id_list[2])
self.assertEqual(EnzymeStructure().query.all()[0].strain, strain)
self.assertEqual(EnzymeStructure().query.all()[1].strain, strain)
self.assertEqual(EnzymeStructure().query.all()[2].strain, strain)
class TestAddEnzymeInhibition(unittest.TestCase):
def setUp(self):
self.app = create_app(TestConfig)
self.client = self.app.test_client()
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
populate_db('enzyme_inhibition', self.client)
self.enzyme = '1'
self.reaction = '1'
self.organism = '1'
self.models = '1'
self.inhibitor_met = 'adp'
self.affected_met = 'atp'
self.inhibition_type = 'Competitive'
self.inhibition_constant = 1.3 * 10 ** -4
self.evidence_level = '1'
self.references = 'https://doi.org/10.1093/bioinformatics/bty942, https://doi.org/10.1093/bioinformatics/bty943'
self.comments = ''
self.reference_list = parse_input_list(self.references)
self.grasp_id = 'PFK1'
self.subs_binding_order = 'adp_c, pep_c'
self.prod_release_order = 'pyr_c, atp_c'
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_add_first_inhibition(self):
response = self.client.post('/add_enzyme_inhibition', data=dict(
enzyme=self.enzyme,
reaction=self.reaction,
organism=self.organism,
models=self.models,
inhibitor_met=self.inhibitor_met,
affected_met=self.affected_met,
inhibition_type=self.inhibition_type,
inhibition_constant=self.inhibition_constant,
inhibition_evidence_level=self.evidence_level,
references=self.references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See enzyme inhibitor - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your enzyme inhibition is now live!' in response.data)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 1)
self.assertEqual(EnzymeReactionOrganism.query.count(), 2)
self.assertEqual(EnzymeReactionInhibition.query.count(), 1)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 3)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().grasp_id, self.grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.first().subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.first().prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.first().reaction, Reaction.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().enzyme, Enzyme.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().models.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().mechanism, Mechanism.query.first())
self.assertEqual(Reference.query.all()[0].title, 'eQuilibrator')
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Reference.query.all()[1].doi, self.reference_list[0])
self.assertEqual(Reference.query.all()[2].doi, self.reference_list[1])
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(EnzymeReactionInhibition.query.count(), 1)
self.assertEqual(EnzymeReactionInhibition.query.first().inhibitor_met,
Metabolite.query.filter_by(bigg_id=self.inhibitor_met).first())
self.assertEqual(EnzymeReactionInhibition.query.first().affected_met,
Metabolite.query.filter_by(bigg_id=self.affected_met).first())
self.assertAlmostEqual(EnzymeReactionInhibition.query.first().inhibition_constant, self.inhibition_constant, 6)
self.assertEqual(EnzymeReactionInhibition.query.first().evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionInhibition.query.first().comments, self.comments)
self.assertEqual(EnzymeReactionInhibition.query.first().references[0].doi, self.reference_list[0])
self.assertEqual(EnzymeReactionInhibition.query.first().references[1].doi, self.reference_list[1])
self.assertEqual(EnzymeReactionInhibition.query.first().models.count(), 1)
self.assertEqual(EnzymeReactionInhibition.query.first().models[0], Model.query.first())
self.assertEqual(Model.query.first().enzyme_reaction_inhibitions.count(), 1)
self.assertEqual(Model.query.first().enzyme_reaction_inhibitions[0].id,
EnzymeReactionInhibition.query.first().id)
self.assertEqual(Model.query.first().enzyme_reaction_organisms.count(), 2)
def test_add_inhibition_two_models(self):
self.models = ['1', '2']
response = self.client.post('/add_enzyme_inhibition', data=dict(
enzyme=self.enzyme,
reaction=self.reaction,
organism=self.organism,
models=self.models,
inhibitor_met=self.inhibitor_met,
affected_met=self.affected_met,
inhibition_type=self.inhibition_type,
inhibition_constant=self.inhibition_constant,
inhibition_evidence_level=self.evidence_level,
references=self.references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See enzyme inhibitor - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your enzyme inhibition is now live!' in response.data)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 1)
self.assertEqual(EnzymeReactionOrganism.query.count(), 2)
self.assertEqual(EnzymeReactionInhibition.query.count(), 1)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 3)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().grasp_id, self.grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.first().subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.first().prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.first().reaction, Reaction.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().enzyme, Enzyme.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().models.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().mechanism, Mechanism.query.first())
self.assertEqual(Reference.query.all()[0].title, 'eQuilibrator')
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Reference.query.all()[1].doi, self.reference_list[0])
self.assertEqual(Reference.query.all()[2].doi, self.reference_list[1])
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(EnzymeReactionInhibition.query.count(), 1)
self.assertEqual(EnzymeReactionInhibition.query.first().inhibitor_met,
Metabolite.query.filter_by(bigg_id=self.inhibitor_met).first())
self.assertEqual(EnzymeReactionInhibition.query.first().affected_met,
Metabolite.query.filter_by(bigg_id=self.affected_met).first())
self.assertAlmostEqual(EnzymeReactionInhibition.query.first().inhibition_constant, self.inhibition_constant, 6)
self.assertEqual(EnzymeReactionInhibition.query.first().evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionInhibition.query.first().comments, self.comments)
self.assertEqual(EnzymeReactionInhibition.query.first().references[0].doi, self.reference_list[0])
self.assertEqual(EnzymeReactionInhibition.query.first().references[1].doi, self.reference_list[1])
self.assertEqual(EnzymeReactionInhibition.query.first().models.count(), 2)
self.assertEqual(EnzymeReactionInhibition.query.first().models[0], Model.query.first())
self.assertEqual(EnzymeReactionInhibition.query.first().models[1], Model.query.all()[1])
self.assertEqual(Model.query.first().enzyme_reaction_inhibitions.count(), 1)
self.assertEqual(Model.query.all()[0].enzyme_reaction_inhibitions[0].id,
EnzymeReactionInhibition.query.first().id)
self.assertEqual(Model.query.all()[1].enzyme_reaction_inhibitions[0].id,
EnzymeReactionInhibition.query.first().id)
self.assertEqual(Model.query.first().enzyme_reaction_organisms.count(), 2)
class TestAddEnzymeActivation(unittest.TestCase):
def setUp(self):
self.app = create_app(TestConfig)
self.client = self.app.test_client()
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
populate_db('enzyme_activation', self.client)
self.enzyme = '1'
self.reaction = '1'
self.organism = '1'
self.models = '1'
self.activator_met = 'adp'
self.activation_constant = 1.3 * 10 ** -4
self.evidence_level = '1'
self.references = 'https://doi.org/10.1093/bioinformatics/bty942, https://doi.org/10.1093/bioinformatics/bty943'
self.comments = ''
self.reference_list = parse_input_list(self.references)
self.grasp_id = 'PFK1'
self.subs_binding_order = 'adp_c, pep_c'
self.prod_release_order = 'pyr_c, atp_c'
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_add_first_activation(self):
response = self.client.post('/add_enzyme_activation', data=dict(
enzyme=self.enzyme,
reaction=self.reaction,
organism=self.organism,
models=self.models,
activator_met=self.activator_met,
activation_constant=self.activation_constant,
activation_evidence_level=self.evidence_level,
references=self.references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See enzyme activator - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your enzyme activation is now live!' in response.data)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 1)
self.assertEqual(EnzymeReactionOrganism.query.count(), 2)
self.assertEqual(EnzymeReactionActivation.query.count(), 1)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 3)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().grasp_id, self.grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.first().subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.first().prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.first().reaction, Reaction.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().enzyme, Enzyme.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().models.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().mechanism, Mechanism.query.first())
self.assertEqual(Reference.query.all()[0].title, 'eQuilibrator')
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Reference.query.all()[1].doi, self.reference_list[0])
self.assertEqual(Reference.query.all()[2].doi, self.reference_list[1])
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(EnzymeReactionActivation.query.count(), 1)
self.assertEqual(EnzymeReactionActivation.query.first().activator_met,
Metabolite.query.filter_by(bigg_id=self.activator_met).first())
self.assertAlmostEqual(EnzymeReactionActivation.query.first().activation_constant, self.activation_constant, 6)
self.assertEqual(EnzymeReactionActivation.query.first().evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionActivation.query.first().comments, self.comments)
self.assertEqual(EnzymeReactionActivation.query.first().references[0].doi, self.reference_list[0])
self.assertEqual(EnzymeReactionActivation.query.first().references[1].doi, self.reference_list[1])
self.assertEqual(EnzymeReactionActivation.query.first().models.count(), 1)
self.assertEqual(EnzymeReactionActivation.query.first().models[0], Model.query.first())
self.assertEqual(Model.query.first().enzyme_reaction_activations.count(), 1)
self.assertEqual(Model.query.first().enzyme_reaction_activations[0].id,
EnzymeReactionActivation.query.first().id)
self.assertEqual(Model.query.first().enzyme_reaction_organisms.count(), 2)
def test_add_activation_two_models(self):
self.models = ['1', '2']
response = self.client.post('/add_enzyme_activation', data=dict(
enzyme=self.enzyme,
reaction=self.reaction,
organism=self.organism,
models=self.models,
activator_met=self.activator_met,
activation_constant=self.activation_constant,
activation_evidence_level=self.evidence_level,
references=self.references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See enzyme activator - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your enzyme activation is now live!' in response.data)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 1)
self.assertEqual(EnzymeReactionOrganism.query.count(), 2)
self.assertEqual(EnzymeReactionActivation.query.count(), 1)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 3)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().grasp_id, self.grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.first().subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.first().prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.first().reaction, Reaction.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().enzyme, Enzyme.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().models.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().mechanism, Mechanism.query.first())
self.assertEqual(Reference.query.all()[0].title, 'eQuilibrator')
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Reference.query.all()[1].doi, self.reference_list[0])
self.assertEqual(Reference.query.all()[2].doi, self.reference_list[1])
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(EnzymeReactionActivation.query.count(), 1)
self.assertEqual(EnzymeReactionActivation.query.first().activator_met,
Metabolite.query.filter_by(bigg_id=self.activator_met).first())
self.assertAlmostEqual(EnzymeReactionActivation.query.first().activation_constant, self.activation_constant, 6)
self.assertEqual(EnzymeReactionActivation.query.first().evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionActivation.query.first().comments, self.comments)
self.assertEqual(EnzymeReactionActivation.query.first().references[0].doi, self.reference_list[0])
self.assertEqual(EnzymeReactionActivation.query.first().references[1].doi, self.reference_list[1])
self.assertEqual(EnzymeReactionActivation.query.first().models.count(), 2)
self.assertEqual(EnzymeReactionActivation.query.first().models[0], Model.query.first())
self.assertEqual(EnzymeReactionActivation.query.first().models[1], Model.query.all()[1])
self.assertEqual(Model.query.first().enzyme_reaction_activations.count(), 1)
self.assertEqual(Model.query.all()[0].enzyme_reaction_activations[0].id,
EnzymeReactionActivation.query.first().id)
self.assertEqual(Model.query.all()[1].enzyme_reaction_activations[0].id,
EnzymeReactionActivation.query.first().id)
self.assertEqual(Model.query.first().enzyme_reaction_organisms.count(), 2)
class TestAddEnzymeEffector(unittest.TestCase):
def setUp(self):
self.app = create_app(TestConfig)
self.client = self.app.test_client()
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
populate_db('enzyme_effector', self.client)
self.enzyme = '1'
self.reaction = '1'
self.organism = '1'
self.models = '1'
self.effector_met = 'adp'
self.effector_type = 'Inhibiting'
self.evidence_level = '1'
self.references = 'https://doi.org/10.1093/bioinformatics/bty942, https://doi.org/10.1093/bioinformatics/bty943'
self.comments = ''
self.reference_list = parse_input_list(self.references)
self.grasp_id = 'PFK1'
self.subs_binding_order = 'adp_c, pep_c'
self.prod_release_order = 'pyr_c, atp_c'
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_add_first_effector(self):
response = self.client.post('/add_enzyme_effector', data=dict(
enzyme=self.enzyme,
reaction=self.reaction,
organism=self.organism,
models=self.models,
effector_met=self.effector_met,
effector_type=self.effector_type,
effector_evidence_level=self.evidence_level,
references=self.references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See enzyme effector - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your enzyme effector is now live!' in response.data)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 1)
self.assertEqual(EnzymeReactionOrganism.query.count(), 2)
self.assertEqual(EnzymeReactionEffector.query.count(), 1)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 3)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().grasp_id, self.grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.first().subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.first().prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.first().reaction, Reaction.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().enzyme, Enzyme.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().models.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().mechanism, Mechanism.query.first())
self.assertEqual(Reference.query.all()[0].title, 'eQuilibrator')
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Reference.query.all()[1].doi, self.reference_list[0])
self.assertEqual(Reference.query.all()[2].doi, self.reference_list[1])
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(EnzymeReactionEffector.query.count(), 1)
self.assertEqual(EnzymeReactionEffector.query.first().effector_met,
Metabolite.query.filter_by(bigg_id=self.effector_met).first())
self.assertEqual(EnzymeReactionEffector.query.first().effector_type, self.effector_type)
self.assertEqual(EnzymeReactionEffector.query.first().evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionEffector.query.first().comments, self.comments)
self.assertEqual(EnzymeReactionEffector.query.first().references[0].doi, self.reference_list[0])
self.assertEqual(EnzymeReactionEffector.query.first().references[1].doi, self.reference_list[1])
self.assertEqual(EnzymeReactionEffector.query.first().models.count(), 1)
self.assertEqual(EnzymeReactionEffector.query.first().models[0], Model.query.first())
self.assertEqual(Model.query.first().enzyme_reaction_effectors.count(), 1)
self.assertEqual(Model.query.first().enzyme_reaction_effectors[0].id, EnzymeReactionEffector.query.first().id)
self.assertEqual(Model.query.first().enzyme_reaction_organisms.count(), 2)
def test_add_effector_two_models(self):
self.models = ['1', '2']
response = self.client.post('/add_enzyme_effector', data=dict(
enzyme=self.enzyme,
reaction=self.reaction,
organism=self.organism,
models=self.models,
effector_met=self.effector_met,
effector_type=self.effector_type,
effector_evidence_level=self.evidence_level,
references=self.references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See enzyme effector - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your enzyme effector is now live!' in response.data)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 1)
self.assertEqual(EnzymeReactionOrganism.query.count(), 2)
self.assertEqual(EnzymeReactionEffector.query.count(), 1)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 3)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().grasp_id, self.grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.first().subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.first().prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.first().reaction, Reaction.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().enzyme, Enzyme.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().models.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().mechanism, Mechanism.query.first())
self.assertEqual(Reference.query.all()[0].title, 'eQuilibrator')
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Reference.query.all()[1].doi, self.reference_list[0])
self.assertEqual(Reference.query.all()[2].doi, self.reference_list[1])
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(EnzymeReactionEffector.query.count(), 1)
self.assertEqual(EnzymeReactionEffector.query.first().effector_met,
Metabolite.query.filter_by(bigg_id=self.effector_met).first())
self.assertEqual(EnzymeReactionEffector.query.first().effector_type, self.effector_type)
self.assertEqual(EnzymeReactionEffector.query.first().evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionEffector.query.first().comments, self.comments)
self.assertEqual(EnzymeReactionEffector.query.first().references[0].doi, self.reference_list[0])
self.assertEqual(EnzymeReactionEffector.query.first().references[1].doi, self.reference_list[1])
self.assertEqual(EnzymeReactionEffector.query.first().models.count(), 2)
self.assertEqual(EnzymeReactionEffector.query.first().models[0], Model.query.first())
self.assertEqual(EnzymeReactionEffector.query.first().models[1], Model.query.all()[1])
self.assertEqual(Model.query.first().enzyme_reaction_effectors.count(), 1)
self.assertEqual(Model.query.all()[0].enzyme_reaction_effectors[0].id, EnzymeReactionEffector.query.first().id)
self.assertEqual(Model.query.all()[1].enzyme_reaction_effectors[0].id, EnzymeReactionEffector.query.first().id)
self.assertEqual(Model.query.first().enzyme_reaction_organisms.count(), 2)
class TestAddEnzymeMiscInfo(unittest.TestCase):
def setUp(self):
self.app = create_app(TestConfig)
self.client = self.app.test_client()
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
populate_db('enzyme_misc_info', self.client)
self.enzyme = '1'
self.reaction = '1'
self.organism = '1'
self.models = '1'
self.topic = 'allostery'
self.description = 'looks like this met is an allosteric inhibitor for that enzyme'
self.evidence_level = '1'
self.references = 'https://doi.org/10.1093/bioinformatics/bty942, https://doi.org/10.1093/bioinformatics/bty943'
self.comments = ''
self.reference_list = parse_input_list(self.references)
self.grasp_id = 'PFK1'
self.subs_binding_order = 'adp_c, pep_c'
self.prod_release_order = 'pyr_c, atp_c'
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_add_first_misc_info(self):
response = self.client.post('/add_enzyme_misc_info', data=dict(
enzyme=self.enzyme,
reaction=self.reaction,
organism=self.organism,
models=self.models,
topic=self.topic,
description=self.description,
evidence_level=self.evidence_level,
references=self.references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See enzyme misc info - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your enzyme misc info is now live!' in response.data)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 1)
self.assertEqual(EnzymeReactionOrganism.query.count(), 2)
self.assertEqual(EnzymeReactionMiscInfo.query.count(), 1)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 3)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().grasp_id, self.grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.first().subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.first().prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.first().reaction, Reaction.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().enzyme, Enzyme.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().models.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().mechanism, Mechanism.query.first())
self.assertEqual(Reference.query.all()[0].title, 'eQuilibrator')
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Reference.query.all()[1].doi, self.reference_list[0])
self.assertEqual(Reference.query.all()[2].doi, self.reference_list[1])
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(EnzymeReactionMiscInfo.query.count(), 1)
self.assertEqual(EnzymeReactionMiscInfo.query.first().topic, self.topic)
self.assertEqual(EnzymeReactionMiscInfo.query.first().description, self.description)
self.assertEqual(EnzymeReactionMiscInfo.query.first().evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionMiscInfo.query.first().comments, self.comments)
self.assertEqual(EnzymeReactionMiscInfo.query.first().references[0].doi, self.reference_list[0])
self.assertEqual(EnzymeReactionMiscInfo.query.first().references[1].doi, self.reference_list[1])
self.assertEqual(EnzymeReactionMiscInfo.query.first().models.count(), 1)
self.assertEqual(EnzymeReactionMiscInfo.query.first().models[0], Model.query.first())
self.assertEqual(Model.query.first().enzyme_reaction_misc_infos.count(), 1)
self.assertEqual(Model.query.first().enzyme_reaction_misc_infos[0].id, EnzymeReactionMiscInfo.query.first().id)
self.assertEqual(Model.query.first().enzyme_reaction_organisms.count(), 2)
def test_add_misc_info_two_models(self):
self.models = ['1', '2']
response = self.client.post('/add_enzyme_misc_info', data=dict(
enzyme=self.enzyme,
reaction=self.reaction,
organism=self.organism,
models=self.models,
topic=self.topic,
description=self.description,
evidence_level=self.evidence_level,
references=self.references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See enzyme misc info - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your enzyme misc info is now live!' in response.data)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 1)
self.assertEqual(EnzymeReactionOrganism.query.count(), 2)
self.assertEqual(EnzymeReactionMiscInfo.query.count(), 1)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 3)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().grasp_id, self.grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.first().subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.first().prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.first().reaction, Reaction.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().enzyme, Enzyme.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().models.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().mechanism, Mechanism.query.first())
self.assertEqual(Reference.query.all()[0].title, 'eQuilibrator')
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Reference.query.all()[1].doi, self.reference_list[0])
self.assertEqual(Reference.query.all()[2].doi, self.reference_list[1])
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(EnzymeReactionMiscInfo.query.count(), 1)
self.assertEqual(EnzymeReactionMiscInfo.query.first().topic, self.topic)
self.assertEqual(EnzymeReactionMiscInfo.query.first().description, self.description)
self.assertEqual(EnzymeReactionMiscInfo.query.first().evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionMiscInfo.query.first().comments, self.comments)
self.assertEqual(EnzymeReactionMiscInfo.query.first().references[0].doi, self.reference_list[0])
self.assertEqual(EnzymeReactionMiscInfo.query.first().references[1].doi, self.reference_list[1])
self.assertEqual(EnzymeReactionMiscInfo.query.first().models.count(), 2)
self.assertEqual(EnzymeReactionMiscInfo.query.first().models[0], Model.query.first())
self.assertEqual(EnzymeReactionMiscInfo.query.first().models[1], Model.query.all()[1])
self.assertEqual(Model.query.first().enzyme_reaction_misc_infos.count(), 1)
self.assertEqual(Model.query.all()[0].enzyme_reaction_misc_infos[0].id, EnzymeReactionMiscInfo.query.first().id)
self.assertEqual(Model.query.all()[1].enzyme_reaction_misc_infos[0].id, EnzymeReactionMiscInfo.query.first().id)
self.assertEqual(Model.query.first().enzyme_reaction_organisms.count(), 2)
"""
class TestAddGene(unittest.TestCase):
def setUp(self):
self.app = create_app(TestConfig)
self.client = self.app.test_client()
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
self.name = 'gene_bla'
self.name = 'b0001'
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_add_first_model(self):
model_name = 'E. coli - iteration 1'
organism_name = 'E. coli'
strain = 'MG16555'
comments = 'Just testing...'
self.assertEqual(Model.query.count(), 0)
self.assertEqual(Organism.query.count(), 0)
response = self.client.post('/add_model', data=dict(
name=model_name,
bigg_id=organism_name), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See models - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your model is now live!' in response.data)
self.assertEqual(Model().query.first().name, model_name)
self.assertEqual(Model().query.first().organism_name, organism_name)
self.assertEqual(Model().query.first().strain, strain)
self.assertEqual(Model().query.first().comments, comments)
self.assertEqual(Organism().query.first().name, organism_name)
self.assertEqual(Organism().query.first().models.count(), 1)
self.assertEqual(Organism().query.first().models[0].name, model_name)
"""
class TestAddMetabolite(unittest.TestCase):
def setUp(self):
self.app = create_app(TestConfig)
self.client = self.app.test_client()
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
populate_db('enzyme_inhibition', self.client)
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_add_first_metabolite(self):
grasp_id = '2pg'
name = '2-phosphoglycerate'
bigg_id = '2pg'
metanetx_id = 'MNXM23'
compartments = ['1', '2']
chebi_ids = 'CHEBI:86354, CHEBI:8685'
inchis = 'InChI=1S/C3H4O3/c1-2(4)3(5)6/h4H,1H2,(H,5,6), InChI=1S/C3H4O4/c1-2(4)3(5)6/h4H,1H2,(H,5,6)'
self.assertEqual(Metabolite.query.count(), 4)
response = self.client.post('/add_metabolite', data=dict(
grasp_id=grasp_id,
name=name,
bigg_id=bigg_id,
metanetx_id=metanetx_id,
compartments=compartments,
chebi_ids=chebi_ids,
inchis=inchis), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See metabolite - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your metabolite is now live!' in response.data)
self.assertEqual(Metabolite.query.count(), 5)
metabolite = Metabolite.query.filter_by(grasp_id=grasp_id).first()
self.assertEqual(metabolite.grasp_id, grasp_id)
self.assertEqual(metabolite.name, name)
self.assertEqual(metabolite.bigg_id, bigg_id)
self.assertEqual(metabolite.metanetx_id, metanetx_id)
self.assertEqual(metabolite.compartments.count(), 2)
self.assertEqual(metabolite.chebis.count(), 2)
self.assertEqual(metabolite.chebis[0].chebi_id, 'CHEBI:86354')
self.assertEqual(metabolite.chebis[1].chebi_id, 'CHEBI:8685')
self.assertEqual(metabolite.chebis[0].inchi, 'InChI=1S/C3H4O3/c1-2(4)3(5)6/h4H,1H2,(H,5,6)')
self.assertEqual(metabolite.chebis[1].inchi, 'InChI=1S/C3H4O4/c1-2(4)3(5)6/h4H,1H2,(H,5,6)')
def test_add_existing_metabolite_grasp_id(self):
grasp_id = 'pep'
name = '2-phosphoglycerate'
bigg_id = '2pg'
metanetx_id = 'MNXM23'
compartments = ['1', '2']
chebi_ids = 'CHEBI:86354, CHEBI:8685'
inchis = 'InChI=1S/C3H4O3/c1-2(4)3(5)6/h4H,1H2,(H,5,6), InChI=1S/C3H4O4/c1-2(4)3(5)6/h4H,1H2,(H,5,6)'
self.assertEqual(Metabolite.query.count(), 4)
response = self.client.post('/add_metabolite', data=dict(
grasp_id=grasp_id,
name=name,
bigg_id=bigg_id,
metanetx_id=metanetx_id,
compartments=compartments,
chebi_ids=chebi_ids,
inchis=inchis), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add metabolite - Kinetics DB \n</title>' in response.data)
self.assertTrue(
b'The metabolite grasp id you specified already exists. Please choose a different one.' in response.data)
self.assertEqual(Metabolite.query.count(), 4)
def test_add_existing_metabolite_bigg_id(self):
grasp_id = '2pg'
name = '2-phosphoglycerate'
bigg_id = 'pep'
metanetx_id = 'MNXM23'
compartments = ['1', '2']
chebi_ids = 'CHEBI:86354, CHEBI:8685'
inchis = 'InChI=1S/C3H4O3/c1-2(4)3(5)6/h4H,1H2,(H,5,6), InChI=1S/C3H4O4/c1-2(4)3(5)6/h4H,1H2,(H,5,6)'
self.assertEqual(Metabolite.query.count(), 4)
response = self.client.post('/add_metabolite', data=dict(
grasp_id=grasp_id,
name=name,
bigg_id=bigg_id,
metanetx_id=metanetx_id,
compartments=compartments,
chebi_ids=chebi_ids,
inchis=inchis), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add metabolite - Kinetics DB \n</title>' in response.data)
self.assertTrue(
b'The metabolite bigg id you specified already exists. Please choose a different one.' in response.data)
self.assertEqual(Metabolite.query.count(), 4)
def test_add_metabolite_diff_chebi_inchi_size(self):
grasp_id = '2pg'
name = '2-phosphoglycerate'
bigg_id = 'pep'
metanetx_id = 'MNXM23'
compartments = ['1', '2']
chebi_ids = 'CHEBI:86354, CHEBI:8685'
inchis = 'InChI=1S/C3H4O3/c1-2(4)3(5)6/h4H,1H2,(H,5,6)'
self.assertEqual(Metabolite.query.count(), 4)
response = self.client.post('/add_metabolite', data=dict(
grasp_id=grasp_id,
name=name,
bigg_id=bigg_id,
metanetx_id=metanetx_id,
compartments=compartments,
chebi_ids=chebi_ids,
inchis=inchis), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add metabolite - Kinetics DB \n</title>' in response.data)
self.assertTrue(
b'The list of ChEBI ids and InChIs should have the same length. Also make sure you separated each value with a comma' in response.data)
self.assertEqual(Metabolite.query.count(), 4)
def test_add_metabolite_diff_chebi_inchi_size2(self):
grasp_id = '2pg'
name = '2-phosphoglycerate'
bigg_id = 'pep'
metanetx_id = 'MNXM23'
compartments = ['1', '2']
chebi_ids = 'CHEBI:86354'
inchis = 'InChI=1S/C3H4O3/c1-2(4)3(5)6/h4H,1H2,(H,5,6), InChI=1S/C3H4O4/c1-2(4)3(5)6/h4H,1H2,(H,5,6)'
self.assertEqual(Metabolite.query.count(), 4)
response = self.client.post('/add_metabolite', data=dict(
grasp_id=grasp_id,
name=name,
bigg_id=bigg_id,
metanetx_id=metanetx_id,
compartments=compartments,
chebi_ids=chebi_ids,
inchis=inchis), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add metabolite - Kinetics DB \n</title>' in response.data)
self.assertTrue(
b'The list of ChEBI ids and InChIs should have the same length. Also make sure you separated each value with a comma' in response.data)
self.assertEqual(Metabolite.query.count(), 4)
class TestAddModel(unittest.TestCase):
def setUp(self):
self.app = create_app(TestConfig)
self.client = self.app.test_client()
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_add_first_model(self):
model_name = 'E. coli - iteration x'
organism_name = 'E. coli2'
strain = 'MG16555'
comments = 'Just testing...'
self.assertEqual(Model.query.count(), 0)
self.assertEqual(Organism.query.count(), 0)
response = self.client.post('/add_model', data=dict(
name=model_name,
organism_name=organism_name,
strain=strain,
comments=comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See models - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your model is now live!' in response.data)
self.assertEqual(Model().query.count(), 1)
self.assertEqual(Model().query.first().name, model_name)
self.assertEqual(Model().query.first().organism_name, organism_name)
self.assertEqual(Model().query.first().strain, strain)
self.assertEqual(Model().query.first().comments, comments)
self.assertEqual(Organism().query.count(), 1)
self.assertEqual(Organism().query.first().name, organism_name)
self.assertEqual(Organism().query.first().models.count(), 1)
self.assertEqual(Organism().query.first().models[0].name, model_name)
# TODO updated
def test_add_model_for_existing_organism(self):
populate_db('model', self.client)
model_name = 'E. coli - iteration x'
organism_name = 'E. coli'
strain = 'MG16555'
enz_rxn_orgs = EnzymeReactionOrganism.query.all()[0]
comments = 'Just testing...'
self.assertEqual(Organism().query.first().name, organism_name)
self.assertEqual(Organism().query.first().models.count(), 2)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.count(), 2)
response = self.client.post('/add_model', data=dict(
name=model_name,
organism_name=organism_name,
strain=strain,
comments=comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See models - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your model is now live!' in response.data)
self.assertEqual(Model().query.all()[2].name, model_name)
self.assertEqual(Model().query.all()[2].strain, strain)
self.assertEqual(Model().query.all()[2].enzyme_reaction_organisms.count(), 0)
self.assertEqual(Model().query.all()[2].comments, comments)
self.assertEqual(Organism().query.all()[0].models.count(), 3)
self.assertEqual(Organism().query.all()[0].models[2].name, model_name)
def test_add_model_empty_organism_name(self):
model_name = 'E. coli - iteration 1'
organism_name = 'E. coli'
strain = 'MG16555'
comments = 'Just testing...'
organism = Organism(name=organism_name)
db.session.add(organism)
self.assertEqual(Organism().query.first().name, organism_name)
self.assertEqual(Organism().query.first().models.count(), 0)
self.assertEqual(Model.query.count(), 0)
self.assertEqual(Organism.query.count(), 1)
response = self.client.post('/add_model', data=dict(
name=model_name,
strain=strain,
comments=comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add model - Kinetics DB \n</title>' in response.data)
self.assertEqual(Model.query.count(), 0)
self.assertEqual(Organism().query.first().models.count(), 0)
def test_add_existing_model_name(self):
populate_db('model', self.client)
model_name = 'E. coli - iteration 3'
organism_name = 'E. coli'
strain = 'MG16555'
comments = 'Just testing...'
model = Model(name=model_name,
organism_name=organism_name,
strain=strain)
db.session.add(model)
self.assertEqual(Model().query.all()[-1].name, model_name)
self.assertEqual(Model().query.all()[-1].organism_name, organism_name)
self.assertEqual(Model().query.all()[-1].strain, strain)
self.assertEqual(Model.query.count(), 3)
self.assertEqual(Organism.query.count(), 2)
response = self.client.post('/add_model', data=dict(
name=model_name,
organism_name=organism_name,
strain=strain,
comments=comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add model - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'A model with that name already exists, please use another name' in response.data)
self.assertEqual(Model.query.count(), 3)
self.assertEqual(Organism().query.count(), 2)
def test_add_second_model_name(self):
model_name = 'E. coli - iteration 1'
organism_name = 'E. coli'
strain = 'MG16555'
comments = 'Just testing...'
organism = Organism(name=organism_name)
db.session.add(organism)
model = Model(name=model_name,
organism_name=organism_name,
strain=strain)
db.session.add(model)
self.assertEqual(Model().query.first().name, model_name)
self.assertEqual(Model().query.first().organism_name, organism_name)
self.assertEqual(Model().query.first().strain, strain)
self.assertEqual(Model.query.count(), 1)
self.assertEqual(Organism.query.count(), 1)
model_name = 'E. coli - iteration 2'
response = self.client.post('/add_model', data=dict(
name=model_name,
organism_name=organism_name,
strain=strain,
comments=comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See models - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your model is now live!' in response.data)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism().query.count(), 1)
self.assertEqual(Model().query.filter_by(name=model_name).first().name, model_name)
self.assertEqual(Model().query.filter_by(name=model_name).first().strain, strain)
self.assertEqual(Model().query.filter_by(name=model_name).first().comments, comments)
self.assertEqual(Organism().query.first().models.count(), 2)
self.assertEqual(Organism().query.first().models[0].name, 'E. coli - iteration 1')
self.assertEqual(Organism().query.first().models[1].name, model_name)
class TestAddModelAssumption(unittest.TestCase):
def setUp(self):
self.app = create_app(TestConfig)
self.client = self.app.test_client()
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
populate_db('model_assumption', self.client)
self.model = '1'
self.assumption = 'allostery'
self.description = 'looks like this met is an allosteric inhibitor for that enzyme'
self.included_in_model = "True"
self.evidence_level = '1'
self.references = 'https://doi.org/10.1093/bioinformatics/bty942, https://doi.org/10.1093/bioinformatics/bty943'
self.comments = ''
self.reference_list = parse_input_list(self.references)
self.grasp_id = 'PFK1'
self.subs_binding_order = 'adp_c, pep_c'
self.prod_release_order = 'pyr_c, atp_c'
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_add_first_model_assumption(self):
response = self.client.post('/add_model_assumption', data=dict(
model=self.model,
assumption=self.assumption,
description=self.description,
evidence_level=self.evidence_level,
included_in_model=self.included_in_model,
references=self.references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See models - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your model assumption is now live!' in response.data)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 1)
self.assertEqual(EnzymeReactionOrganism.query.count(), 2)
self.assertEqual(ModelAssumptions.query.count(), 1)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 3)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().grasp_id, self.grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.first().subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.first().prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.first().reaction, Reaction.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().enzyme, Enzyme.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().models.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.first().mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().mechanism, Mechanism.query.first())
self.assertEqual(Reference.query.all()[0].title, 'eQuilibrator')
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Reference.query.all()[1].doi, self.reference_list[0])
self.assertEqual(Reference.query.all()[2].doi, self.reference_list[1])
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(ModelAssumptions.query.count(), 1)
self.assertEqual(ModelAssumptions.query.first().assumption, self.assumption)
self.assertEqual(ModelAssumptions.query.first().description, self.description)
self.assertEqual(ModelAssumptions.query.first().included_in_model, True)
self.assertEqual(ModelAssumptions.query.first().evidence, EvidenceLevel.query.first())
self.assertEqual(ModelAssumptions.query.first().comments, self.comments)
self.assertEqual(ModelAssumptions.query.first().references[0].doi, self.reference_list[0])
self.assertEqual(ModelAssumptions.query.first().references[1].doi, self.reference_list[1])
self.assertEqual(Model.query.first().model_assumptions.count(), 1)
self.assertEqual(Model.query.first().model_assumptions[0].id, ModelAssumptions.query.first().id)
self.assertEqual(Model.query.first().enzyme_reaction_organisms.count(), 2)
class TestAddOrganism(unittest.TestCase):
def setUp(self):
self.app = create_app(TestConfig)
self.client = self.app.test_client()
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_add_organism(self):
organism_name = 'E. coli'
self.assertEqual(Organism.query.count(), 0)
response = self.client.post('/add_organism', data=dict(
name=organism_name), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See organisms - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your organism is now live!' in response.data)
self.assertEqual(Organism().query.first().name, organism_name)
self.assertEqual(Organism().query.first().models.count(), 0)
def test_add_model_empty_organism_name(self):
organism_name = 'E. coli'
organism = Organism(name=organism_name)
db.session.add(organism)
self.assertEqual(Organism.query.count(), 1)
self.assertEqual(Organism().query.first().name, organism_name)
self.assertEqual(Organism().query.first().models.count(), 0)
response = self.client.post('/add_organism', data=dict(
name=organism_name), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add organism - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'An organism with that name already exists, please use another name' in response.data)
self.assertEqual(Organism().query.first().name, organism_name)
self.assertEqual(Organism().query.first().models.count(), 0)
class TestAddReaction(unittest.TestCase):
def setUp(self):
self.app = create_app(TestConfig)
self.client = self.app.test_client()
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
populate_db('reaction', self.client)
self.reaction_name = 'phosphofructokinase'
self.reaction_acronym = 'PFK'
self.reaction_grasp_id = 'PFK1'
self.reaction_string = '1 pep_c + 1.5 adp_c <-> pyr_c + 2.0 atp_m'
self.metanetx_id = ''
self.name = ''
self.kegg_id = ''
self.compartment = '1'
self.organism = '1'
self.models = ['1', '2']
self.enzymes = ['1', '2']
self.mechanism = '1'
self.mechanism_references = 'https://doi.org/10.1093/bioinformatics/bty942, https://doi.org/10.1093/bioinformatics/bty943'
self.mechanism_evidence_level = '1'
self.subs_binding_order = 'adp_c, pep_c'
self.prod_release_order = 'atp_m, pyr_c'
self.std_gibbs_energy = 2.1
self.std_gibbs_energy_std = 0.2
self.std_gibbs_energy_ph = 7
self.std_gibbs_energy_ionic_strength = 0.2
self.std_gibbs_energy_references = 'equilibrator'
self.comments = ''
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_add_first_reaction(self):
true_isoenzyme_acronym = 'PFK1'
true_gibbs_energy_ref = 'eQuilibrator'
self.models = '1'
self.enzymes = '1'
self.mechanism_references = 'https://doi.org/10.1093/bioinformatics/bty942'
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See reactions - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your reaction is now live!' in response.data)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 1)
self.assertEqual(EnzymeReactionOrganism.query.count(), 1)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 2)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(Model.query.first().enzyme_reaction_organisms.count(), 1)
self.assertEqual(Reaction.query.count(), 1)
self.assertEqual(Reaction.query.first().name, self.reaction_name)
self.assertEqual(Reaction.query.first().compartment_name, Compartment.query.first().name)
self.assertEqual(EnzymeReactionOrganism.query.first().enzyme_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().reaction_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().organism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().mechanism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().mech_evidence_level_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().grasp_id, self.reaction_grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.first().subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.first().prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.first().reaction.name, self.reaction_name)
self.assertEqual(EnzymeReactionOrganism.query.first().enzyme.isoenzyme, true_isoenzyme_acronym)
self.assertEqual(EnzymeReactionOrganism.query.first().models[0], Model.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().mechanism, Mechanism.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism_references[0].doi, self.mechanism_references)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 1)
self.assertEqual(GibbsEnergyReactionModel.query.first().reaction_id, 1)
self.assertEqual(GibbsEnergyReactionModel.query.first().model_id, 1)
self.assertEqual(GibbsEnergyReactionModel.query.first().gibbs_energy_id, 1)
self.assertEqual(GibbsEnergy.query.first().standard_dg, self.std_gibbs_energy)
self.assertEqual(GibbsEnergy.query.first().standard_dg_std, self.std_gibbs_energy_std)
self.assertEqual(GibbsEnergy.query.first().ph, self.std_gibbs_energy_ph)
self.assertEqual(GibbsEnergy.query.first().ionic_strength, self.std_gibbs_energy_ionic_strength)
self.assertEqual(GibbsEnergy.query.first().references[0].title, true_gibbs_energy_ref)
self.assertEqual(Reference.query.all()[0].title, true_gibbs_energy_ref)
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Reference.query.all()[1].doi, self.mechanism_references)
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(Metabolite.query.all()[0].bigg_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].grasp_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[1].bigg_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].grasp_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[2].bigg_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].grasp_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[3].bigg_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].grasp_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].compartments[0].bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.all()[0].metabolite.bigg_id, 'pep')
self.assertEqual(ReactionMetabolite.query.all()[0].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[0].stoich_coef, -1)
self.assertEqual(ReactionMetabolite.query.all()[0].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[1].metabolite.bigg_id, 'adp')
self.assertEqual(ReactionMetabolite.query.all()[1].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[1].stoich_coef, -1.5)
self.assertEqual(ReactionMetabolite.query.all()[1].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[2].metabolite.bigg_id, 'pyr')
self.assertEqual(ReactionMetabolite.query.all()[2].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[2].stoich_coef, 1)
self.assertEqual(ReactionMetabolite.query.all()[2].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[3].metabolite.bigg_id, 'atp')
self.assertEqual(ReactionMetabolite.query.all()[3].compartment.bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.all()[3].stoich_coef, 2)
self.assertEqual(ReactionMetabolite.query.all()[3].reaction.acronym, self.reaction_acronym)
def test_add_reaction_two_isoenzymes(self):
true_isoenzyme_acronym1 = 'PFK1'
true_isoenzyme_acronym2 = 'PFK2'
true_gibbs_energy_ref = 'eQuilibrator'
self.models = '1'
self.mechanism_references = 'https://doi.org/10.1093/bioinformatics/bty942'
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See reactions - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your reaction is now live!' in response.data)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 2)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(Model.query.first().enzyme_reaction_organisms.count(), 2)
self.assertEqual(Reaction.query.count(), 1)
self.assertEqual(Reaction.query.first().name, self.reaction_name)
self.assertEqual(Reaction.query.first().compartment_name, Compartment.query.first().name)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].enzyme_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].enzyme_id, 2)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].reaction_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].reaction_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].organism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].organism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mechanism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mech_evidence_level_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mech_evidence_level_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].grasp_id, self.reaction_grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].grasp_id, self.reaction_grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].reaction.name, self.reaction_name)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].reaction.name, self.reaction_name)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].enzyme.isoenzyme, true_isoenzyme_acronym1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].enzyme.isoenzyme, true_isoenzyme_acronym2)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].models[0], Model.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[1].models[0], Model.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism, Mechanism.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mechanism, Mechanism.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism_references.count(), 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism_references[0].doi, self.mechanism_references)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mechanism_references[0].doi, self.mechanism_references)
self.assertEqual(GibbsEnergy.query.count(), 1)
self.assertEqual(GibbsEnergy.query.first().standard_dg, self.std_gibbs_energy)
self.assertEqual(GibbsEnergy.query.first().standard_dg_std, self.std_gibbs_energy_std)
self.assertEqual(GibbsEnergy.query.first().ph, self.std_gibbs_energy_ph)
self.assertEqual(GibbsEnergy.query.first().ionic_strength, self.std_gibbs_energy_ionic_strength)
self.assertEqual(GibbsEnergy.query.first().references[0].title, true_gibbs_energy_ref)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 1)
self.assertEqual(GibbsEnergyReactionModel.query.first().reaction_id, 1)
self.assertEqual(GibbsEnergyReactionModel.query.first().model_id, 1)
self.assertEqual(GibbsEnergyReactionModel.query.first().gibbs_energy_id, 1)
self.assertEqual(Reference.query.all()[0].title, true_gibbs_energy_ref)
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Reference.query.all()[1].doi, self.mechanism_references)
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(Metabolite.query.all()[0].bigg_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].grasp_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[1].bigg_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].grasp_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[2].bigg_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].grasp_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[3].bigg_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].grasp_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].compartments[0].bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.all()[0].metabolite.bigg_id, 'pep')
self.assertEqual(ReactionMetabolite.query.all()[0].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[0].stoich_coef, -1)
self.assertEqual(ReactionMetabolite.query.all()[0].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[1].metabolite.bigg_id, 'adp')
self.assertEqual(ReactionMetabolite.query.all()[1].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[1].stoich_coef, -1.5)
self.assertEqual(ReactionMetabolite.query.all()[1].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[2].metabolite.bigg_id, 'pyr')
self.assertEqual(ReactionMetabolite.query.all()[2].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[2].stoich_coef, 1)
self.assertEqual(ReactionMetabolite.query.all()[2].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[3].metabolite.bigg_id, 'atp')
self.assertEqual(ReactionMetabolite.query.all()[3].compartment.bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.all()[3].stoich_coef, 2)
self.assertEqual(ReactionMetabolite.query.all()[3].reaction.acronym, self.reaction_acronym)
def test_add_reaction_two_models(self):
true_isoenzyme_acronym1 = 'PFK1'
true_isoenzyme_acronym2 = 'PFK2'
true_gibbs_energy_ref = 'eQuilibrator'
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See reactions - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your reaction is now live!' in response.data)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 3)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(Model.query.all()[0].enzyme_reaction_organisms.count(), 2)
self.assertEqual(Model.query.all()[1].enzyme_reaction_organisms.count(), 2)
self.assertEqual(Reaction.query.count(), 1)
self.assertEqual(Reaction.query.first().name, self.reaction_name)
self.assertEqual(Reaction.query.first().compartment_name, Compartment.query.first().name)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].enzyme_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].enzyme_id, 2)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].reaction_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].reaction_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].organism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].organism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mechanism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mech_evidence_level_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mech_evidence_level_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].grasp_id, self.reaction_grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].grasp_id, self.reaction_grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].reaction.name, self.reaction_name)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].reaction.name, self.reaction_name)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].enzyme.isoenzyme, true_isoenzyme_acronym1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].enzyme.isoenzyme, true_isoenzyme_acronym2)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].models.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].models[0], Model.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[1].models[0], Model.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism, Mechanism.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mechanism, Mechanism.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism_references.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism_references[0].doi,
self.mechanism_references.split(', ')[0])
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism_references[1].doi,
self.mechanism_references.split(', ')[1])
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mechanism_references[0].doi,
self.mechanism_references.split(', ')[0])
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mechanism_references[1].doi,
self.mechanism_references.split(', ')[1])
self.assertEqual(GibbsEnergyReactionModel.query.count(), 2)
self.assertEqual(GibbsEnergyReactionModel.query.all()[0].reaction_id, 1)
self.assertEqual(GibbsEnergyReactionModel.query.all()[1].reaction_id, 1)
self.assertEqual(GibbsEnergyReactionModel.query.all()[0].model_id, 1)
self.assertEqual(GibbsEnergyReactionModel.query.all()[1].model_id, 2)
self.assertEqual(GibbsEnergyReactionModel.query.all()[0].gibbs_energy_id, 1)
self.assertEqual(GibbsEnergyReactionModel.query.all()[1].gibbs_energy_id, 1)
self.assertEqual(GibbsEnergy.query.count(), 1)
self.assertEqual(GibbsEnergy.query.first().standard_dg, self.std_gibbs_energy)
self.assertEqual(GibbsEnergy.query.first().standard_dg_std, self.std_gibbs_energy_std)
self.assertEqual(GibbsEnergy.query.first().ph, self.std_gibbs_energy_ph)
self.assertEqual(GibbsEnergy.query.first().ionic_strength, self.std_gibbs_energy_ionic_strength)
self.assertEqual(GibbsEnergy.query.first().references[0].title, true_gibbs_energy_ref)
self.assertEqual(Reference.query.all()[0].title, true_gibbs_energy_ref)
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Reference.query.all()[1].doi, self.mechanism_references.split(', ')[0])
self.assertEqual(Reference.query.all()[2].doi, self.mechanism_references.split(', ')[1])
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(Metabolite.query.all()[0].bigg_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].grasp_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[1].bigg_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].grasp_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[2].bigg_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].grasp_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[3].bigg_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].grasp_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].compartments[0].bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.all()[0].metabolite.bigg_id, 'pep')
self.assertEqual(ReactionMetabolite.query.all()[0].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[0].stoich_coef, -1)
self.assertEqual(ReactionMetabolite.query.all()[0].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[1].metabolite.bigg_id, 'adp')
self.assertEqual(ReactionMetabolite.query.all()[1].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[1].stoich_coef, -1.5)
self.assertEqual(ReactionMetabolite.query.all()[1].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[2].metabolite.bigg_id, 'pyr')
self.assertEqual(ReactionMetabolite.query.all()[2].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[2].stoich_coef, 1)
self.assertEqual(ReactionMetabolite.query.all()[2].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[3].metabolite.bigg_id, 'atp')
self.assertEqual(ReactionMetabolite.query.all()[3].compartment.bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.all()[3].stoich_coef, 2)
self.assertEqual(ReactionMetabolite.query.all()[3].reaction.acronym, self.reaction_acronym)
def test_add_reaction_two_mechanism_references(self):
true_isoenzyme_acronym1 = 'PFK1'
true_isoenzyme_acronym2 = 'PFK2'
true_gibbs_energy_ref = 'eQuilibrator'
true_mechanism_references = self.mechanism_references.split(', ')
self.models = '1'
self.enzymes = ['1', '2']
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See reactions - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your reaction is now live!' in response.data)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(Reaction.query.count(), 1)
self.assertEqual(Reaction.query.first().name, self.reaction_name)
self.assertEqual(Reaction.query.first().compartment_name, Compartment.query.first().name)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].enzyme_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].enzyme_id, 2)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].reaction_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].reaction_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].organism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].organism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mechanism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mech_evidence_level_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mech_evidence_level_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].grasp_id, self.reaction_grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].grasp_id, self.reaction_grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].reaction.name, self.reaction_name)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].reaction.name, self.reaction_name)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].enzyme.isoenzyme, true_isoenzyme_acronym1)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].enzyme.isoenzyme, true_isoenzyme_acronym2)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].models[0], Model.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[1].models[0], Model.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism, Mechanism.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mechanism, Mechanism.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism_references.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism_references[0].doi,
true_mechanism_references[0])
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism_references[1].doi,
true_mechanism_references[1])
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mechanism_references.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mechanism_references[0].doi,
true_mechanism_references[0])
self.assertEqual(EnzymeReactionOrganism.query.all()[1].mechanism_references[1].doi,
true_mechanism_references[1])
self.assertEqual(GibbsEnergyReactionModel.query.count(), 1)
self.assertEqual(GibbsEnergyReactionModel.query.all()[0].reaction_id, 1)
self.assertEqual(GibbsEnergyReactionModel.query.all()[0].model_id, 1)
self.assertEqual(GibbsEnergyReactionModel.query.all()[0].gibbs_energy_id, 1)
self.assertEqual(GibbsEnergy.query.count(), 1)
self.assertEqual(GibbsEnergy.query.first().standard_dg, self.std_gibbs_energy)
self.assertEqual(GibbsEnergy.query.first().standard_dg_std, self.std_gibbs_energy_std)
self.assertEqual(GibbsEnergy.query.first().ph, self.std_gibbs_energy_ph)
self.assertEqual(GibbsEnergy.query.first().ionic_strength, self.std_gibbs_energy_ionic_strength)
self.assertEqual(GibbsEnergy.query.first().references[0].title, true_gibbs_energy_ref)
self.assertEqual(Reference.query.count(), 3)
self.assertEqual(Reference.query.all()[0].title, true_gibbs_energy_ref)
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Reference.query.all()[1].doi, true_mechanism_references[0])
self.assertEqual(Reference.query.all()[2].doi, true_mechanism_references[1])
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(Metabolite.query.all()[0].bigg_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].grasp_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[1].bigg_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].grasp_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[2].bigg_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].grasp_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[3].bigg_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].grasp_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].compartments[0].bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.all()[0].metabolite.bigg_id, 'pep')
self.assertEqual(ReactionMetabolite.query.all()[0].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[0].stoich_coef, -1)
self.assertEqual(ReactionMetabolite.query.all()[0].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[1].metabolite.bigg_id, 'adp')
self.assertEqual(ReactionMetabolite.query.all()[1].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[1].stoich_coef, -1.5)
self.assertEqual(ReactionMetabolite.query.all()[1].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[2].metabolite.bigg_id, 'pyr')
self.assertEqual(ReactionMetabolite.query.all()[2].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[2].stoich_coef, 1)
self.assertEqual(ReactionMetabolite.query.all()[2].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[3].metabolite.bigg_id, 'atp')
self.assertEqual(ReactionMetabolite.query.all()[3].compartment.bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.all()[3].stoich_coef, 2)
self.assertEqual(ReactionMetabolite.query.all()[3].reaction.acronym, self.reaction_acronym)
def test_add_reaction_no_isoenzyme(self):
self.models = '1'
self.enzymes = ''
self.mechanism = ''
self.mechanism_references = ''
self.mechanism_evidence_level = ''
self.subs_binding_order = ''
self.prod_release_order = ''
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add reaction - Kinetics DB \n</title>' in response.data)
self.assertEqual(Reaction.query.count(), 0)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 0)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(EnzymeReactionOrganism.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
def test_add_reaction_met_format(self):
self.reaction_string = '1 pep_c + 1.5 adpc <-> pyr_c + 2.0 atp_m'
self.mechanism = ''
self.mechanism_references = ''
self.mechanism_evidence_level = ''
self.subs_binding_order = ''
self.prod_release_order = ''
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add reaction - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Please specify the metabolite' in response.data)
self.assertEqual(Reaction.query.count(), 0)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 0)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(EnzymeReactionOrganism.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
def test_add_reaction_met_compartment(self):
self.reaction_string = '1 pep_c + 1.5 adp_x <-> pyr_c + 2.0 atp_m'
self.mechanism = ''
self.mechanism_references = ''
self.mechanism_evidence_level = ''
self.subs_binding_order = ''
self.prod_release_order = ''
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add reaction - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'The specified compartment bigg_acronym' in response.data)
self.assertEqual(Reaction.query.count(), 0)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 0)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(EnzymeReactionOrganism.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
def test_add_reaction_met_subs_binding(self):
self.subs_binding_order = 'adx_c, pep_c'
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add reaction - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'does not match any metabolite in' in response.data)
self.assertEqual(Reaction.query.count(), 0)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 0)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(EnzymeReactionOrganism.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
def test_add_reaction_met_prod_release(self):
self.prod_release_order = 'atp_m, pyr2_c'
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add reaction - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'does not match any metabolite in' in response.data)
self.assertEqual(Reaction.query.count(), 0)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 0)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(EnzymeReactionOrganism.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
def test_add_reaction_mechanism_and_no_isoenzyme(self):
self.models = '1'
self.enzymes = ''
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add reaction - Kinetics DB \n</title>' in response.data)
self.assertTrue(
b'If you add a reaction mechanism, you need to specify the catalyzing isoenzyme(s).' in response.data)
self.assertEqual(Reaction.query.count(), 0)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 0)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(EnzymeReactionOrganism.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
def test_add_reaction_mechanism_evidence_level(self):
self.models = '1'
self.mechanism = ''
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add reaction - Kinetics DB \n</title>' in response.data)
self.assertTrue(
b'You cannot specify evidence level for the mechanism without specifying a mechanism.' in response.data)
self.assertEqual(Reaction.query.count(), 0)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 0)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(EnzymeReactionOrganism.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
def test_add_reaction_subs_binding_order(self):
self.models = '1'
self.enzymes = ''
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add reaction - Kinetics DB \n</title>' in response.data)
self.assertTrue(
b'If you add substrate binding order without specifying the catalyzing isoenzyme(s)' in response.data)
self.assertEqual(Reaction.query.count(), 0)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 0)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(EnzymeReactionOrganism.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
def test_add_reaction_prod_release_order(self):
self.models = '1'
self.enzymes = ''
self.subs_binding_order = ''
self.prod_release_order = 'pyr_c, atp_m'
self.std_gibbs_energy = ''
self.std_gibbs_energy_std = ''
self.std_gibbs_energy_ph = ''
self.std_gibbs_energy_ionic_strength = ''
self.std_gibbs_energy_references = ''
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add reaction - Kinetics DB \n</title>' in response.data)
self.assertTrue(
b'If you add product release order without specifying the catalyzing isoenzyme(s)' in response.data)
self.assertEqual(Reaction.query.count(), 0)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 0)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(EnzymeReactionOrganism.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
def test_add_reaction_std_gibbs_energy_std_no_model(self):
self.models = ''
self.enzymes = '1'
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add reaction - Kinetics DB \n</title>' in response.data)
self.assertTrue(
b'Gibbs energies cannot be added to reactions alone, a model must be associated as well. Please add model name.' in response.data)
self.assertEqual(Reaction.query.count(), 0)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(EnzymeReactionOrganism.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
def test_add_reaction_std_gibbs_energy_std(self):
self.models = '1'
self.enzymes = '1'
self.std_gibbs_energy = ''
self.std_gibbs_energy_std = 0.2
self.std_gibbs_energy_ph = ''
self.std_gibbs_energy_ionic_strength = ''
self.std_gibbs_energy_references = ''
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add reaction - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Please specify the standard Gibbs energy as well.' in response.data)
self.assertEqual(Reaction.query.count(), 0)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(EnzymeReactionOrganism.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
def test_add_reaction_std_gibbs_energy_ph(self):
self.models = '1'
self.enzymes = '1'
self.std_gibbs_energy = ''
self.std_gibbs_energy_std = ''
self.std_gibbs_energy_ph = 7
self.std_gibbs_energy_ionic_strength = ''
self.std_gibbs_energy_references = ''
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add reaction - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Please specify the standard Gibbs energy as well.' in response.data)
self.assertEqual(Reaction.query.count(), 0)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(EnzymeReactionOrganism.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
def test_add_reaction_std_gibbs_energy_is(self):
self.models = '1'
self.enzymes = '1'
self.std_gibbs_energy = ''
self.std_gibbs_energy_std = ''
self.std_gibbs_energy_ph = ''
self.std_gibbs_energy_ionic_strength = 0.1
self.std_gibbs_energy_references = ''
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add reaction - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Please specify the standard Gibbs energy as well.' in response.data)
self.assertEqual(Reaction.query.count(), 0)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(EnzymeReactionOrganism.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
def test_add_reaction_std_gibbs_energy_refs(self):
self.models = '1'
self.enzymes = '1'
self.std_gibbs_energy = ''
self.std_gibbs_energy_std = ''
self.std_gibbs_energy_ph = ''
self.std_gibbs_energy_ionic_strength = ''
self.std_gibbs_energy_references = 'equilibrator'
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add reaction - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Please specify the standard Gibbs energy as well.' in response.data)
self.assertEqual(Reaction.query.count(), 0)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 0)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(EnzymeReactionOrganism.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
def test_add_reaction_std_gibbs_energy_no_refs(self):
self.models = '1'
self.enzymes = '1'
self.std_gibbs_energy = 7.1
self.std_gibbs_energy_std = ''
self.std_gibbs_energy_ph = ''
self.std_gibbs_energy_ionic_strength = ''
self.std_gibbs_energy_references = ''
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n Add reaction - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Please specify the reference for the above standard Gibbs energy' in response.data)
self.assertEqual(Reaction.query.count(), 0)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 0)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(EnzymeReactionOrganism.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
def test_add_reaction_only(self):
true_gibbs_energy_ref = 'eQuilibrator'
self.compartment = ''
self.models = ''
self.enzymes = '1'
self.mechanism = ''
self.mechanism_references = ''
self.mechanism_evidence_level = ''
self.subs_binding_order = ''
self.prod_release_order = ''
self.std_gibbs_energy = ''
self.std_gibbs_energy_std = ''
self.std_gibbs_energy_ph = ''
self.std_gibbs_energy_ionic_strength = ''
self.std_gibbs_energy_references = ''
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See reactions - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your reaction is now live!' in response.data)
self.assertEqual(Reaction.query.count(), 1)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 0)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.count(), 1)
self.assertEqual(EnzymeReactionOrganism.query.first().enzyme_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().reaction_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().organism_id, 1)
self.assertEqual(Reaction.query.count(), 1)
self.assertEqual(Reaction.query.first().name, self.reaction_name)
self.assertEqual(Reaction.query.first().compartment_name, None)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Reference.query.all()[0].title, true_gibbs_energy_ref)
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(Metabolite.query.all()[0].bigg_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].grasp_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[1].bigg_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].grasp_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[2].bigg_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].grasp_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[3].bigg_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].grasp_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].compartments[0].bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.all()[0].metabolite.bigg_id, 'pep')
self.assertEqual(ReactionMetabolite.query.all()[0].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[0].stoich_coef, -1)
self.assertEqual(ReactionMetabolite.query.all()[0].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[1].metabolite.bigg_id, 'adp')
self.assertEqual(ReactionMetabolite.query.all()[1].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[1].stoich_coef, -1.5)
self.assertEqual(ReactionMetabolite.query.all()[1].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[2].metabolite.bigg_id, 'pyr')
self.assertEqual(ReactionMetabolite.query.all()[2].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[2].stoich_coef, 1)
self.assertEqual(ReactionMetabolite.query.all()[2].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[3].metabolite.bigg_id, 'atp')
self.assertEqual(ReactionMetabolite.query.all()[3].compartment.bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.all()[3].stoich_coef, 2)
self.assertEqual(ReactionMetabolite.query.all()[3].reaction.acronym, self.reaction_acronym)
def test_add_reaction_compartment(self):
true_gibbs_energy_ref = 'eQuilibrator'
self.compartment = '1'
self.models = ''
self.enzymes = '1'
self.mechanism = ''
self.mechanism_references = ''
self.mechanism_evidence_level = ''
self.subs_binding_order = ''
self.prod_release_order = ''
self.std_gibbs_energy = ''
self.std_gibbs_energy_std = ''
self.std_gibbs_energy_ph = ''
self.std_gibbs_energy_ionic_strength = ''
self.std_gibbs_energy_references = ''
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See reactions - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your reaction is now live!' in response.data)
self.assertEqual(Reaction.query.count(), 1)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 0)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(Reaction.query.count(), 1)
self.assertEqual(Reaction.query.first().name, self.reaction_name)
self.assertEqual(Reaction.query.first().compartment_name, Compartment.query.first().name)
self.assertEqual(EnzymeReactionOrganism.query.count(), 1)
self.assertEqual(EnzymeReactionOrganism.query.first().enzyme_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().reaction_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().organism_id, 1)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Reference.query.all()[0].title, true_gibbs_energy_ref)
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(Metabolite.query.all()[0].bigg_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].grasp_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[1].bigg_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].grasp_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[2].bigg_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].grasp_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[3].bigg_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].grasp_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].compartments[0].bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.all()[0].metabolite.bigg_id, 'pep')
self.assertEqual(ReactionMetabolite.query.all()[0].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[0].stoich_coef, -1)
self.assertEqual(ReactionMetabolite.query.all()[0].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[1].metabolite.bigg_id, 'adp')
self.assertEqual(ReactionMetabolite.query.all()[1].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[1].stoich_coef, -1.5)
self.assertEqual(ReactionMetabolite.query.all()[1].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[2].metabolite.bigg_id, 'pyr')
self.assertEqual(ReactionMetabolite.query.all()[2].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[2].stoich_coef, 1)
self.assertEqual(ReactionMetabolite.query.all()[2].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[3].metabolite.bigg_id, 'atp')
self.assertEqual(ReactionMetabolite.query.all()[3].compartment.bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.all()[3].stoich_coef, 2)
self.assertEqual(ReactionMetabolite.query.all()[3].reaction.acronym, self.reaction_acronym)
def test_add_reaction_mechanism(self):
true_gibbs_energy_ref = 'eQuilibrator'
self.models = ''
self.enzymes = '1'
self.std_gibbs_energy = ''
self.std_gibbs_energy_std = ''
self.std_gibbs_energy_ph = ''
self.std_gibbs_energy_ionic_strength = ''
self.std_gibbs_energy_references = ''
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See reactions - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your reaction is now live!' in response.data)
self.assertEqual(Reaction.query.count(), 1)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(GibbsEnergy.query.count(), 0)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 0)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(Reaction.query.count(), 1)
self.assertEqual(Reaction.query.first().name, self.reaction_name)
self.assertEqual(Reaction.query.first().compartment_name, Compartment.query.first().name)
self.assertEqual(Reference.query.count(), 3)
self.assertEqual(Reference.query.all()[0].title, true_gibbs_energy_ref)
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Reference.query.all()[1].doi, self.mechanism_references.split(', ')[0])
self.assertEqual(Reference.query.all()[2].doi, self.mechanism_references.split(', ')[1])
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(Metabolite.query.all()[0].bigg_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].grasp_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[1].bigg_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].grasp_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[2].bigg_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].grasp_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[3].bigg_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].grasp_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].compartments[0].bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.all()[0].metabolite.bigg_id, 'pep')
self.assertEqual(ReactionMetabolite.query.all()[0].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[0].stoich_coef, -1)
self.assertEqual(ReactionMetabolite.query.all()[0].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[1].metabolite.bigg_id, 'adp')
self.assertEqual(ReactionMetabolite.query.all()[1].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[1].stoich_coef, -1.5)
self.assertEqual(ReactionMetabolite.query.all()[1].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[2].metabolite.bigg_id, 'pyr')
self.assertEqual(ReactionMetabolite.query.all()[2].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[2].stoich_coef, 1)
self.assertEqual(ReactionMetabolite.query.all()[2].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[3].metabolite.bigg_id, 'atp')
self.assertEqual(ReactionMetabolite.query.all()[3].compartment.bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.all()[3].stoich_coef, 2)
self.assertEqual(ReactionMetabolite.query.all()[3].reaction.acronym, self.reaction_acronym)
self.assertEqual(EnzymeReactionOrganism.query.count(), 1)
self.assertEqual(EnzymeReactionOrganism.query.first().enzyme_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().reaction_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().organism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].grasp_id, self.reaction_grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].reaction.name, self.reaction_name)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].enzyme.isoenzyme, 'PFK1')
self.assertEqual(EnzymeReactionOrganism.query.all()[0].models.count(), 0)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism, Mechanism.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism_references.count(), 2)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism_references[0].doi,
self.mechanism_references.split(', ')[0])
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism_references[1].doi,
self.mechanism_references.split(', ')[1])
def test_add_reaction_gibbs_energy(self):
true_gibbs_energy_ref = 'eQuilibrator'
self.models = '1'
self.enzymes = '1'
self.mechanism = ''
self.mechanism_references = ''
self.mechanism_evidence_level = ''
self.subs_binding_order = ''
self.prod_release_order = ''
self.assertEqual(Enzyme.query.count(), 3)
response = self.client.post('/add_reaction', data=dict(
name=self.reaction_name,
acronym=self.reaction_acronym,
grasp_id=self.reaction_grasp_id,
reaction_string=self.reaction_string,
bigg_id=self.name,
kegg_id=self.kegg_id,
metanetx_id=self.metanetx_id,
compartment=self.compartment,
organism=self.organism,
models=self.models,
enzymes=self.enzymes,
mechanism=self.mechanism,
mechanism_references=self.mechanism_references,
mechanism_evidence_level=self.mechanism_evidence_level,
subs_binding_order=self.subs_binding_order,
prod_release_order=self.prod_release_order,
std_gibbs_energy=self.std_gibbs_energy,
std_gibbs_energy_std=self.std_gibbs_energy_std,
std_gibbs_energy_ph=self.std_gibbs_energy_ph,
std_gibbs_energy_ionic_strength=self.std_gibbs_energy_ionic_strength,
std_gibbs_energy_references=self.std_gibbs_energy_references,
comments=self.comments), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(b'<title>\n See reactions - Kinetics DB \n</title>' in response.data)
self.assertTrue(b'Your reaction is now live!' in response.data)
self.assertEqual(Enzyme.query.count(), 3)
self.assertEqual(Mechanism.query.count(), 7)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(Reaction.query.count(), 1)
self.assertEqual(Reaction.query.first().name, self.reaction_name)
self.assertEqual(Reaction.query.first().compartment_name, Compartment.query.first().name)
self.assertEqual(EnzymeReactionOrganism.query.count(), 1)
self.assertEqual(EnzymeReactionOrganism.query.first().enzyme_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().reaction_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().organism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].grasp_id, self.reaction_grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].reaction.name, self.reaction_name)
self.assertEqual(EnzymeReactionOrganism.query.all()[0].enzyme.isoenzyme, 'PFK1')
self.assertEqual(EnzymeReactionOrganism.query.all()[0].models[0], Model.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism, None)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 1)
self.assertEqual(GibbsEnergyReactionModel.query.all()[0].reaction_id, 1)
self.assertEqual(GibbsEnergyReactionModel.query.all()[0].model_id, 1)
self.assertEqual(GibbsEnergyReactionModel.query.all()[0].gibbs_energy_id, 1)
self.assertEqual(GibbsEnergy.query.count(), 1)
self.assertEqual(GibbsEnergy.query.first().standard_dg, self.std_gibbs_energy)
self.assertEqual(GibbsEnergy.query.first().standard_dg_std, self.std_gibbs_energy_std)
self.assertEqual(GibbsEnergy.query.first().ph, self.std_gibbs_energy_ph)
self.assertEqual(GibbsEnergy.query.first().ionic_strength, self.std_gibbs_energy_ionic_strength)
self.assertEqual(GibbsEnergy.query.first().references[0].title, true_gibbs_energy_ref)
self.assertEqual(Reference.query.count(), 1)
self.assertEqual(Reference.query.all()[0].title, true_gibbs_energy_ref)
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(Metabolite.query.all()[0].bigg_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].grasp_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[1].bigg_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].grasp_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[2].bigg_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].grasp_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[3].bigg_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].grasp_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].compartments[0].bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.all()[0].metabolite.bigg_id, 'pep')
self.assertEqual(ReactionMetabolite.query.all()[0].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[0].stoich_coef, -1)
self.assertEqual(ReactionMetabolite.query.all()[0].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[1].metabolite.bigg_id, 'adp')
self.assertEqual(ReactionMetabolite.query.all()[1].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[1].stoich_coef, -1.5)
self.assertEqual(ReactionMetabolite.query.all()[1].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[2].metabolite.bigg_id, 'pyr')
self.assertEqual(ReactionMetabolite.query.all()[2].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[2].stoich_coef, 1)
self.assertEqual(ReactionMetabolite.query.all()[2].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[3].metabolite.bigg_id, 'atp')
self.assertEqual(ReactionMetabolite.query.all()[3].compartment.bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.all()[3].stoich_coef, 2)
self.assertEqual(ReactionMetabolite.query.all()[3].reaction.acronym, self.reaction_acronym)
"""
self.assertEqual(Enzyme.query.count(), 2)
self.assertEqual(GibbsEnergy.query.count(), 1)
self.assertEqual(EnzymeReactionOrganism.query.count(), 1)
self.assertEqual(Mechanism.query.count(), 2)
self.assertEqual(Reference.query.count(), 2)
self.assertEqual(Model.query.count(), 2)
self.assertEqual(Organism.query.count(), 2)
self.assertEqual(Model.query.first().enzyme_reaction_organisms.count(), 1)
self.assertEqual(Reaction.query.count(), 1)
self.assertEqual(Reaction.query.first().name, self.reaction_name)
self.assertEqual(Reaction.query.first().compartment_name, Compartment.query.first().name)
self.assertEqual(EnzymeReactionOrganism.query.first().enzyme_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().reaction_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().organism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().mechanism_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().mech_evidence_level_id, 1)
self.assertEqual(EnzymeReactionOrganism.query.first().grasp_id, self.reaction_grasp_id)
self.assertEqual(EnzymeReactionOrganism.query.first().subs_binding_order, self.subs_binding_order)
self.assertEqual(EnzymeReactionOrganism.query.first().prod_release_order, self.prod_release_order)
self.assertEqual(EnzymeReactionOrganism.query.first().reaction.name, self.reaction_name)
self.assertEqual(EnzymeReactionOrganism.query.first().enzyme.isoenzyme, true_isoenzyme_acronym)
self.assertEqual(EnzymeReactionOrganism.query.first().models[0], Model.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().mech_evidence, EvidenceLevel.query.first())
self.assertEqual(EnzymeReactionOrganism.query.first().mechanism, Mechanism.query.first())
self.assertEqual(EnzymeReactionOrganism.query.all()[0].mechanism_references[0].doi, self.mechanism_references)
self.assertEqual(GibbsEnergyReactionModel.query.count(), 1)
self.assertEqual(GibbsEnergyReactionModel.query.first().reaction_id, 1)
self.assertEqual(GibbsEnergyReactionModel.query.first().model_id, 1)
self.assertEqual(GibbsEnergyReactionModel.query.first().gibbs_energy_id, 1)
self.assertEqual(GibbsEnergy.query.first().standard_dg, self.std_gibbs_energy)
self.assertEqual(GibbsEnergy.query.first().standard_dg_std, self.std_gibbs_energy_std)
self.assertEqual(GibbsEnergy.query.first().ph, self.std_gibbs_energy_ph)
self.assertEqual(GibbsEnergy.query.first().ionic_strength, self.std_gibbs_energy_ionic_strength)
self.assertEqual(GibbsEnergy.query.first().references[0].title, true_gibbs_energy_ref)
self.assertEqual(Reference.query.all()[0].title, true_gibbs_energy_ref)
self.assertEqual(Reference.query.all()[0].type.type, 'Online database')
self.assertEqual(Reference.query.all()[1].doi, self.mechanism_references)
self.assertEqual(Metabolite.query.count(), 4)
self.assertEqual(Metabolite.query.all()[0].bigg_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].grasp_id, 'pep')
self.assertEqual(Metabolite.query.all()[0].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[1].bigg_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].grasp_id, 'adp')
self.assertEqual(Metabolite.query.all()[1].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[2].bigg_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].grasp_id, 'pyr')
self.assertEqual(Metabolite.query.all()[2].compartments[0].bigg_id, 'c')
self.assertEqual(Metabolite.query.all()[3].bigg_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].grasp_id, 'atp')
self.assertEqual(Metabolite.query.all()[3].compartments[0].bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.count(), 4)
self.assertEqual(ReactionMetabolite.query.all()[0].metabolite.bigg_id, 'pep')
self.assertEqual(ReactionMetabolite.query.all()[0].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[0].stoich_coef, -1)
self.assertEqual(ReactionMetabolite.query.all()[0].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[1].metabolite.bigg_id, 'adp')
self.assertEqual(ReactionMetabolite.query.all()[1].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[1].stoich_coef, -1.5)
self.assertEqual(ReactionMetabolite.query.all()[1].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[2].metabolite.bigg_id, 'pyr')
self.assertEqual(ReactionMetabolite.query.all()[2].compartment.bigg_id, 'c')
self.assertEqual(ReactionMetabolite.query.all()[2].stoich_coef, 1)
self.assertEqual(ReactionMetabolite.query.all()[2].reaction.acronym, self.reaction_acronym)
self.assertEqual(ReactionMetabolite.query.all()[3].metabolite.bigg_id, 'atp')
self.assertEqual(ReactionMetabolite.query.all()[3].compartment.bigg_id, 'm')
self.assertEqual(ReactionMetabolite.query.all()[3].stoich_coef, 2)
self.assertEqual(ReactionMetabolite.query.all()[3].reaction.acronym, self.reaction_acronym)
"""
if __name__ == '__main__':
unittest.main(verbosity=2)
| 51.100677
| 147
| 0.684972
| 21,093
| 181,203
| 5.693405
| 0.016356
| 0.17162
| 0.036139
| 0.089882
| 0.972779
| 0.967058
| 0.96048
| 0.953602
| 0.943027
| 0.936715
| 0
| 0.01507
| 0.191779
| 181,203
| 3,545
| 148
| 51.115092
| 0.804933
| 0.001308
| 0
| 0.900172
| 0
| 0.005489
| 0.057742
| 0.003795
| 0
| 0
| 0
| 0.000282
| 0.483362
| 1
| 0.026072
| false
| 0
| 0.002744
| 0
| 0.034305
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e2efabdce3385006a06c69b854384747c61d96a9
| 2,867
|
py
|
Python
|
test/strings/escaping1.py
|
kylebarron/MagicPython
|
da6fa0793e2c85d3bf7709ff1d4f65ccf468db11
|
[
"MIT"
] | 1,482
|
2015-10-16T21:59:32.000Z
|
2022-03-30T11:44:40.000Z
|
test/strings/escaping1.py
|
kylebarron/MagicPython
|
da6fa0793e2c85d3bf7709ff1d4f65ccf468db11
|
[
"MIT"
] | 226
|
2015-10-15T15:53:44.000Z
|
2022-03-25T03:08:27.000Z
|
test/strings/escaping1.py
|
kylebarron/MagicPython
|
da6fa0793e2c85d3bf7709ff1d4f65ccf468db11
|
[
"MIT"
] | 129
|
2015-10-20T02:41:49.000Z
|
2022-03-22T01:44:36.000Z
|
a = "simple \\ string \
foo \' \" \a \b \c \f \n \r \t \v \5 \55 \555 \05 \005"
a : source.python
: source.python
= : keyword.operator.assignment.python, source.python
: source.python
" : punctuation.definition.string.begin.python, source.python, string.quoted.single.python
simple : source.python, string.quoted.single.python
\\ : constant.character.escape.python, source.python, string.quoted.single.python
string : source.python, string.quoted.single.python
\ : constant.language.python, source.python, string.quoted.single.python
foo : source.python, string.quoted.single.python
\' : constant.character.escape.python, source.python, string.quoted.single.python
: source.python, string.quoted.single.python
\" : constant.character.escape.python, source.python, string.quoted.single.python
: source.python, string.quoted.single.python
\a : constant.character.escape.python, source.python, string.quoted.single.python
: source.python, string.quoted.single.python
\b : constant.character.escape.python, source.python, string.quoted.single.python
\c : source.python, string.quoted.single.python
\f : constant.character.escape.python, source.python, string.quoted.single.python
: source.python, string.quoted.single.python
\n : constant.character.escape.python, source.python, string.quoted.single.python
: source.python, string.quoted.single.python
\r : constant.character.escape.python, source.python, string.quoted.single.python
: source.python, string.quoted.single.python
\t : constant.character.escape.python, source.python, string.quoted.single.python
: source.python, string.quoted.single.python
\v : constant.character.escape.python, source.python, string.quoted.single.python
: source.python, string.quoted.single.python
\5 : constant.character.escape.python, source.python, string.quoted.single.python
: source.python, string.quoted.single.python
\55 : constant.character.escape.python, source.python, string.quoted.single.python
: source.python, string.quoted.single.python
\555 : constant.character.escape.python, source.python, string.quoted.single.python
: source.python, string.quoted.single.python
\05 : constant.character.escape.python, source.python, string.quoted.single.python
: source.python, string.quoted.single.python
\005 : constant.character.escape.python, source.python, string.quoted.single.python
" : punctuation.definition.string.end.python, source.python, string.quoted.single.python
| 65.159091
| 102
| 0.664109
| 323
| 2,867
| 5.894737
| 0.095975
| 0.239496
| 0.321429
| 0.428571
| 0.884979
| 0.884979
| 0.866071
| 0.776786
| 0.776786
| 0.776786
| 0
| 0.009826
| 0.219044
| 2,867
| 43
| 103
| 66.674419
| 0.840554
| 0
| 0
| 0.35
| 0
| 0.3
| 0.015696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
e2fab55c2d4e2c98710fd2bdc890ed698ea7130f
| 10,299
|
py
|
Python
|
locan/tests/locan_io/test_rapidstorm.py
|
super-resolution/Locan
|
94ed7759f7d7ceddee7c7feaabff80010cfedf30
|
[
"BSD-3-Clause"
] | 8
|
2021-11-25T20:05:49.000Z
|
2022-03-27T17:45:00.000Z
|
locan/tests/locan_io/test_rapidstorm.py
|
super-resolution/Locan
|
94ed7759f7d7ceddee7c7feaabff80010cfedf30
|
[
"BSD-3-Clause"
] | 4
|
2021-12-15T22:39:20.000Z
|
2022-03-11T17:35:34.000Z
|
locan/tests/locan_io/test_rapidstorm.py
|
super-resolution/Locan
|
94ed7759f7d7ceddee7c7feaabff80010cfedf30
|
[
"BSD-3-Clause"
] | 1
|
2022-03-22T19:53:13.000Z
|
2022-03-22T19:53:13.000Z
|
from io import StringIO
import numpy as np
import locan.constants
from locan.locan_io import load_rapidSTORM_file, load_rapidSTORM_track_file
from locan.locan_io.locdata.rapidstorm_io import (
load_rapidSTORM_header,
load_rapidSTORM_track_header,
)
def test_get_correct_column_names_from_rapidSTORM_header():
columns = load_rapidSTORM_header(
path=locan.ROOT_DIR / "tests/test_data/rapidSTORM_dstorm_data.txt"
)
assert columns == [
"position_x",
"position_y",
"frame",
"intensity",
"chi_square",
"local_background",
]
file_like = StringIO(
'# <localizations insequence="true" repetitions="variable"><field identifier="Position-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="position in sample space in X" unit="nanometer" min="0 m" max="3.27165e-005 m" /><field identifier="Position-1-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="position in sample space in Y" unit="nanometer" min="0 m" max="3.27165e-005 m" /><field identifier="ImageNumber-0-0" syntax="integer" semantic="frame number" unit="frame" min="0 fr" /><field identifier="Amplitude-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="emission strength" unit="A/D count" /><field identifier="FitResidues-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="fit residue chi square value" unit="dimensionless" /><field identifier="LocalBackground-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="local background" unit="A/D count" /></localizations>\n'
"9657.4 24533.5 0 33290.1 1.19225e+006 767.733"
)
columns = load_rapidSTORM_header(path=file_like)
assert columns == [
"position_x",
"position_y",
"frame",
"intensity",
"chi_square",
"local_background",
]
def test_loading_rapidSTORM_file():
dat = load_rapidSTORM_file(
path=locan.ROOT_DIR / "tests/test_data/rapidSTORM_dstorm_data.txt", nrows=10
)
# print(dat.data.head())
# dat.print_meta()
assert len(dat) == 10
file_like = StringIO(
'# <localizations insequence="true" repetitions="variable"><field identifier="Position-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="position in sample space in X" unit="nanometer" min="0 m" max="3.27165e-005 m" /><field identifier="Position-1-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="position in sample space in Y" unit="nanometer" min="0 m" max="3.27165e-005 m" /><field identifier="ImageNumber-0-0" syntax="integer" semantic="frame number" unit="frame" min="0 fr" /><field identifier="Amplitude-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="emission strength" unit="A/D count" /><field identifier="FitResidues-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="fit residue chi square value" unit="dimensionless" /><field identifier="LocalBackground-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="local background" unit="A/D count" /></localizations>\n'
"9657.4 24533.5 0 33290.1 1.19225e+006 767.733"
)
dat = load_rapidSTORM_file(path=file_like, nrows=1)
assert len(dat) == 1
def test_get_correct_column_names_from_rapidSTORM_track_header():
columns = load_rapidSTORM_track_header(
path=locan.ROOT_DIR / "tests/test_data/rapidSTORM_dstorm_track_data.txt"
)
assert columns == (
["position_x", "position_y", "frame", "intensity"],
[
"position_x",
"uncertainty_x",
"position_y",
"uncertainty_y",
"frame",
"intensity",
"chi_square",
"local_background",
],
)
file_like = StringIO(
'# <localizations insequence="true" repetitions="variable"><field identifier="Position-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="position in sample space in X" unit="nanometer" min="0 m" max="8.442e-006 m" /><field identifier="Position-1-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="position in sample space in Y" unit="nanometer" min="0 m" max="8.442e-006 m" /><field identifier="ImageNumber-0-0" syntax="integer" semantic="frame number" unit="frame" min="0 fr" /><field identifier="Amplitude-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="emission strength" unit="A/D count" /><localizations insequence="true" repetitions="variable"><field identifier="Position-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="position in sample space in X" unit="nanometer" min="0 m" max="8.442e-006 m" /><field identifier="Position-0-0-uncertainty" syntax="floating point with . for decimals and optional scientific e-notation" semantic="position uncertainty in sample space in X" unit="nanometer" /><field identifier="Position-1-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="position in sample space in Y" unit="nanometer" min="0 m" max="8.442e-006 m" /><field identifier="Position-1-0-uncertainty" syntax="floating point with . for decimals and optional scientific e-notation" semantic="position uncertainty in sample space in Y" unit="nanometer" /><field identifier="ImageNumber-0-0" syntax="integer" semantic="frame number" unit="frame" min="0 fr" /><field identifier="Amplitude-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="emission strength" unit="A/D count" /><field identifier="FitResidues-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="fit residue chi square value" unit="dimensionless" /><field identifier="LocalBackground-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="local background" unit="A/D count" /></localizations></localizations>\n'
"5417.67 8439.85 1 4339.29 2 5421.22 25 8440.4 25 0 2292.39 48696.1 149.967 5414.13 25 8439.3 25 1 2046.9 65491.4 142.521"
)
columns = load_rapidSTORM_track_header(path=file_like)
assert columns == (
["position_x", "position_y", "frame", "intensity"],
[
"position_x",
"uncertainty_x",
"position_y",
"uncertainty_y",
"frame",
"intensity",
"chi_square",
"local_background",
],
)
def test_loading_rapidSTORM_track_file():
dat = load_rapidSTORM_track_file(
path=locan.ROOT_DIR / "tests/test_data/rapidSTORM_dstorm_track_data.txt",
min_localization_count=2,
nrows=10,
)
# print(dat.data.head())
# print(dat.data.columns)
# dat.print_meta()
assert np.array_equal(
dat.data.columns,
[
"localization_count",
"position_x",
"position_y",
"region_measure_bb",
"localization_density_bb",
"subregion_measure_bb",
],
)
assert (
len(dat) == 9
) # len(dat) is 9 and not 10 since one row is filtered out y min_localization_count=2
dat = load_rapidSTORM_track_file(
path=locan.ROOT_DIR / "tests/test_data/rapidSTORM_dstorm_track_data.txt",
collection=False,
nrows=10,
)
assert np.array_equal(
dat.data.columns, ["position_x", "position_y", "frame", "intensity"]
)
assert len(dat) == 10
file_like = StringIO(
'# <localizations insequence="true" repetitions="variable"><field identifier="Position-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="position in sample space in X" unit="nanometer" min="0 m" max="8.442e-006 m" /><field identifier="Position-1-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="position in sample space in Y" unit="nanometer" min="0 m" max="8.442e-006 m" /><field identifier="ImageNumber-0-0" syntax="integer" semantic="frame number" unit="frame" min="0 fr" /><field identifier="Amplitude-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="emission strength" unit="A/D count" /><localizations insequence="true" repetitions="variable"><field identifier="Position-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="position in sample space in X" unit="nanometer" min="0 m" max="8.442e-006 m" /><field identifier="Position-0-0-uncertainty" syntax="floating point with . for decimals and optional scientific e-notation" semantic="position uncertainty in sample space in X" unit="nanometer" /><field identifier="Position-1-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="position in sample space in Y" unit="nanometer" min="0 m" max="8.442e-006 m" /><field identifier="Position-1-0-uncertainty" syntax="floating point with . for decimals and optional scientific e-notation" semantic="position uncertainty in sample space in Y" unit="nanometer" /><field identifier="ImageNumber-0-0" syntax="integer" semantic="frame number" unit="frame" min="0 fr" /><field identifier="Amplitude-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="emission strength" unit="A/D count" /><field identifier="FitResidues-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="fit residue chi square value" unit="dimensionless" /><field identifier="LocalBackground-0-0" syntax="floating point with . for decimals and optional scientific e-notation" semantic="local background" unit="A/D count" /></localizations></localizations>\n'
"5417.67 8439.85 1 4339.29 2 5421.22 25 8440.4 25 0 2292.39 48696.1 149.967 5414.13 25 8439.3 25 1 2046.9 65491.4 142.521"
)
dat = load_rapidSTORM_file(path=file_like, nrows=1)
assert len(dat) == 1
| 76.288889
| 2,226
| 0.70803
| 1,417
| 10,299
| 5.053634
| 0.105152
| 0.075408
| 0.079598
| 0.096355
| 0.932272
| 0.924033
| 0.910348
| 0.896942
| 0.885212
| 0.885212
| 0
| 0.051551
| 0.167492
| 10,299
| 134
| 2,227
| 76.858209
| 0.783648
| 0.017963
| 0
| 0.570175
| 0
| 0.052632
| 0.758088
| 0.149302
| 0
| 0
| 0
| 0
| 0.096491
| 1
| 0.035088
| false
| 0
| 0.04386
| 0
| 0.078947
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1a773319addfc34eeebb57a0b3fa1b873d2311ab
| 7,967
|
py
|
Python
|
django/stock/repositories.py
|
nah990/StockF
|
58f719728f4072186459d0ca5651624eab820c5e
|
[
"MIT"
] | null | null | null |
django/stock/repositories.py
|
nah990/StockF
|
58f719728f4072186459d0ca5651624eab820c5e
|
[
"MIT"
] | 3
|
2021-12-27T02:05:58.000Z
|
2022-03-23T00:06:14.000Z
|
django/stock/repositories.py
|
nah990/StockF
|
58f719728f4072186459d0ca5651624eab820c5e
|
[
"MIT"
] | null | null | null |
from .models import *
from django.db import transaction
from users.models import CustomUser
from .repository_pattern import *
class StockByDateRepository(Repository):
@classmethod
@transaction.atomic
def create(cls, client_user, model):
model.save(
using=cls.db_config_manager.get_connection(client_user))
@classmethod
def read_by_pk(cls, client_user, pk):
return StockByDate.objects.using(cls.db_config_manager.get_connection(client_user)).get(pk=pk)
@classmethod
def read_filtered(cls, client_user, filter_dict):
return StockByDate.objects.using(cls.db_config_manager.get_connection(client_user)).filter(**filter_dict)
@classmethod
def read_all(cls, client_user):
return StockByDate.objects.using(cls.db_config_manager.get_connection(client_user)).all()
@classmethod
def update_by_pk(cls, client_user, pk, update_dict):
StockByDate.objects.using(cls.db_config_manager.get_connection(client_user)).filter(
pk=pk).update(**update_dict)
@classmethod
def update_filtered(cls, client_user, filter_dict, update_dict):
StockByDate.objects.using(cls.db_config_manager.get_connection(
client_user)).filter(**filter_dict).update(**update_dict)
@classmethod
def update_all(cls, client_user, update_dict):
StockByDate.objects.using(cls.db_config_manager.get_connection(
client_user)).all().update(**update_dict)
@classmethod
def delete_by_pk(cls, client_user, pk):
StockByDate.objects.using(cls.db_config_manager.get_connection(
client_user)).get(pk=pk).delete()
@classmethod
def delete_filtered(cls, client_user, filter_dict):
StockByDate.objects.using(cls.db_config_manager.get_connection(
client_user)).filter(**filter_dict).delete()
@classmethod
def read_join_filtered(cls, client_user, join_field, filter_dict):
return StockByDate.objects.using(cls.db_config_manager.get_connection(client_user)).\
select_related(join_field).filter(**filter_dict)
class StockInfoRepository(Repository):
@classmethod
@transaction.atomic
def create(cls, client_user, model):
model.save(
using=cls.db_config_manager.get_connection(client_user))
@classmethod
def read_by_pk(cls, client_user, pk):
return StockInfo.objects.using(cls.db_config_manager.get_connection(client_user)).get(pk=pk)
@classmethod
def read_filtered(cls, client_user, filter_dict):
return StockInfo.objects.using(cls.db_config_manager.get_connection(client_user)).filter(**filter_dict)
@classmethod
def read_all(cls, client_user):
return StockInfo.objects.using(cls.db_config_manager.get_connection(client_user)).all()
@classmethod
def update_by_pk(cls, client_user, pk, update_dict):
StockInfo.objects.using(cls.db_config_manager.get_connection(client_user)).filter(
pk=pk).update(**update_dict)
@classmethod
def update_filtered(cls, client_user, filter_dict, update_dict):
StockInfo.objects.using(cls.db_config_manager.get_connection(
client_user)).filter(**filter_dict).update(**update_dict)
@classmethod
def update_all(cls, client_user, update_dict):
StockInfo.objects.using(cls.db_config_manager.get_connection(
client_user)).all().update(**update_dict)
@classmethod
def delete_by_pk(cls, client_user, pk):
StockInfo.objects.using(cls.db_config_manager.get_connection(
client_user)).get(pk=pk).delete()
@classmethod
def delete_filtered(cls, client_user, filter_dict):
StockInfo.objects.using(cls.db_config_manager.get_connection(
client_user)).filter(**filter_dict).delete()
@classmethod
def read_join_filtered(cls, client_user, join_field, filter_dict):
return StockInfo.objects.using(cls.db_config_manager.get_connection(client_user)).\
select_related(join_field).filter(**filter_dict)
class SourceInfoRepository(Repository):
@classmethod
@transaction.atomic
def create(cls, client_user, model):
model.save(
using=cls.db_config_manager.get_connection(client_user))
@classmethod
def read_by_pk(cls, client_user, pk):
return SourceInfo.objects.using(cls.db_config_manager.get_connection(client_user)).get(pk=pk)
@classmethod
def read_filtered(cls, client_user, filter_dict):
return SourceInfo.objects.using(cls.db_config_manager.get_connection(client_user)).filter(**filter_dict)
@classmethod
def read_all(cls, client_user):
return SourceInfo.objects.using(cls.db_config_manager.get_connection(client_user)).all()
@classmethod
def update_by_pk(cls, client_user, pk, update_dict):
SourceInfo.objects.using(cls.db_config_manager.get_connection(client_user)).filter(
pk=pk).update(**update_dict)
@classmethod
def update_filtered(cls, client_user, filter_dict, update_dict):
SourceInfo.objects.using(cls.db_config_manager.get_connection(
client_user)).filter(**filter_dict).update(**update_dict)
@classmethod
def update_all(cls, client_user, update_dict):
SourceInfo.objects.using(cls.db_config_manager.get_connection(
client_user)).all().update(**update_dict)
@classmethod
def delete_by_pk(cls, client_user, pk):
SourceInfo.objects.using(cls.db_config_manager.get_connection(
client_user)).get(pk=pk).delete()
@classmethod
def delete_filtered(cls, client_user, filter_dict):
SourceInfo.objects.using(cls.db_config_manager.get_connection(
client_user)).filter(**filter_dict).delete()
@classmethod
def read_join_filtered(cls, client_user, join_field, filter_dict):
return SourceInfo.objects.using(cls.db_config_manager.get_connection(client_user)).\
select_related(join_field).filter(**filter_dict)
class CustomUserRepository(Repository):
@classmethod
@transaction.atomic
def create(cls, client_user, user):
user.save(using=cls.db_config_manager.get_connection(client_user))
@classmethod
def read_by_pk(cls, client_user, pk):
return CustomUser.objects.using(cls.db_config_manager.get_connection(client_user)).get(pk=pk)
@classmethod
def read_filtered(cls, client_user, filter_dict):
return CustomUser.objects.using(cls.db_config_manager.get_connection(client_user)).filter(**filter_dict)
@classmethod
def read_all(cls, client_user):
return CustomUser.objects.using(cls.db_config_manager.get_connection(client_user)).all()
@classmethod
def update_by_pk(cls, client_user, pk, update_dict):
CustomUser.objects.using(cls.db_config_manager.get_connection(
client_user)).filter(pk=pk).update(**update_dict)
@classmethod
def update_filtered(cls, client_user, filter_dict, update_dict):
CustomUser.objects.using(cls.db_config_manager.get_connection(
client_user)).filter(**filter_dict).update(**update_dict)
@classmethod
def update_all(cls, client_user, update_dict):
CustomUser.objects.using(cls.db_config_manager.get_connection(
client_user)).all().update(**update_dict)
@classmethod
def delete_by_pk(cls, client_user, pk):
CustomUser.objects.using(cls.db_config_manager.get_connection(
client_user)).get(pk=pk).delete()
@classmethod
def delete_filtered(cls, client_user, filter_dict):
CustomUser.objects.using(cls.db_config_manager.get_connection(
client_user)).filter(**filter_dict).delete()
@classmethod
def read_join_filtered(cls, client_user, join_field, filter_dict):
return CustomUser.objects.using(cls.db_config_manager.get_connection(client_user)).\
select_related(join_field).filter(**filter_dict)
| 39.835
| 113
| 0.725744
| 1,017
| 7,967
| 5.369715
| 0.046214
| 0.146493
| 0.095221
| 0.117195
| 0.964109
| 0.964109
| 0.964109
| 0.964109
| 0.964109
| 0.953122
| 0
| 0
| 0.168068
| 7,967
| 200
| 114
| 39.835
| 0.823929
| 0
| 0
| 0.811321
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.251572
| false
| 0
| 0.025157
| 0.100629
| 0.402516
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
1aa4e58fee1ad5271b760c3b34af29980da4ab08
| 377
|
py
|
Python
|
tests/parser/bug.88.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/bug.88.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/bug.88.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
e(0). e(1).
p(X) | q(X) :- e(X).
p(1) :- q(1).
q(1) :- p(1).
r :- p(X1), q(X1), p(X2), q(X2), p(X3), q(X3), p(X4), q(X4), p(X5),
q(X5), p(X6), q(X6), p(X7), q(X7).
"""
output = """
e(0). e(1).
p(X) | q(X) :- e(X).
p(1) :- q(1).
q(1) :- p(1).
r :- p(X1), q(X1), p(X2), q(X2), p(X3), q(X3), p(X4), q(X4), p(X5),
q(X5), p(X6), q(X6), p(X7), q(X7).
"""
| 19.842105
| 68
| 0.352785
| 96
| 377
| 1.385417
| 0.166667
| 0.06015
| 0.090226
| 0.06015
| 0.917293
| 0.917293
| 0.917293
| 0.917293
| 0.917293
| 0.917293
| 0
| 0.135593
| 0.217507
| 377
| 18
| 69
| 20.944444
| 0.315254
| 0
| 0
| 0.875
| 0
| 0.125
| 0.914601
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
1ac4414a5d2e954d14767c8edbff4516ce66e2fe
| 9,431
|
py
|
Python
|
tests/draw/svg/test_shapes.py
|
MasonryHQ/WeasyPrint
|
c191372e07e334010a283c59e7ebc354783120c9
|
[
"BSD-3-Clause"
] | null | null | null |
tests/draw/svg/test_shapes.py
|
MasonryHQ/WeasyPrint
|
c191372e07e334010a283c59e7ebc354783120c9
|
[
"BSD-3-Clause"
] | null | null | null |
tests/draw/svg/test_shapes.py
|
MasonryHQ/WeasyPrint
|
c191372e07e334010a283c59e7ebc354783120c9
|
[
"BSD-3-Clause"
] | null | null | null |
"""
weasyprint.tests.test_draw.svg.test_shapes
------------------------------------------
Test how SVG simple shapes are drawn.
"""
from ...testing_utils import assert_no_logs
from .. import assert_pixels
@assert_no_logs
def test_rect_stroke():
assert_pixels('rect_stroke', 9, 9, '''
_________
_RRRRRRR_
_RRRRRRR_
_RR___RR_
_RR___RR_
_RR___RR_
_RRRRRRR_
_RRRRRRR_
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<rect x="2" y="2" width="5" height="5"
stroke-width="2" stroke="red" fill="none" />
</svg>
''')
@assert_no_logs
def test_rect_fill():
assert_pixels('rect_fill', 9, 9, '''
_________
_________
__RRRRR__
__RRRRR__
__RRRRR__
__RRRRR__
__RRRRR__
_________
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<rect x="2" y="2" width="5" height="5" fill="red" />
</svg>
''')
@assert_no_logs
def test_rect_stroke_fill():
assert_pixels('rect_stroke_fill', 9, 9, '''
_________
_RRRRRRR_
_RRRRRRR_
_RRBBBRR_
_RRBBBRR_
_RRBBBRR_
_RRRRRRR_
_RRRRRRR_
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<rect x="2" y="2" width="5" height="5"
stroke-width="2" stroke="red" fill="blue" />
</svg>
''')
@assert_no_logs
def test_rect_round():
assert_pixels('rect_round', 9, 9, '''
_zzzzzzz_
zzzzzzzzz
zzRRRRRzz
zzRRRRRzz
zzRRRRRzz
zzRRRRRzz
zzRRRRRzz
zzzzzzzzz
_zzzzzzz_
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<rect width="9" height="9" fill="red" rx="4" ry="4" />
</svg>
''')
@assert_no_logs
def test_rect_round_zero():
assert_pixels('rect_round_zero', 9, 9, '''
RRRRRRRRR
RRRRRRRRR
RRRRRRRRR
RRRRRRRRR
RRRRRRRRR
RRRRRRRRR
RRRRRRRRR
RRRRRRRRR
RRRRRRRRR
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<rect width="9" height="9" fill="red" rx="0" ry="4" />
</svg>
''')
@assert_no_logs
def test_line():
assert_pixels('line', 9, 9, '''
_________
_________
_________
_________
RRRRRR___
RRRRRR___
_________
_________
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<line x1="0" y1="5" x2="6" y2="5"
stroke="red" stroke-width="2"/>
</svg>
''')
@assert_no_logs
def test_polyline():
assert_pixels('polyline', 9, 9, '''
_________
RRRRRR___
RRRRRR___
RR__RR___
RR__RR___
RR__RR___
_________
_________
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<polyline points="1,6, 1,2, 5,2, 5,6"
stroke="red" stroke-width="2" fill="none"/>
</svg>
''')
@assert_no_logs
def test_polyline_fill():
assert_pixels('polyline_fill', 9, 9, '''
_________
RRRRRR___
RRRRRR___
RRBBRR___
RRBBRR___
RRBBRR___
_________
_________
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<polyline points="1,6, 1,2, 5,2, 5,6"
stroke="red" stroke-width="2" fill="blue"/>
</svg>
''')
@assert_no_logs
def test_polygon():
assert_pixels('polygon', 9, 9, '''
_________
RRRRRR___
RRRRRR___
RR__RR___
RR__RR___
RRRRRR___
RRRRRR___
_________
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<polygon points="1,6, 1,2, 5,2, 5,6"
stroke="red" stroke-width="2" fill="none"/>
</svg>
''')
@assert_no_logs
def test_polygon_fill():
assert_pixels('polygon_fill', 9, 9, '''
_________
RRRRRR___
RRRRRR___
RRBBRR___
RRBBRR___
RRRRRR___
RRRRRR___
_________
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<polygon points="1,6, 1,2, 5,2, 5,6"
stroke="red" stroke-width="2" fill="blue"/>
</svg>
''')
@assert_no_logs
def test_circle_stroke():
assert_pixels('circle_stroke', 10, 10, '''
__________
__RRRRRR__
_RRRRRRRR_
_RRRRRRRR_
_RRR__RRR_
_RRR__RRR_
_RRRRRRRR_
_RRRRRRRR_
__RRRRRR__
__________
''', '''
<style>
@page { size: 10px }
svg { display: block }
</style>
<svg width="10px" height="10px" xmlns="http://www.w3.org/2000/svg">
<circle cx="5" cy="5" r="3"
stroke="red" stroke-width="2" fill="none"/>
</svg>
''')
@assert_no_logs
def test_circle_fill():
assert_pixels('circle_fill', 10, 10, '''
__________
__RRRRRR__
_RRRRRRRR_
_RRRRRRRR_
_RRRBBRRR_
_RRRBBRRR_
_RRRRRRRR_
_RRRRRRRR_
__RRRRRR__
__________
''', '''
<style>
@page { size: 10px }
svg { display: block }
</style>
<svg width="10px" height="10px" xmlns="http://www.w3.org/2000/svg">
<circle cx="5" cy="5" r="3"
stroke="red" stroke-width="2" fill="blue"/>
</svg>
''')
@assert_no_logs
def test_ellipse_stroke():
assert_pixels('ellipse_stroke', 10, 10, '''
__________
__RRRRRR__
_RRRRRRRR_
_RRRRRRRR_
_RRR__RRR_
_RRR__RRR_
_RRRRRRRR_
_RRRRRRRR_
__RRRRRR__
__________
''', '''
<style>
@page { size: 10px }
svg { display: block }
</style>
<svg width="10px" height="10px" xmlns="http://www.w3.org/2000/svg">
<ellipse cx="5" cy="5" rx="3" ry="3"
stroke="red" stroke-width="2" fill="none"/>
</svg>
''')
@assert_no_logs
def test_ellipse_fill():
assert_pixels('ellipse_fill', 10, 10, '''
__________
__RRRRRR__
_RRRRRRRR_
_RRRRRRRR_
_RRRBBRRR_
_RRRBBRRR_
_RRRRRRRR_
_RRRRRRRR_
__RRRRRR__
__________
''', '''
<style>
@page { size: 10px }
svg { display: block }
</style>
<svg width="10px" height="10px" xmlns="http://www.w3.org/2000/svg">
<ellipse cx="5" cy="5" rx="3" ry="3"
stroke="red" stroke-width="2" fill="blue"/>
</svg>
''')
@assert_no_logs
def test_rect_in_g():
assert_pixels('rect_in_g', 9, 9, '''
RRRRR____
RRRRR____
RRRRR____
RRRRR____
RRRRR____
_________
_________
_________
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<g x="5" y="5">
<rect width="5" height="5" fill="red" />
</g>
</svg>
''')
@assert_no_logs
def test_rect_x_y_in_g():
assert_pixels('rect_x_y_in_g', 9, 9, '''
_________
_________
__RRRRR__
__RRRRR__
__RRRRR__
__RRRRR__
__RRRRR__
_________
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<g x="5" y="5">
<rect x="2" y="2" width="5" height="5" fill="red" />
</g>
</svg>
''')
@assert_no_logs
def test_rect_stroke_zero():
assert_pixels('rect_stroke_zero', 9, 9, '''
_________
_________
_________
_________
_________
_________
_________
_________
_________
''', '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<rect x="2" y="2" width="5" height="5"
stroke-width="0" stroke="red" fill="none" />
</svg>
''')
| 22.348341
| 73
| 0.503658
| 979
| 9,431
| 3.951992
| 0.08478
| 0.037219
| 0.055828
| 0.065909
| 0.834324
| 0.812872
| 0.808736
| 0.764539
| 0.699147
| 0.690359
| 0
| 0.047032
| 0.346199
| 9,431
| 421
| 74
| 22.401425
| 0.580441
| 0.013148
| 0
| 0.883905
| 0
| 0.055409
| 0.832974
| 0
| 0
| 0
| 0
| 0
| 0.094987
| 1
| 0.044855
| true
| 0
| 0.005277
| 0
| 0.050132
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
20133b8465ad3b3b9d27111ae240516d6bff4b15
| 129
|
py
|
Python
|
soap/parser/__init__.py
|
gitter-badger/soap
|
4f5eb7848e4dc516a6ff972db5c8c46ec9037c47
|
[
"MIT"
] | 22
|
2016-02-08T16:57:30.000Z
|
2021-03-12T20:32:06.000Z
|
soap/parser/__init__.py
|
gitter-badger/soap
|
4f5eb7848e4dc516a6ff972db5c8c46ec9037c47
|
[
"MIT"
] | 1
|
2018-07-11T21:21:27.000Z
|
2018-07-17T19:53:19.000Z
|
soap/parser/__init__.py
|
gitter-badger/soap
|
4f5eb7848e4dc516a6ff972db5c8c46ec9037c47
|
[
"MIT"
] | 6
|
2016-02-01T13:30:56.000Z
|
2018-11-28T04:35:27.000Z
|
from soap.parser.expression import expr_parse
from soap.parser.statement import stmt_parse
from soap.parser.program import parse
| 32.25
| 45
| 0.860465
| 20
| 129
| 5.45
| 0.5
| 0.220183
| 0.385321
| 0.348624
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 129
| 3
| 46
| 43
| 0.931624
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
203095752831fce02f84396b2a9c3a0925b34bc1
| 110,404
|
py
|
Python
|
pkgs/ops-pkg/src/genie/libs/ops/routing/iosxe/tests/routing_output.py
|
jbronikowski/genielibs
|
200a34e5fe4838a27b5a80d5973651b2e34ccafb
|
[
"Apache-2.0"
] | null | null | null |
pkgs/ops-pkg/src/genie/libs/ops/routing/iosxe/tests/routing_output.py
|
jbronikowski/genielibs
|
200a34e5fe4838a27b5a80d5973651b2e34ccafb
|
[
"Apache-2.0"
] | null | null | null |
pkgs/ops-pkg/src/genie/libs/ops/routing/iosxe/tests/routing_output.py
|
jbronikowski/genielibs
|
200a34e5fe4838a27b5a80d5973651b2e34ccafb
|
[
"Apache-2.0"
] | null | null | null |
'''
Route Genie Ops Object Outputs for IOSXE.
'''
class RouteOutput(object):
"""show ip route output """
ShowVrfDetail = {
"Mgmt-vrf": {
"vrf_id": 1,
"interfaces": [
"GigabitEthernet0/0"
],
"address_family": {
"ipv4 unicast": {
"table_id": "0x1",
"flags": "0x0",
"vrf_label": {
'allocation_mode': 'per-prefix'
}
},
"ipv6 unicast": {
"table_id": "0x1E000001",
"flags": "0x0",
"vrf_label": {
'allocation_mode': 'per-prefix'
}
}
},
"flags": "0x1808"
},
"VRF1": {
"interfaces": [
"GigabitEthernet0/0"
],
"address_family": {
"ipv4 unicast": {
"export_to_global": {
"export_to_global_map": "export_to_global_map",
"prefix_limit": 1000
},
"import_from_global": {
"prefix_limit": 1000,
"import_from_global_map": "import_from_global_map"
},
"table_id": "0x1",
"routing_table_limit": {
"routing_table_limit_action": {
"enable_alert_limit_number": {
"alert_limit_number": 10000
}
}
},
"route_targets": {
"200:1": {
"rt_type": "both",
"route_target": "200:1"
},
"100:1": {
"rt_type": "both",
"route_target": "100:1"
}
},
"flags": "0x2100",
"vrf_label": {
'allocation_mode': 'per-prefix'
}
},
"ipv6 unicast": {
"export_to_global": {
"export_to_global_map": "export_to_global_map",
"prefix_limit": 1000
},
"table_id": "0x1E000001",
"routing_table_limit": {
"routing_table_limit_action": {
"enable_alert_percent": {
"alert_percent_value": 70
},
"enable_alert_limit_number": {
"alert_limit_number": 7000
}
},
"routing_table_limit_number": 10000
},
"route_targets": {
"200:1": {
"rt_type": "import",
"route_target": "200:1"
},
"400:1": {
"rt_type": "import",
"route_target": "400:1"
},
"300:1": {
"rt_type": "export",
"route_target": "300:1"
},
"100:1": {
"rt_type": "export",
"route_target": "100:1"
}
},
"flags": "0x100",
"vrf_label": {
'allocation_mode': 'per-prefix'
}
}
},
"flags": "0x180C",
"route_distinguisher": "100:1",
"vrf_id": 1
}
}
showIpRoute_default = '''\
genie_Router#show ip route
Codes: L - local, C - connected, S - static, R - RIP, M - mobile, B - BGP
D - EIGRP, EX - EIGRP external, O - OSPF, IA - OSPF inter area
N1 - OSPF NSSA external type 1, N2 - OSPF NSSA external type 2
E1 - OSPF external type 1, E2 - OSPF external type 2
i - IS-IS, su - IS-IS summary, L1 - IS-IS level-1, L2 - IS-IS level-2
ia - IS-IS inter area, * - candidate default, U - per-user static route
o - ODR, P - periodic downloaded static route, H - NHRP, l - LISP
a - application route
+ - replicated route, % - next hop override, p - overrides from PfR
Gateway of last resort is not set
10.1.0.0/32 is subnetted, 1 subnets
C 10.4.1.1 is directly connected, Loopback0
10.4.0.0/32 is subnetted, 1 subnets
D 10.16.2.2 [90/10752] via 10.12.90.2, 4d19h, GigabitEthernet2.90
10.9.0.0/32 is subnetted, 1 subnets
D 10.36.3.3 [90/2570240] via 10.13.90.3, 4d19h, GigabitEthernet3.90
10.0.0.0/8 is variably subnetted, 20 subnets, 2 masks
C 10.12.90.0/24 is directly connected, GigabitEthernet2.90
L 10.12.90.1/32 is directly connected, GigabitEthernet2.90
C 10.12.110.0/24 is directly connected, GigabitEthernet2.110
L 10.12.110.1/32 is directly connected, GigabitEthernet2.110
C 10.12.115.0/24 is directly connected, GigabitEthernet2.115
L 10.12.115.1/32 is directly connected, GigabitEthernet2.115
C 10.12.120.0/24 is directly connected, GigabitEthernet2.120
L 10.12.120.1/32 is directly connected, GigabitEthernet2.120
C 10.13.90.0/24 is directly connected, GigabitEthernet3.90
L 10.13.90.1/32 is directly connected, GigabitEthernet3.90
C 10.13.110.0/24 is directly connected, GigabitEthernet3.110
L 10.13.110.1/32 is directly connected, GigabitEthernet3.110
C 10.13.115.0/24 is directly connected, GigabitEthernet3.115
L 10.13.115.1/32 is directly connected, GigabitEthernet3.115
C 10.13.120.0/24 is directly connected, GigabitEthernet3.120
L 10.13.120.1/32 is directly connected, GigabitEthernet3.120
D 10.23.90.0/24 [90/15360] via 10.13.90.3, 4d19h, GigabitEthernet3.90
[90/15360] via 10.12.90.2, 4d19h, GigabitEthernet2.90
O 10.23.110.0/24 [110/2] via 10.12.110.2, 4d19h, GigabitEthernet2.110
i L1 10.23.115.0/24 [115/20] via 10.12.115.2, 4d19h, GigabitEthernet2.115
R 10.23.120.0/24
[120/1] via 10.13.120.3, 00:00:08, GigabitEthernet3.120
[120/1] via 10.12.120.2, 00:00:02, GigabitEthernet2.120
'''
showIpRoute_VRF1 = '''\
genie_Router#show ip route vrf VRF1
Routing Table: VRF1
Codes: L - local, C - connected, S - static, R - RIP, M - mobile, B - BGP
D - EIGRP, EX - EIGRP external, O - OSPF, IA - OSPF inter area
N1 - OSPF NSSA external type 1, N2 - OSPF NSSA external type 2
E1 - OSPF external type 1, E2 - OSPF external type 2
i - IS-IS, su - IS-IS summary, L1 - IS-IS level-1, L2 - IS-IS level-2
ia - IS-IS inter area, * - candidate default, U - per-user static route
o - ODR, P - periodic downloaded static route, H - NHRP, l - LISP
a - application route
+ - replicated route, % - next hop override, p - overrides from PfR
Gateway of last resort is not set
10.1.0.0/32 is subnetted, 1 subnets
C 10.4.1.1 is directly connected, Loopback300
10.4.0.0/32 is subnetted, 1 subnets
D 10.16.2.2 [90/10752] via 10.12.90.2, 4d19h, GigabitEthernet2.390
10.9.0.0/32 is subnetted, 1 subnets
D 10.36.3.3 [90/2570240] via 10.13.90.3, 4d19h, GigabitEthernet3.390
10.0.0.0/8 is variably subnetted, 20 subnets, 2 masks
C 10.12.90.0/24 is directly connected, GigabitEthernet2.390
L 10.12.90.1/32 is directly connected, GigabitEthernet2.390
C 10.12.110.0/24 is directly connected, GigabitEthernet2.410
L 10.12.110.1/32 is directly connected, GigabitEthernet2.410
C 10.12.115.0/24 is directly connected, GigabitEthernet2.415
L 10.12.115.1/32 is directly connected, GigabitEthernet2.415
C 10.12.120.0/24 is directly connected, GigabitEthernet2.420
L 10.12.120.1/32 is directly connected, GigabitEthernet2.420
C 10.13.90.0/24 is directly connected, GigabitEthernet3.390
L 10.13.90.1/32 is directly connected, GigabitEthernet3.390
C 10.13.110.0/24 is directly connected, GigabitEthernet3.410
L 10.13.110.1/32 is directly connected, GigabitEthernet3.410
C 10.13.115.0/24 is directly connected, GigabitEthernet3.415
L 10.13.115.1/32 is directly connected, GigabitEthernet3.415
C 10.13.120.0/24 is directly connected, GigabitEthernet3.420
L 10.13.120.1/32 is directly connected, GigabitEthernet3.420
D 10.23.90.0/24 [90/15360] via 10.13.90.3, 4d19h, GigabitEthernet3.390
[90/15360] via 10.12.90.2, 4d19h, GigabitEthernet2.390
O 10.23.110.0/24 [110/2] via 10.12.110.2, 4d19h, GigabitEthernet2.410
i L1 10.23.115.0/24 [115/50] via 10.13.115.3, 4d19h, GigabitEthernet3.415
R 10.23.120.0/24
[120/1] via 10.13.120.3, 00:00:20, GigabitEthernet3.420
[120/1] via 10.12.120.2, 00:00:17, GigabitEthernet2.420
'''
showIpv6RouteUpdated_default = '''\
genie_Router#show ipv6 route
IPv6 Routing Table - default - 24 entries
Codes: C - Connected, L - Local, S - Static, U - Per-user Static route
B - BGP, R - RIP, H - NHRP, I1 - ISIS L1
I2 - ISIS L2, IA - ISIS interarea, IS - ISIS summary, D - EIGRP
EX - EIGRP external, ND - ND Default, NDp - ND Prefix, DCE - Destination
NDr - Redirect, RL - RPL, O - OSPF Intra, OI - OSPF Inter
OE1 - OSPF ext 1, OE2 - OSPF ext 2, ON1 - OSPF NSSA ext 1
ON2 - OSPF NSSA ext 2, la - LISP alt, lr - LISP site-registrations
ld - LISP dyn-eid, lA - LISP away, le - LISP extranet-policy
a - Application
LC 2001:1:1:1::1/128 [0/0]
via Loopback0, receive
D 2001:2:2:2::2/128 [90/10752]
via FE80::F816:3EFF:FE21:73F6, GigabitEthernet2.90
D 2001:3:3:3::3/128 [90/2570240]
via FE80::5C00:80FF:FE02:7, GigabitEthernet3.90
C 2001:10:12:90::/64 [0/0]
via GigabitEthernet2.90, directly connected
L 2001:10:12:90::1/128 [0/0]
via GigabitEthernet2.90, receive
C 2001:10:12:110::/64 [0/0]
via GigabitEthernet2.110, directly connected
L 2001:10:12:110::1/128 [0/0]
via GigabitEthernet2.110, receive
C 2001:10:12:115::/64 [0/0]
via GigabitEthernet2.115, directly connected
L 2001:10:12:115::1/128 [0/0]
via GigabitEthernet2.115, receive
C 2001:10:12:120::/64 [0/0]
via GigabitEthernet2.120, directly connected
L 2001:10:12:120::1/128 [0/0]
via GigabitEthernet2.120, receive
C 2001:10:13:90::/64 [0/0]
via GigabitEthernet3.90, directly connected
L 2001:10:13:90::1/128 [0/0]
via GigabitEthernet3.90, receive
C 2001:10:13:110::/64 [0/0]
via GigabitEthernet3.110, directly connected
L 2001:10:13:110::1/128 [0/0]
via GigabitEthernet3.110, receive
C 2001:10:13:115::/64 [0/0]
via GigabitEthernet3.115, directly connected
L 2001:10:13:115::1/128 [0/0]
via GigabitEthernet3.115, receive
C 2001:10:13:120::/64 [0/0]
via GigabitEthernet3.120, directly connected
L 2001:10:13:120::1/128 [0/0]
via GigabitEthernet3.120, receive
D 2001:10:23:90::/64 [90/15360]
via FE80::F816:3EFF:FE21:73F6, GigabitEthernet2.90
via FE80::5C00:80FF:FE02:7, GigabitEthernet3.90
O 2001:10:23:110::/64 [110/2]
via FE80::F816:3EFF:FE21:73F6, GigabitEthernet2.110
I1 2001:10:23:115::/64 [115/20]
via FE80::F816:3EFF:FE21:73F6, GigabitEthernet2.115
R 2001:10:23:120::/64 [120/2]
via FE80::5C00:80FF:FE02:7, GigabitEthernet3.120
L FF00::/8 [0/0]
via Null0, receive
'''
showIpv6RouteUpdated_VRF1 = '''\
genie_Router#show ipv6 route vrf VRF1
IPv6 Routing Table - VRF1 - 23 entries
Codes: C - Connected, L - Local, S - Static, U - Per-user Static route
B - BGP, R - RIP, H - NHRP, I1 - ISIS L1
I2 - ISIS L2, IA - ISIS interarea, IS - ISIS summary, D - EIGRP
EX - EIGRP external, ND - ND Default, NDp - ND Prefix, DCE - Destination
NDr - Redirect, RL - RPL, O - OSPF Intra, OI - OSPF Inter
OE1 - OSPF ext 1, OE2 - OSPF ext 2, ON1 - OSPF NSSA ext 1
ON2 - OSPF NSSA ext 2, la - LISP alt, lr - LISP site-registrations
ld - LISP dyn-eid, lA - LISP away, le - LISP extranet-policy
a - Application
LC 2001:1:1:1::1/128 [0/0]
via Loopback300, receive
D 2001:2:2:2::2/128 [90/10752]
via FE80::F816:3EFF:FE21:73F6, GigabitEthernet2.390
D 2001:3:3:3::3/128 [90/2570240]
via FE80::5C00:80FF:FE02:7, GigabitEthernet3.390
C 2001:10:12:90::/64 [0/0]
via GigabitEthernet2.390, directly connected
L 2001:10:12:90::1/128 [0/0]
via GigabitEthernet2.390, receive
C 2001:10:12:110::/64 [0/0]
via GigabitEthernet2.410, directly connected
L 2001:10:12:110::1/128 [0/0]
via GigabitEthernet2.410, receive
C 2001:10:12:115::/64 [0/0]
via GigabitEthernet2.415, directly connected
L 2001:10:12:115::1/128 [0/0]
via GigabitEthernet2.415, receive
C 2001:10:12:120::/64 [0/0]
via GigabitEthernet2.420, directly connected
L 2001:10:12:120::1/128 [0/0]
via GigabitEthernet2.420, receive
C 2001:10:13:90::/64 [0/0]
via GigabitEthernet3.390, directly connected
L 2001:10:13:90::1/128 [0/0]
via GigabitEthernet3.390, receive
C 2001:10:13:110::/64 [0/0]
via GigabitEthernet3.410, directly connected
L 2001:10:13:110::1/128 [0/0]
via GigabitEthernet3.410, receive
C 2001:10:13:115::/64 [0/0]
via GigabitEthernet3.415, directly connected
L 2001:10:13:115::1/128 [0/0]
via GigabitEthernet3.415, receive
C 2001:10:13:120::/64 [0/0]
via GigabitEthernet3.420, directly connected
L 2001:10:13:120::1/128 [0/0]
via GigabitEthernet3.420, receive
D 2001:10:23:90::/64 [90/15360]
via FE80::F816:3EFF:FE21:73F6, GigabitEthernet2.390
via FE80::5C00:80FF:FE02:7, GigabitEthernet3.390
I1 2001:10:23:115::/64 [115/50]
via FE80::5C00:80FF:FE02:7, GigabitEthernet3.415
R 2001:10:23:120::/64 [120/2]
via FE80::5C00:80FF:FE02:7, GigabitEthernet3.420
L FF00::/8 [0/0]
via Null0, receive
'''
routeOpsOutput_vrf1 = {
"vrf": {
"VRF1": {
"address_family": {
"ipv4": {
"routes": {
"10.23.120.0/24": {
"route": "10.23.120.0/24",
"active": True,
"route_preference": 120,
"metric": 1,
"source_protocol": "rip",
"source_protocol_codes": "R",
"next_hop": {
"next_hop_list": {
2: {
"index": 2,
"next_hop": "10.12.120.2",
"outgoing_interface": "GigabitEthernet2.420",
"updated": "00:00:17",
},
1: {
"index": 1,
"next_hop": "10.13.120.3",
"outgoing_interface": "GigabitEthernet3.420",
"updated": "00:00:20",
},
}
},
},
"10.23.115.0/24": {
"route": "10.23.115.0/24",
"active": True,
"route_preference": 115,
"metric": 50,
"source_protocol": "isis",
"source_protocol_codes": "i L1",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.13.115.3",
"outgoing_interface": "GigabitEthernet3.415",
"updated": "4d19h",
}
}
},
},
"10.23.110.0/24": {
"route": "10.23.110.0/24",
"active": True,
"route_preference": 110,
"metric": 2,
"source_protocol": "ospf",
"source_protocol_codes": "O",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.12.110.2",
"outgoing_interface": "GigabitEthernet2.410",
"updated": "4d19h",
}
}
},
},
"10.23.90.0/24": {
"route": "10.23.90.0/24",
"active": True,
"route_preference": 90,
"metric": 15360,
"source_protocol": "eigrp",
"source_protocol_codes": "D",
"next_hop": {
"next_hop_list": {
2: {
"index": 2,
"next_hop": "10.12.90.2",
"outgoing_interface": "GigabitEthernet2.390",
"updated": "4d19h",
},
1: {
"index": 1,
"next_hop": "10.13.90.3",
"outgoing_interface": "GigabitEthernet3.390",
"updated": "4d19h",
},
}
},
},
"10.13.120.1/32": {
"route": "10.13.120.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.420": {
"outgoing_interface": "GigabitEthernet3.420"
}
}
},
},
"10.13.120.0/24": {
"route": "10.13.120.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.420": {
"outgoing_interface": "GigabitEthernet3.420"
}
}
},
},
"10.13.115.1/32": {
"route": "10.13.115.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.415": {
"outgoing_interface": "GigabitEthernet3.415"
}
}
},
},
"10.13.115.0/24": {
"route": "10.13.115.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.415": {
"outgoing_interface": "GigabitEthernet3.415"
}
}
},
},
"10.13.110.1/32": {
"route": "10.13.110.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.410": {
"outgoing_interface": "GigabitEthernet3.410"
}
}
},
},
"10.13.110.0/24": {
"route": "10.13.110.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.410": {
"outgoing_interface": "GigabitEthernet3.410"
}
}
},
},
"10.13.90.1/32": {
"route": "10.13.90.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.390": {
"outgoing_interface": "GigabitEthernet3.390"
}
}
},
},
"10.13.90.0/24": {
"route": "10.13.90.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.390": {
"outgoing_interface": "GigabitEthernet3.390"
}
}
},
},
"10.12.120.1/32": {
"route": "10.12.120.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.420": {
"outgoing_interface": "GigabitEthernet2.420"
}
}
},
},
"10.12.120.0/24": {
"route": "10.12.120.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.420": {
"outgoing_interface": "GigabitEthernet2.420"
}
}
},
},
"10.12.115.1/32": {
"route": "10.12.115.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.415": {
"outgoing_interface": "GigabitEthernet2.415"
}
}
},
},
"10.12.115.0/24": {
"route": "10.12.115.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.415": {
"outgoing_interface": "GigabitEthernet2.415"
}
}
},
},
"10.12.110.1/32": {
"route": "10.12.110.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.410": {
"outgoing_interface": "GigabitEthernet2.410"
}
}
},
},
"10.12.110.0/24": {
"route": "10.12.110.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.410": {
"outgoing_interface": "GigabitEthernet2.410"
}
}
},
},
"10.12.90.1/32": {
"route": "10.12.90.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.390": {
"outgoing_interface": "GigabitEthernet2.390"
}
}
},
},
"10.12.90.0/24": {
"route": "10.12.90.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.390": {
"outgoing_interface": "GigabitEthernet2.390"
}
}
},
},
"10.36.3.3/32": {
"route": "10.36.3.3/32",
"active": True,
"route_preference": 90,
"metric": 2570240,
"source_protocol": "eigrp",
"source_protocol_codes": "D",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.13.90.3",
"outgoing_interface": "GigabitEthernet3.390",
"updated": "4d19h",
}
}
},
},
"10.16.2.2/32": {
"route": "10.16.2.2/32",
"active": True,
"route_preference": 90,
"metric": 10752,
"source_protocol": "eigrp",
"source_protocol_codes": "D",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.12.90.2",
"outgoing_interface": "GigabitEthernet2.390",
"updated": "4d19h",
}
}
},
},
"10.4.1.1/32": {
"route": "10.4.1.1/32",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"Loopback300": {"outgoing_interface": "Loopback300"}
}
},
},
}
},
"ipv6": {
"routes": {
"FF00::/8": {
"route": "FF00::/8",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"Null0": {"outgoing_interface": "Null0"}
}
},
},
"2001:10:23:120::/64": {
"route": "2001:10:23:120::/64",
"active": True,
"route_preference": 120,
"metric": 2,
"source_protocol": "rip",
"source_protocol_codes": "R",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "FE80::5C00:80FF:FE02:7",
"outgoing_interface": "GigabitEthernet3.420",
}
}
},
},
"2001:10:23:115::/64": {
"route": "2001:10:23:115::/64",
"active": True,
"route_preference": 115,
"metric": 50,
"source_protocol": "isis",
"source_protocol_codes": "I1",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "FE80::5C00:80FF:FE02:7",
"outgoing_interface": "GigabitEthernet3.415",
}
}
},
},
"2001:10:23:90::/64": {
"route": "2001:10:23:90::/64",
"active": True,
"route_preference": 90,
"metric": 15360,
"source_protocol": "eigrp",
"source_protocol_codes": "D",
"next_hop": {
"next_hop_list": {
2: {
"index": 2,
"next_hop": "FE80::5C00:80FF:FE02:7",
"outgoing_interface": "GigabitEthernet3.390",
},
1: {
"index": 1,
"next_hop": "FE80::F816:3EFF:FE21:73F6",
"outgoing_interface": "GigabitEthernet2.390",
},
}
},
},
"2001:10:13:120::1/128": {
"route": "2001:10:13:120::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "GigabitEthernet3.420",
"outgoing_interface": "receive",
}
}
},
},
"2001:10:13:120::/64": {
"route": "2001:10:13:120::/64",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"next_hop_list": {
1: {"index": 1, "next_hop": "GigabitEthernet3.420"}
}
},
},
"2001:10:13:115::1/128": {
"route": "2001:10:13:115::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "GigabitEthernet3.415",
"outgoing_interface": "receive",
}
}
},
},
"2001:10:13:115::/64": {
"route": "2001:10:13:115::/64",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"next_hop_list": {
1: {"index": 1, "next_hop": "GigabitEthernet3.415"}
}
},
},
"2001:10:13:110::1/128": {
"route": "2001:10:13:110::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "GigabitEthernet3.410",
"outgoing_interface": "receive",
}
}
},
},
"2001:10:13:110::/64": {
"route": "2001:10:13:110::/64",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"next_hop_list": {
1: {"index": 1, "next_hop": "GigabitEthernet3.410"}
}
},
},
"2001:10:13:90::1/128": {
"route": "2001:10:13:90::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "GigabitEthernet3.390",
"outgoing_interface": "receive",
}
}
},
},
"2001:10:13:90::/64": {
"route": "2001:10:13:90::/64",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"next_hop_list": {
1: {"index": 1, "next_hop": "GigabitEthernet3.390"}
}
},
},
"2001:10:12:120::1/128": {
"route": "2001:10:12:120::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "GigabitEthernet2.420",
"outgoing_interface": "receive",
}
}
},
},
"2001:10:12:120::/64": {
"route": "2001:10:12:120::/64",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"next_hop_list": {
1: {"index": 1, "next_hop": "GigabitEthernet2.420"}
}
},
},
"2001:10:12:115::1/128": {
"route": "2001:10:12:115::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "GigabitEthernet2.415",
"outgoing_interface": "receive",
}
}
},
},
"2001:10:12:115::/64": {
"route": "2001:10:12:115::/64",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"next_hop_list": {
1: {"index": 1, "next_hop": "GigabitEthernet2.415"}
}
},
},
"2001:10:12:110::1/128": {
"route": "2001:10:12:110::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "GigabitEthernet2.410",
"outgoing_interface": "receive",
}
}
},
},
"2001:10:12:110::/64": {
"route": "2001:10:12:110::/64",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"next_hop_list": {
1: {"index": 1, "next_hop": "GigabitEthernet2.410"}
}
},
},
"2001:10:12:90::1/128": {
"route": "2001:10:12:90::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "GigabitEthernet2.390",
"outgoing_interface": "receive",
}
}
},
},
"2001:10:12:90::/64": {
"route": "2001:10:12:90::/64",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"next_hop_list": {
1: {"index": 1, "next_hop": "GigabitEthernet2.390"}
}
},
},
"2001:3:3:3::3/128": {
"route": "2001:3:3:3::3/128",
"active": True,
"route_preference": 90,
"metric": 2570240,
"source_protocol": "eigrp",
"source_protocol_codes": "D",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "FE80::5C00:80FF:FE02:7",
"outgoing_interface": "GigabitEthernet3.390",
}
}
},
},
"2001:2:2:2::2/128": {
"route": "2001:2:2:2::2/128",
"active": True,
"route_preference": 90,
"metric": 10752,
"source_protocol": "eigrp",
"source_protocol_codes": "D",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "FE80::F816:3EFF:FE21:73F6",
"outgoing_interface": "GigabitEthernet2.390",
}
}
},
},
"2001:1:1:1::1/128": {
"route": "2001:1:1:1::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local_connected",
"source_protocol_codes": "LC",
"next_hop": {
"outgoing_interface": {
"Loopback300": {"outgoing_interface": "Loopback300"}
}
},
},
}
},
}
}
}
}
routeOpsOutput = {
"vrf": {
"VRF1": {
"address_family": {
"ipv4": {
"routes": {
"10.23.120.0/24": {
"route": "10.23.120.0/24",
"active": True,
"route_preference": 120,
"metric": 1,
"source_protocol": "rip",
"source_protocol_codes": "R",
"next_hop": {
"next_hop_list": {
2: {
"index": 2,
"next_hop": "10.12.120.2",
"outgoing_interface": "GigabitEthernet2.420",
"updated": "00:00:17",
},
1: {
"index": 1,
"next_hop": "10.13.120.3",
"outgoing_interface": "GigabitEthernet3.420",
"updated": "00:00:20",
},
}
},
},
"10.23.115.0/24": {
"route": "10.23.115.0/24",
"active": True,
"route_preference": 115,
"metric": 50,
"source_protocol": "isis",
"source_protocol_codes": "i L1",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.13.115.3",
"outgoing_interface": "GigabitEthernet3.415",
"updated": "4d19h",
}
}
},
},
"10.23.110.0/24": {
"route": "10.23.110.0/24",
"active": True,
"route_preference": 110,
"metric": 2,
"source_protocol": "ospf",
"source_protocol_codes": "O",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.12.110.2",
"outgoing_interface": "GigabitEthernet2.410",
"updated": "4d19h",
}
}
},
},
"10.23.90.0/24": {
"route": "10.23.90.0/24",
"active": True,
"route_preference": 90,
"metric": 15360,
"source_protocol": "eigrp",
"source_protocol_codes": "D",
"next_hop": {
"next_hop_list": {
2: {
"index": 2,
"next_hop": "10.12.90.2",
"outgoing_interface": "GigabitEthernet2.390",
"updated": "4d19h",
},
1: {
"index": 1,
"next_hop": "10.13.90.3",
"outgoing_interface": "GigabitEthernet3.390",
"updated": "4d19h",
},
}
},
},
"10.13.120.1/32": {
"route": "10.13.120.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.420": {
"outgoing_interface": "GigabitEthernet3.420"
}
}
},
},
"10.13.120.0/24": {
"route": "10.13.120.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.420": {
"outgoing_interface": "GigabitEthernet3.420"
}
}
},
},
"10.13.115.1/32": {
"route": "10.13.115.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.415": {
"outgoing_interface": "GigabitEthernet3.415"
}
}
},
},
"10.13.115.0/24": {
"route": "10.13.115.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.415": {
"outgoing_interface": "GigabitEthernet3.415"
}
}
},
},
"10.13.110.1/32": {
"route": "10.13.110.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.410": {
"outgoing_interface": "GigabitEthernet3.410"
}
}
},
},
"10.13.110.0/24": {
"route": "10.13.110.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.410": {
"outgoing_interface": "GigabitEthernet3.410"
}
}
},
},
"10.13.90.1/32": {
"route": "10.13.90.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.390": {
"outgoing_interface": "GigabitEthernet3.390"
}
}
},
},
"10.13.90.0/24": {
"route": "10.13.90.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.390": {
"outgoing_interface": "GigabitEthernet3.390"
}
}
},
},
"10.12.120.1/32": {
"route": "10.12.120.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.420": {
"outgoing_interface": "GigabitEthernet2.420"
}
}
},
},
"10.12.120.0/24": {
"route": "10.12.120.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.420": {
"outgoing_interface": "GigabitEthernet2.420"
}
}
},
},
"10.12.115.1/32": {
"route": "10.12.115.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.415": {
"outgoing_interface": "GigabitEthernet2.415"
}
}
},
},
"10.12.115.0/24": {
"route": "10.12.115.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.415": {
"outgoing_interface": "GigabitEthernet2.415"
}
}
},
},
"10.12.110.1/32": {
"route": "10.12.110.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.410": {
"outgoing_interface": "GigabitEthernet2.410"
}
}
},
},
"10.12.110.0/24": {
"route": "10.12.110.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.410": {
"outgoing_interface": "GigabitEthernet2.410"
}
}
},
},
"10.12.90.1/32": {
"route": "10.12.90.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.390": {
"outgoing_interface": "GigabitEthernet2.390"
}
}
},
},
"10.12.90.0/24": {
"route": "10.12.90.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.390": {
"outgoing_interface": "GigabitEthernet2.390"
}
}
},
},
"10.36.3.3/32": {
"route": "10.36.3.3/32",
"active": True,
"route_preference": 90,
"metric": 2570240,
"source_protocol": "eigrp",
"source_protocol_codes": "D",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.13.90.3",
"outgoing_interface": "GigabitEthernet3.390",
"updated": "4d19h",
}
}
},
},
"10.16.2.2/32": {
"route": "10.16.2.2/32",
"active": True,
"route_preference": 90,
"metric": 10752,
"source_protocol": "eigrp",
"source_protocol_codes": "D",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.12.90.2",
"outgoing_interface": "GigabitEthernet2.390",
"updated": "4d19h",
}
}
},
},
"10.4.1.1/32": {
"route": "10.4.1.1/32",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"Loopback300": {"outgoing_interface": "Loopback300"}
}
},
},
}
}
}
},
"default": {
"address_family": {
"ipv4": {
"routes": {
"10.23.120.0/24": {
"route": "10.23.120.0/24",
"active": True,
"route_preference": 120,
"metric": 1,
"source_protocol": "rip",
"source_protocol_codes": "R",
"next_hop": {
"next_hop_list": {
2: {
"index": 2,
"next_hop": "10.12.120.2",
"outgoing_interface": "GigabitEthernet2.120",
"updated": "00:00:02",
},
1: {
"index": 1,
"next_hop": "10.13.120.3",
"outgoing_interface": "GigabitEthernet3.120",
"updated": "00:00:08",
},
}
},
},
"10.23.115.0/24": {
"route": "10.23.115.0/24",
"active": True,
"route_preference": 115,
"metric": 20,
"source_protocol": "isis",
"source_protocol_codes": "i L1",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.12.115.2",
"outgoing_interface": "GigabitEthernet2.115",
"updated": "4d19h",
}
}
},
},
"10.23.110.0/24": {
"route": "10.23.110.0/24",
"active": True,
"route_preference": 110,
"metric": 2,
"source_protocol": "ospf",
"source_protocol_codes": "O",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.12.110.2",
"outgoing_interface": "GigabitEthernet2.110",
"updated": "4d19h",
}
}
},
},
"10.23.90.0/24": {
"route": "10.23.90.0/24",
"active": True,
"route_preference": 90,
"metric": 15360,
"source_protocol": "eigrp",
"source_protocol_codes": "D",
"next_hop": {
"next_hop_list": {
2: {
"index": 2,
"next_hop": "10.12.90.2",
"outgoing_interface": "GigabitEthernet2.90",
"updated": "4d19h",
},
1: {
"index": 1,
"next_hop": "10.13.90.3",
"outgoing_interface": "GigabitEthernet3.90",
"updated": "4d19h",
},
}
},
},
"10.13.120.1/32": {
"route": "10.13.120.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.120": {
"outgoing_interface": "GigabitEthernet3.120"
}
}
},
},
"10.13.120.0/24": {
"route": "10.13.120.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.120": {
"outgoing_interface": "GigabitEthernet3.120"
}
}
},
},
"10.13.115.1/32": {
"route": "10.13.115.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.115": {
"outgoing_interface": "GigabitEthernet3.115"
}
}
},
},
"10.13.115.0/24": {
"route": "10.13.115.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.115": {
"outgoing_interface": "GigabitEthernet3.115"
}
}
},
},
"10.13.110.1/32": {
"route": "10.13.110.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.110": {
"outgoing_interface": "GigabitEthernet3.110"
}
}
},
},
"10.13.110.0/24": {
"route": "10.13.110.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.110": {
"outgoing_interface": "GigabitEthernet3.110"
}
}
},
},
"10.13.90.1/32": {
"route": "10.13.90.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.90": {
"outgoing_interface": "GigabitEthernet3.90"
}
}
},
},
"10.13.90.0/24": {
"route": "10.13.90.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet3.90": {
"outgoing_interface": "GigabitEthernet3.90"
}
}
},
},
"10.12.120.1/32": {
"route": "10.12.120.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.120": {
"outgoing_interface": "GigabitEthernet2.120"
}
}
},
},
"10.12.120.0/24": {
"route": "10.12.120.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.120": {
"outgoing_interface": "GigabitEthernet2.120"
}
}
},
},
"10.12.115.1/32": {
"route": "10.12.115.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.115": {
"outgoing_interface": "GigabitEthernet2.115"
}
}
},
},
"10.12.115.0/24": {
"route": "10.12.115.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.115": {
"outgoing_interface": "GigabitEthernet2.115"
}
}
},
},
"10.12.110.1/32": {
"route": "10.12.110.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.110": {
"outgoing_interface": "GigabitEthernet2.110"
}
}
},
},
"10.12.110.0/24": {
"route": "10.12.110.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.110": {
"outgoing_interface": "GigabitEthernet2.110"
}
}
},
},
"10.12.90.1/32": {
"route": "10.12.90.1/32",
"active": True,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.90": {
"outgoing_interface": "GigabitEthernet2.90"
}
}
},
},
"10.12.90.0/24": {
"route": "10.12.90.0/24",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"GigabitEthernet2.90": {
"outgoing_interface": "GigabitEthernet2.90"
}
}
},
},
"10.36.3.3/32": {
"route": "10.36.3.3/32",
"active": True,
"route_preference": 90,
"metric": 2570240,
"source_protocol": "eigrp",
"source_protocol_codes": "D",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.13.90.3",
"outgoing_interface": "GigabitEthernet3.90",
"updated": "4d19h",
}
}
},
},
"10.16.2.2/32": {
"route": "10.16.2.2/32",
"active": True,
"route_preference": 90,
"metric": 10752,
"source_protocol": "eigrp",
"source_protocol_codes": "D",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.12.90.2",
"outgoing_interface": "GigabitEthernet2.90",
"updated": "4d19h",
}
}
},
},
"10.4.1.1/32": {
"route": "10.4.1.1/32",
"active": True,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"outgoing_interface": {
"Loopback0": {"outgoing_interface": "Loopback0"}
}
},
},
}
},
"ipv6": {
"routes": {
"FF00::/8": {
"route": "FF00::/8",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"outgoing_interface": {
"Null0": {"outgoing_interface": "Null0"}
}
},
},
"2001:10:23:120::/64": {
"route": "2001:10:23:120::/64",
"active": True,
"route_preference": 120,
"metric": 2,
"source_protocol": "rip",
"source_protocol_codes": "R",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "FE80::5C00:80FF:FE02:7",
"outgoing_interface": "GigabitEthernet3.120",
}
}
},
},
"2001:10:23:115::/64": {
"route": "2001:10:23:115::/64",
"active": True,
"route_preference": 115,
"metric": 20,
"source_protocol": "isis",
"source_protocol_codes": "I1",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "FE80::F816:3EFF:FE21:73F6",
"outgoing_interface": "GigabitEthernet2.115",
}
}
},
},
"2001:10:23:110::/64": {
"route": "2001:10:23:110::/64",
"active": True,
"route_preference": 110,
"metric": 2,
"source_protocol": "ospf",
"source_protocol_codes": "O",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "FE80::F816:3EFF:FE21:73F6",
"outgoing_interface": "GigabitEthernet2.110",
}
}
},
},
"2001:10:23:90::/64": {
"route": "2001:10:23:90::/64",
"active": True,
"route_preference": 90,
"metric": 15360,
"source_protocol": "eigrp",
"source_protocol_codes": "D",
"next_hop": {
"next_hop_list": {
2: {
"index": 2,
"next_hop": "FE80::5C00:80FF:FE02:7",
"outgoing_interface": "GigabitEthernet3.90",
},
1: {
"index": 1,
"next_hop": "FE80::F816:3EFF:FE21:73F6",
"outgoing_interface": "GigabitEthernet2.90",
},
}
},
},
"2001:10:13:120::1/128": {
"route": "2001:10:13:120::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "GigabitEthernet3.120",
"outgoing_interface": "receive",
}
}
},
},
"2001:10:13:120::/64": {
"route": "2001:10:13:120::/64",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"next_hop_list": {
1: {"index": 1, "next_hop": "GigabitEthernet3.120"}
}
},
},
"2001:10:13:115::1/128": {
"route": "2001:10:13:115::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "GigabitEthernet3.115",
"outgoing_interface": "receive",
}
}
},
},
"2001:10:13:115::/64": {
"route": "2001:10:13:115::/64",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"next_hop_list": {
1: {"index": 1, "next_hop": "GigabitEthernet3.115"}
}
},
},
"2001:10:13:110::1/128": {
"route": "2001:10:13:110::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "GigabitEthernet3.110",
"outgoing_interface": "receive",
}
}
},
},
"2001:10:13:110::/64": {
"route": "2001:10:13:110::/64",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"next_hop_list": {
1: {"index": 1, "next_hop": "GigabitEthernet3.110"}
}
},
},
"2001:10:13:90::1/128": {
"route": "2001:10:13:90::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "GigabitEthernet3.90",
"outgoing_interface": "receive",
}
}
},
},
"2001:10:13:90::/64": {
"route": "2001:10:13:90::/64",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"next_hop_list": {
1: {"index": 1, "next_hop": "GigabitEthernet3.90"}
}
},
},
"2001:10:12:120::1/128": {
"route": "2001:10:12:120::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "GigabitEthernet2.120",
"outgoing_interface": "receive",
}
}
},
},
"2001:10:12:120::/64": {
"route": "2001:10:12:120::/64",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"next_hop_list": {
1: {"index": 1, "next_hop": "GigabitEthernet2.120"}
}
},
},
"2001:10:12:115::1/128": {
"route": "2001:10:12:115::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "GigabitEthernet2.115",
"outgoing_interface": "receive",
}
}
},
},
"2001:10:12:115::/64": {
"route": "2001:10:12:115::/64",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"next_hop_list": {
1: {"index": 1, "next_hop": "GigabitEthernet2.115"}
}
},
},
"2001:10:12:110::1/128": {
"route": "2001:10:12:110::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "GigabitEthernet2.110",
"outgoing_interface": "receive",
}
}
},
},
"2001:10:12:110::/64": {
"route": "2001:10:12:110::/64",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"next_hop_list": {
1: {"index": 1, "next_hop": "GigabitEthernet2.110"}
}
},
},
"2001:10:12:90::1/128": {
"route": "2001:10:12:90::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local",
"source_protocol_codes": "L",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "GigabitEthernet2.90",
"outgoing_interface": "receive",
}
}
},
},
"2001:10:12:90::/64": {
"route": "2001:10:12:90::/64",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "connected",
"source_protocol_codes": "C",
"next_hop": {
"next_hop_list": {
1: {"index": 1, "next_hop": "GigabitEthernet2.90"}
}
},
},
"2001:3:3:3::3/128": {
"route": "2001:3:3:3::3/128",
"active": True,
"route_preference": 90,
"metric": 2570240,
"source_protocol": "eigrp",
"source_protocol_codes": "D",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "FE80::5C00:80FF:FE02:7",
"outgoing_interface": "GigabitEthernet3.90",
}
}
},
},
"2001:2:2:2::2/128": {
"route": "2001:2:2:2::2/128",
"active": True,
"route_preference": 90,
"metric": 10752,
"source_protocol": "eigrp",
"source_protocol_codes": "D",
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "FE80::F816:3EFF:FE21:73F6",
"outgoing_interface": "GigabitEthernet2.90",
}
}
},
},
"2001:1:1:1::1/128": {
"route": "2001:1:1:1::1/128",
"active": True,
"route_preference": 0,
"metric": 0,
"source_protocol": "local_connected",
"source_protocol_codes": "LC",
"next_hop": {
"outgoing_interface": {
"Loopback0": {"outgoing_interface": "Loopback0"}
}
},
},
}
},
}
},
}
}
| 52.052805
| 92
| 0.262047
| 6,386
| 110,404
| 4.376605
| 0.036329
| 0.062113
| 0.078858
| 0.058142
| 0.971019
| 0.960464
| 0.952449
| 0.940284
| 0.925149
| 0.914702
| 0
| 0.158234
| 0.645728
| 110,404
| 2,120
| 93
| 52.077358
| 0.556337
| 0.000571
| 0
| 0.68331
| 0
| 0.00951
| 0.314474
| 0.04145
| 0
| 0
| 0.000499
| 0
| 0
| 1
| 0
| false
| 0
| 0.001902
| 0
| 0.005706
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
64b6651bba898b1226c9e67230f353011e16122e
| 173
|
py
|
Python
|
all_feature_transform/__init__.py
|
LuChungYing/yt8mtest
|
bae523eda6bd7e88fc75632c7ee8c023d47cd9a9
|
[
"Apache-2.0"
] | 196
|
2017-06-16T12:06:56.000Z
|
2022-02-18T10:50:43.000Z
|
all_feature_transform/__init__.py
|
LuChungYing/yt8mtest
|
bae523eda6bd7e88fc75632c7ee8c023d47cd9a9
|
[
"Apache-2.0"
] | 5
|
2017-08-04T02:37:34.000Z
|
2018-10-27T18:32:38.000Z
|
youtube-8m-wangheda/all_feature_transform/__init__.py
|
wangheda/youtube-8m
|
07e54b387ee027cb58b0c14f5eb7c88cfa516d58
|
[
"Apache-2.0"
] | 71
|
2017-06-20T15:04:13.000Z
|
2021-10-06T16:43:32.000Z
|
from default_transformer import *
from identical_transformer import *
from engineer_transformer import *
from avg_transformer import *
from resolution_transformer import *
| 24.714286
| 36
| 0.849711
| 20
| 173
| 7.1
| 0.4
| 0.598592
| 0.591549
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121387
| 173
| 6
| 37
| 28.833333
| 0.934211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
64d1ddec7cbe1c73d1b72d55235c222ff4ee95e5
| 124
|
py
|
Python
|
services/auth/api/v1/token/__init__.py
|
amthorn/qutex
|
2bc441e63cba38d80aa9438b6278b732d44849a4
|
[
"MIT"
] | null | null | null |
services/auth/api/v1/token/__init__.py
|
amthorn/qutex
|
2bc441e63cba38d80aa9438b6278b732d44849a4
|
[
"MIT"
] | 239
|
2021-05-12T03:54:32.000Z
|
2022-03-31T06:15:52.000Z
|
services/auth/api/v1/token/__init__.py
|
amthorn/qutex
|
2bc441e63cba38d80aa9438b6278b732d44849a4
|
[
"MIT"
] | 2
|
2022-02-17T23:13:12.000Z
|
2022-03-02T20:28:41.000Z
|
from api.v1.token import check # noqa
from api.v1.token import generate # noqa
from api.v1.token import invalidate # noqa
| 41.333333
| 43
| 0.766129
| 21
| 124
| 4.52381
| 0.428571
| 0.221053
| 0.284211
| 0.442105
| 0.715789
| 0.505263
| 0
| 0
| 0
| 0
| 0
| 0.028846
| 0.16129
| 124
| 3
| 43
| 41.333333
| 0.884615
| 0.112903
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
64d658d4d4ddde958d3285d2a4bd9c16a7a4e2fd
| 1,836
|
py
|
Python
|
tests/app/notify_client/test_email_branding_client.py
|
karlchillmaid/notifications-admin
|
9ef6da4ef9e2fa97b7debb4b573cb035a5cb8880
|
[
"MIT"
] | null | null | null |
tests/app/notify_client/test_email_branding_client.py
|
karlchillmaid/notifications-admin
|
9ef6da4ef9e2fa97b7debb4b573cb035a5cb8880
|
[
"MIT"
] | null | null | null |
tests/app/notify_client/test_email_branding_client.py
|
karlchillmaid/notifications-admin
|
9ef6da4ef9e2fa97b7debb4b573cb035a5cb8880
|
[
"MIT"
] | null | null | null |
from app.notify_client.email_branding_client import EmailBrandingClient
def test_get_email_branding(mocker, fake_uuid):
mock_get = mocker.patch('app.notify_client.email_branding_client.EmailBrandingClient.get')
EmailBrandingClient().get_email_branding(fake_uuid)
mock_get.assert_called_once_with(
url='/email-branding/{}'.format(fake_uuid)
)
def test_get_all_email_branding(mocker):
mock_get = mocker.patch('app.notify_client.email_branding_client.EmailBrandingClient.get')
EmailBrandingClient().get_all_email_branding()
mock_get.assert_called_once_with(
url='/email-branding'
)
def test_get_letter_email_branding(mocker):
mock_get = mocker.patch('app.notify_client.email_branding_client.EmailBrandingClient.get')
EmailBrandingClient().get_letter_email_branding()
mock_get.assert_called_once_with(
url='/dvla_organisations'
)
def test_create_email_branding(mocker):
org_data = {'logo': 'test.png', 'name': 'test name', 'colour': 'red'}
mock_post = mocker.patch('app.notify_client.email_branding_client.EmailBrandingClient.post')
EmailBrandingClient().create_email_branding(logo=org_data['logo'], name=org_data['name'], colour=org_data['colour'])
mock_post.assert_called_once_with(
url='/email-branding',
data=org_data
)
def test_update_email_branding(mocker, fake_uuid):
org_data = {'logo': 'test.png', 'name': 'test name', 'colour': 'red'}
mock_post = mocker.patch('app.notify_client.email_branding_client.EmailBrandingClient.post')
EmailBrandingClient().update_email_branding(
branding_id=fake_uuid, logo=org_data['logo'], name=org_data['name'], colour=org_data['colour'])
mock_post.assert_called_once_with(
url='/email-branding/{}'.format(fake_uuid),
data=org_data
)
| 36
| 120
| 0.744009
| 234
| 1,836
| 5.452991
| 0.15812
| 0.203762
| 0.070533
| 0.094044
| 0.81348
| 0.77116
| 0.744514
| 0.744514
| 0.744514
| 0.661442
| 0
| 0
| 0.132353
| 1,836
| 50
| 121
| 36.72
| 0.801004
| 0
| 0
| 0.388889
| 0
| 0
| 0.271242
| 0.172658
| 0
| 0
| 0
| 0
| 0.138889
| 1
| 0.138889
| false
| 0
| 0.027778
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b3e03c06d5ca690f0ce68cc1d1f8c5160acf04ff
| 12,389
|
py
|
Python
|
dict_key.py
|
Magnetization/GPK-CHISRC
|
7a305b0f2e98aa58f53313d579ca46181ae67cfa
|
[
"MIT"
] | 8
|
2019-05-09T13:21:20.000Z
|
2019-06-08T01:56:52.000Z
|
dict_key.py
|
Magnetization/GPK
|
7a305b0f2e98aa58f53313d579ca46181ae67cfa
|
[
"MIT"
] | null | null | null |
dict_key.py
|
Magnetization/GPK
|
7a305b0f2e98aa58f53313d579ca46181ae67cfa
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
import numpy as np
import random
np.seterr(divide='ignore',invalid='ignore')
global clustered_centroids
clustered_centroids = np.array([[-77.8,25],[-42.64516129,25.32258065],[-12.53125,25.34375],[ 17.53333333,25.36666667],[ 50.05,25.55],[-29.67741935,0],[29.7,0],[-49.77777778,-25.66666667],
[-19.96428571,-25.64285714],[11.97368421,-25.47368421],[42.13888889,-25.44444444],[77.81818182,-25]])
def decode(str):
'''
This function is to transform the key pressed into the
corresponding key number
'''
return {
"0" : 48,
"1" : 49,
"2" : 50,
"3" : 51,
"4" : 52,
"5" : 53,
"6" : 54,
"7" : 55,
"8" : 56,
"9" : 57,
"a" : 65,
"b" : 66,
"c" : 67,
"d" : 68,
"e" : 69,
"f" : 70,
"g" : 71,
"h" : 72,
"i" : 73,
"j" : 74,
"k" : 75,
"l" : 76,
"m" : 77,
"n" : 78,
"o" : 79,
"p" : 80,
"q" : 81,
"r" : 82,
"s" : 83,
"t" : 84,
"u" : 85,
"v" : 86,
"w" : 87,
"x" : 88,
"y" : 89,
"z" : 90,
"-" : 189,
"=" : 187,
"`" : 192,
"[" : 219,
"]" : 221,
"|" : 220,
";" : 186,
"\'" : 222,
"<" : 188,
"." : 190,
"/" : 191,
"undefined" : 32
}.get(str,0) #'error'为默认返回值,可自设置
def decode_keys(num):
''' decode the keys back into numbers'''
return {
# first row
0 : "`",
1 : "1",
2 : "2",
3 : "3",
4 : "4",
5 : "5",
6 : "6",
7 : "7",
8 : "8",
9 : "9",
10 : "0",
11 : "-",
12 : "=",
# second row
13 : "q",
14 : "w",
15 : "e",
16 : "r",
17 : "t",
18 : "y",
19 : "u",
20 : "i",
21 : "o",
22 : "p",
23 : "[",
24 : "]",
25 : "\\",
# third row
26 : "a",
27 : "s",
28 : "d",
29 : "f",
30 : "g",
31 : "h",
32 : "j",
33 : "k",
34 : "l",
35 : ";",
36 : "'",
# forth row
39 : "z",
40 : "x",
41 : "c",
42 : "v",
43 : "b",
44 : "n",
45 : "m",
46 : ",",
47 : ".",
48 : "/",
# space
53 : "undefined",
54 : "undefined1",
55 : "undefined2",
56 : "undefined3",
57 : "undefined4",
58 : "undefined5",
59 : "undefined6",
60 : "undefined_",
#"undefined7" : 60,
}.get(num,"null") #默认返回值,可自设置
def encode_keys(str):
''' encode the keys into numbers'''
return {
# first row
"Oem_3" : 0, # `
"1" : 1,
"2" : 2,
"3" : 3,
"4" : 4,
"5" : 5,
"6" : 6,
"7" : 7,
"8" : 8,
"9" : 9,
"0" : 10,
"Oem_Minus" : 11, # -
"Oem_Plus" : 12,
# second row
"Q" : 13,
"W" : 14,
"E" : 15,
"R" : 16,
"T" : 17,
"Y" : 18,
"U" : 19,
"I" : 20,
"O" : 21,
"P" : 22,
"Oem_4" : 23, # [
"Oem_6" : 24, # ]
"Oem_5" : 25, # \
# third row
"A" : 26,
"S" : 27,
"D" : 28,
"F" : 29,
"G" : 30,
"H" : 31,
"J" : 32,
"K" : 33,
"L" : 34,
"Oem_1" : 35, # ;
"Oem_7" : 36, # '
# forth row
"Z" : 39,
"X" : 40,
"C" : 41,
"V" : 42,
"B" : 43,
"N" : 44,
"M" : 45,
"Oem_Comma" : 46, # ,
"Oem_Period" : 47, # .
"Oem_2" : 48, # /
# space
# "undefined" :53,
# "undefined1":54,
# "undefined2":55,
# "undefined3":56,
# "undefined4":57,
# "undefined5":58,
# "undefined6":59,
# "undefined_" : 60,
"Space" :53,
"Space1" :54,
"Space2" :55,
"Space3" :56,
"Space4" :57,
"Space5" :58,
"Space6" :59,
"Space_" :60,
}.get(str,"-1") #默认返回值,可自设置
def get_vectors():
vector_list = [[]]
for i in range(0,11):
#vector_list.append([i])
if i != 0:
vector_list.append(get_vector(i,i+12))
vector_list.append(get_vector(i,i+13))
vector_list.append(get_vector(i,i+14))
for i in range(13,26):
if i == 13:
#vector_list.append([i])
vector_list.append(get_vector(i,i-13))
vector_list.append(get_vector(i,i-12))
vector_list.append(get_vector(i,i+1))
vector_list.append(get_vector(i,i+13))
vector_list.append(get_vector(i,i+14))
elif i == 14:
#vector_list.append([i])
vector_list.append(get_vector(i,i-13))
vector_list.append(get_vector(i,i-12))
vector_list.append(get_vector(i,i-11))
vector_list.append(get_vector(i,i-1))
vector_list.append(get_vector(i,i+1))
vector_list.append(get_vector(i,i+12))
vector_list.append(get_vector(i,i+13))
elif i == 23:
#vector_list.append([i])
vector_list.append(get_vector(i,i-14))
vector_list.append(get_vector(i,i-13))
vector_list.append(get_vector(i,i-12))
vector_list.append(get_vector(i,i-1))
vector_list.append(get_vector(i,i+1))
vector_list.append(get_vector(i,i+11))
vector_list.append(get_vector(i,i+12))
vector_list.append(get_vector(i,i+13))
elif i == 24:
#vector_list.append([i])
vector_list.append(get_vector(i,i-13))
vector_list.append(get_vector(i,i-1))
vector_list.append(get_vector(i,i+11))
vector_list.append(get_vector(i,i+12))
elif i == 25:
#vector_list.append([i])
vector_list.append(get_vector(i,i-14))
vector_list.append(get_vector(i,i-1))
vector_list.append(get_vector(i,i+11))
else :
#vector_list.append([i])
vector_list.append(get_vector(i,i-14))
vector_list.append(get_vector(i,i-13))
vector_list.append(get_vector(i,i-12))
vector_list.append(get_vector(i,i-11))
vector_list.append(get_vector(i,i-1))
vector_list.append(get_vector(i,i+1))
vector_list.append(get_vector(i,i+11))
vector_list.append(get_vector(i,i+12))
vector_list.append(get_vector(i,i+13))
vector_list.append(get_vector(i,i+14))
for i in range(26,37):
if i == 26:
#vector_list.append([i])
vector_list.append(get_vector(i,i-13))
vector_list.append(get_vector(i,i-12))
vector_list.append(get_vector(i,i-11))
vector_list.append(get_vector(i,i+1))
vector_list.append(get_vector(i,i+13))
vector_list.append(get_vector(i,i+14))
elif i == 27:
#vector_list.append([i])
vector_list.append(get_vector(i,i-14))
vector_list.append(get_vector(i,i-13))
vector_list.append(get_vector(i,i-12))
vector_list.append(get_vector(i,i-11))
vector_list.append(get_vector(i,i-1))
vector_list.append(get_vector(i,i+1))
vector_list.append(get_vector(i,i+12))
vector_list.append(get_vector(i,i+13))
vector_list.append(get_vector(i,i+14))
elif i == 35:
#vector_list.append([i])
vector_list.append(get_vector(i,i-14))
vector_list.append(get_vector(i,i-13))
vector_list.append(get_vector(i,i-12))
vector_list.append(get_vector(i,i-11))
vector_list.append(get_vector(i,i-1))
vector_list.append(get_vector(i,i+1))
vector_list.append(get_vector(i,i+11))
vector_list.append(get_vector(i,i+12))
vector_list.append(get_vector(i,i+13))
elif i == 36:
#vector_list.append([i])
vector_list.append(get_vector(i,i-14))
vector_list.append(get_vector(i,i-13))
vector_list.append(get_vector(i,i-12))
vector_list.append(get_vector(i,i-11))
vector_list.append(get_vector(i,i-1))
vector_list.append(get_vector(i,i+11))
vector_list.append(get_vector(i,i+12))
else :
#vector_list.append([i])
vector_list.append(get_vector(i,i-14))
vector_list.append(get_vector(i,i-13))
vector_list.append(get_vector(i,i-12))
vector_list.append(get_vector(i,i-11))
vector_list.append(get_vector(i,i-1))
vector_list.append(get_vector(i,i+1))
vector_list.append(get_vector(i,i+11))
vector_list.append(get_vector(i,i+12))
vector_list.append(get_vector(i,i+13))
vector_list.append(get_vector(i,i+14))
for i in range(39,49):
if i == 39:
#vector_list.append([i])
vector_list.append(get_vector(i,i-13))
vector_list.append(get_vector(i,i-12))
vector_list.append(get_vector(i,i-11))
vector_list.append(get_vector(i,i+1))
vector_list.append(get_vector(i,i+14))
elif i == 40:
#vector_list.append([i])
vector_list.append(get_vector(i,i-14))
vector_list.append(get_vector(i,i-13))
vector_list.append(get_vector(i,i-12))
vector_list.append(get_vector(i,i-11))
vector_list.append(get_vector(i,i-1))
vector_list.append(get_vector(i,i+1))
vector_list.append(get_vector(i,i+13))
vector_list.append(get_vector(i,i+14))
elif i == 41:
#vector_list.append([i])
vector_list.append(get_vector(i,i-14))
vector_list.append(get_vector(i,i-13))
vector_list.append(get_vector(i,i-12))
vector_list.append(get_vector(i,i-11))
vector_list.append(get_vector(i,i-1))
vector_list.append(get_vector(i,i+1))
vector_list.append(get_vector(i,i+12))
vector_list.append(get_vector(i,i+13))
vector_list.append(get_vector(i,i+14))
elif i == 46:
#vector_list.append([i])
vector_list.append(get_vector(i,i-14))
vector_list.append(get_vector(i,i-13))
vector_list.append(get_vector(i,i-12))
vector_list.append(get_vector(i,i-11))
vector_list.append(get_vector(i,i-1))
vector_list.append(get_vector(i,i+1))
vector_list.append(get_vector(i,i+11))
vector_list.append(get_vector(i,i+12))
vector_list.append(get_vector(i,i+13))
elif i == 47:
#vector_list.append([i])
vector_list.append(get_vector(i,i-14))
vector_list.append(get_vector(i,i-13))
vector_list.append(get_vector(i,i-12))
vector_list.append(get_vector(i,i-11))
vector_list.append(get_vector(i,i-1))
vector_list.append(get_vector(i,i+1))
vector_list.append(get_vector(i,i+11))
vector_list.append(get_vector(i,i+12))
elif i == 48:
#vector_list.append([i])
vector_list.append(get_vector(i,i-14))
vector_list.append(get_vector(i,i-13))
vector_list.append(get_vector(i,i-12))
vector_list.append(get_vector(i,i-1))
vector_list.append(get_vector(i,i+11))
else:
#vector_list.append([i])
vector_list.append(get_vector(i,i-14))
vector_list.append(get_vector(i,i-13))
vector_list.append(get_vector(i,i-12))
vector_list.append(get_vector(i,i-11))
vector_list.append(get_vector(i,i-1))
vector_list.append(get_vector(i,i+1))
vector_list.append(get_vector(i,i+11))
vector_list.append(get_vector(i,i+12))
vector_list.append(get_vector(i,i+13))
vector_list.append(get_vector(i,i+14))
result = vector_list[1:]
x = [x_[0] for x_ in result]
y = [y_[1] for y_ in result]
return x, y, result
| 31.930412
| 187
| 0.494713
| 1,724
| 12,389
| 3.37297
| 0.12761
| 0.268272
| 0.423732
| 0.441101
| 0.754944
| 0.745486
| 0.745486
| 0.745486
| 0.745486
| 0.745486
| 0
| 0.103782
| 0.340463
| 12,389
| 388
| 188
| 31.930412
| 0.607882
| 0.075712
| 0
| 0.41716
| 0
| 0
| 0.031313
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011834
| false
| 0
| 0.008876
| 0
| 0.032544
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b3f46bd7f39860e7ee6f13512ddcc9d76b0c8a2e
| 223
|
py
|
Python
|
ch25/change_search_path.py
|
eroicaleo/LearningPython
|
297d46eddce6e43ce0c160d2660dff5f5d616800
|
[
"MIT"
] | 1
|
2020-10-12T13:33:29.000Z
|
2020-10-12T13:33:29.000Z
|
ch25/change_search_path.py
|
eroicaleo/LearningPython
|
297d46eddce6e43ce0c160d2660dff5f5d616800
|
[
"MIT"
] | null | null | null |
ch25/change_search_path.py
|
eroicaleo/LearningPython
|
297d46eddce6e43ce0c160d2660dff5f5d616800
|
[
"MIT"
] | 1
|
2016-11-09T07:28:45.000Z
|
2016-11-09T07:28:45.000Z
|
#!/usr/bin/env python3
import sys
print(sys.path)
try:
import Recursion
except Exception as e:
print(e)
sys.path.append('../ch19')
print(sys.path)
try:
import Recursion
except Exception as e:
print(e)
| 11.15
| 26
| 0.672646
| 34
| 223
| 4.411765
| 0.470588
| 0.14
| 0.16
| 0.2
| 0.72
| 0.72
| 0.72
| 0.72
| 0.72
| 0.72
| 0
| 0.016854
| 0.201794
| 223
| 19
| 27
| 11.736842
| 0.825843
| 0.09417
| 0
| 0.833333
| 0
| 0
| 0.034826
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0.333333
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b6002c90bec5405a68953a21b4172dbfef85300e
| 31,635
|
py
|
Python
|
arista/tag/v2/services/gen_pb2_grpc.py
|
barryCrunch/cloudvision-python
|
bafb55a57743141ef419ce8b6f3adda31a18ca42
|
[
"Apache-2.0"
] | 8
|
2020-10-22T13:19:00.000Z
|
2021-12-16T02:16:47.000Z
|
arista/tag/v2/services/gen_pb2_grpc.py
|
barryCrunch/cloudvision-python
|
bafb55a57743141ef419ce8b6f3adda31a18ca42
|
[
"Apache-2.0"
] | 6
|
2020-12-16T11:31:03.000Z
|
2021-11-19T10:00:37.000Z
|
arista/tag/v2/services/gen_pb2_grpc.py
|
barryCrunch/cloudvision-python
|
bafb55a57743141ef419ce8b6f3adda31a18ca42
|
[
"Apache-2.0"
] | 7
|
2020-12-04T01:30:34.000Z
|
2021-11-11T21:40:12.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from arista.tag.v2.services import gen_pb2 as arista_dot_tag_dot_v2_dot_services_dot_gen__pb2
class TagServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetOne = channel.unary_unary(
'/arista.tag.v2.TagService/GetOne',
request_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagRequest.SerializeToString,
response_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagResponse.FromString,
)
self.GetAll = channel.unary_stream(
'/arista.tag.v2.TagService/GetAll',
request_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagStreamRequest.SerializeToString,
response_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagStreamResponse.FromString,
)
self.Subscribe = channel.unary_stream(
'/arista.tag.v2.TagService/Subscribe',
request_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagStreamRequest.SerializeToString,
response_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagStreamResponse.FromString,
)
class TagServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def GetOne(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAll(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Subscribe(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_TagServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetOne': grpc.unary_unary_rpc_method_handler(
servicer.GetOne,
request_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagRequest.FromString,
response_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagResponse.SerializeToString,
),
'GetAll': grpc.unary_stream_rpc_method_handler(
servicer.GetAll,
request_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagStreamRequest.FromString,
response_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagStreamResponse.SerializeToString,
),
'Subscribe': grpc.unary_stream_rpc_method_handler(
servicer.Subscribe,
request_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagStreamRequest.FromString,
response_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagStreamResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'arista.tag.v2.TagService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class TagService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def GetOne(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.tag.v2.TagService/GetOne',
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagRequest.SerializeToString,
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAll(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.tag.v2.TagService/GetAll',
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagStreamRequest.SerializeToString,
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Subscribe(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.tag.v2.TagService/Subscribe',
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagStreamRequest.SerializeToString,
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
class TagAssignmentServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetOne = channel.unary_unary(
'/arista.tag.v2.TagAssignmentService/GetOne',
request_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentRequest.SerializeToString,
response_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentResponse.FromString,
)
self.GetAll = channel.unary_stream(
'/arista.tag.v2.TagAssignmentService/GetAll',
request_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentStreamRequest.SerializeToString,
response_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentStreamResponse.FromString,
)
self.Subscribe = channel.unary_stream(
'/arista.tag.v2.TagAssignmentService/Subscribe',
request_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentStreamRequest.SerializeToString,
response_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentStreamResponse.FromString,
)
class TagAssignmentServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def GetOne(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAll(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Subscribe(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_TagAssignmentServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetOne': grpc.unary_unary_rpc_method_handler(
servicer.GetOne,
request_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentRequest.FromString,
response_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentResponse.SerializeToString,
),
'GetAll': grpc.unary_stream_rpc_method_handler(
servicer.GetAll,
request_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentStreamRequest.FromString,
response_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentStreamResponse.SerializeToString,
),
'Subscribe': grpc.unary_stream_rpc_method_handler(
servicer.Subscribe,
request_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentStreamRequest.FromString,
response_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentStreamResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'arista.tag.v2.TagAssignmentService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class TagAssignmentService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def GetOne(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.tag.v2.TagAssignmentService/GetOne',
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentRequest.SerializeToString,
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAll(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.tag.v2.TagAssignmentService/GetAll',
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentStreamRequest.SerializeToString,
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Subscribe(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.tag.v2.TagAssignmentService/Subscribe',
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentStreamRequest.SerializeToString,
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
class TagAssignmentConfigServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetOne = channel.unary_unary(
'/arista.tag.v2.TagAssignmentConfigService/GetOne',
request_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigRequest.SerializeToString,
response_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigResponse.FromString,
)
self.GetAll = channel.unary_stream(
'/arista.tag.v2.TagAssignmentConfigService/GetAll',
request_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigStreamRequest.SerializeToString,
response_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigStreamResponse.FromString,
)
self.Subscribe = channel.unary_stream(
'/arista.tag.v2.TagAssignmentConfigService/Subscribe',
request_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigStreamRequest.SerializeToString,
response_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigStreamResponse.FromString,
)
self.Set = channel.unary_unary(
'/arista.tag.v2.TagAssignmentConfigService/Set',
request_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigSetRequest.SerializeToString,
response_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigSetResponse.FromString,
)
self.Delete = channel.unary_unary(
'/arista.tag.v2.TagAssignmentConfigService/Delete',
request_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigDeleteRequest.SerializeToString,
response_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigDeleteResponse.FromString,
)
class TagAssignmentConfigServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def GetOne(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAll(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Subscribe(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Set(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Delete(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_TagAssignmentConfigServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetOne': grpc.unary_unary_rpc_method_handler(
servicer.GetOne,
request_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigRequest.FromString,
response_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigResponse.SerializeToString,
),
'GetAll': grpc.unary_stream_rpc_method_handler(
servicer.GetAll,
request_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigStreamRequest.FromString,
response_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigStreamResponse.SerializeToString,
),
'Subscribe': grpc.unary_stream_rpc_method_handler(
servicer.Subscribe,
request_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigStreamRequest.FromString,
response_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigStreamResponse.SerializeToString,
),
'Set': grpc.unary_unary_rpc_method_handler(
servicer.Set,
request_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigSetRequest.FromString,
response_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigSetResponse.SerializeToString,
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigDeleteRequest.FromString,
response_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigDeleteResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'arista.tag.v2.TagAssignmentConfigService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class TagAssignmentConfigService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def GetOne(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.tag.v2.TagAssignmentConfigService/GetOne',
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigRequest.SerializeToString,
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAll(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.tag.v2.TagAssignmentConfigService/GetAll',
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigStreamRequest.SerializeToString,
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Subscribe(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.tag.v2.TagAssignmentConfigService/Subscribe',
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigStreamRequest.SerializeToString,
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Set(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.tag.v2.TagAssignmentConfigService/Set',
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigSetRequest.SerializeToString,
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigSetResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Delete(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.tag.v2.TagAssignmentConfigService/Delete',
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigDeleteRequest.SerializeToString,
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagAssignmentConfigDeleteResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
class TagConfigServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetOne = channel.unary_unary(
'/arista.tag.v2.TagConfigService/GetOne',
request_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigRequest.SerializeToString,
response_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigResponse.FromString,
)
self.GetAll = channel.unary_stream(
'/arista.tag.v2.TagConfigService/GetAll',
request_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigStreamRequest.SerializeToString,
response_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigStreamResponse.FromString,
)
self.Subscribe = channel.unary_stream(
'/arista.tag.v2.TagConfigService/Subscribe',
request_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigStreamRequest.SerializeToString,
response_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigStreamResponse.FromString,
)
self.Set = channel.unary_unary(
'/arista.tag.v2.TagConfigService/Set',
request_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigSetRequest.SerializeToString,
response_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigSetResponse.FromString,
)
self.Delete = channel.unary_unary(
'/arista.tag.v2.TagConfigService/Delete',
request_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigDeleteRequest.SerializeToString,
response_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigDeleteResponse.FromString,
)
class TagConfigServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def GetOne(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAll(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Subscribe(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Set(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Delete(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_TagConfigServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetOne': grpc.unary_unary_rpc_method_handler(
servicer.GetOne,
request_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigRequest.FromString,
response_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigResponse.SerializeToString,
),
'GetAll': grpc.unary_stream_rpc_method_handler(
servicer.GetAll,
request_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigStreamRequest.FromString,
response_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigStreamResponse.SerializeToString,
),
'Subscribe': grpc.unary_stream_rpc_method_handler(
servicer.Subscribe,
request_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigStreamRequest.FromString,
response_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigStreamResponse.SerializeToString,
),
'Set': grpc.unary_unary_rpc_method_handler(
servicer.Set,
request_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigSetRequest.FromString,
response_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigSetResponse.SerializeToString,
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigDeleteRequest.FromString,
response_serializer=arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigDeleteResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'arista.tag.v2.TagConfigService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class TagConfigService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def GetOne(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.tag.v2.TagConfigService/GetOne',
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigRequest.SerializeToString,
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetAll(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.tag.v2.TagConfigService/GetAll',
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigStreamRequest.SerializeToString,
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Subscribe(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/arista.tag.v2.TagConfigService/Subscribe',
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigStreamRequest.SerializeToString,
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigStreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Set(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.tag.v2.TagConfigService/Set',
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigSetRequest.SerializeToString,
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigSetResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Delete(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/arista.tag.v2.TagConfigService/Delete',
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigDeleteRequest.SerializeToString,
arista_dot_tag_dot_v2_dot_services_dot_gen__pb2.TagConfigDeleteResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 48.970588
| 140
| 0.69695
| 3,173
| 31,635
| 6.517806
| 0.041286
| 0.028432
| 0.056284
| 0.070354
| 0.969682
| 0.968232
| 0.966781
| 0.953726
| 0.953726
| 0.931773
| 0
| 0.009576
| 0.234203
| 31,635
| 645
| 141
| 49.046512
| 0.844093
| 0.066445
| 0
| 0.766284
| 1
| 0
| 0.077982
| 0.049346
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076628
| false
| 0
| 0.003831
| 0.030651
| 0.1341
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
80a3e6ecf18950961d535d2aa49b10fc8338fbad
| 9,114
|
py
|
Python
|
evaluation_plot_script.py
|
mendoncagary/deep-summarization
|
85a625dce1bcaaa2597d6ad63bf869717852d026
|
[
"MIT"
] | 132
|
2016-05-13T20:33:30.000Z
|
2021-08-10T03:52:01.000Z
|
evaluation_plot_script.py
|
RobertMarton/deep-summarization
|
9b3bb1daae11a1db2386dbe4a71848714e6127f8
|
[
"MIT"
] | 5
|
2016-11-04T05:32:41.000Z
|
2018-06-26T03:54:29.000Z
|
evaluation_plot_script.py
|
RobertMarton/deep-summarization
|
9b3bb1daae11a1db2386dbe4a71848714e6127f8
|
[
"MIT"
] | 50
|
2016-05-21T12:38:14.000Z
|
2020-10-04T08:37:08.000Z
|
from helpers.plotter import Plotter
from helpers.metric import Calculator
import matplotlib.pyplot as plt
############## ALL GRU PLOTS ############################
result_file_1 = 'result/simple/gru/no_attention.csv'
result_file_2 = 'result/bidirectional/gru/no_attention.csv'
result_file_3 = 'result/stacked_simple/gru/no_attention.csv'
result_file_4 = 'result/stacked_bidirectional/gru/no_attention.csv'
result_file_description = ['gru_smpl', 'gru_bidr', 'gru_stack_smpl', 'gru_stack_bidr']
hypothesis_dir = 'metrics/hypothesis'
reference_dir = 'metrics/reference'
bleu_1 = []
bleu_2 = []
bleu_3 = []
bleu_4 = []
rouge = []
calculator = Calculator(3,hypothesis_dir,reference_dir)
calculator.load_result(result_file_1)
calculator.evaluate_all_ref_hyp_pairs()
bleu_1_val,bleu_2_val,bleu_3_val,bleu_4_val,rouge_val = calculator.get_all_metrics()
bleu_1.append(bleu_1_val)
bleu_2.append(bleu_2_val)
bleu_3.append(bleu_3_val)
bleu_4.append(bleu_4_val)
rouge.append(rouge_val)
calculator = Calculator(3,hypothesis_dir,reference_dir)
calculator.load_result(result_file_2)
calculator.evaluate_all_ref_hyp_pairs()
bleu_1_val,bleu_2_val,bleu_3_val,bleu_4_val,rouge_val = calculator.get_all_metrics()
bleu_1.append(bleu_1_val)
bleu_2.append(bleu_2_val)
bleu_3.append(bleu_3_val)
bleu_4.append(bleu_4_val)
rouge.append(rouge_val)
calculator = Calculator(3,hypothesis_dir,reference_dir)
calculator.load_result(result_file_3)
calculator.evaluate_all_ref_hyp_pairs()
bleu_1_val,bleu_2_val,bleu_3_val,bleu_4_val,rouge_val = calculator.get_all_metrics()
bleu_1.append(bleu_1_val)
bleu_2.append(bleu_2_val)
bleu_3.append(bleu_3_val)
bleu_4.append(bleu_4_val)
rouge.append(rouge_val)
calculator = Calculator(3,hypothesis_dir,reference_dir)
calculator.load_result(result_file_4)
calculator.evaluate_all_ref_hyp_pairs()
bleu_1_val,bleu_2_val,bleu_3_val,bleu_4_val,rouge_val = calculator.get_all_metrics()
bleu_1.append(bleu_1_val)
bleu_2.append(bleu_2_val)
bleu_3.append(bleu_3_val)
bleu_4.append(bleu_4_val)
rouge.append(rouge_val)
steps = calculator.get_steps()
plotter = Plotter()
plotter.set_metrics(bleu_1,bleu_2,bleu_3,bleu_4,rouge)
plotter.set_file_description(result_file_description)
plotter.set_steps(steps)
plotter.plot_all_metrics()
########## ALL LSTM PLOTS ####################
result_file_1 = 'result/simple/lstm/no_attention.csv'
result_file_2 = 'result/bidirectional/lstm/no_attention.csv'
result_file_3 = 'result/stacked_simple/lstm/no_attention.csv'
result_file_4 = 'result/stacked_bidirectional/lstm/no_attention.csv'
result_file_description = ['lstm_smpl','lstm_bidr','lstm_stack_smpl','lstm_stack_bidr']
hypothesis_dir = 'metrics/hypothesis'
reference_dir = 'metrics/reference'
bleu_1 = []
bleu_2 = []
bleu_3 = []
bleu_4 = []
rouge = []
calculator = Calculator(3,hypothesis_dir,reference_dir)
calculator.load_result(result_file_1)
calculator.evaluate_all_ref_hyp_pairs()
bleu_1_val,bleu_2_val,bleu_3_val,bleu_4_val,rouge_val = calculator.get_all_metrics()
bleu_1.append(bleu_1_val)
bleu_2.append(bleu_2_val)
bleu_3.append(bleu_3_val)
bleu_4.append(bleu_4_val)
rouge.append(rouge_val)
calculator = Calculator(3,hypothesis_dir,reference_dir)
calculator.load_result(result_file_2)
calculator.evaluate_all_ref_hyp_pairs()
bleu_1_val,bleu_2_val,bleu_3_val,bleu_4_val,rouge_val = calculator.get_all_metrics()
bleu_1.append(bleu_1_val)
bleu_2.append(bleu_2_val)
bleu_3.append(bleu_3_val)
bleu_4.append(bleu_4_val)
rouge.append(rouge_val)
calculator = Calculator(3,hypothesis_dir,reference_dir)
calculator.load_result(result_file_3)
calculator.evaluate_all_ref_hyp_pairs()
bleu_1_val,bleu_2_val,bleu_3_val,bleu_4_val,rouge_val = calculator.get_all_metrics()
bleu_1.append(bleu_1_val)
bleu_2.append(bleu_2_val)
bleu_3.append(bleu_3_val)
bleu_4.append(bleu_4_val)
rouge.append(rouge_val)
calculator = Calculator(3,hypothesis_dir,reference_dir)
calculator.load_result(result_file_4)
calculator.evaluate_all_ref_hyp_pairs()
bleu_1_val,bleu_2_val,bleu_3_val,bleu_4_val,rouge_val = calculator.get_all_metrics()
bleu_1.append(bleu_1_val)
bleu_2.append(bleu_2_val)
bleu_3.append(bleu_3_val)
bleu_4.append(bleu_4_val)
rouge.append(rouge_val)
steps = calculator.get_steps()
plotter = Plotter()
plotter.set_metrics(bleu_1,bleu_2,bleu_3,bleu_4,rouge)
plotter.set_file_description(result_file_description)
plotter.set_steps(steps)
plotter.plot_all_metrics()
#### GRU and LSTM Comparison plots #####
## SIMPLE
result_file_1 = 'result/simple/gru/no_attention.csv'
result_file_2 = 'result/simple/lstm/no_attention.csv'
result_file_description = ['gru_simple','lstm_simple']
bleu_1 = []
bleu_2 = []
bleu_3 = []
bleu_4 = []
rouge = []
calculator = Calculator(3,hypothesis_dir,reference_dir)
calculator.load_result(result_file_1)
calculator.evaluate_all_ref_hyp_pairs()
bleu_1_val,bleu_2_val,bleu_3_val,bleu_4_val,rouge_val = calculator.get_all_metrics()
bleu_1.append(bleu_1_val)
bleu_2.append(bleu_2_val)
bleu_3.append(bleu_3_val)
bleu_4.append(bleu_4_val)
rouge.append(rouge_val)
calculator = Calculator(3,hypothesis_dir,reference_dir)
calculator.load_result(result_file_2)
calculator.evaluate_all_ref_hyp_pairs()
bleu_1_val,bleu_2_val,bleu_3_val,bleu_4_val,rouge_val = calculator.get_all_metrics()
bleu_1.append(bleu_1_val)
bleu_2.append(bleu_2_val)
bleu_3.append(bleu_3_val)
bleu_4.append(bleu_4_val)
rouge.append(rouge_val)
steps = calculator.get_steps()
plotter = Plotter()
plotter.set_metrics(bleu_1,bleu_2,bleu_3,bleu_4,rouge)
plotter.set_file_description(result_file_description)
plotter.set_steps(steps)
plotter.plot_all_metrics()
## BIDIRECTIONAL
result_file_1 = 'result/bidirectional/gru/no_attention.csv'
result_file_2 = 'result/bidirectional/lstm/no_attention.csv'
result_file_description = ['gru_bidir','lstm_bidir']
bleu_1 = []
bleu_2 = []
bleu_3 = []
bleu_4 = []
rouge = []
calculator = Calculator(3,hypothesis_dir,reference_dir)
calculator.load_result(result_file_1)
calculator.evaluate_all_ref_hyp_pairs()
bleu_1_val,bleu_2_val,bleu_3_val,bleu_4_val,rouge_val = calculator.get_all_metrics()
bleu_1.append(bleu_1_val)
bleu_2.append(bleu_2_val)
bleu_3.append(bleu_3_val)
bleu_4.append(bleu_4_val)
rouge.append(rouge_val)
calculator = Calculator(3,hypothesis_dir,reference_dir)
calculator.load_result(result_file_2)
calculator.evaluate_all_ref_hyp_pairs()
bleu_1_val,bleu_2_val,bleu_3_val,bleu_4_val,rouge_val = calculator.get_all_metrics()
bleu_1.append(bleu_1_val)
bleu_2.append(bleu_2_val)
bleu_3.append(bleu_3_val)
bleu_4.append(bleu_4_val)
rouge.append(rouge_val)
steps = calculator.get_steps()
plotter = Plotter()
plotter.set_metrics(bleu_1,bleu_2,bleu_3,bleu_4,rouge)
plotter.set_file_description(result_file_description)
plotter.set_steps(steps)
plotter.plot_all_metrics()
## STACKED_SIMPLE
result_file_1 = 'result/stacked_simple/gru/no_attention.csv'
result_file_2 = 'result/stacked_simple/lstm/no_attention.csv'
result_file_description = ['gru_stacked','lstm_stacked']
bleu_1 = []
bleu_2 = []
bleu_3 = []
bleu_4 = []
rouge = []
calculator = Calculator(3,hypothesis_dir,reference_dir)
calculator.load_result(result_file_1)
calculator.evaluate_all_ref_hyp_pairs()
bleu_1_val,bleu_2_val,bleu_3_val,bleu_4_val,rouge_val = calculator.get_all_metrics()
bleu_1.append(bleu_1_val)
bleu_2.append(bleu_2_val)
bleu_3.append(bleu_3_val)
bleu_4.append(bleu_4_val)
rouge.append(rouge_val)
calculator = Calculator(3,hypothesis_dir,reference_dir)
calculator.load_result(result_file_2)
calculator.evaluate_all_ref_hyp_pairs()
bleu_1_val,bleu_2_val,bleu_3_val,bleu_4_val,rouge_val = calculator.get_all_metrics()
bleu_1.append(bleu_1_val)
bleu_2.append(bleu_2_val)
bleu_3.append(bleu_3_val)
bleu_4.append(bleu_4_val)
rouge.append(rouge_val)
steps = calculator.get_steps()
plotter = Plotter()
plotter.set_metrics(bleu_1,bleu_2,bleu_3,bleu_4,rouge)
plotter.set_file_description(result_file_description)
plotter.set_steps(steps)
plotter.plot_all_metrics()
## STACKED BIDIRECTIONAL
result_file_1 = 'result/stacked_bidirectional/gru/no_attention.csv'
result_file_2 = 'result/stacked_bidirectional/lstm/no_attention.csv'
result_file_description = ['gru_stack_bidir','lstm_stack_bidir']
bleu_1 = []
bleu_2 = []
bleu_3 = []
bleu_4 = []
rouge = []
calculator = Calculator(3,hypothesis_dir,reference_dir)
calculator.load_result(result_file_1)
calculator.evaluate_all_ref_hyp_pairs()
bleu_1_val,bleu_2_val,bleu_3_val,bleu_4_val,rouge_val = calculator.get_all_metrics()
bleu_1.append(bleu_1_val)
bleu_2.append(bleu_2_val)
bleu_3.append(bleu_3_val)
bleu_4.append(bleu_4_val)
rouge.append(rouge_val)
calculator = Calculator(3,hypothesis_dir,reference_dir)
calculator.load_result(result_file_2)
calculator.evaluate_all_ref_hyp_pairs()
bleu_1_val,bleu_2_val,bleu_3_val,bleu_4_val,rouge_val = calculator.get_all_metrics()
bleu_1.append(bleu_1_val)
bleu_2.append(bleu_2_val)
bleu_3.append(bleu_3_val)
bleu_4.append(bleu_4_val)
rouge.append(rouge_val)
steps = calculator.get_steps()
plotter = Plotter()
plotter.set_metrics(bleu_1,bleu_2,bleu_3,bleu_4,rouge)
plotter.set_file_description(result_file_description)
plotter.set_steps(steps)
plotter.plot_all_metrics()
# SHOW ALL PLOTS
plt.show()
| 29.784314
| 87
| 0.824995
| 1,553
| 9,114
| 4.374759
| 0.03284
| 0.098911
| 0.03768
| 0.05652
| 0.960406
| 0.949956
| 0.946865
| 0.944804
| 0.941125
| 0.877833
| 0
| 0.033586
| 0.05914
| 9,114
| 305
| 88
| 29.881967
| 0.758717
| 0.014703
| 0
| 0.908333
| 0
| 0
| 0.104399
| 0.075599
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.0125
| 0
| 0.0125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
80aa061bcbae09865fc78b4744def484b4d1d093
| 29,192
|
py
|
Python
|
pirates/effects/FireworkShow.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 81
|
2018-04-08T18:14:24.000Z
|
2022-01-11T07:22:15.000Z
|
pirates/effects/FireworkShow.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 4
|
2018-09-13T20:41:22.000Z
|
2022-01-08T06:57:00.000Z
|
pirates/effects/FireworkShow.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 26
|
2018-05-26T12:49:27.000Z
|
2021-09-11T09:11:59.000Z
|
from pandac.PandaModules import *
from direct.interval.IntervalGlobal import *
from pirates.audio import SoundGlobals
from pirates.effects import FireworkGlobals
from pirates.effects.FireworkGlobals import *
from pirates.effects.Firework import Firework
from pirates.ai import HolidayGlobals
from pirates.piratesbase import TODDefs
import random
colors = [
Vec4(1, 1, 1, 1), Vec4(1, 0.1, 0.1, 1), Vec4(0.1, 1, 0.1, 1), Vec4(0.3, 1, 0.3, 1), Vec4(0.2, 0.2, 1, 1), Vec4(1, 1, 0.1, 1), Vec4(1, 0.5, 0.1, 1), Vec4(1, 0.1, 1, 1), Vec4(0.1, 1, 1, 1), Vec4(0.1, 0.5, 1, 1)]
class FireworkShow(NodePath):
def __init__(self, showType):
NodePath.__init__(self, 'FireworkShow')
self.showType = showType
self.sectionIvals = []
self.fireworks = []
def r():
return random.randint(8, 12) / 10.0
def rV():
return Vec3(random.randint(-120, 120), random.randint(-120, 120), random.randint(400, 600))
def rP():
return Point3(random.randint(-300, 300), random.randint(-50, 50), 0)
def rS():
return 0.75 + random.random() / 2.0
def rC():
return random.choice(colors)
def rT():
return random.randint(12, 20) / 10.0
def rD():
return random.randint(1, 20) / 10.0
self.showData = {HolidayGlobals.FOURTHOFJULY: [[FireworkType.BasicPeony, Vec3(0, 0, 450), Point3(0, 0, 0), 1.0, Vec4(1, 1, 1, 1), Vec4(1, 1, 1, 1), 2.0, 3.0], [FireworkType.BasicPeony, Vec3(-60, 20, 550), Point3(-120, 0, 0), 0.8, Vec4(1, 1, 0, 1), -1, 1.8, 0.2], [FireworkType.BasicPeony, Vec3(30, -20, 470), Point3(120, 0, 0), 0.8, rC(), -1, 1.8, 2.5], [FireworkType.AdvancedPeony, Vec3(-120, 20, 500), Point3(-200, 0, 0), 1.0, Vec4(1, 0, 0, 1), -1, rT(), 0.25], [FireworkType.AdvancedPeony, Vec3(0, 0, 500), Point3(0, 0, 0), 1.0, Vec4(0, 1, 0, 1), -1, rT(), 0.25], [FireworkType.AdvancedPeony, Vec3(120, -20, 500), Point3(200, 0, 0), 1.0, Vec4(0.1, 0.1, 1, 1), -1, rT(), 2.5], [FireworkType.BasicPeony, Vec3(-50, 50, 450) * r(), Point3(0, 0, 0), 1.0, rC(), -1, rT(), rD()], [FireworkType.AdvancedPeony, Vec3(50, -50, 450) * r(), Point3(200, 0, 0), 1.0, rC(), -1, rT(), rD()], [FireworkType.BasicPeony, Vec3(-100, 0, 450) * r(), Point3(-200, 0, 0), 1.0, rC(), -1, rT(), rD()], [FireworkType.AdvancedPeony, Vec3(100, 50, 450) * r(), Point3(200, 0, 0), 1.0, rC(), -1, rT(), rD()], [FireworkType.BasicPeony, Vec3(100, -50, 450) * r(), Point3(-200, 0, 0), 1.0, rC(), -1, rT(), 1.5], [FireworkType.DiademPeony, Vec3(0, 0, 450) * r(), Point3(0, 0, 0), 1.1, rC(), -1, rT(), 3.0], [FireworkType.AmericanFlag, None, Point3(0, 0, 0), 1.0, None, None, None, 4.0], [FireworkType.GlowFlare, Vec3(-100, 0, 500), Point3(-400, 0, 0), 1.25, Vec4(1, 1, 1, 1), -1, 3.0, 0.0], [FireworkType.GlowFlare, Vec3(100, 0, 500), Point3(400, 0, 0), 1.25, Vec4(1, 1, 1, 1), -1, 3.0, 0.5], [FireworkType.GlowFlare, Vec3(-50, 0, 500), Point3(-250, 0, 0), 1.25, Vec4(0, 1, 0, 1), -1, 3.0, 0.0], [FireworkType.GlowFlare, Vec3(50, 0, 500), Point3(250, 0, 0), 1.25, Vec4(0, 1, 0, 1), -1, 3.0, 0.5], [FireworkType.GlowFlare, Vec3(-25, 0, 500), Point3(-100, 0, 0), 1.25, Vec4(1, 0, 0, 1), -1, 3.0, 0.0], [FireworkType.GlowFlare, Vec3(25, 0, 500), Point3(100, 0, 0), 1.25, Vec4(1, 0, 0, 1), -1, 3.0, 1.0], [FireworkType.DiademChrysanthemum, Vec3(0, 0, 550), Point3(0, 0, 0), 1.25, Vec4(1, 1, 1, 1), -1, 1.5, 2.0], [FireworkType.Ring, Vec3(-100, 50, 500) * r(), Point3(-200, 0, 0), 1.0, rC(), -1, rT(), 0.5], [FireworkType.Ring, Vec3(100, -50, 500) * r(), Point3(200, 0, 0), 1.0, rC(), -1, rT(), 1.5], [FireworkType.Ring, Vec3(0, 0, 550), Point3(0, 50, 0), 1.0, rC(), -1, rT(), 1.5], [FireworkType.Saturn, Vec3(-250, 50, 450), Point3(200, 0, 0), 1.0, rC(), rC(), 1.7, 0.5], [FireworkType.Saturn, Vec3(250, -50, 450), Point3(-200, 0, 0), 1.0, rC(), rC(), 1.7, 1.5], [FireworkType.BasicPeony, Vec3(-150, 100, 500) * r(), Point3(-200, 0, 0), 1.0, rC(), rC(), rT(), rD()], [FireworkType.AdvancedPeony, Vec3(-50, 100, 500) * r(), Point3(200, 0, 0), 1.0, rC(), -1, rT(), rD()], [FireworkType.BasicPeony, Vec3(-150, -100, 500) * r(), Point3(0, 50, 0), 1.0, rC(), rC(), rT(), rD()], [FireworkType.Chrysanthemum, Vec3(175, 100, 500) * r(), Point3(220, 0, 0), 1.0, rC(), -1, rT(), rD()], [FireworkType.Ring, Vec3(-75, -100, 500) * r(), Point3(-220, 0, 0), 1.0, rC(), rC(), rT(), rD()], [FireworkType.BasicPeony, Vec3(0, 100, 500) * r(), Point3(0, 0, 0), 1.0, rC(), rC(), rT(), rD()], [FireworkType.AdvancedPeony, Vec3(75, 100, 500) * r(), Point3(-200, 0, 0), 1.0, rC(), rC(), rT(), rD()], [FireworkType.BasicPeony, Vec3(150, 100, 500) * r(), Point3(0, 0, 0), 1.0, rC(), rC(), rT(), rD()], [FireworkType.Chrysanthemum, Vec3(-100, 100, 500) * r(), Point3(200, 0, 0), 1.0, rC(), -1, rT(), 3.0], [FireworkType.Mickey, None, Point3(0, 0, 0), 1.0, rC(), -1, None, 3.0], [FireworkType.Bees, Vec3(0, 0, 550), Point3(0, 0, 0), 1.2, rC(), -1, 1.7, 2.0], [FireworkType.Bees, rV(), rP(), 1.0, rC(), rC(), rT(), rD()], [FireworkType.Bees, Vec3(-100, 50, 500) * r(), Point3(-200, 50, 0), 1.0, rC(), -1, rT(), rD()], [FireworkType.Bees, Vec3(-50, 0, 500) * r(), Point3(0, 0, 0), 1.0, rC(), -1, rT(), rD()], [FireworkType.Bees, Vec3(100, -50, 500) * r(), Point3(200, 0, 0), 1.0, rC(), -1, rT(), rD()], [FireworkType.BasicPeony, rV(), rP(), 1.0, rC(), rC(), rT(), rD()], [FireworkType.AdvancedPeony, rV(), rP(), 1.0, rC(), rC(), rT(), rD()], [FireworkType.BasicPeony, rV(), rP(), 1.0, rC(), rC(), rT(), rD()], [FireworkType.Bees, rV(), rP(), 1.0, rC(), -1, rT(), rD()], [FireworkType.AdvancedPeony, rV(), rP(), 1.0, rC(), rC(), rT(), rD()], [FireworkType.Ring, rV(), rP(), 1.0, rC(), rC(), rT(), rD()], [FireworkType.BasicPeony, rV(), rP(), 1.0, rC(), rC(), rT(), rD()], [FireworkType.Ring, rV(), rP(), 1.0, rC(), rC(), rT(), rD()], [FireworkType.Bees, rV(), rP(), 1.0, rC(), -1, rT(), rD()], [FireworkType.DiademPeony, rV(), rP(), 1.0, rC(), rC(), rT(), rD()], [FireworkType.BasicPeony, rV(), rP(), 1.0, rC(), rC(), rT(), rD()], [FireworkType.Ring, rV(), rP(), 1.0, rC(), rC(), rT(), rD()], [FireworkType.BasicPeony, rV(), rP(), 1.0, rC(), rC(), rT(), rD()], [FireworkType.Bees, rV(), rP(), 1.0, rC(), -1, rT(), rD()], [FireworkType.Saturn, rV(), rP(), 1.0, rC(), rC(), rT(), rD()], [FireworkType.Chrysanthemum, rV(), rP(), 1.0, rC(), rC(), rT(), 3.5], [FireworkType.PalmTree, Vec3(-150, 50, 300), rP(), 1.0, Vec4(0, 1, 0, 1), rC(), 1.75, 2.0], [FireworkType.PalmTree, Vec3(160, 50, 320), rP(), 1.0, Vec4(0, 1, 0, 1), rC(), 1.75, 2.0], [FireworkType.Bees, rV(), rP(), 1.0, rC(), -1, rT(), 2.0], [FireworkType.PalmTree, Vec3(-150, -50, 350), Point3(-250, 0, 0), 1.2, Vec4(0, 1, 0, 1), rC(), 1.75, 2.0], [FireworkType.BasicPeony, rV(), rP(), 1.0, rC(), rC(), rT(), 0.75], [FireworkType.Bees, rV(), rP(), 1.0, rC(), -1, rT(), 0.5], [FireworkType.AdvancedPeony, rV(), rP(), 1.0, rC(), rC(), rT(), 0.4], [FireworkType.Ring, rV(), rP(), 1.0, rC(), rC(), rT(), 0.3], [FireworkType.BasicPeony, rV(), rP(), 1.0, rC(), rC(), rT(), 0.5], [FireworkType.Ring, rV(), rP(), 1.0, rC(), rC(), rT(), 0.25], [FireworkType.Bees, rV(), rP(), 1.0, rC(), -1, rT(), 0.5], [FireworkType.BasicPeony, rV(), rP(), 1.0, rC(), rC(), rT(), 0.4], [FireworkType.Ring, rV(), rP(), 1.0, rC(), rC(), rT(), 0.4], [FireworkType.AdvancedPeony, rV(), rP(), 1.0, rC(), rC(), rT(), 0.6], [FireworkType.DiademPeony, rV(), rP(), 1.0, rC(), rC(), rT(), 0.5], [FireworkType.AdvancedPeony, rV(), rP(), 1.0, rC(), rC(), rT(), 0.6], [FireworkType.Ring, rV(), rP(), 1.0, rC(), rC(), rT(), 0.3], [FireworkType.BasicPeony, rV(), rP(), 1.0, rC(), rC(), rT(), 0.3], [FireworkType.Bees, rV(), rP(), 1.0, rC(), -1, rT(), 0.6], [FireworkType.AdvancedPeony, rV(), rP(), 1.0, rC(), rC(), rT(), 0.5], [FireworkType.Ring, rV(), rP(), 1.0, rC(), rC(), rT(), 0.5], [FireworkType.BasicPeony, rV(), rP(), 1.0, rC(), rC(), rT(), 0.4], [FireworkType.Ring, rV(), rP(), 1.0, rC(), rC(), rT(), 0.3], [FireworkType.Bees, rV(), rP(), 1.0, rC(), -1, rT(), 0.4], [FireworkType.BasicPeony, rV(), rP(), 1.0, rC(), rC(), rT(), 0.4], [FireworkType.AdvancedPeony, rV(), rP(), 1.0, rC(), rC(), rT(), 0.5], [FireworkType.Saturn, rV(), rP(), 1.0, rC(), rC(), rT(), 0.6], [FireworkType.Chrysanthemum, rV(), rP(), 1.0, rC(), rC(), rT(), 2.5], [FireworkType.PirateSkull, None, Point3(0, 0, 0), 1.0, Vec4(1, 1, 1, 1), -1, None, 2.0]],HolidayGlobals.NEWYEARS: [[FireworkType.BasicPeony, Vec3(0, 0, 460), Point3(0, 0, 0), 1.0, Vec4(1, 1, 1, 1), Vec4(1, 1, 1, 1), 2.5, 0.75], [FireworkType.BasicPeony, Vec3(-75, 0, 450), Point3(-250, 0, 0), 1.0, Vec4(1, 1, 1, 1), Vec4(1, 1, 1, 1), 2.3, 3.1], [FireworkType.BasicPeony, Vec3(120, 0, 600), Point3(100, 0, 0), 1.0, Vec4(1, 1, 1, 1), Vec4(1, 1, 1, 1), 1.25, 1.7], [FireworkType.BasicPeony, Vec3(-25, 0, 480), Point3(-350, 0, 0), 1.0, Vec4(0.2, 1, 0.2, 1), Vec4(1, 1, 1, 1), 1.5, 0.2], [FireworkType.BasicPeony, Vec3(25, 0, 500), Point3(350, 0, 0), 1.0, Vec4(1, 0.2, 0.2, 1), Vec4(1, 1, 1, 1), 1.5, 1.6], [FireworkType.BasicPeony, Vec3(-50, 0, 500), Point3(-150, 0, 0), 1.0, Vec4(1, 0.2, 0.2, 1), Vec4(1, 1, 1, 1), 1.25, 0.2], [FireworkType.BasicPeony, Vec3(50, 0, 550), Point3(150, 0, 0), 1.0, Vec4(0.2, 1, 0.2, 1), Vec4(1, 1, 1, 1), 1.25, 1.5], [FireworkType.AdvancedPeony, Vec3(0, 0, 700), Point3(0, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 1.0, 0.4], [FireworkType.BasicPeony, Vec3(100, 0, 520), Point3(300, 0, 0), 1.1, rC(), Vec4(1, 1, 1, 1), 1.2, 0.0], [FireworkType.BasicPeony, Vec3(-100, 0, 500), Point3(-300, 0, 0), 1.1, rC(), Vec4(1, 1, 1, 1), 1.2, 1.75], [FireworkType.PalmTree, Vec3(100, 0, 350), Point3(-300, 0, 0), 1.2, Vec4(0.1, 1, 0.1, 1), rC(), 1.8, 0.5], [FireworkType.PalmTree, Vec3(-150, 0, 350), Point3(-350, 0, 0), 1.1, Vec4(0.1, 1, 0.1, 1), rC(), 1.75, 2.8], [FireworkType.BasicPeony, Vec3(100, 0, 450), Point3(350, 0, 0), 1.0, rC(), Vec4(1, 1, 1, 1), 2.2, 0.25], [FireworkType.BasicPeony, Vec3(-100, 0, 500), Point3(50, 0, 0), 1.0, rC(), Vec4(1, 1, 1, 1), 2.2, 0.25], [FireworkType.AdvancedPeony, Vec3(0, 0, 400), Point3(200, 0, 0), 1.15, rC(), Vec4(1, 1, 1, 1), 2.8, 2.5], [FireworkType.Chrysanthemum, Vec3(-25, 0, 500), Point3(-150, 0, 0), 1.5, Vec4(1, 1, 0.1, 1), Vec4(1, 1, 1, 1), 1.6, 2.35], [FireworkType.GlowFlare, Vec3(150, 0, 620), Point3(-500, 0, 0), 1.0, Vec4(1, 0.1, 0.1, 1), Vec4(1, 0.1, 0.1, 1), 2.2, 0.5], [FireworkType.GlowFlare, Vec3(150, 0, 620), Point3(-300, 0, 0), 1.0, Vec4(0.1, 1, 0.1, 1), Vec4(0.1, 1, 0.1, 1), 2.2, 0.6], [FireworkType.GlowFlare, Vec3(150, 0, 620), Point3(-100, 0, 0), 1.0, Vec4(0.1, 0.1, 1, 1), Vec4(0.1, 0.1, 1, 1), 2.2, 0.5], [FireworkType.GlowFlare, Vec3(-150, 0, 620), Point3(100, 0, 0), 1.0, Vec4(1, 0.1, 0.1, 1), Vec4(1, 0.1, 0.1, 1), 2.2, 0.6], [FireworkType.GlowFlare, Vec3(-150, 0, 620), Point3(300, 0, 0), 1.0, Vec4(0.1, 1, 0.1, 1), Vec4(0.1, 1, 0.1, 1), 2.2, 0.5], [FireworkType.GlowFlare, Vec3(-150, 0, 620), Point3(500, 0, 0), 1.0, Vec4(0.1, 0.1, 1, 1), Vec4(0.1, 0.1, 1, 1), 2.2, 0.5], [FireworkType.Chrysanthemum, Vec3(-50, 0, 400), Point3(-200, 0, 0), 1.2, Vec4(1, 0.5, 0.2, 1), Vec4(1, 1, 1, 1), 2.5, 0.0], [FireworkType.GlowFlare, Vec3(250, 0, 350), Point3(-500, 0, 0), 1.0, Vec4(1, 1, 0.1, 1), Vec4(1, 1, 0.1, 1), 2.2, 0.0], [FireworkType.GlowFlare, Vec3(-250, 0, 350), Point3(500, 0, 0), 1.0, Vec4(1, 1, 0.1, 1), Vec4(1, 1, 0.1, 1), 2.2, 0.5], [FireworkType.Chrysanthemum, Vec3(50, 0, 440), Point3(200, 0, 0), 1.2, Vec4(1, 0.5, 0.2, 1), Vec4(1, 1, 1, 1), 2.0, 1.5], [FireworkType.BasicPeony, Vec3(50, 0, 500), Point3(200, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 1.5, 0.5], [FireworkType.BasicPeony, Vec3(-80, 0, 500), Point3(-200, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 1.5, 0.5], [FireworkType.BasicPeony, Vec3(50, 0, 550), Point3(350, 0, 0), 1.1, rC(), Vec4(1, 1, 1, 1), 1.5, 0.5], [FireworkType.DiademPeony, Vec3(0, 0, 600), Point3(0, 0, 0), 1.3, Vec4(0.1, 0.1, 1, 1), Vec4(1, 1, 0.1, 1), 1.3, 0.5], [FireworkType.BasicPeony, Vec3(-100, 0, 750), Point3(-350, 0, 0), 1.1, rC(), Vec4(1, 1, 1, 1), 1.0, 0.2], [FireworkType.BasicPeony, Vec3(40, 0, 550), Point3(400, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 1.25, 0.5], [FireworkType.BasicPeony, Vec3(0, 0, 550), Point3(-400, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 1.25, 0.5], [FireworkType.Chrysanthemum, Vec3(-100, 0, 550), Point3(-300, 0, 0), 1.0, rC(), Vec4(1, 1, 1, 1), 1.3, 0.0], [FireworkType.Chrysanthemum, Vec3(100, 0, 550), Point3(300, 0, 0), 1.0, rC(), Vec4(1, 1, 1, 1), 1.3, 0.5], [FireworkType.DiademChrysanthemum, Vec3(-10, 0, 600), Point3(0, 0, 0), 1.3, Vec4(1, 0.1, 0.1, 1), Vec4(1, 1, 0.1, 1), 1.3, 1.9], [FireworkType.Bees, Vec3(-100, 0, 650), Point3(-350, 0, 0), 1.3, rC(), Vec4(1, 1, 1, 1), 1.2, 2.2], [FireworkType.Bees, Vec3(100, 0, 600), Point3(-250, 0, 0), 1.3, rC(), Vec4(1, 1, 1, 1), 1.2, 2.2], [FireworkType.Chrysanthemum, Vec3(25, 0, 480), Point3(250, 0, 0), 1.3, rC(), Vec4(1, 1, 1, 1), 2.3, 0.3], [FireworkType.Bees, Vec3(-100, 0, 500), Point3(100, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 1.3, 0.0], [FireworkType.Bees, Vec3(150, 0, 500), Point3(350, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 1.3, 3.25], [FireworkType.GlowFlare, Vec3(-150, 0, 400), Point3(0, 0, 0), 0.5, Vec4(0.1, 1, 0.1, 1), Vec4(0.1, 1, 0.1, 1), 1.5, 0.0], [FireworkType.GlowFlare, Vec3(150, 0, 400), Point3(0, 0, 0), 0.5, Vec4(0.1, 1, 0.1, 1), Vec4(0.1, 1, 0.1, 1), 1.5, 0.75], [FireworkType.GlowFlare, Vec3(0, 0, 480), Point3(0, 0, 0), 0.75, Vec4(0.1, 1, 0.1, 1), Vec4(0.1, 1, 0.1, 1), 1.75, 0.0], [FireworkType.DiademChrysanthemum, Vec3(0, 0, 450), Point3(0, 0, 0), 1.25, rC(), Vec4(1, 1, 0.1, 1), 1.25, 2.5], [FireworkType.DiademPeony, Vec3(50, 0, 450), Point3(300, 0, 0), 1.2, rC(), rC(), 1.75, 0.75], [FireworkType.Ring, Vec3(75, 0, 500), Point3(150, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 1.75, 0.5], [FireworkType.DiademPeony, Vec3(-50, 0, 450), Point3(-300, 0, 0), 1.2, rC(), rC(), 1.75, 0.5], [FireworkType.Ring, Vec3(-75, 0, 500), Point3(-150, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 1.75, 1.25], [FireworkType.Saturn, Vec3(0, 0, 450), Point3(0, 0, 0), 1.2, rC(), rC(), 1.3, 3.3], [FireworkType.BasicPeony, Vec3(-25, 0, 300), Point3(-400, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 3.1, 0.2], [FireworkType.BasicPeony, Vec3(-10, 0, 400), Point3(-200, 0, 0), 1.15, rC(), Vec4(1, 1, 1, 1), 2.7, 0.2], [FireworkType.BasicPeony, Vec3(0, 0, 500), Point3(0, 0, 0), 1.1, rC(), Vec4(1, 1, 1, 1), 2.3, 0.2], [FireworkType.BasicPeony, Vec3(10, 0, 600), Point3(200, 0, 0), 1.05, rC(), Vec4(1, 1, 1, 1), 1.9, 0.2], [FireworkType.BasicPeony, Vec3(25, 0, 700), Point3(400, 0, 0), 1.0, rC(), Vec4(1, 1, 1, 1), 1.5, 0.6], [FireworkType.Saturn, Vec3(75, 0, 300), Point3(250, 0, 0), 1.25, Vec4(0.1, 1, 0.1, 1), Vec4(1, 1, 0.1, 1), 2.25, 2.5], [FireworkType.DiademChrysanthemum, Vec3(-25, 0, 550), Point3(0, 0, 0), 1.4, rC(), rC(), 1.3, 1.5], [FireworkType.BasicPeony, Vec3(-150, 0, 450), Point3(-400, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.AdvancedPeony, Vec3(-100, 0, 550), Point3(-300, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.Chrysanthemum, Vec3(-50, 0, 450), Point3(-200, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.BasicPeony, Vec3(-25, 0, 550), Point3(-100, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.AdvancedPeony, Vec3(0, 0, 450), Point3(0, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.Chrysanthemum, Vec3(25, 0, 550), Point3(100, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.BasicPeony, Vec3(50, 0, 450), Point3(200, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.AdvancedPeony, Vec3(100, 0, 550), Point3(300, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.Chrysanthemum, Vec3(150, 0, 450), Point3(400, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.BasicPeony, Vec3(50, 0, 400), Point3(300, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.AdvancedPeony, Vec3(25, 0, 500), Point3(200, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.Chrysanthemum, Vec3(0, 0, 400), Point3(100, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.BasicPeony, Vec3(-25, 0, 500), Point3(0, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.AdvancedPeony, Vec3(-50, 0, 400), Point3(-100, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.Chrysanthemum, Vec3(-100, 0, 500), Point3(-200, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.BasicPeony, Vec3(-150, 0, 400), Point3(-300, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.AdvancedPeony, Vec3(-200, 0, 500), Point3(-400, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25]],HolidayGlobals.MARDIGRAS: [[FireworkType.BasicPeony, Vec3(0, 0, 460), Point3(0, 0, 0), 1.0, Vec4(1, 0.2, 1, 1), Vec4(1, 1, 1, 1), 2.5, 0.75], [FireworkType.BasicPeony, Vec3(-75, 0, 450), Point3(-250, 0, 0), 1.0, Vec4(0.2, 1, 0.2, 1), Vec4(1, 1, 1, 1), 2.3, 3.1], [FireworkType.BasicPeony, Vec3(120, 0, 600), Point3(100, 0, 0), 1.0, Vec4(1, 0.2, 0.2, 1), Vec4(1, 1, 1, 1), 1.25, 1.7], [FireworkType.BasicPeony, Vec3(-25, 0, 480), Point3(-350, 0, 0), 1.0, Vec4(0.2, 1, 0.2, 1), Vec4(1, 1, 1, 1), 1.5, 0.2], [FireworkType.BasicPeony, Vec3(25, 0, 500), Point3(350, 0, 0), 1.0, Vec4(1, 0.2, 0.2, 1), Vec4(1, 1, 1, 1), 1.5, 1.6], [FireworkType.BasicPeony, Vec3(-50, 0, 500), Point3(-150, 0, 0), 1.0, Vec4(1, 0.2, 0.2, 1), Vec4(1, 1, 1, 1), 1.25, 0.2], [FireworkType.BasicPeony, Vec3(50, 0, 550), Point3(150, 0, 0), 1.0, Vec4(0.2, 1, 0.2, 1), Vec4(1, 1, 1, 1), 1.25, 1.5], [FireworkType.AdvancedPeony, Vec3(0, 0, 700), Point3(0, 0, 0), 1.2, Vec4(1, 0.2, 1, 1), Vec4(1, 1, 1, 1), 1.0, 0.4], [FireworkType.BasicPeony, Vec3(100, 0, 520), Point3(300, 0, 0), 1.1, Vec4(0.2, 1, 0.2, 1), Vec4(1, 1, 1, 1), 1.2, 0.0], [FireworkType.BasicPeony, Vec3(-100, 0, 500), Point3(-300, 0, 0), 1.1, Vec4(1, 0.2, 0.2, 1), Vec4(1, 1, 1, 1), 1.2, 1.75], [FireworkType.PalmTree, Vec3(100, 0, 350), Point3(-300, 0, 0), 1.2, Vec4(0.1, 1, 0.1, 1), Vec4(1, 0.1, 1, 1), 1.8, 0.5], [FireworkType.PalmTree, Vec3(-150, 0, 350), Point3(-350, 0, 0), 1.1, Vec4(0.1, 1, 0.1, 1), Vec4(1, 0.1, 1, 1), 1.75, 2.8], [FireworkType.BasicPeony, Vec3(100, 0, 450), Point3(350, 0, 0), 1.0, rC(), Vec4(1, 1, 1, 1), 2.2, 0.25], [FireworkType.BasicPeony, Vec3(-100, 0, 500), Point3(50, 0, 0), 1.0, rC(), Vec4(1, 1, 1, 1), 2.2, 0.25], [FireworkType.AdvancedPeony, Vec3(0, 0, 400), Point3(200, 0, 0), 1.15, rC(), Vec4(1, 1, 1, 1), 2.8, 2.5], [FireworkType.Chrysanthemum, Vec3(-25, 0, 500), Point3(-150, 0, 0), 1.5, Vec4(1, 0.1, 1, 1), Vec4(1, 1, 1, 1), 1.6, 2.35], [FireworkType.GlowFlare, Vec3(150, 0, 620), Point3(-500, 0, 0), 1.0, Vec4(1, 0.1, 0.1, 1), Vec4(1, 0.1, 0.1, 1), 2.2, 0.5], [FireworkType.GlowFlare, Vec3(150, 0, 620), Point3(-300, 0, 0), 1.0, Vec4(0.1, 1, 0.1, 1), Vec4(0.1, 1, 0.1, 1), 2.2, 0.6], [FireworkType.GlowFlare, Vec3(150, 0, 620), Point3(-100, 0, 0), 1.0, Vec4(1, 0.1, 1, 1), Vec4(1, 0.1, 1, 1), 2.2, 0.5], [FireworkType.GlowFlare, Vec3(-150, 0, 620), Point3(100, 0, 0), 1.0, Vec4(1, 0.1, 0.1, 1), Vec4(1, 0.1, 0.1, 1), 2.2, 0.6], [FireworkType.GlowFlare, Vec3(-150, 0, 620), Point3(300, 0, 0), 1.0, Vec4(0.1, 1, 0.1, 1), Vec4(0.1, 1, 0.1, 1), 2.2, 0.5], [FireworkType.GlowFlare, Vec3(-150, 0, 620), Point3(500, 0, 0), 1.0, Vec4(1, 0.1, 1, 1), Vec4(1, 0.1, 1, 1), 2.2, 0.5], [FireworkType.Chrysanthemum, Vec3(-50, 0, 400), Point3(-200, 0, 0), 1.2, Vec4(1, 0.5, 0.2, 1), Vec4(1, 1, 1, 1), 2.5, 0.0], [FireworkType.GlowFlare, Vec3(250, 0, 350), Point3(-500, 0, 0), 1.0, Vec4(1, 1, 0.1, 1), Vec4(1, 1, 0.1, 1), 2.2, 0.0], [FireworkType.GlowFlare, Vec3(-250, 0, 350), Point3(500, 0, 0), 1.0, Vec4(1, 1, 0.1, 1), Vec4(1, 1, 0.1, 1), 2.2, 0.5], [FireworkType.Chrysanthemum, Vec3(50, 0, 440), Point3(200, 0, 0), 1.2, Vec4(1, 0.5, 0.2, 1), Vec4(1, 1, 1, 1), 2.0, 1.5], [FireworkType.BasicPeony, Vec3(50, 0, 500), Point3(200, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 1.5, 0.5], [FireworkType.BasicPeony, Vec3(-80, 0, 500), Point3(-200, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 1.5, 0.5], [FireworkType.BasicPeony, Vec3(50, 0, 550), Point3(350, 0, 0), 1.1, rC(), Vec4(1, 1, 1, 1), 1.5, 0.5], [FireworkType.DiademPeony, Vec3(0, 0, 600), Point3(0, 0, 0), 1.3, Vec4(0.1, 0.1, 1, 1), Vec4(1, 1, 0.1, 1), 1.3, 0.5], [FireworkType.BasicPeony, Vec3(-100, 0, 750), Point3(-350, 0, 0), 1.1, rC(), Vec4(1, 1, 1, 1), 1.0, 0.2], [FireworkType.BasicPeony, Vec3(40, 0, 550), Point3(400, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 1.25, 0.5], [FireworkType.BasicPeony, Vec3(0, 0, 550), Point3(-400, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 1.25, 0.5], [FireworkType.Chrysanthemum, Vec3(-100, 0, 550), Point3(-300, 0, 0), 1.0, rC(), Vec4(1, 1, 1, 1), 1.3, 0.0], [FireworkType.Chrysanthemum, Vec3(100, 0, 550), Point3(300, 0, 0), 1.0, rC(), Vec4(1, 1, 1, 1), 1.3, 0.5], [FireworkType.DiademChrysanthemum, Vec3(-10, 0, 600), Point3(0, 0, 0), 1.3, Vec4(1, 0.1, 0.1, 1), Vec4(1, 1, 0.1, 1), 1.3, 1.9], [FireworkType.Bees, Vec3(-100, 0, 650), Point3(-350, 0, 0), 1.3, Vec4(1, 0.1, 1, 1), Vec4(1, 1, 1, 1), 1.2, 2.2], [FireworkType.Bees, Vec3(100, 0, 600), Point3(-250, 0, 0), 1.3, Vec4(1, 1, 0.1, 1), Vec4(1, 1, 1, 1), 1.2, 2.2], [FireworkType.Chrysanthemum, Vec3(25, 0, 480), Point3(250, 0, 0), 1.3, rC(), Vec4(1, 1, 1, 1), 2.3, 0.3], [FireworkType.Bees, Vec3(-100, 0, 500), Point3(100, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 1.3, 0.0], [FireworkType.Bees, Vec3(150, 0, 500), Point3(350, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 1.3, 3.25], [FireworkType.GlowFlare, Vec3(-150, 0, 400), Point3(0, 0, 0), 0.5, Vec4(0.1, 1, 0.1, 1), Vec4(0.1, 1, 0.1, 1), 1.5, 0.0], [FireworkType.GlowFlare, Vec3(150, 0, 400), Point3(0, 0, 0), 0.5, Vec4(0.1, 1, 0.1, 1), Vec4(0.1, 1, 0.1, 1), 1.5, 0.75], [FireworkType.GlowFlare, Vec3(0, 0, 480), Point3(0, 0, 0), 0.75, Vec4(0.1, 1, 0.1, 1), Vec4(0.1, 1, 0.1, 1), 1.75, 0.0], [FireworkType.DiademChrysanthemum, Vec3(0, 0, 450), Point3(0, 0, 0), 1.25, rC(), Vec4(1, 1, 0.1, 1), 1.25, 2.5], [FireworkType.DiademPeony, Vec3(50, 0, 450), Point3(300, 0, 0), 1.2, rC(), rC(), 1.75, 0.75], [FireworkType.Ring, Vec3(75, 0, 500), Point3(150, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 1.75, 0.5], [FireworkType.DiademPeony, Vec3(-50, 0, 450), Point3(-300, 0, 0), 1.2, rC(), rC(), 1.75, 0.5], [FireworkType.Ring, Vec3(-75, 0, 500), Point3(-150, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 1.75, 1.25], [FireworkType.Saturn, Vec3(0, 0, 450), Point3(0, 0, 0), 1.2, rC(), rC(), 1.3, 3.3], [FireworkType.BasicPeony, Vec3(-25, 0, 300), Point3(-400, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 3.1, 0.2], [FireworkType.BasicPeony, Vec3(-10, 0, 400), Point3(-200, 0, 0), 1.15, rC(), Vec4(1, 1, 1, 1), 2.7, 0.2], [FireworkType.BasicPeony, Vec3(0, 0, 500), Point3(0, 0, 0), 1.1, rC(), Vec4(1, 1, 1, 1), 2.3, 0.2], [FireworkType.BasicPeony, Vec3(10, 0, 600), Point3(200, 0, 0), 1.05, rC(), Vec4(1, 1, 1, 1), 1.9, 0.2], [FireworkType.BasicPeony, Vec3(25, 0, 700), Point3(400, 0, 0), 1.0, rC(), Vec4(1, 1, 1, 1), 1.5, 0.6], [FireworkType.Saturn, Vec3(75, 0, 300), Point3(250, 0, 0), 1.25, Vec4(0.1, 1, 0.1, 1), Vec4(1, 1, 0.1, 1), 2.25, 2.5], [FireworkType.DiademChrysanthemum, Vec3(-25, 0, 550), Point3(0, 0, 0), 1.4, rC(), rC(), 1.3, 1.5], [FireworkType.BasicPeony, Vec3(-150, 0, 450), Point3(-400, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.AdvancedPeony, Vec3(-100, 0, 550), Point3(-300, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.Chrysanthemum, Vec3(-50, 0, 450), Point3(-200, 0, 0), 1.2, Vec4(1, 0.1, 1, 1), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.BasicPeony, Vec3(-25, 0, 550), Point3(-100, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.AdvancedPeony, Vec3(0, 0, 450), Point3(0, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.Chrysanthemum, Vec3(25, 0, 550), Point3(100, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.BasicPeony, Vec3(50, 0, 450), Point3(200, 0, 0), 1.2, Vec4(1, 0.1, 1, 1), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.AdvancedPeony, Vec3(100, 0, 550), Point3(300, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.Chrysanthemum, Vec3(150, 0, 450), Point3(400, 0, 0), 1.2, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.BasicPeony, Vec3(50, 0, 400), Point3(300, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.AdvancedPeony, Vec3(25, 0, 500), Point3(200, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.Chrysanthemum, Vec3(0, 0, 400), Point3(100, 0, 0), 1.25, Vec4(1, 0.2, 1, 1), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.BasicPeony, Vec3(-25, 0, 500), Point3(0, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.AdvancedPeony, Vec3(-50, 0, 400), Point3(-100, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.Chrysanthemum, Vec3(-100, 0, 500), Point3(-200, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.BasicPeony, Vec3(-150, 0, 400), Point3(-300, 0, 0), 1.25, rC(), Vec4(1, 1, 1, 1), 2.0, 0.25], [FireworkType.AdvancedPeony, Vec3(-200, 0, 500), Point3(-400, 0, 0), 1.25, Vec4(1, 0.2, 1, 1), Vec4(1, 1, 1, 1), 2.0, 0.25]]}
self.sectionData = {HolidayGlobals.FOURTHOFJULY: [(0, 34), (34, 95)],HolidayGlobals.NEWYEARS: [(0, 77)],HolidayGlobals.MARDIGRAS: [(0, 77)]}
self.showMusic = {HolidayGlobals.NEWYEARS: SoundGlobals.MUSIC_FIREWORKS,HolidayGlobals.MARDIGRAS: SoundGlobals.MUSIC_FIREWORKS}
del r
del rV
del rP
del rS
del rC
del rT
del rD
self.delaySectionStart = None
self.curSection = None
self.curOffset = 0.0
return None
def beginSection(self, startIndex, endIndex, offset):
taskMgr.remove('beginSection' + str(startIndex) + str(endIndex))
sectionIval = Parallel()
time = 2.0
showMusic = self.showMusic.get(self.showType)
if showMusic:
base.musicMgr.load(showMusic, looping=False)
musicOffset = self.getDuration(0, startIndex) - self.getDuration(startIndex, startIndex) + offset
volume = 0.8
if not self.wantFireworkSounds():
volume = 0.0
sectionIval.append(Func(base.musicMgr.request, showMusic, priority=2, looping=False, volume=volume))
sectionIval.append(Func(base.musicMgr.offsetMusic, musicOffset))
sectionData = self.showData.get(self.showType)[startIndex:endIndex]
for fireworkInfo in sectionData:
typeId = fireworkInfo[0]
velocity = fireworkInfo[1]
pos = fireworkInfo[2]
scale = fireworkInfo[3]
color1 = fireworkInfo[4]
color2 = fireworkInfo[5]
if color2 == -1:
color2 = color1
trailDur = fireworkInfo[6]
delay = fireworkInfo[7]
firework = Firework(typeId, velocity, scale, color1, color2, trailDur)
firework.reparentTo(self)
firework.setPos(pos)
self.fireworks.append(firework)
sectionIval.append(Sequence(Wait(time), firework.generateFireworkIval()))
time += delay
if endIndex == len(self.showData.get(self.showType)):
sectionIval.append(Sequence(Wait(time), Func(self.cleanupShow)))
self.sectionIvals.append(sectionIval)
self.curSection = sectionIval
self.curOffset = offset
self.delaySectionStart = FrameDelayedCall('delaySectionStart', self.startCurSection, frames=24)
def startCurSection(self):
self.curSection.start(self.curOffset)
def begin(self, timestamp):
time = 0.0
for section in self.sectionData.get(self.showType):
startIndex = section[0]
endIndex = section[1]
sectionDur = self.getDuration(startIndex, endIndex)
if timestamp < sectionDur:
timestamp = max(0.0, timestamp)
taskMgr.doMethodLater(time, self.beginSection, 'beginSection' + str(startIndex) + str(endIndex), extraArgs=[startIndex, endIndex, timestamp])
time = time + sectionDur - timestamp
timestamp -= sectionDur
def getDuration(self, startIndex=0, endIndex=None):
duration = 0.0
if endIndex == None:
endIndex = len(self.showData.get(self.showType))
for firework in self.showData.get(self.showType)[startIndex:endIndex]:
duration += firework[7]
return duration
def isPlaying(self):
for ival in self.sectionIvals:
if ival.isPlaying():
return True
return False
def cleanupShow(self):
if self.delaySectionStart:
self.delaySectionStart.destroy()
showMusic = self.showMusic.get(self.showType)
if showMusic:
base.musicMgr.requestFadeOut(showMusic)
for section in self.sectionData.get(self.showType):
startIndex = section[0]
endIndex = section[1]
taskMgr.remove('beginSection' + str(startIndex) + str(endIndex))
for ival in self.sectionIvals:
ival.pause()
ival = None
self.sectionIvals = []
for firework in self.fireworks:
firework.cleanup()
firework = None
self.fireworks = []
return
def wantFireworkSounds(self):
return localAvatar.getGameState() != 'Cutscene' and base.cr.timeOfDayManager.environment not in [TODDefs.ENV_CAVE, TODDefs.ENV_LAVACAVE, TODDefs.ENV_INTERIOR]
| 193.324503
| 23,474
| 0.571938
| 5,494
| 29,192
| 3.036585
| 0.035311
| 0.070851
| 0.058083
| 0.041959
| 0.828029
| 0.805431
| 0.793862
| 0.772463
| 0.768387
| 0.750584
| 0
| 0.215115
| 0.168265
| 29,192
| 151
| 23,475
| 193.324503
| 0.471993
| 0
| 0
| 0.140625
| 0
| 0
| 0.002501
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117188
| false
| 0
| 0.070313
| 0.0625
| 0.296875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
80aefb2333e1c8607d553038b29b555ce7f227fc
| 9,655
|
py
|
Python
|
platform/core/tests/test_auditor/test_auditor_pipeline.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
platform/core/tests/test_auditor/test_auditor_pipeline.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
platform/core/tests/test_auditor/test_auditor_pipeline.py
|
hackerwins/polyaxon
|
ff56a098283ca872abfbaae6ba8abba479ffa394
|
[
"Apache-2.0"
] | null | null | null |
# pylint:disable=ungrouped-imports
from unittest.mock import patch
import pytest
import auditor
from events.registry import pipeline as pipeline_events
from factories.factory_pipelines import PipelineFactory
from tests.test_auditor.utils import AuditorBaseTest
@pytest.mark.auditor_mark
class AuditorPipelineTest(AuditorBaseTest):
"""Testing subscribed events"""
EVENTS = pipeline_events.EVENTS
def setUp(self):
super().setUp()
self.pipeline = PipelineFactory()
self.tested_events = {
pipeline_events.PIPELINE_CREATED,
pipeline_events.PIPELINE_UPDATED,
pipeline_events.PIPELINE_DELETED,
pipeline_events.PIPELINE_CLEANED_TRIGGERED,
pipeline_events.PIPELINE_VIEWED,
pipeline_events.PIPELINE_ARCHIVED,
pipeline_events.PIPELINE_RESTORED,
pipeline_events.PIPELINE_BOOKMARKED,
pipeline_events.PIPELINE_UNBOOKMARKED,
pipeline_events.PIPELINE_DELETED_TRIGGERED,
}
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_pipeline_created(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=pipeline_events.PIPELINE_CREATED,
instance=self.pipeline)
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_pipeline_updated(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=pipeline_events.PIPELINE_UPDATED,
instance=self.pipeline,
actor_name='foo',
actor_id=1)
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_pipeline_deleted(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=pipeline_events.PIPELINE_DELETED,
instance=self.pipeline)
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 0
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_pipeline_cleaned_triggered(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=pipeline_events.PIPELINE_CLEANED_TRIGGERED,
instance=self.pipeline)
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_pipeline_viewed(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=pipeline_events.PIPELINE_VIEWED,
instance=self.pipeline,
actor_name='foo',
actor_id=1)
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_pipeline_archived(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=pipeline_events.PIPELINE_ARCHIVED,
instance=self.pipeline,
actor_name='foo',
actor_id=1)
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_pipeline_restored(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=pipeline_events.PIPELINE_RESTORED,
instance=self.pipeline,
actor_name='foo',
actor_id=1)
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_pipeline_bookmarked(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=pipeline_events.PIPELINE_BOOKMARKED,
instance=self.pipeline,
actor_name='foo',
actor_id=1)
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_pipeline_unbookmarked(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=pipeline_events.PIPELINE_UNBOOKMARKED,
instance=self.pipeline,
actor_name='foo',
actor_id=1)
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
@patch('executor.executor_service.ExecutorService.record_event')
@patch('notifier.service.NotifierService.record_event')
@patch('tracker.service.TrackerService.record_event')
@patch('activitylogs.service.ActivityLogService.record_event')
def test_pipeline_triggered_deleted(self,
activitylogs_record,
tracker_record,
notifier_record,
executor_record):
auditor.record(event_type=pipeline_events.PIPELINE_DELETED_TRIGGERED,
instance=self.pipeline,
actor_id=1,
actor_name='foo')
assert tracker_record.call_count == 1
assert activitylogs_record.call_count == 1
assert notifier_record.call_count == 0
assert executor_record.call_count == 0
del AuditorBaseTest
| 43.687783
| 77
| 0.616675
| 879
| 9,655
| 6.47099
| 0.075085
| 0.096695
| 0.105485
| 0.059072
| 0.852496
| 0.834564
| 0.834564
| 0.834564
| 0.834564
| 0.834564
| 0
| 0.007073
| 0.311756
| 9,655
| 220
| 78
| 43.886364
| 0.848909
| 0.006111
| 0
| 0.746032
| 0
| 0
| 0.204484
| 0.202294
| 0
| 0
| 0
| 0
| 0.21164
| 1
| 0.058201
| false
| 0
| 0.031746
| 0
| 0.100529
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
80e06df01eafa784a4fdae668c9677eb149db97d
| 14,924
|
py
|
Python
|
tests/v3_api/test_ingress.py
|
adingilloRancher/validation
|
9e063c383840895bc7d973f02172aa5080e59eaa
|
[
"Apache-2.0"
] | null | null | null |
tests/v3_api/test_ingress.py
|
adingilloRancher/validation
|
9e063c383840895bc7d973f02172aa5080e59eaa
|
[
"Apache-2.0"
] | null | null | null |
tests/v3_api/test_ingress.py
|
adingilloRancher/validation
|
9e063c383840895bc7d973f02172aa5080e59eaa
|
[
"Apache-2.0"
] | null | null | null |
from common import * # NOQA
import pytest
namespace = {"p_client": None, "ns": None, "cluster": None, "project": None}
def test_ingress():
p_client = namespace["p_client"]
ns = namespace["ns"]
cluster = namespace["cluster"]
con = [{"name": "test1",
"image": TEST_TARGET_IMAGE}]
name = random_test_name("default")
workload = p_client.create_workload(name=name,
containers=con,
namespaceId=ns.id,
daemonSetConfig={})
validate_workload(p_client, workload, "daemonSet", ns.name,
len(get_schedulable_nodes(cluster)))
host = "test1.com"
path = "/name.html"
rule = {"host": host,
"paths":
{path: {"workloadIds": [workload.id], "targetPort": "80"}}}
p_client.create_ingress(name=name,
namespaceId=ns.id,
rules=[rule])
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload], host, path)
def test_ingress_with_same_rules_having_multiple_targets():
p_client = namespace["p_client"]
ns = namespace["ns"]
cluster = namespace["cluster"]
con = [{"name": "testm1",
"image": TEST_TARGET_IMAGE}]
name = random_test_name("default")
workload1 = p_client.create_workload(name=name,
containers=con,
namespaceId=ns.id,
daemonSetConfig={})
validate_workload(p_client, workload1, "daemonSet", ns.name,
len(get_schedulable_nodes(cluster)))
name = random_test_name("default")
workload2 = p_client.create_workload(name=name,
containers=con,
namespaceId=ns.id,
daemonSetConfig={})
validate_workload(p_client, workload2, "daemonSet", ns.name,
len(get_schedulable_nodes(cluster)))
host = "testm1.com"
path = "/name.html"
rule1 = {"host": host,
"paths": {
path: {"workloadIds": [workload1.id], "targetPort": "80"}}}
rule2 = {"host": host,
"paths": {
path:
{"workloadIds": [workload2.id], "targetPort": "80"}}}
p_client.create_ingress(name=name,
namespaceId=ns.id,
rules=[rule1, rule2])
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload1, workload2], host, path)
def test_ingress_edit_target():
p_client = namespace["p_client"]
ns = namespace["ns"]
con = [{"name": "test1",
"image": TEST_TARGET_IMAGE}]
name = random_test_name("default")
workload1 = p_client.create_workload(name=name,
containers=con,
namespaceId=ns.id,
scale=2)
validate_workload(p_client, workload1, "deployment", ns.name, pod_count=2)
name = random_test_name("default")
workload2 = p_client.create_workload(name=name,
containers=con,
namespaceId=ns.id,
scale=2)
validate_workload(p_client, workload2, "deployment", ns.name, pod_count=2)
host = "test2.com"
path = "/name.html"
rule = {"host": host,
"paths":
{path: {"workloadIds": [workload1.id], "targetPort": "80"}}}
ingress = p_client.create_ingress(name=name,
namespaceId=ns.id,
rules=[rule])
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload1], host, path)
rule = {"host": host,
"paths":
{path: {"workloadIds": [workload2.id], "targetPort": "80"}}}
ingress = p_client.update(ingress, rules=[rule])
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload2], host, path)
def test_ingress_edit_host():
p_client = namespace["p_client"]
ns = namespace["ns"]
cluster = namespace["cluster"]
con = [{"name": "test1",
"image": TEST_TARGET_IMAGE}]
name = random_test_name("default")
workload = p_client.create_workload(name=name,
containers=con,
namespaceId=ns.id,
daemonSetConfig={})
validate_workload(p_client, workload, "daemonSet", ns.name,
len(get_schedulable_nodes(cluster)))
host = "test3.com"
path = "/name.html"
rule = {"host": host,
"paths":
{path: {"workloadIds": [workload.id], "targetPort": "80"}}}
ingress = p_client.create_ingress(name=name,
namespaceId=ns.id,
rules=[rule])
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload], host, path)
host = "test4.com"
rule = {"host": host,
"paths":
{path: {"workloadIds": [workload.id], "targetPort": "80"}}}
ingress = p_client.update(ingress, rules=[rule])
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload], host, path)
def test_ingress_edit_path():
p_client = namespace["p_client"]
ns = namespace["ns"]
cluster = namespace["cluster"]
con = [{"name": "test1",
"image": TEST_TARGET_IMAGE}]
name = random_test_name("default")
workload = p_client.create_workload(name=name,
containers=con,
namespaceId=ns.id,
daemonSetConfig={})
validate_workload(p_client, workload, "daemonSet", ns.name,
len(get_schedulable_nodes(cluster)))
host = "test5.com"
path = "/name.html"
rule = {"host": host,
"paths":
{path: {"workloadIds": [workload.id], "targetPort": "80"}}}
ingress = p_client.create_ingress(name=name,
namespaceId=ns.id,
rules=[rule])
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload], host, path)
path = "/service1.html"
rule = {"host": host,
"paths":
{path: {"workloadIds": [workload.id], "targetPort": "80"}}}
ingress = p_client.update(ingress, rules=[rule])
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload], host, path)
def test_ingress_edit_add_more_rules():
p_client = namespace["p_client"]
ns = namespace["ns"]
con = [{"name": "test1",
"image": TEST_TARGET_IMAGE}]
name = random_test_name("default")
workload1 = p_client.create_workload(name=name,
containers=con,
namespaceId=ns.id,
scale=2)
validate_workload(p_client, workload1, "deployment", ns.name, pod_count=2)
name = random_test_name("default")
workload2 = p_client.create_workload(name=name,
containers=con,
namespaceId=ns.id,
scale=2)
validate_workload(p_client, workload2, "deployment", ns.name, pod_count=2)
host1 = "test6.com"
path = "/name.html"
rule1 = {"host": host1,
"paths":
{path: {"workloadIds": [workload1.id], "targetPort": "80"}}}
ingress = p_client.create_ingress(name=name,
namespaceId=ns.id,
rules=[rule1])
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload1], host1, path)
host2 = "test7.com"
rule2 = {"host": host2,
"paths":
{path: {"workloadIds": [workload2.id], "targetPort": "80"}}}
ingress = p_client.update(ingress, rules=[rule1, rule2])
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload2], host2, path)
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload1], host1, path)
def test_ingress_scale_up_target():
p_client = namespace["p_client"]
ns = namespace["ns"]
con = [{"name": "test1",
"image": TEST_TARGET_IMAGE}]
name = random_test_name("default")
workload = p_client.create_workload(name=name,
containers=con,
namespaceId=ns.id,
scale=2)
validate_workload(p_client, workload, "deployment", ns.name, pod_count=2)
host = "test8.com"
path = "/name.html"
rule = {"host": host,
"paths":
{path: {"workloadIds": [workload.id], "targetPort": "80"}}}
p_client.create_ingress(name=name,
namespaceId=ns.id,
rules=[rule])
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload], host, path)
workload = p_client.update(workload, scale=4, containers=con)
validate_workload(p_client, workload, "deployment", ns.name, pod_count=4)
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload], host, path)
def test_ingress_upgrade_target():
p_client = namespace["p_client"]
ns = namespace["ns"]
con = {"name": "test1",
"image": TEST_TARGET_IMAGE}
name = random_test_name("default")
workload = p_client.create_workload(name=name,
containers=[con],
namespaceId=ns.id,
scale=2)
validate_workload(p_client, workload, "deployment", ns.name, pod_count=2)
host = "test9.com"
path = "/name.html"
rule = {"host": host,
"paths":
{path: {"workloadIds": [workload.id], "targetPort": "80"}}}
p_client.create_ingress(name=name,
namespaceId=ns.id,
rules=[rule])
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload], host, path)
con["environment"] = {"test1": "value1"}
workload = p_client.update(workload, containers=[con])
wait_for_pods_in_workload(p_client, workload, pod_count=2)
validate_workload(p_client, workload, "deployment", ns.name, pod_count=2)
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload], host, path)
def test_ingress_rule_with_only_path():
p_client = namespace["p_client"]
ns = namespace["ns"]
con = {"name": "test1",
"image": TEST_TARGET_IMAGE}
name = random_test_name("default")
workload = p_client.create_workload(name=name,
containers=[con],
namespaceId=ns.id,
scale=2)
validate_workload(p_client, workload, "deployment", ns.name, pod_count=2)
host = ""
path = "/service2.html"
rule = {"host": host,
"paths":
{path: {"workloadIds": [workload.id], "targetPort": "80"}}}
p_client.create_ingress(name=name,
namespaceId=ns.id,
rules=[rule])
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload], "", path, True)
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload], "hello.com", path, True)
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload], "hello2.com", path, True)
def test_ingress_rule_with_only_host():
p_client = namespace["p_client"]
ns = namespace["ns"]
con = {"name": "test1",
"image": TEST_TARGET_IMAGE}
name = random_test_name("default")
workload = p_client.create_workload(name=name,
containers=[con],
namespaceId=ns.id,
scale=2)
validate_workload(p_client, workload, "deployment", ns.name, pod_count=2)
host = "test10.com"
path = ""
rule = {"host": host,
"paths":
{path:
{"workloadIds": [workload.id], "targetPort": "80"}}}
p_client.create_ingress(name=name,
namespaceId=ns.id,
rules=[rule])
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload], host, "/name.html")
validate_ingress(namespace["p_client"], namespace["cluster"],
[workload], host, "/service1.html")
def test_ingress_xip_io():
p_client = namespace["p_client"]
ns = namespace["ns"]
cluster = namespace["cluster"]
con = [{"name": "test1",
"image": TEST_TARGET_IMAGE}]
name = random_test_name("default")
workload = p_client.create_workload(name=name,
containers=con,
namespaceId=ns.id,
daemonSetConfig={})
validate_workload(p_client, workload, "daemonSet", ns.name,
len(get_schedulable_nodes(cluster)))
path = "/name.html"
rule = {"host": "xip.io",
"paths":
{path: {"workloadIds": [workload.id], "targetPort": "80"}}}
ingress = p_client.create_ingress(name=name,
namespaceId=ns.id,
rules=[rule])
validate_ingress_using_endpoint(namespace["p_client"], ingress, [workload])
@pytest.fixture(scope='module', autouse="True")
def create_project_client(request):
client = get_admin_client()
clusters = client.list_cluster()
assert len(clusters) >= 1
cluster = clusters[0]
create_kubeconfig(cluster)
p, ns = create_project_and_ns(ADMIN_TOKEN, cluster)
p_client = get_project_client_for_token(p, ADMIN_TOKEN)
namespace["p_client"] = p_client
namespace["ns"] = ns
namespace["cluster"] = cluster
namespace["project"] = p
def fin():
client = get_admin_client()
client.delete(namespace["project"])
request.addfinalizer(fin)
| 41.112948
| 79
| 0.522715
| 1,415
| 14,924
| 5.304594
| 0.077739
| 0.088596
| 0.072475
| 0.066613
| 0.876366
| 0.863576
| 0.852651
| 0.842393
| 0.831735
| 0.802558
| 0
| 0.012745
| 0.348097
| 14,924
| 362
| 80
| 41.226519
| 0.758762
| 0.000268
| 0
| 0.771341
| 0
| 0
| 0.115766
| 0
| 0
| 0
| 0
| 0
| 0.003049
| 1
| 0.039634
| false
| 0
| 0.006098
| 0
| 0.045732
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
03b1480c2f580ac122d736947d3faf181a0aec92
| 110
|
py
|
Python
|
Python3/TuplesAndSets/slicing_tuple.py
|
norbertosanchezdichi/TIL
|
2e9719ddd288022f53b094a42679e849bdbcc625
|
[
"MIT"
] | null | null | null |
Python3/TuplesAndSets/slicing_tuple.py
|
norbertosanchezdichi/TIL
|
2e9719ddd288022f53b094a42679e849bdbcc625
|
[
"MIT"
] | null | null | null |
Python3/TuplesAndSets/slicing_tuple.py
|
norbertosanchezdichi/TIL
|
2e9719ddd288022f53b094a42679e849bdbcc625
|
[
"MIT"
] | null | null | null |
numbers = (1, 2, 3, (4, 5), 6, 7)
print(f'{numbers =}')
print(f'{numbers[2:3] =}')
print(f'{numbers[2::2] =}')
| 27.5
| 33
| 0.518182
| 21
| 110
| 2.714286
| 0.47619
| 0.315789
| 0.684211
| 0.491228
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114583
| 0.127273
| 110
| 4
| 34
| 27.5
| 0.479167
| 0
| 0
| 0
| 0
| 0
| 0.396396
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
20aa27e1ddeaeb56343cc4f30e96aafd82c9bf66
| 8,857
|
py
|
Python
|
booklable.py
|
suddharshan/creating-book-cover-using-python
|
69a052898e84033986a79c8e0771a48d16cbae12
|
[
"MIT"
] | null | null | null |
booklable.py
|
suddharshan/creating-book-cover-using-python
|
69a052898e84033986a79c8e0771a48d16cbae12
|
[
"MIT"
] | 2
|
2019-05-11T08:14:49.000Z
|
2019-05-17T05:33:00.000Z
|
booklable.py
|
suddharshan/creating-book-cover-using-python
|
69a052898e84033986a79c8e0771a48d16cbae12
|
[
"MIT"
] | null | null | null |
import csv
import PIL
from PIL import ImageFont
from PIL import Image
from PIL import ImageDraw
with open('books.csv',encoding='utf-8') as csv_file:
data = [row for row in csv.reader(csv_file)]
print("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^")
print(" BOOK COVER MAKER ")
print("--------------------------------------------------")
x=range(10)
for i in x:
bookname=(data[i][0])
author=(data[i][1])
print("booknumer=%d"%i)
print("label=%s"%bookname)
k=(i%9)
j=str(k)
a=len(bookname)
d=len(author)
b=bookname.split()
e=author.split()
c=len(b)
f=len(e)
print("No.of words in book name=%d"%(c))
print("No.of characters in book name=%d"%a)
print("No.of words in author=%d"%(f))
print("No.of characters in author=%d"%(d))
if(c==1):
if(a<=8):
img=Image.open(j+'.png')
font=ImageFont.truetype('aleo-bold-webfont.ttf',150,encoding="utf-8")
font1=ImageFont.truetype('aleo-bold-webfont.ttf',110,encoding="utf-8")
draw=ImageDraw.Draw(img)
draw.text((500,900),bookname,(255,255,255),font=font)
draw.text((600,2090),author,(255,255,255),font=font1)
img.save(bookname+'.png')
if(8<a<=17):
img=Image.open(j+'.png')
font=ImageFont.truetype('aleo-bold-webfont.ttf',150,encoding="utf-8")
font1=ImageFont.truetype('aleo-bold-webfont.ttf',110,encoding="utf-8")
draw=ImageDraw.Draw(img)
draw.text((180,900),bookname,(255,255,255),font=font)
draw.text((400,2090),author,(255,255,255),font=font1)
img.save(bookname+'.png')
if(c==2):
if(a<=8):
img= Image.open(j+'.png')
font = ImageFont.truetype('aleo-bold-webfont.ttf',150,encoding="utf-8")
font1 = ImageFont.truetype('aleo-bold-webfont.ttf',110,encoding="utf-8")
draw = ImageDraw.Draw(img)
draw.text((500,900),bookname,(255,255,255),font=font)
draw.text((600,2090),author,(255,255,255),font=font1)
img.save(bookname+'.png')
if(8<a<=17):
img= Image.open(j+'.png')
font = ImageFont.truetype('aleo-bold-webfont.ttf',150,encoding="utf-8")
font1 = ImageFont.truetype('aleo-bold-webfont.ttf',110,encoding="utf-8")
draw = ImageDraw.Draw(img)
draw.text((200,900),bookname,(255,255,255),font=font)
draw.text((400,2090),author,(255,255,255),font=font1)
img.save(bookname+'.png')
if(c==3):
if(8<a<=17):
img= Image.open(j+'.png')
font = ImageFont.truetype('aleo-bold-webfont.ttf',150,encoding="utf-8")
font1 = ImageFont.truetype('aleo-bold-webfont.ttf',110,encoding="utf-8")
draw = ImageDraw.Draw(img)
draw.text((200,900),bookname,(255,255,255),font=font)
draw.text((400,2090),author,(255,255,255),font=font1)
img.save(bookname+'.png')
else:
img= Image.open(j+'.png')
font = ImageFont.truetype('aleo-bold-webfont.ttf',150,encoding="utf-8")
font1 = ImageFont.truetype('aleo-bold-webfont.ttf',110,encoding="utf-8")
draw = ImageDraw.Draw(img)
ab=b[0]+" "+b[1]
bc=b[2]
draw.text((250,900),ab,(255,255,255),font=font)
draw.text((350,1050),bc,(255,255,255),font=font)
draw.text((400,2090),author,(255,255,255),font=font1)
img.save(bookname+'.png')
if(c==4):
if(8<a<=17):
img= Image.open(j+'.png')
font = ImageFont.truetype('aleo-bold-webfont.ttf',150,encoding="utf-8")
font1 = ImageFont.truetype('aleo-bold-webfont.ttf',110,encoding="utf-8")
draw = ImageDraw.Draw(img)
draw.text((200,900),bookname,(255,255,255),font=font)
draw.text((400,2090),author,(255,255,255),font=font1)
img.save(bookname+'.png')
else:
img= Image.open("3.png")
font = ImageFont.truetype('aleo-bold-webfont.ttf',150,encoding="utf-8")
font1 = ImageFont.truetype('aleo-bold-webfont.ttf',110,encoding="utf-8")
draw = ImageDraw.Draw(img)
ab=b[0]+" "+b[1]
bc=b[2]+" "+b[3]
draw.text((250,900),ab,(255,255,255),font=font)
draw.text((250,1050),bc,(255,255,255),font=font)
draw.text((400,2090),author,(255,255,255),font=font1)
img.save(bookname+'.png')
if(c==5):
ab=b[0]+" "+b[1]+" "+b[2]
bc=b[2]+" "+b[3]+" "+b[4]
if(8<a<=17):
img= Image.open(j+'.png')
font = ImageFont.truetype('aleo-bold-webfont.ttf',150,encoding="utf-8")
font1 = ImageFont.truetype('aleo-bold-webfont.ttf',110,encoding="utf-8")
draw = ImageDraw.Draw(img)
draw.text((250,900),bookname,(255,255,255),font=font)
draw.text((400,2090),author,(255,255,255),font=font1)
img.save(bookname+'.png')
elif(len(ab)<=17):
img= Image.open(j+'.png')
font = ImageFont.truetype('aleo-bold-webfont.ttf',150,encoding="utf-8")
font1 = ImageFont.truetype('aleo-bold-webfont.ttf',110,encoding="utf-8")
draw = ImageDraw.Draw(img)
ab=b[0]+" "+b[1]+" "+b[2]
bc=b[3]+" "+b[4]
draw.text((180,900),ab,(255,255,255),font=font)
draw.text((250,1050),bc,(255,255,255),font=font)
draw.text((400,2090),author,(255,255,255),font=font1)
img.save(bookname+'.png')
elif(len(ab)>17 and len(bc)<=17):
img= Image.open(j+'.png')
font = ImageFont.truetype('aleo-bold-webfont.ttf',150,encoding="utf-8")
font1 = ImageFont.truetype('aleo-bold-webfont.ttf',110,encoding="utf-8")
draw = ImageDraw.Draw(img)
ab=b[0]+" "+b[1]
bc=b[2]+" "+b[3]+" "+b[4]
draw.text((250,900),ab,(255,255,255),font=font)
draw.text((200,1050),bc,(255,255,255),font=font)
draw.text((400,2090),author,(255,255,255),font=font1)
img.save(bookname+'.png')
else:
img= Image.open(j+'.png')
font = ImageFont.truetype('aleo-bold-webfont.ttf',150,encoding="utf-8")
font1 = ImageFont.truetype('aleo-bold-webfont.ttf',110,encoding="utf-8")
draw = ImageDraw.Draw(img)
ab=b[0]+" "+b[1]
bc=b[2]+" "+b[3]
cd=b[4]
draw.text((250,900),ab,(255,255,255),font=font)
draw.text((250,1050),bc,(255,255,255),font=font)
draw.text((280,1200),cd,(255,255,255),font=font)
draw.text((400,2090),author,(255,255,255),font=font1)
img.save(bookname+'.png')
if(c==6):
ab=b[0]+" "+b[1]+" "+b[2]
bc=b[3]+" "+b[4]+" "+b[5]
if(8<a<=17):
img= Image.open(j+'.png')
font = ImageFont.truetype('aleo-bold-webfont.ttf',150,encoding="utf-8")
font1 = ImageFont.truetype('aleo-bold-webfont.ttf',110,encoding="utf-8")
draw = ImageDraw.Draw(img)
draw.text((250,900),bookname,(255,255,255),font=font)
draw.text((400,2090),author,(255,255,255),font=font1)
img.save(bookname+'.png')
elif(len(ab)<17 and len(bc)<17):
img= Image.open(j+'.png')
font = ImageFont.truetype('aleo-bold-webfont.ttf',150,encoding="utf-8")
font1 = ImageFont.truetype('aleo-bold-webfont.ttf',110,encoding="utf-8")
draw = ImageDraw.Draw(img)
ab=b[0]+" "+b[1]+" "+b[2]
bc=b[3]+" "+b[4]+" "+b[5]
draw.text((200,900),ab,(255,255,255),font=font)
draw.text((200,1050),bc,(255,255,255),font=font)
draw.text((400,2090),author,(255,255,255),font=font1)
img.save(bookname+'.png')
else:
img= Image.open(j+'.png')
font = ImageFont.truetype('aleo-bold-webfont.ttf',150,encoding="utf-8")
font1 = ImageFont.truetype('aleo-bold-webfont.ttf',110,encoding="utf-8")
draw = ImageDraw.Draw(img)
ab=b[0]+" "+b[1]
bc=b[2]+" "+b[3]
cd=b[4]+" "+b[5]
draw.text((225,900),ab,(255,255,255),font=font)
draw.text((225,1050),bc,(255,255,255),font=font)
draw.text((225,1200),cd,(255,255,255),font=font)
draw.text((400,2090),author,(255,255,255),font=font1)
img.save(bookname+'.png')
print("book cover saved as %s.png"%bookname)
print("**************************************************************************")
print("**************************************************************************")
| 46.862434
| 87
| 0.521847
| 1,231
| 8,857
| 3.753046
| 0.076361
| 0.101299
| 0.075974
| 0.10974
| 0.908009
| 0.891991
| 0.886364
| 0.885498
| 0.884199
| 0.868615
| 0
| 0.128232
| 0.253359
| 8,857
| 188
| 88
| 47.111702
| 0.570392
| 0
| 0
| 0.725806
| 0
| 0
| 0.157728
| 0.099131
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.026882
| 0
| 0.026882
| 0.064516
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
20e6792a7b75b46442b4776c3a34371d937fe0eb
| 12,337
|
py
|
Python
|
kitsu/models.py
|
MrArkon/kitsu.py
|
06435f83c72f5256c3c9bc82c8622f3eafac0e8d
|
[
"MIT"
] | 7
|
2021-09-05T06:13:04.000Z
|
2022-01-06T10:47:40.000Z
|
kitsu/models.py
|
MrArkon/kitsu.py
|
06435f83c72f5256c3c9bc82c8622f3eafac0e8d
|
[
"MIT"
] | null | null | null |
kitsu/models.py
|
MrArkon/kitsu.py
|
06435f83c72f5256c3c9bc82c8622f3eafac0e8d
|
[
"MIT"
] | 1
|
2021-12-05T13:27:56.000Z
|
2021-12-05T13:27:56.000Z
|
"""
MIT License
Copyright (c) 2021-present MrArkon
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from __future__ import annotations
from datetime import datetime
from typing import Dict, List, Literal, Optional
from dateutil.parser import isoparse
__all__ = ("Anime", "Manga")
class Anime:
def __init__(self, payload: dict) -> None:
self._payload: dict = payload
def __repr__(self) -> str:
return f"<Anime id={self.id} title='{self.title}'>"
@property
def id(self) -> str:
"""The anime's ID."""
return self._payload.get("id", None)
@property
def created_at(self) -> Optional[datetime]:
"""creation datetime"""
try:
return isoparse(self._payload["attributes"]["createdAt"])
except KeyError:
return None
except TypeError:
return None
@property
def updated_at(self) -> Optional[datetime]:
"""Returns the last modified datetime"""
try:
return isoparse(self._payload["attributes"]["updatedAt"])
except KeyError:
return None
except TypeError:
return None
@property
def slug(self) -> Optional[str]:
return self._payload["attributes"].get("slug", None)
@property
def synopsis(self) -> Optional[str]:
return self._payload["attributes"].get("synopsis", None)
@property
def title(self) -> Optional[str]:
"""The anime's title."""
value: Optional[str]
for value in self._payload["attributes"]["titles"].values():
if value:
return value
@property
def canonical_title(self) -> Optional[str]:
return self._payload["attributes"].get("canonicalTitle", None)
@property
def abbreviated_titles(self) -> Optional[List[str]]:
return self._payload["attributes"].get("abbreviatedTitles", None)
@property
def average_rating(self) -> Optional[float]:
try:
return float(self._payload["attributes"]["averageRating"])
except KeyError:
return None
except TypeError:
return None
@property
def rating_frequencies(self) -> Optional[Dict[str, str]]:
return self._payload["attributes"].get("ratingFrequencies", None)
@property
def user_count(self) -> Optional[int]:
try:
return int(self._payload["attributes"]["userCount"])
except KeyError:
return None
except TypeError:
return None
@property
def favorites_count(self) -> Optional[int]:
try:
return int(self._payload["attributes"]["favoritesCount"])
except KeyError:
return None
except TypeError:
return None
@property
def start_date(self) -> Optional[datetime]:
try:
return datetime.strptime(self._payload["attributes"]["startDate"], "%Y-%m-%d")
except KeyError:
return None
except TypeError:
return None
@property
def end_date(self) -> Optional[datetime]:
"""Returns the end date as a datetime object"""
try:
return datetime.strptime(self._payload["attributes"]["endDate"], "%Y-%m-%d")
except KeyError:
return None
except TypeError:
return None
@property
def popularity_rank(self) -> Optional[int]:
try:
return int(self._payload["attributes"]["popularityRank"])
except KeyError:
return None
except TypeError:
return None
@property
def rating_rank(self) -> Optional[int]:
try:
return int(self._payload["attributes"]["ratingRank"])
except KeyError:
return None
except TypeError:
return None
@property
def age_rating(self) -> Optional[Literal["G", "PG", "R", "R18"]]:
return self._payload["attributes"].get("ageRating", None)
@property
def age_rating_guide(self) -> Optional[str]:
return self._payload["attributes"].get("ageRatingGuide", None)
@property
def subtype(self) -> Optional[Literal["ONA", "OVA", "TV", "movie", "music", "special"]]:
return self._payload["attributes"].get("subtype", None)
@property
def status(self) -> Optional[Literal["current", "finished", "tba", "unreleased", "upcoming"]]:
return self._payload["attributes"].get("status", None)
@property
def tba(self) -> Optional[str]:
return self._payload["attributes"].get("tba", None)
def poster_image(
self, _type: Optional[Literal["tiny", "small", "medium", "large", "original"]] = "original"
) -> Optional[str]:
try:
return self._payload["attributes"]["posterImage"].get(_type, None)
except AttributeError:
return None
def cover_image(self, _type: Optional[Literal["tiny", "small", "large", "original"]] = "original") -> Optional[str]:
try:
return self._payload["attributes"]["coverImage"].get(_type, None)
except AttributeError:
return None
@property
def episode_count(self) -> Optional[int]:
try:
return int(self._payload["attributes"]["episodeCount"])
except KeyError:
return None
except TypeError:
return None
@property
def episode_length(self) -> Optional[int]:
"""length of each episode in minutes"""
try:
return int(self._payload["attributes"]["episodeLength"])
except KeyError:
return None
except TypeError:
return None
@property
def youtube_video_id(self) -> Optional[str]:
return self._payload["attributes"].get("youtubeVideoId", None)
@property
def nsfw(self) -> Optional[bool]:
return self._payload["attributes"].get("nsfw", None)
class Manga:
def __init__(self, payload: dict) -> None:
self._payload: dict = payload
def __str__(self) -> Optional[str]:
return self.title
@property
def id(self) -> str:
"""The manga's ID."""
return self._payload.get("id", None)
@property
def created_at(self) -> Optional[datetime]:
"""creation datetime"""
try:
return isoparse(self._payload["attributes"]["createdAt"])
except KeyError:
return None
except TypeError:
return None
@property
def updated_at(self) -> Optional[datetime]:
"""last modified datetime"""
try:
return isoparse(self._payload["attributes"]["updatedAt"])
except KeyError:
return None
except TypeError:
return None
@property
def slug(self) -> Optional[str]:
return self._payload["attributes"].get("slug", None)
@property
def synopsis(self) -> Optional[str]:
return self._payload["attributes"].get("synopsis", None)
@property
def title(self) -> Optional[str]:
"""The manga's title."""
value: Optional[str]
for value in self._payload["attributes"]["titles"].values():
if value:
return value
@property
def canonical_title(self) -> Optional[str]:
return self._payload["attributes"].get("canonicalTitle", None)
@property
def abbreviated_titles(self) -> Optional[List[str]]:
return self._payload["attributes"].get("abbreviatedTitles", None)
@property
def average_rating(self) -> Optional[float]:
try:
return float(self._payload["attributes"]["averageRating"])
except KeyError:
return None
except TypeError:
return None
@property
def rating_frequencies(self) -> Optional[Dict[str, str]]:
return self._payload["attributes"].get("ratingFrequencies", None)
@property
def user_count(self) -> Optional[int]:
try:
return int(self._payload["attributes"]["userCount"])
except KeyError:
return None
except TypeError:
return None
@property
def favorites_count(self) -> Optional[int]:
try:
return int(self._payload["attributes"]["favoritesCount"])
except KeyError:
return None
except TypeError:
return None
@property
def start_date(self) -> Optional[datetime]:
try:
return datetime.strptime(self._payload["attributes"]["startDate"], "%Y-%m-%d")
except KeyError:
return None
except TypeError:
return None
@property
def end_date(self) -> Optional[datetime]:
try:
return datetime.strptime(self._payload["attributes"]["endDate"], "%Y-%m-%d")
except KeyError:
return None
except TypeError:
return None
@property
def popularity_rank(self) -> Optional[int]:
try:
return int(self._payload["attributes"]["popularityRank"])
except KeyError:
return None
except TypeError:
return None
@property
def rating_rank(self) -> Optional[int]:
try:
return int(self._payload["attributes"]["ratingRank"])
except KeyError:
return None
except TypeError:
return None
@property
def age_rating(self) -> Optional[Literal["G", "PG", "R", "R18"]]:
return self._payload["attributes"].get("ageRating", None)
@property
def age_rating_guide(self) -> Optional[str]:
return self._payload["attributes"].get("ageRatingGuide", None)
@property
def subtype(self) -> Optional[Literal["doujin", "manga", "manhua", "manhwa", "novel", "oel", "oneshot"]]:
return self._payload["attributes"].get("subtype", None)
@property
def status(self) -> Optional[Literal["current", "finished", "tba", "unreleased", "upcoming"]]:
return self._payload["attributes"].get("status", None)
@property
def tba(self) -> Optional[str]:
return self._payload["attributes"].get("tba", None)
def poster_image(
self, _type: Optional[Literal["tiny", "small", "medium", "large", "original"]] = "original"
) -> Optional[str]:
try:
return self._payload["attributes"]["posterImage"].get(_type, None)
except AttributeError:
return None
def cover_image(self, _type: Optional[Literal["tiny", "small", "large", "original"]] = "original") -> Optional[str]:
try:
return self._payload["attributes"]["coverImage"].get(_type, None)
except AttributeError:
return None
@property
def chapter_count(self) -> Optional[int]:
try:
return int(self._payload["attributes"]["chapterCount"])
except KeyError:
return None
except TypeError:
return None
@property
def volume_count(self) -> Optional[int]:
try:
return int(self._payload["attributes"]["volumeCount"])
except KeyError:
return None
except TypeError:
return None
@property
def serialization(self) -> Optional[str]:
return self._payload["attributes"].get("serialization", None)
| 30.997487
| 120
| 0.607279
| 1,304
| 12,337
| 5.648006
| 0.177147
| 0.085132
| 0.145418
| 0.098982
| 0.812899
| 0.801358
| 0.796877
| 0.796877
| 0.775967
| 0.775967
| 0
| 0.000891
| 0.272189
| 12,337
| 397
| 121
| 31.075567
| 0.819356
| 0.106266
| 0
| 0.910596
| 0
| 0
| 0.127371
| 0.001915
| 0
| 0
| 0
| 0
| 0
| 1
| 0.188742
| false
| 0
| 0.013245
| 0.082781
| 0.549669
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
454b7b1fe73aeb11f3105db2e9f4b84a82c7f455
| 4,361
|
py
|
Python
|
tests/flow/test_graph_versioning.py
|
10088/RedisGraph
|
ddccace7154ea13dbf96d1cfd5ccb26e80b17f06
|
[
"ISC",
"Apache-2.0",
"MIT",
"Ruby",
"BSD-3-Clause"
] | 313
|
2017-06-06T19:22:15.000Z
|
2018-11-02T09:42:37.000Z
|
tests/flow/test_graph_versioning.py
|
10088/RedisGraph
|
ddccace7154ea13dbf96d1cfd5ccb26e80b17f06
|
[
"ISC",
"Apache-2.0",
"MIT",
"Ruby",
"BSD-3-Clause"
] | 107
|
2018-03-20T07:59:03.000Z
|
2018-11-01T22:04:39.000Z
|
tests/flow/test_graph_versioning.py
|
10088/RedisGraph
|
ddccace7154ea13dbf96d1cfd5ccb26e80b17f06
|
[
"ISC",
"Apache-2.0",
"MIT",
"Ruby",
"BSD-3-Clause"
] | 30
|
2017-07-14T22:04:24.000Z
|
2018-10-28T03:17:50.000Z
|
from common import *
from redis import ResponseError
VERSION = 0
GRAPH_ID = "GraphVersion"
class testGraphVersioning(FlowTestsBase):
def __init__(self):
self.env = Env(decodeResponses=True)
# Make sure graph version changes once a new label is created
def test01_version_update_on_label_creation(self):
global VERSION
con = self.env.getConnection()
# Adding a node without a label shouldn't update graph version.
q = """CREATE ()"""
res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version", VERSION)
self.env.assertFalse(isinstance(res[0], ResponseError))
# Adding a labeled node should update graph version.
q = """CREATE (:L)"""
res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version", VERSION)
self.env.assertFalse(isinstance(res[0], ResponseError))
q = """RETURN 1"""
res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version", VERSION)
self.env.assertTrue(isinstance(res[0], ResponseError))
# Update version
VERSION = int(res[1])
# Adding a node with an existing label shouldn't update graph version
q = """CREATE (:L)"""
res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version", VERSION)
self.env.assertFalse(isinstance(res[0], ResponseError))
q = """RETURN 1"""
res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version", VERSION)
self.env.assertFalse(isinstance(res[0], ResponseError))
# Make sure graph version changes once a new relationship type is created
def test02_version_update_on_relation_creation(self):
global VERSION
con = self.env.getConnection()
# Adding edge with a new relationship type should update graph version
q = """CREATE ()-[:R]->()"""
res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version", VERSION)
self.env.assertFalse(isinstance(res[0], ResponseError))
q = """RETURN 1"""
res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version", VERSION)
self.env.assertTrue(isinstance(res[0], ResponseError))
# Update version
VERSION = int(res[1])
# Adding edge with existing relationship type shouldn't update graph version
q = """CREATE ()-[:R]->()"""
res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version", VERSION)
self.env.assertFalse(isinstance(res[0], ResponseError))
q = """RETURN 1"""
res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version", VERSION)
self.env.assertFalse(isinstance(res[0], ResponseError))
# Make sure graph version changes once a new attribute is created
def test03_version_update_on_attribute_creation(self):
global VERSION
con = self.env.getConnection()
# Adding a new attribute should update graph version
q = """CREATE ({v:1})"""
res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version", VERSION)
self.env.assertFalse(isinstance(res[0], ResponseError))
q = """RETURN 1"""
res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version", VERSION)
self.env.assertTrue(isinstance(res[0], ResponseError))
# Update version
VERSION = int(res[1])
# Adding a new node with existing attribute shouldn't update graph version
q = """CREATE ({v:1})"""
res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version", VERSION)
self.env.assertFalse(isinstance(res[0], ResponseError))
# Adding a new edge with a new attribute should update graph version
q = """CREATE ()-[:R {q:1}]->()"""
res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version", VERSION)
self.env.assertFalse(isinstance(res[0], ResponseError))
q = """RETURN 1"""
res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version", VERSION)
self.env.assertTrue(isinstance(res[0], ResponseError))
# Update version
VERSION = int(res[1])
# Adding a new edge with existing attribute shouldn't update graph version
q = """CREATE ()-[:R {v:1}]->()"""
res = con.execute_command("GRAPH.QUERY", GRAPH_ID, q, "version", VERSION)
self.env.assertFalse(isinstance(res[0], ResponseError))
| 40.37963
| 84
| 0.635634
| 540
| 4,361
| 5.040741
| 0.124074
| 0.048861
| 0.071639
| 0.110213
| 0.851212
| 0.845702
| 0.843497
| 0.840926
| 0.806025
| 0.763409
| 0
| 0.010733
| 0.23091
| 4,361
| 107
| 85
| 40.757009
| 0.800835
| 0.193534
| 0
| 0.8
| 0
| 0
| 0.135181
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 1
| 0.061538
| false
| 0
| 0.030769
| 0
| 0.107692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4598ac2667ccb8763e50ccb2a3769963c5340d24
| 13,127
|
py
|
Python
|
spark_fhir_schemas/stu3/complex_types/medicationadministration_dosage.py
|
icanbwell/SparkFhirSchemas
|
8c828313c39850b65f8676e67f526ee92b7d624e
|
[
"Apache-2.0"
] | 2
|
2020-10-31T23:25:01.000Z
|
2021-06-09T14:12:42.000Z
|
spark_fhir_schemas/stu3/complex_types/medicationadministration_dosage.py
|
icanbwell/SparkFhirSchemas
|
8c828313c39850b65f8676e67f526ee92b7d624e
|
[
"Apache-2.0"
] | null | null | null |
spark_fhir_schemas/stu3/complex_types/medicationadministration_dosage.py
|
icanbwell/SparkFhirSchemas
|
8c828313c39850b65f8676e67f526ee92b7d624e
|
[
"Apache-2.0"
] | null | null | null |
from typing import Union, List, Optional
from pyspark.sql.types import StructType, StructField, StringType, ArrayType, DataType
# This file is auto-generated by generate_schema so do not edit manually
# noinspection PyPep8Naming
class MedicationAdministration_DosageSchema:
"""
Describes the event of a patient consuming or otherwise being administered a
medication. This may be as simple as swallowing a tablet or it may be a long
running infusion. Related resources tie this event to the authorizing
prescription, and the specific encounter between patient and health care
practitioner.
"""
# noinspection PyDefaultArgument
@staticmethod
def get_schema(
max_nesting_depth: Optional[int] = 6,
nesting_depth: int = 0,
nesting_list: List[str] = [],
max_recursion_limit: Optional[int] = 2,
include_extension: Optional[bool] = False,
extension_fields: Optional[List[str]] = [
"valueBoolean",
"valueCode",
"valueDate",
"valueDateTime",
"valueDecimal",
"valueId",
"valueInteger",
"valuePositiveInt",
"valueString",
"valueTime",
"valueUnsignedInt",
"valueUri",
"valueQuantity",
],
extension_depth: int = 0,
max_extension_depth: Optional[int] = 2,
) -> Union[StructType, DataType]:
"""
Describes the event of a patient consuming or otherwise being administered a
medication. This may be as simple as swallowing a tablet or it may be a long
running infusion. Related resources tie this event to the authorizing
prescription, and the specific encounter between patient and health care
practitioner.
id: unique id for the element within a resource (for internal references). This
may be any string value that does not contain spaces.
extension: May be used to represent additional information that is not part of the basic
definition of the element. In order to make the use of extensions safe and
manageable, there is a strict set of governance applied to the definition and
use of extensions. Though any implementer is allowed to define an extension,
there is a set of requirements that SHALL be met as part of the definition of
the extension.
text: Free text dosage can be used for cases where the dosage administered is too
complex to code. When coded dosage is present, the free text dosage may still
be present for display to humans.
The dosage instructions should reflect the dosage of the medication that was
administered.
site: A coded specification of the anatomic site where the medication first entered
the body. For example, "left arm".
route: A code specifying the route or physiological path of administration of a
therapeutic agent into or onto the patient. For example, topical,
intravenous, etc.
method: A coded value indicating the method by which the medication is intended to be
or was introduced into or on the body. This attribute will most often NOT be
populated. It is most commonly used for injections. For example, Slow Push,
Deep IV.
dose: The amount of the medication given at one administration event. Use this
value when the administration is essentially an instantaneous event such as a
swallowing a tablet or giving an injection.
rateRatio: Identifies the speed with which the medication was or will be introduced into
the patient. Typically the rate for an infusion e.g. 100 ml per 1 hour or 100
ml/hr. May also be expressed as a rate per unit of time e.g. 500 ml per 2
hours. Other examples: 200 mcg/min or 200 mcg/1 minute; 1 liter/8 hours.
rateSimpleQuantity: Identifies the speed with which the medication was or will be introduced into
the patient. Typically the rate for an infusion e.g. 100 ml per 1 hour or 100
ml/hr. May also be expressed as a rate per unit of time e.g. 500 ml per 2
hours. Other examples: 200 mcg/min or 200 mcg/1 minute; 1 liter/8 hours.
"""
from spark_fhir_schemas.stu3.complex_types.extension import ExtensionSchema
from spark_fhir_schemas.stu3.complex_types.codeableconcept import (
CodeableConceptSchema,
)
from spark_fhir_schemas.stu3.complex_types.quantity import QuantitySchema
from spark_fhir_schemas.stu3.complex_types.ratio import RatioSchema
if (
max_recursion_limit
and nesting_list.count("MedicationAdministration_Dosage")
>= max_recursion_limit
) or (max_nesting_depth and nesting_depth >= max_nesting_depth):
return StructType([StructField("id", StringType(), True)])
# add my name to recursion list for later
my_nesting_list: List[str] = nesting_list + ["MedicationAdministration_Dosage"]
schema = StructType(
[
# unique id for the element within a resource (for internal references). This
# may be any string value that does not contain spaces.
StructField("id", StringType(), True),
# May be used to represent additional information that is not part of the basic
# definition of the element. In order to make the use of extensions safe and
# manageable, there is a strict set of governance applied to the definition and
# use of extensions. Though any implementer is allowed to define an extension,
# there is a set of requirements that SHALL be met as part of the definition of
# the extension.
StructField(
"extension",
ArrayType(
ExtensionSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
)
),
True,
),
# Free text dosage can be used for cases where the dosage administered is too
# complex to code. When coded dosage is present, the free text dosage may still
# be present for display to humans.
#
# The dosage instructions should reflect the dosage of the medication that was
# administered.
StructField("text", StringType(), True),
# A coded specification of the anatomic site where the medication first entered
# the body. For example, "left arm".
StructField(
"site",
CodeableConceptSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
),
True,
),
# A code specifying the route or physiological path of administration of a
# therapeutic agent into or onto the patient. For example, topical,
# intravenous, etc.
StructField(
"route",
CodeableConceptSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
),
True,
),
# A coded value indicating the method by which the medication is intended to be
# or was introduced into or on the body. This attribute will most often NOT be
# populated. It is most commonly used for injections. For example, Slow Push,
# Deep IV.
StructField(
"method",
CodeableConceptSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
),
True,
),
# The amount of the medication given at one administration event. Use this
# value when the administration is essentially an instantaneous event such as a
# swallowing a tablet or giving an injection.
StructField(
"dose",
QuantitySchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
),
True,
),
# Identifies the speed with which the medication was or will be introduced into
# the patient. Typically the rate for an infusion e.g. 100 ml per 1 hour or 100
# ml/hr. May also be expressed as a rate per unit of time e.g. 500 ml per 2
# hours. Other examples: 200 mcg/min or 200 mcg/1 minute; 1 liter/8 hours.
StructField(
"rateRatio",
RatioSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
),
True,
),
# Identifies the speed with which the medication was or will be introduced into
# the patient. Typically the rate for an infusion e.g. 100 ml per 1 hour or 100
# ml/hr. May also be expressed as a rate per unit of time e.g. 500 ml per 2
# hours. Other examples: 200 mcg/min or 200 mcg/1 minute; 1 liter/8 hours.
StructField(
"rateSimpleQuantity",
QuantitySchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
),
True,
),
]
)
if not include_extension:
schema.fields = [
c
if c.name != "extension"
else StructField("extension", StringType(), True)
for c in schema.fields
]
return schema
| 50.683398
| 105
| 0.575684
| 1,419
| 13,127
| 5.17759
| 0.181818
| 0.0539
| 0.034708
| 0.045733
| 0.811215
| 0.804274
| 0.804274
| 0.779502
| 0.779502
| 0.779502
| 0
| 0.012657
| 0.380056
| 13,127
| 258
| 106
| 50.879845
| 0.890145
| 0.400244
| 0
| 0.567742
| 1
| 0
| 0.038749
| 0.008284
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006452
| false
| 0
| 0.03871
| 0
| 0.064516
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
459c464163829c5d02d402d663106b60e473628e
| 109
|
py
|
Python
|
wsgi.py
|
Bigcheese/ph2svg
|
d5eb404240069505e0fef32180b20aef8df3427a
|
[
"MIT"
] | null | null | null |
wsgi.py
|
Bigcheese/ph2svg
|
d5eb404240069505e0fef32180b20aef8df3427a
|
[
"MIT"
] | null | null | null |
wsgi.py
|
Bigcheese/ph2svg
|
d5eb404240069505e0fef32180b20aef8df3427a
|
[
"MIT"
] | null | null | null |
from ph2svg.ph2svg import ph2svg
def application(env, start_response):
return ph2svg(env, start_response)
| 21.8
| 37
| 0.807339
| 15
| 109
| 5.733333
| 0.6
| 0.186047
| 0.372093
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 0.119266
| 109
| 4
| 38
| 27.25
| 0.854167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
45bb4362d68e8d7be9c2884059880f3f56723f34
| 2,005
|
py
|
Python
|
tvm/tests/python/unittest/test_pass_vectorize.py
|
hj424/heterocl
|
e51b8f7f65ae6ad55c0c2426ab7192c3d8f6702b
|
[
"Apache-2.0"
] | 64
|
2021-05-02T14:42:34.000Z
|
2021-05-06T01:35:03.000Z
|
tvm/tests/python/unittest/test_pass_vectorize.py
|
hj424/heterocl
|
e51b8f7f65ae6ad55c0c2426ab7192c3d8f6702b
|
[
"Apache-2.0"
] | 23
|
2019-07-29T05:21:52.000Z
|
2020-08-31T18:51:42.000Z
|
tvm/tests/python/unittest/test_pass_vectorize.py
|
hj424/heterocl
|
e51b8f7f65ae6ad55c0c2426ab7192c3d8f6702b
|
[
"Apache-2.0"
] | 51
|
2019-07-12T05:10:25.000Z
|
2021-07-28T16:19:06.000Z
|
import tvm
def test_vectorize_loop():
dtype = 'int64'
n = tvm.var('n')
ib = tvm.ir_builder.create()
A = ib.pointer("float32", name="A")
with ib.for_range(0, n) as i:
with ib.for_range(0, 4, for_type="vectorize") as j:
A[j] = tvm.const(1, A.dtype)
stmt = ib.get()
assert isinstance(stmt.body, tvm.stmt.For)
stmt = tvm.ir_pass.VectorizeLoop(stmt)
assert isinstance(stmt, tvm.stmt.For)
assert not isinstance(stmt.body, tvm.stmt.For)
assert isinstance(stmt.body.index, tvm.expr.Ramp)
assert isinstance(stmt.body.value, tvm.expr.Broadcast)
def test_vectorize_vector():
dtype = 'int64'
n = tvm.var('n')
ib = tvm.ir_builder.create()
A = ib.pointer("float32x4", name="A")
with ib.for_range(0, n) as i:
with ib.for_range(0, 4, for_type="vectorize") as j:
A[j] = tvm.const(1, A.dtype)
stmt = ib.get()
assert isinstance(stmt.body, tvm.stmt.For)
stmt = tvm.ir_pass.VectorizeLoop(stmt)
assert isinstance(stmt, tvm.stmt.For)
assert not isinstance(stmt.body, tvm.stmt.For)
assert isinstance(stmt.body.index, tvm.expr.Ramp)
assert isinstance(stmt.body.value, tvm.expr.Broadcast)
def test_vectorize_with_if():
n = tvm.var('n')
x = tvm.var('x')
ib = tvm.ir_builder.create()
A = ib.pointer("float32", name="A")
with ib.for_range(0, 4, for_type="vectorize") as i:
with ib.if_scope(x < n):
A[i] = A[i] + 1
with ib.else_scope():
with ib.if_scope(i < n):
A[i] = 2.0
stmt = ib.get()
stmt = tvm.ir_pass.VectorizeLoop(stmt)
assert isinstance(stmt, tvm.stmt.IfThenElse)
assert isinstance(stmt.then_case.index, tvm.expr.Ramp)
assert isinstance(stmt.then_case.value, tvm.expr.Add)
assert stmt.then_case.value.dtype == "float32x4"
assert isinstance(stmt.else_case, tvm.stmt.For)
if __name__ == "__main__":
test_vectorize_vector()
test_vectorize_with_if()
test_vectorize_loop()
| 33.416667
| 59
| 0.638404
| 309
| 2,005
| 4.003236
| 0.174757
| 0.158448
| 0.194018
| 0.116411
| 0.76152
| 0.732417
| 0.732417
| 0.703314
| 0.703314
| 0.703314
| 0
| 0.017165
| 0.215461
| 2,005
| 59
| 60
| 33.983051
| 0.769231
| 0
| 0
| 0.603774
| 0
| 0
| 0.041895
| 0
| 0
| 0
| 0
| 0
| 0.283019
| 1
| 0.056604
| false
| 0.056604
| 0.018868
| 0
| 0.075472
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
b32517b01d8ae6b028b3849c646e9be8d7f20e95
| 29,181
|
py
|
Python
|
networks/PyTorch/attentionModule.py
|
jbcnrlz/san
|
1eab20f83d3c7dba5607e22d1c70768905b62b12
|
[
"MIT"
] | null | null | null |
networks/PyTorch/attentionModule.py
|
jbcnrlz/san
|
1eab20f83d3c7dba5607e22d1c70768905b62b12
|
[
"MIT"
] | null | null | null |
networks/PyTorch/attentionModule.py
|
jbcnrlz/san
|
1eab20f83d3c7dba5607e22d1c70768905b62b12
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
from torch.nn import functional as F
from networks.PyTorch.dcn_impl import DeformableConv2d
torch_ver = torch.__version__[:3]
def kaiming_init(module,
a=0,
mode='fan_out',
nonlinearity='relu',
bias=0,
distribution='normal'):
assert distribution in ['uniform', 'normal']
if distribution == 'uniform':
nn.init.kaiming_uniform_(
module.weight, a=a, mode=mode, nonlinearity=nonlinearity)
else:
nn.init.kaiming_normal_(
module.weight, a=a, mode=mode, nonlinearity=nonlinearity)
if hasattr(module, 'bias') and module.bias is not None:
nn.init.constant_(module.bias, bias)
class PAM_Module(nn.Module):
""" Position attention module"""
# Ref from SAGAN
def __init__(self, in_dim):
super(PAM_Module, self).__init__()
self.chanel_in = in_dim
self.query_conv = nn.Conv2d(in_channels=in_dim, out_channels=in_dim // 8, kernel_size=1)
self.key_conv = nn.Conv2d(in_channels=in_dim, out_channels=in_dim // 8, kernel_size=1)
self.value_conv = nn.Conv2d(in_channels=in_dim, out_channels=in_dim, kernel_size=1)
self.gamma = nn.Parameter(torch.zeros(1))
self.softmax = nn.Softmax(dim=-1)
self.init_weights()
def init_weights(self):
kaiming_init(self.query_conv)
kaiming_init(self.key_conv)
kaiming_init(self.value_conv)
def forward(self, x):
"""
inputs :
x : input feature maps( B X C X H X W)
returns :
out : attention value + input feature
attention: B X (HxW) X (HxW)
"""
m_batchsize, C, height, width = x.size()
proj_query = self.query_conv(x).view(m_batchsize, -1, width * height).permute(0, 2, 1)
proj_key = self.key_conv(x).view(m_batchsize, -1, width * height)
energy = torch.bmm(proj_query, proj_key)
attention = self.softmax(energy)
proj_value = self.value_conv(x).view(m_batchsize, -1, width * height)
out = torch.bmm(proj_value, attention.permute(0, 2, 1))
out = out.view(m_batchsize, C, height, width)
out = self.gamma * out + x
return out
class CAM_Calculate(nn.Module):
""" Channel attention module"""
def __init__(self, in_dim):
super(CAM_Calculate, self).__init__()
self.chanel_in = in_dim
self.softmax = nn.Softmax(dim=-1)
def forward(self, x):
"""
inputs :
x : input feature maps( B X C X H X W)
returns :
attention: B X C X C
"""
m_batchsize, C, height, width = x.size()
proj_query = x.contiguous().view(m_batchsize, C, -1)
proj_key = x.contiguous().view(m_batchsize, C, -1).permute(0, 2, 1)
energy = torch.bmm(proj_query, proj_key)
energy_new = torch.max(energy, -1, keepdim=True)[0].expand_as(energy) - energy
attention = self.softmax(energy_new)
return attention
class CAM_Use(nn.Module):
""" Channel attention module"""
def __init__(self, in_dim):
super(CAM_Use, self).__init__()
self.chanel_in = in_dim
self.gamma = nn.Parameter(torch.zeros(1))
def forward(self, x, attention):
"""
inputs :
x : input feature maps( B X C X H X W)
attention: B X C X C
returns :
out : attention value + input feature
"""
m_batchsize, C, height, width = x.size()
proj_value = x.contiguous().view(m_batchsize, C, -1)
out = torch.bmm(attention, proj_value)
out = out.view(m_batchsize, C, height, width)
out = self.gamma * out + x
return out
class _NonLocalBlockND(nn.Module):
def __init__(self, in_channels, inter_channels=None, dimension=3, sub_sample=True, bn_layer=True):
super(_NonLocalBlockND, self).__init__()
assert dimension in [1, 2, 3]
self.dimension = dimension
self.sub_sample = sub_sample
self.in_channels = in_channels
self.inter_channels = inter_channels
if self.inter_channels is None:
self.inter_channels = in_channels // 2
if self.inter_channels == 0:
self.inter_channels = 1
if dimension == 3:
conv_nd = nn.Conv3d
max_pool_layer = nn.MaxPool3d(kernel_size=(1, 2, 2))
bn = nn.BatchNorm3d
elif dimension == 2:
conv_nd = nn.Conv2d
max_pool_layer = nn.MaxPool2d(kernel_size=(2, 2))
bn = nn.BatchNorm2d
else:
conv_nd = nn.Conv1d
max_pool_layer = nn.MaxPool1d(kernel_size=(2))
bn = nn.BatchNorm1d
self.g = conv_nd(in_channels=self.in_channels, out_channels=self.inter_channels,
kernel_size=1, stride=1, padding=0)
if bn_layer:
self.W = nn.Sequential(
conv_nd(in_channels=self.inter_channels, out_channels=self.in_channels,
kernel_size=1, stride=1, padding=0),
bn(self.in_channels)
)
nn.init.constant_(self.W[1].weight, 0)
nn.init.constant_(self.W[1].bias, 0)
else:
self.W = conv_nd(in_channels=self.inter_channels, out_channels=self.in_channels,
kernel_size=1, stride=1, padding=0)
nn.init.constant_(self.W.weight, 0)
nn.init.constant_(self.W.bias, 0)
self.theta = conv_nd(in_channels=self.in_channels, out_channels=self.inter_channels,
kernel_size=1, stride=1, padding=0)
self.phi = conv_nd(in_channels=self.in_channels, out_channels=self.inter_channels,
kernel_size=1, stride=1, padding=0)
if sub_sample:
self.g = nn.Sequential(self.g, max_pool_layer)
self.phi = nn.Sequential(self.phi, max_pool_layer)
def forward(self, x):
'''
:param x: (b, c, t, h, w)
:return:
'''
batch_size = x.size(0)
g_x = self.g(x).view(batch_size, self.inter_channels, -1)
g_x = g_x.permute(0, 2, 1)
theta_x = self.theta(x).view(batch_size, self.inter_channels, -1)
theta_x = theta_x.permute(0, 2, 1)
phi_x = self.phi(x).view(batch_size, self.inter_channels, -1)
f = torch.matmul(theta_x, phi_x)
f_div_C = F.softmax(f, dim=-1)
y = torch.matmul(f_div_C, g_x)
y = y.permute(0, 2, 1).contiguous()
y = y.view(batch_size, self.inter_channels, *x.size()[2:])
W_y = self.W(y)
z = W_y + x
return z
class NONLocalBlock2D(_NonLocalBlockND):
def __init__(self, in_channels, inter_channels=None, sub_sample=True, bn_layer=True):
super(NONLocalBlock2D, self).__init__(in_channels,
inter_channels=inter_channels,
dimension=2, sub_sample=sub_sample,
bn_layer=bn_layer)
class FeatureEnhance(nn.Module):
depthLayers = 1
def __init__(self,
in_channels=256,
out_channels=256):
super(FeatureEnhance, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.pams = nn.ModuleList()
self.cam_cals = nn.ModuleList()
self.cam_uses = nn.ModuleList()
self.deform_convs = nn.ModuleList()
for i in range(self.depthLayers):
self.pams.append(PAM_Module(self.in_channels))
self.cam_cals.append(CAM_Calculate(self.in_channels))
self.cam_uses.append(CAM_Use(self.in_channels))
self.deform_convs.append(nn.Conv2d(self.in_channels, self.out_channels, kernel_size=3, padding=1))
#self.deform_convs.append(DeformableConv2d(in_channels=self.in_channels,
# out_channels=self.out_channels,
# kernel_size=3,
# padding=1))
def forward(self, r_f, g_f, b_f, a_f):
for idx in range(self.depthLayers):
r_sp_feat = self.pams[idx](r_f)
g_sp_feat = self.pams[idx](g_f)
b_sp_feat = self.pams[idx](b_f)
a_sp_feat = self.pams[idx](a_f)
r_attention = self.cam_cals[idx](r_f)
g_attention = self.cam_cals[idx](g_f)
b_attention = self.cam_cals[idx](b_f)
a_attention = self.cam_cals[idx](a_f)
rr_sc_feat = self.cam_uses[idx](r_f, r_attention)
gg_sc_feat = self.cam_uses[idx](g_f, g_attention)
bb_sc_feat = self.cam_uses[idx](b_f, b_attention)
aa_sc_feat = self.cam_uses[idx](a_f, a_attention)
rg_sc_feat = self.cam_uses[idx](r_f, g_attention)
rb_sc_feat = self.cam_uses[idx](r_f, b_attention)
ra_sc_feat = self.cam_uses[idx](r_f, a_attention)
gr_sc_feat = self.cam_uses[idx](g_f, r_attention)
gb_sc_feat = self.cam_uses[idx](g_f, b_attention)
ga_sc_feat = self.cam_uses[idx](g_f, a_attention)
bg_sc_feat = self.cam_uses[idx](b_f, g_attention)
br_sc_feat = self.cam_uses[idx](b_f, r_attention)
ba_sc_feat = self.cam_uses[idx](b_f, a_attention)
ag_sc_feat = self.cam_uses[idx](a_f, g_attention)
ab_sc_feat = self.cam_uses[idx](a_f, b_attention)
ar_sc_feat = self.cam_uses[idx](a_f, r_attention)
#z_f = z_sp_feat + zz_sc_feat + zx_sc_feat
#x_f = x_sp_feat + xx_sc_feat + xz_sc_feat
r_f = r_sp_feat + rr_sc_feat + rg_sc_feat + rb_sc_feat + ra_sc_feat
g_f = g_sp_feat + gg_sc_feat + gr_sc_feat + gb_sc_feat + ga_sc_feat
b_f = b_sp_feat + bb_sc_feat + br_sc_feat + bg_sc_feat + ba_sc_feat
a_f = a_sp_feat + aa_sc_feat + ag_sc_feat + ab_sc_feat + ar_sc_feat
r_f = self.deform_convs[idx](r_f)
g_f = self.deform_convs[idx](g_f)
b_f = self.deform_convs[idx](b_f)
a_f = self.deform_convs[idx](a_f)
#z_f = self.deform_convs[idx](z_f)
#x_f = self.deform_convs[idx](x_f)
return r_f, g_f, b_f, a_f
class FeatureEnhanceNoCross(nn.Module):
depthLayers = 1
def __init__(self,
in_channels=256,
out_channels=256):
super(FeatureEnhanceNoCross, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.pams = nn.ModuleList()
self.cam_cals = nn.ModuleList()
self.cam_uses = nn.ModuleList()
self.deform_convs = nn.ModuleList()
for i in range(self.depthLayers):
self.pams.append(PAM_Module(self.in_channels))
self.cam_cals.append(CAM_Calculate(self.in_channels))
self.cam_uses.append(CAM_Use(self.in_channels))
self.deform_convs.append(nn.Conv2d(self.in_channels, self.out_channels, kernel_size=3, padding=1))
#self.deform_convs.append(DeformableConv2d(in_channels=self.in_channels,
# out_channels=self.out_channels,
# kernel_size=3,
# padding=1))
def forward(self, r_f):
for idx in range(self.depthLayers):
r_sp_feat = self.pams[idx](r_f)
r_attention = self.cam_cals[idx](r_f)
rr_sc_feat = self.cam_uses[idx](r_f, r_attention)
r_f = r_sp_feat + rr_sc_feat
r_f = self.deform_convs[idx](r_f)
return r_f
class FeatureEnhanceDepthDI(nn.Module):
depthLayers = 1
def __init__(self,
in_channels=256,
out_channels=256):
super(FeatureEnhanceDepthDI, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.pams = nn.ModuleList()
self.cam_cals = nn.ModuleList()
self.cam_uses = nn.ModuleList()
self.deform_convs = nn.ModuleList()
for i in range(self.depthLayers):
self.pams.append(PAM_Module(self.in_channels))
self.cam_cals.append(CAM_Calculate(self.in_channels))
self.cam_uses.append(CAM_Use(self.in_channels))
self.deform_convs.append(nn.Conv2d(self.in_channels, self.out_channels, kernel_size=3, padding=1))
#self.deform_convs.append(DeformableConv2d(in_channels=self.in_channels,
# out_channels=self.out_channels,
# kernel_size=3,
# padding=1))
def forward(self, r_f, g_f, b_f, a_f,d_f):
for idx in range(self.depthLayers):
r_sp_feat = self.pams[idx](r_f)
g_sp_feat = self.pams[idx](g_f)
b_sp_feat = self.pams[idx](b_f)
a_sp_feat = self.pams[idx](a_f)
d_sp_feat = self.pams[idx](d_f)
r_attention = self.cam_cals[idx](r_f)
g_attention = self.cam_cals[idx](g_f)
b_attention = self.cam_cals[idx](b_f)
a_attention = self.cam_cals[idx](a_f)
d_attention = self.cam_cals[idx](d_f)
rr_sc_feat = self.cam_uses[idx](r_f, r_attention)
gg_sc_feat = self.cam_uses[idx](g_f, g_attention)
bb_sc_feat = self.cam_uses[idx](b_f, b_attention)
aa_sc_feat = self.cam_uses[idx](a_f, a_attention)
dd_sc_feat = self.cam_uses[idx](d_f, d_attention)
rg_sc_feat = self.cam_uses[idx](r_f, g_attention)
rb_sc_feat = self.cam_uses[idx](r_f, b_attention)
ra_sc_feat = self.cam_uses[idx](r_f, a_attention)
rd_sc_feat = self.cam_uses[idx](r_f, d_attention)
gr_sc_feat = self.cam_uses[idx](g_f, r_attention)
gb_sc_feat = self.cam_uses[idx](g_f, b_attention)
ga_sc_feat = self.cam_uses[idx](g_f, a_attention)
gd_sc_feat = self.cam_uses[idx](g_f, d_attention)
bg_sc_feat = self.cam_uses[idx](b_f, g_attention)
br_sc_feat = self.cam_uses[idx](b_f, r_attention)
ba_sc_feat = self.cam_uses[idx](b_f, a_attention)
bd_sc_feat = self.cam_uses[idx](b_f, d_attention)
ag_sc_feat = self.cam_uses[idx](a_f, g_attention)
ab_sc_feat = self.cam_uses[idx](a_f, b_attention)
ar_sc_feat = self.cam_uses[idx](a_f, r_attention)
ad_sc_feat = self.cam_uses[idx](a_f, d_attention)
dr_sc_feat = self.cam_uses[idx](d_f, r_attention)
dg_sc_feat = self.cam_uses[idx](d_f, g_attention)
db_sc_feat = self.cam_uses[idx](d_f, b_attention)
da_sc_feat = self.cam_uses[idx](d_f, a_attention)
r_f = r_sp_feat + rr_sc_feat + rg_sc_feat + rb_sc_feat + ra_sc_feat + rd_sc_feat
g_f = g_sp_feat + gg_sc_feat + gr_sc_feat + gb_sc_feat + ga_sc_feat + gd_sc_feat
b_f = b_sp_feat + bb_sc_feat + br_sc_feat + bg_sc_feat + ba_sc_feat + bd_sc_feat
a_f = a_sp_feat + aa_sc_feat + ag_sc_feat + ab_sc_feat + ar_sc_feat + ad_sc_feat
d_f = d_sp_feat + dd_sc_feat + dr_sc_feat + dg_sc_feat + da_sc_feat + db_sc_feat
r_f = self.deform_convs[idx](r_f)
g_f = self.deform_convs[idx](g_f)
b_f = self.deform_convs[idx](b_f)
a_f = self.deform_convs[idx](a_f)
d_f = self.deform_convs[idx](d_f)
return r_f, g_f, b_f, a_f, d_f
class FeatureEnhanceDINoCross(nn.Module):
depthLayers = 1
def __init__(self,
in_channels=256,
out_channels=256):
super(FeatureEnhanceDINoCross, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.pams = nn.ModuleList()
self.cam_cals = nn.ModuleList()
self.cam_uses = nn.ModuleList()
self.deform_convs = nn.ModuleList()
for i in range(self.depthLayers):
self.pams.append(PAM_Module(self.in_channels))
self.cam_cals.append(CAM_Calculate(self.in_channels))
self.cam_uses.append(CAM_Use(self.in_channels))
self.deform_convs.append(nn.Conv2d(self.in_channels, self.out_channels, kernel_size=3, padding=1))
#self.deform_convs.append(DeformableConv2d(in_channels=self.in_channels,
# out_channels=self.out_channels,
# kernel_size=3,
# padding=1))
def forward(self, r_f, g_f, b_f, a_f):
for idx in range(self.depthLayers):
r_sp_feat = self.pams[idx](r_f)
g_sp_feat = self.pams[idx](g_f)
b_sp_feat = self.pams[idx](b_f)
a_sp_feat = self.pams[idx](a_f)
r_attention = self.cam_cals[idx](r_f)
g_attention = self.cam_cals[idx](g_f)
b_attention = self.cam_cals[idx](b_f)
a_attention = self.cam_cals[idx](a_f)
rr_sc_feat = self.cam_uses[idx](r_f, r_attention)
gg_sc_feat = self.cam_uses[idx](g_f, g_attention)
bb_sc_feat = self.cam_uses[idx](b_f, b_attention)
aa_sc_feat = self.cam_uses[idx](a_f, a_attention)
r_f = r_sp_feat + rr_sc_feat
g_f = g_sp_feat + gg_sc_feat
b_f = b_sp_feat + bb_sc_feat
a_f = a_sp_feat + aa_sc_feat
r_f = self.deform_convs[idx](r_f)
g_f = self.deform_convs[idx](g_f)
b_f = self.deform_convs[idx](b_f)
a_f = self.deform_convs[idx](a_f)
return r_f, g_f, b_f, a_f
class FeatureEnhanceDI(nn.Module):
depthLayers = 1
def __init__(self,
in_channels=256,
out_channels=256):
super(FeatureEnhanceDI, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.pams = nn.ModuleList()
self.cam_cals = nn.ModuleList()
self.cam_uses = nn.ModuleList()
self.deform_convs = nn.ModuleList()
for i in range(self.depthLayers):
self.pams.append(PAM_Module(self.in_channels))
self.cam_cals.append(CAM_Calculate(self.in_channels))
self.cam_uses.append(CAM_Use(self.in_channels))
self.deform_convs.append(nn.Conv2d(self.in_channels, self.out_channels, kernel_size=3, padding=1))
#self.deform_convs.append(DeformableConv2d(in_channels=self.in_channels,
# out_channels=self.out_channels,
# kernel_size=3,
# padding=1))
def forward(self, r_f, g_f, b_f, a_f):
for idx in range(self.depthLayers):
r_sp_feat = self.pams[idx](r_f)
g_sp_feat = self.pams[idx](g_f)
b_sp_feat = self.pams[idx](b_f)
a_sp_feat = self.pams[idx](a_f)
r_attention = self.cam_cals[idx](r_f)
g_attention = self.cam_cals[idx](g_f)
b_attention = self.cam_cals[idx](b_f)
a_attention = self.cam_cals[idx](a_f)
rr_sc_feat = self.cam_uses[idx](r_f, r_attention)
gg_sc_feat = self.cam_uses[idx](g_f, g_attention)
bb_sc_feat = self.cam_uses[idx](b_f, b_attention)
aa_sc_feat = self.cam_uses[idx](a_f, a_attention)
rg_sc_feat = self.cam_uses[idx](r_f, g_attention)
rb_sc_feat = self.cam_uses[idx](r_f, b_attention)
ra_sc_feat = self.cam_uses[idx](r_f, a_attention)
gr_sc_feat = self.cam_uses[idx](g_f, r_attention)
gb_sc_feat = self.cam_uses[idx](g_f, b_attention)
ga_sc_feat = self.cam_uses[idx](g_f, a_attention)
bg_sc_feat = self.cam_uses[idx](b_f, g_attention)
br_sc_feat = self.cam_uses[idx](b_f, r_attention)
ba_sc_feat = self.cam_uses[idx](b_f, a_attention)
ag_sc_feat = self.cam_uses[idx](a_f, g_attention)
ab_sc_feat = self.cam_uses[idx](a_f, b_attention)
ar_sc_feat = self.cam_uses[idx](a_f, r_attention)
r_f = r_sp_feat + rr_sc_feat + rg_sc_feat + rb_sc_feat + ra_sc_feat
g_f = g_sp_feat + gg_sc_feat + gr_sc_feat + gb_sc_feat + ga_sc_feat
b_f = b_sp_feat + bb_sc_feat + br_sc_feat + bg_sc_feat + ba_sc_feat
a_f = a_sp_feat + aa_sc_feat + ag_sc_feat + ab_sc_feat + ar_sc_feat
r_f = self.deform_convs[idx](r_f)
g_f = self.deform_convs[idx](g_f)
b_f = self.deform_convs[idx](b_f)
a_f = self.deform_convs[idx](a_f)
return r_f, g_f, b_f, a_f
class FeatureEnhanceDepth(nn.Module):
depthLayers = 1
def __init__(self,
in_channels=256,
out_channels=256):
super(FeatureEnhanceDepth, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.cam_uses = nn.ModuleList()
self.pams = nn.ModuleList()
self.deform_convs = nn.ModuleList()
for i in range(self.depthLayers):
self.pams.append(PAM_Module(self.in_channels))
self.cam_uses.append(CAM_Use(self.in_channels))
self.deform_convs.append(nn.Conv2d(self.in_channels, self.out_channels, kernel_size=3, padding=1))
#self.deform_convs.append(DeformableConv2d(in_channels=self.in_channels,
# out_channels=self.out_channels,
# kernel_size=3,
# padding=1))
def forward(self, d_f):
for idx in range(self.depthLayers):
d_sp_feat = self.pams[idx](d_f)
d_f = self.deform_convs[idx](d_sp_feat)
return d_f
class FeatureEnhanceDepthDIOnlyCross(nn.Module):
depthLayers = 1
def __init__(self,
in_channels=256,
out_channels=256):
super(FeatureEnhanceDepthDIOnlyCross, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.cam_cals = nn.ModuleList()
self.cam_uses = nn.ModuleList()
self.deform_convs = nn.ModuleList()
for i in range(self.depthLayers):
self.cam_cals.append(CAM_Calculate(self.in_channels))
self.cam_uses.append(CAM_Use(self.in_channels))
self.deform_convs.append(nn.Conv2d(self.in_channels, self.out_channels, kernel_size=3, padding=1))
#self.deform_convs.append(DeformableConv2d(in_channels=self.in_channels,
# out_channels=self.out_channels,
# kernel_size=3,
# padding=1))
def forward(self, r_f, g_f, b_f, a_f,d_f):
for idx in range(self.depthLayers):
r_attention = torch.bmm(r_f.reshape(r_f.shape[0],r_f.shape[1],-1),r_f.reshape(r_f.shape[0],r_f.shape[1],-1).permute(0,2,1))
g_attention = self.cam_cals[idx](g_f)
b_attention = self.cam_cals[idx](b_f)
a_attention = self.cam_cals[idx](a_f)
d_attention = self.cam_cals[idx](d_f)
rg_sc_feat = self.cam_uses[idx](r_f, g_attention)
rb_sc_feat = self.cam_uses[idx](r_f, b_attention)
ra_sc_feat = self.cam_uses[idx](r_f, a_attention)
rd_sc_feat = self.cam_uses[idx](r_f, d_attention)
gr_sc_feat = self.cam_uses[idx](g_f, r_attention)
gb_sc_feat = self.cam_uses[idx](g_f, b_attention)
ga_sc_feat = self.cam_uses[idx](g_f, a_attention)
gd_sc_feat = self.cam_uses[idx](g_f, d_attention)
bg_sc_feat = self.cam_uses[idx](b_f, g_attention)
br_sc_feat = self.cam_uses[idx](b_f, r_attention)
ba_sc_feat = self.cam_uses[idx](b_f, a_attention)
bd_sc_feat = self.cam_uses[idx](b_f, d_attention)
ag_sc_feat = self.cam_uses[idx](a_f, g_attention)
ab_sc_feat = self.cam_uses[idx](a_f, b_attention)
ar_sc_feat = self.cam_uses[idx](a_f, r_attention)
ad_sc_feat = self.cam_uses[idx](a_f, d_attention)
dr_sc_feat = self.cam_uses[idx](d_f, r_attention)
dg_sc_feat = self.cam_uses[idx](d_f, g_attention)
db_sc_feat = self.cam_uses[idx](d_f, b_attention)
da_sc_feat = self.cam_uses[idx](d_f, a_attention)
r_f = rg_sc_feat + rb_sc_feat + ra_sc_feat + rd_sc_feat
g_f = gr_sc_feat + gb_sc_feat + ga_sc_feat + gd_sc_feat
b_f = br_sc_feat + bg_sc_feat + ba_sc_feat + bd_sc_feat
a_f = ag_sc_feat + ab_sc_feat + ar_sc_feat + ad_sc_feat
d_f = dr_sc_feat + dg_sc_feat + da_sc_feat + db_sc_feat
r_f = self.deform_convs[idx](r_f)
g_f = self.deform_convs[idx](g_f)
b_f = self.deform_convs[idx](b_f)
a_f = self.deform_convs[idx](a_f)
d_f = self.deform_convs[idx](d_f)
return r_f, g_f, b_f, a_f, d_f
'''
class FeatureEnhanceSA(nn.Module):
depthLayers = 1
def __init__(self,
in_channels=256,
out_channels=256):
super(FeatureEnhanceSA, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.cam_uses = nn.ModuleList()
self.pams = nn.ModuleList()
self.deform_convs = nn.ModuleList()
for i in range(self.depthLayers):
self.pams.append(PAM_Module(self.in_channels))
self.cam_uses.append(CAM_Use(self.in_channels))
self.deform_convs.append(nn.Conv2d(self.in_channels, self.out_channels, kernel_size=3, padding=1))
def forward(self, d_f):
for idx in range(self.depthLayers):
r_sp_feat = self.pams[idx](r_f)
g_sp_feat = self.pams[idx](g_f)
b_sp_feat = self.pams[idx](b_f)
a_sp_feat = self.pams[idx](a_f)
r_attention = self.cam_cals[idx](r_f)
g_attention = self.cam_cals[idx](g_f)
return d_f
'''
class FeatureEnhanceDIDepthNoCross(nn.Module):
depthLayers = 1
def __init__(self,
in_channels=256,
out_channels=256):
super(FeatureEnhanceDIDepthNoCross, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.pams = nn.ModuleList()
self.cam_cals = nn.ModuleList()
self.cam_uses = nn.ModuleList()
self.deform_convs = nn.ModuleList()
for i in range(self.depthLayers):
self.pams.append(PAM_Module(self.in_channels))
self.cam_cals.append(CAM_Calculate(self.in_channels))
self.cam_uses.append(CAM_Use(self.in_channels))
self.deform_convs.append(nn.Conv2d(self.in_channels, self.out_channels, kernel_size=3, padding=1))
#self.deform_convs.append(DeformableConv2d(in_channels=self.in_channels,
# out_channels=self.out_channels,
# kernel_size=3,
# padding=1))
def forward(self, r_f, g_f, b_f, a_f, d_f):
for idx in range(self.depthLayers):
r_sp_feat = self.pams[idx](r_f)
g_sp_feat = self.pams[idx](g_f)
b_sp_feat = self.pams[idx](b_f)
a_sp_feat = self.pams[idx](a_f)
d_sp_feat = self.pams[idx](d_f)
r_attention = self.cam_cals[idx](r_f)
g_attention = self.cam_cals[idx](g_f)
b_attention = self.cam_cals[idx](b_f)
a_attention = self.cam_cals[idx](a_f)
d_attention = self.cam_cals[idx](d_f)
rr_sc_feat = self.cam_uses[idx](r_f, r_attention)
gg_sc_feat = self.cam_uses[idx](g_f, g_attention)
bb_sc_feat = self.cam_uses[idx](b_f, b_attention)
aa_sc_feat = self.cam_uses[idx](a_f, a_attention)
dd_sc_feat = self.cam_uses[idx](d_f, d_attention)
r_f = r_sp_feat + rr_sc_feat
g_f = g_sp_feat + gg_sc_feat
b_f = b_sp_feat + bb_sc_feat
a_f = a_sp_feat + aa_sc_feat
d_f = d_sp_feat + dd_sc_feat
r_f = self.deform_convs[idx](r_f)
g_f = self.deform_convs[idx](g_f)
b_f = self.deform_convs[idx](b_f)
a_f = self.deform_convs[idx](a_f)
d_f = self.deform_convs[idx](d_f)
return r_f, g_f, b_f, a_f, d_f
| 39.756131
| 135
| 0.58401
| 4,269
| 29,181
| 3.627079
| 0.044273
| 0.068974
| 0.074593
| 0.073043
| 0.85062
| 0.843258
| 0.826724
| 0.806768
| 0.773444
| 0.767179
| 0
| 0.010894
| 0.311127
| 29,181
| 733
| 136
| 39.810368
| 0.759377
| 0.089202
| 0
| 0.721881
| 0
| 0
| 0.001625
| 0
| 0
| 0
| 0
| 0
| 0.00409
| 1
| 0.055215
| false
| 0
| 0.00818
| 0
| 0.130879
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2fb37f9839225f2fef9bcbb5339e844746a29827
| 1,569
|
py
|
Python
|
messengerext/home/migrations/0009_auto_20160529_1706.py
|
groupsome/groupsome
|
4edcf30d66ff458c4df37d3198ef187219a768d7
|
[
"MIT"
] | 6
|
2016-10-07T13:43:17.000Z
|
2017-10-07T22:34:44.000Z
|
messengerext/home/migrations/0009_auto_20160529_1706.py
|
groupsome/groupsome
|
4edcf30d66ff458c4df37d3198ef187219a768d7
|
[
"MIT"
] | null | null | null |
messengerext/home/migrations/0009_auto_20160529_1706.py
|
groupsome/groupsome
|
4edcf30d66ff458c4df37d3198ef187219a768d7
|
[
"MIT"
] | 1
|
2020-07-15T04:29:31.000Z
|
2020-07-15T04:29:31.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-29 17:06
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0008_remove_sticker_emoji'),
]
operations = [
migrations.AddField(
model_name='audio',
name='telegram_id',
field=models.IntegerField(null=True, verbose_name='Telegram ID'),
),
migrations.AddField(
model_name='file',
name='telegram_id',
field=models.IntegerField(null=True, verbose_name='Telegram ID'),
),
migrations.AddField(
model_name='link',
name='telegram_id',
field=models.IntegerField(null=True, verbose_name='Telegram ID'),
),
migrations.AddField(
model_name='photo',
name='telegram_id',
field=models.IntegerField(null=True, verbose_name='Telegram ID'),
),
migrations.AddField(
model_name='sticker',
name='telegram_id',
field=models.IntegerField(null=True, verbose_name='Telegram ID'),
),
migrations.AddField(
model_name='text',
name='telegram_id',
field=models.IntegerField(null=True, verbose_name='Telegram ID'),
),
migrations.AddField(
model_name='video',
name='telegram_id',
field=models.IntegerField(null=True, verbose_name='Telegram ID'),
),
]
| 30.764706
| 77
| 0.576163
| 156
| 1,569
| 5.608974
| 0.307692
| 0.192
| 0.224
| 0.216
| 0.713143
| 0.713143
| 0.713143
| 0.713143
| 0.713143
| 0.713143
| 0
| 0.018298
| 0.303378
| 1,569
| 50
| 78
| 31.38
| 0.782251
| 0.042702
| 0
| 0.651163
| 1
| 0
| 0.144763
| 0.016678
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.046512
| 0
| 0.116279
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2fbe2d4eb6521743caac93bd9ab0e0ee1b58a442
| 141
|
py
|
Python
|
lib/backbone/__init__.py
|
yaopengUSTC/mbit-skin-cancer
|
a82a87b2abebaf724dbe2a7b7e833c434c1b56a0
|
[
"MIT"
] | 3
|
2022-01-23T05:27:43.000Z
|
2022-03-08T07:29:25.000Z
|
lib/backbone/__init__.py
|
yaopengUSTC/mbit-skin-cancer
|
a82a87b2abebaf724dbe2a7b7e833c434c1b56a0
|
[
"MIT"
] | null | null | null |
lib/backbone/__init__.py
|
yaopengUSTC/mbit-skin-cancer
|
a82a87b2abebaf724dbe2a7b7e833c434c1b56a0
|
[
"MIT"
] | null | null | null |
from .all_models import *
from .build_regnet import *
from .regnet import *
from .regnet_dropblock import *
from .efficientnet import *
| 23.5
| 32
| 0.751773
| 18
| 141
| 5.722222
| 0.444444
| 0.38835
| 0.31068
| 0.427184
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177305
| 141
| 5
| 33
| 28.2
| 0.887931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
6421faf5ab6f8982161b187253309ea98b2e2f8b
| 9,818
|
py
|
Python
|
skyportal/tests/frontend/test_followup_requests.py
|
dannygoldstein/skyportal
|
3f3518136530fcf5bd1787a4c890782164627fce
|
[
"BSD-3-Clause"
] | null | null | null |
skyportal/tests/frontend/test_followup_requests.py
|
dannygoldstein/skyportal
|
3f3518136530fcf5bd1787a4c890782164627fce
|
[
"BSD-3-Clause"
] | null | null | null |
skyportal/tests/frontend/test_followup_requests.py
|
dannygoldstein/skyportal
|
3f3518136530fcf5bd1787a4c890782164627fce
|
[
"BSD-3-Clause"
] | null | null | null |
import uuid
import pytest
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import (
ElementClickInterceptedException,
TimeoutException,
)
from skyportal.tests import api
def add_telescope_and_instrument(instrument_name, token):
status, data = api("GET", f"instrument?name={instrument_name}", token=token)
if len(data["data"]) == 1:
return data["data"][0]
telescope_name = str(uuid.uuid4())
status, data = api(
"POST",
"telescope",
data={
"name": telescope_name,
"nickname": telescope_name,
"lat": 0.0,
"lon": 0.0,
"elevation": 0.0,
"diameter": 10.0,
"robotic": True,
},
token=token,
)
assert status == 200
assert data["status"] == "success"
telescope_id = data["data"]["id"]
status, data = api(
"POST",
"instrument",
data={
"name": instrument_name,
"type": "imager",
"band": "Optical",
"telescope_id": telescope_id,
"filters": ["ztfg"],
},
token=token,
)
assert status == 200
assert data["status"] == "success"
return data["data"]
@pytest.mark.flaky(reruns=2)
def test_submit_new_followup_request(
driver, super_admin_user, public_source, super_admin_token
):
add_telescope_and_instrument("P60 Camera", super_admin_token)
driver.get(f"/become_user/{super_admin_user.id}")
driver.get(f"/source/{public_source.id}")
instrument_select = driver.wait_for_xpath(
'//*[@id="mui-component-select-instrument_id"]'
)
driver.scroll_to_element_and_click(instrument_select)
driver.scroll_to_element_and_click(
driver.wait_for_xpath('//*[text()="P60 Camera"]')
)
submit_button = driver.wait_for_xpath(
'//*[@name="createNewFollowupRequestSubmitButton"]'
)
driver.execute_script("arguments[0].scrollIntoView();", submit_button)
driver.execute_script(
"arguments[0].click();", driver.wait_for_xpath('//input[@name="start_date"]')
)
driver.execute_script(
"arguments[0].click();",
driver.wait_for_xpath(
'//div[contains(@class,"react-datepicker__day react-datepicker__day--013")]'
),
)
driver.execute_script(
"arguments[0].click();", driver.wait_for_xpath('//input[@name="end_date"]')
)
driver.execute_script(
"arguments[0].click();",
driver.wait_for_xpath(
'//div[contains(@class,"react-datepicker__day react-datepicker__day--014")]'
),
)
driver.execute_script(
"arguments[0].click();", driver.wait_for_xpath('//input[@value="sdssu"]')
)
exposure_select = Select(driver.wait_for_xpath('//select[@name="exposure_time"]'))
exposure_select.select_by_visible_text("120s")
priority_select = Select(driver.wait_for_xpath('//select[@name="priority"]'))
priority_select.select_by_visible_text("1")
driver.execute_script("arguments[0].click();", submit_button)
driver.wait_for_xpath("//td[contains(.,'P60 Camera')]")
driver.wait_for_xpath("//td[contains(.,'pending')]")
driver.wait_for_xpath("//td[contains(.,'1')]")
@pytest.mark.flaky(reruns=2)
def test_edit_existing_followup_request(
driver, super_admin_user, public_source, super_admin_token
):
add_telescope_and_instrument("P60 Camera", super_admin_token)
driver.get(f"/become_user/{super_admin_user.id}")
driver.get(f"/source/{public_source.id}")
instrument_select = driver.wait_for_xpath(
'//*[@id="mui-component-select-instrument_id"]'
)
driver.scroll_to_element_and_click(instrument_select)
driver.scroll_to_element_and_click(
driver.wait_for_xpath('//*[text()="P60 Camera"]')
)
submit_button = driver.wait_for_xpath(
'//*[@name="createNewFollowupRequestSubmitButton"]'
)
driver.execute_script("arguments[0].scrollIntoView();", submit_button)
driver.execute_script(
"arguments[0].click();", driver.wait_for_xpath('//input[@name="start_date"]')
)
driver.execute_script(
"arguments[0].click();",
driver.wait_for_xpath(
'//div[contains(@class,"react-datepicker__day react-datepicker__day--013")]'
),
)
driver.execute_script(
"arguments[0].click();", driver.wait_for_xpath('//input[@name="end_date"]')
)
driver.execute_script(
"arguments[0].click();",
driver.wait_for_xpath(
'//div[contains(@class,"react-datepicker__day react-datepicker__day--014")]'
),
)
driver.execute_script(
"arguments[0].click();", driver.wait_for_xpath('//input[@value="sdssu"]')
)
exposure_select = Select(driver.wait_for_xpath('//select[@name="exposure_time"]'))
exposure_select.select_by_visible_text("120s")
priority_select = Select(driver.wait_for_xpath('//select[@name="priority"]'))
priority_select.select_by_visible_text("1")
submit_button.click()
driver.wait_for_xpath("//td[contains(.,'1')]")
driver.scroll_to_element_and_click(driver.wait_for_xpath('//button[text()="Edit"]'))
priority_select = Select(driver.wait_for_xpath('//select[@name="priority"]'))
priority_select.select_by_visible_text("5")
submit_button = driver.wait_for_xpath(
'//*[@name="editExistingFollowupRequestSubmitButton"]'
)
driver.execute_script("arguments[0].click();", submit_button)
try:
driver.wait_for_xpath("//td[contains(.,'5')]")
except TimeoutException:
driver.refresh()
driver.wait_for_xpath("//td[contains(.,'5')]")
@pytest.mark.flaky(reruns=2)
def test_delete_followup_request(
driver, super_admin_user, public_source, super_admin_token
):
add_telescope_and_instrument("P60 Camera", super_admin_token)
driver.get(f"/become_user/{super_admin_user.id}")
driver.get(f"/source/{public_source.id}")
instrument_select = driver.wait_for_xpath(
'//*[@id="mui-component-select-instrument_id"]'
)
driver.scroll_to_element_and_click(instrument_select)
driver.scroll_to_element_and_click(
driver.wait_for_xpath('//*[text()="P60 Camera"]')
)
submit_button = driver.wait_for_xpath(
'//*[@name="createNewFollowupRequestSubmitButton"]'
)
driver.execute_script("arguments[0].scrollIntoView();", submit_button)
driver.execute_script(
"arguments[0].click();", driver.wait_for_xpath('//input[@name="start_date"]')
)
driver.execute_script(
"arguments[0].click();",
driver.wait_for_xpath(
'//div[contains(@class,"react-datepicker__day react-datepicker__day--013")]'
),
)
driver.execute_script(
"arguments[0].click();", driver.wait_for_xpath('//input[@name="end_date"]')
)
driver.execute_script(
"arguments[0].click();",
driver.wait_for_xpath(
'//div[contains(@class,"react-datepicker__day react-datepicker__day--014")]'
),
)
driver.execute_script(
"arguments[0].click();", driver.wait_for_xpath('//input[@value="sdssu"]')
)
exposure_select = Select(driver.wait_for_xpath('//select[@name="exposure_time"]'))
exposure_select.select_by_visible_text("120s")
priority_select = Select(driver.wait_for_xpath('//select[@name="priority"]'))
priority_select.select_by_visible_text("1")
driver.execute_script("arguments[0].click();", submit_button)
driver.wait_for_xpath("//td[contains(.,'P60 Camera')]")
driver.wait_for_xpath("//td[contains(.,'pending')]")
driver.wait_for_xpath("//td[contains(.,'1')]")
driver.scroll_to_element_and_click(
driver.wait_for_xpath('//button[text()="Delete"]')
)
driver.wait_for_xpath_to_disappear('//button[text()="Delete"]')
@pytest.mark.flaky(reruns=2)
def test_cannot_edit_uneditable_followup_request(
driver, super_admin_user, public_source, super_admin_token
):
add_telescope_and_instrument("ALFOSC", super_admin_token)
driver.get(f"/become_user/{super_admin_user.id}")
driver.get(f"/source/{public_source.id}")
instrument_select = driver.wait_for_xpath(
'//*[@id="mui-component-select-instrument_id"]'
)
driver.scroll_to_element_and_click(instrument_select)
driver.scroll_to_element_and_click(driver.wait_for_xpath('//*[text()="ALFOSC"]'))
submit_button = driver.wait_for_xpath(
'//*[@name="createNewFollowupRequestSubmitButton"]'
)
driver.execute_script("arguments[0].scrollIntoView();", submit_button)
driver.wait_for_xpath(
'//*[contains(.,"WARNING: You will not be able to edit or delete this request once submitted.")]'
)
driver.execute_script(
"arguments[0].click();", driver.wait_for_xpath('//input[@name="start_date"]')
)
driver.execute_script(
"arguments[0].click();",
driver.wait_for_xpath(
'//div[contains(@class,"react-datepicker__day react-datepicker__day--013")]'
),
)
driver.execute_script(
"arguments[0].click();", driver.wait_for_xpath('//input[@name="end_date"]')
)
driver.execute_script(
"arguments[0].click();",
driver.wait_for_xpath(
'//div[contains(@class,"react-datepicker__day react-datepicker__day--014")]'
),
)
filter_select = Select(driver.wait_for_xpath('//select[@name="filters"]'))
filter_select.select_by_visible_text("sdssu")
priority_select = Select(driver.wait_for_xpath('//select[@name="priority"]'))
priority_select.select_by_visible_text("1")
driver.execute_script("arguments[0].click();", submit_button)
driver.wait_for_xpath("//td[contains(.,'1')]")
driver.wait_for_xpath_to_disappear('//button[text()="Edit"]')
| 37.049057
| 105
| 0.659197
| 1,151
| 9,818
| 5.29192
| 0.114683
| 0.091939
| 0.119521
| 0.16549
| 0.862092
| 0.857166
| 0.855196
| 0.821047
| 0.793958
| 0.778197
| 0
| 0.01349
| 0.177022
| 9,818
| 264
| 106
| 37.189394
| 0.740347
| 0
| 0
| 0.651639
| 0
| 0
| 0.322469
| 0.276329
| 0
| 0
| 0
| 0
| 0.016393
| 1
| 0.020492
| false
| 0
| 0.020492
| 0
| 0.04918
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
643fb86b695b720b6afcc16bfc6bfefe62340849
| 184
|
py
|
Python
|
src/server/api/ModelUploadAPI.py
|
OdysseyMomentum/Bangalore-Torpedo
|
423abf4ab07c84e603fcaf948c30c7ce54f26188
|
[
"MIT"
] | 1
|
2020-12-02T20:49:31.000Z
|
2020-12-02T20:49:31.000Z
|
src/server/api/ModelUploadAPI.py
|
OdysseyMomentum/Bangalore-Torpedo
|
423abf4ab07c84e603fcaf948c30c7ce54f26188
|
[
"MIT"
] | null | null | null |
src/server/api/ModelUploadAPI.py
|
OdysseyMomentum/Bangalore-Torpedo
|
423abf4ab07c84e603fcaf948c30c7ce54f26188
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
model_upload_api = Blueprint('model_upload_api', __name__)
@model_upload_api.route("/model_upload")
def model_upload():
return "Model Upload Response"
| 26.285714
| 58
| 0.793478
| 25
| 184
| 5.36
| 0.48
| 0.492537
| 0.313433
| 0.343284
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108696
| 184
| 7
| 59
| 26.285714
| 0.817073
| 0
| 0
| 0
| 0
| 0
| 0.27027
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.6
| 0.4
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
ff2b2fe54dd0aedc282520dcc7206b4f780bfee8
| 246
|
py
|
Python
|
graphene-django/examples/cookbook/cookbook/schema.py
|
dialoguemd/graphene
|
ceffc4de691509968f200065642731fcc4acd217
|
[
"MIT"
] | null | null | null |
graphene-django/examples/cookbook/cookbook/schema.py
|
dialoguemd/graphene
|
ceffc4de691509968f200065642731fcc4acd217
|
[
"MIT"
] | null | null | null |
graphene-django/examples/cookbook/cookbook/schema.py
|
dialoguemd/graphene
|
ceffc4de691509968f200065642731fcc4acd217
|
[
"MIT"
] | null | null | null |
import graphene
import cookbook.ingredients.schema
# print cookbook.ingredients.schema.Query._meta.graphql_type.get_fields()['allIngredients'].args
class Query(cookbook.ingredients.schema.Query):
pass
schema = graphene.Schema(query=Query)
| 24.6
| 96
| 0.808943
| 30
| 246
| 6.533333
| 0.533333
| 0.290816
| 0.382653
| 0.306122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081301
| 246
| 9
| 97
| 27.333333
| 0.867257
| 0.382114
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.2
| 0.4
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
ff77a924a5582a482edeb27d0b61ed2f27e3fcbe
| 2,321
|
py
|
Python
|
web/src/utils/exceptions.py
|
saurabh1e/SuperFlaskSeed
|
a533daee568ca349be8d9ef4a7a9d5065abb2324
|
[
"MIT"
] | 11
|
2017-01-19T16:27:07.000Z
|
2022-01-19T07:18:47.000Z
|
web/src/utils/exceptions.py
|
saurabh1e/SuperFlaskSeed
|
a533daee568ca349be8d9ef4a7a9d5065abb2324
|
[
"MIT"
] | null | null | null |
web/src/utils/exceptions.py
|
saurabh1e/SuperFlaskSeed
|
a533daee568ca349be8d9ef4a7a9d5065abb2324
|
[
"MIT"
] | 6
|
2016-11-13T14:07:25.000Z
|
2019-12-04T15:34:09.000Z
|
from sqlalchemy.exc import OperationalError, IntegrityError
class SQlOperationalError(OperationalError):
def __init__(self, data, message, operation, status):
self.message = self.construct_error_message(data, message, operation)
self.status = status
def _get_message(self):
return self._message
def _set_message(self, message):
self._message = message
message = property(_get_message, _set_message)
def _get_status(self):
return self._status
def _set_status(self, status):
self._status = status
status = property(_get_status, _set_status)
@staticmethod
def construct_error_message(data, message, operation):
return {'data': data, 'message': message, 'operation': operation}
class SQLIntegrityError(IntegrityError):
def __init__(self, data, message, operation, status):
self.message = self.construct_error_message(data, message, operation)
self.status = status
def _get_message(self):
return self._message
def _set_message(self, message):
self._message = message
message = property(_get_message, _set_message)
def _get_status(self):
return self._status
def _set_status(self, status):
self._status = status
status = property(_get_status, _set_status)
@staticmethod
def construct_error_message(data, message, operation):
return {'data': data, 'message': message, 'operation': operation}
class CustomException(Exception):
def __init__(self, data, message, operation, status=400):
self.message = self.construct_error_message(data, message, operation)
self.status = status
def _get_message(self):
return self._message
def _set_message(self, message):
self._message = message
message = property(_get_message, _set_message)
def _get_status(self):
return self._status
def _set_status(self, status):
self._status = status
status = property(_get_status, _set_status)
@staticmethod
def construct_error_message(data, message, operation):
return {'data': data, 'message': message, 'operation': operation}
class ResourceNotFound(CustomException):
status = 404
class RequestNotAllowed(CustomException):
status = 401
| 24.691489
| 77
| 0.692805
| 254
| 2,321
| 6
| 0.11811
| 0.086614
| 0.11811
| 0.098425
| 0.844488
| 0.844488
| 0.844488
| 0.82021
| 0.82021
| 0.82021
| 0
| 0.004953
| 0.217148
| 2,321
| 93
| 78
| 24.956989
| 0.833792
| 0
| 0
| 0.839286
| 0
| 0
| 0.025851
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.321429
| false
| 0
| 0.017857
| 0.160714
| 0.732143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
441f13800acc530c79215e68fdbe30197aeadc31
| 9,174
|
py
|
Python
|
test/test_SM/test_ploshadki/test_ploshadka_sberbank_act_pao_sberbank_ask.py
|
IrinaSlobodchikova/marker
|
72f981134fb025a94348cd2bc829fa8430a01372
|
[
"Apache-2.0"
] | null | null | null |
test/test_SM/test_ploshadki/test_ploshadka_sberbank_act_pao_sberbank_ask.py
|
IrinaSlobodchikova/marker
|
72f981134fb025a94348cd2bc829fa8430a01372
|
[
"Apache-2.0"
] | null | null | null |
test/test_SM/test_ploshadki/test_ploshadka_sberbank_act_pao_sberbank_ask.py
|
IrinaSlobodchikova/marker
|
72f981134fb025a94348cd2bc829fa8430a01372
|
[
"Apache-2.0"
] | null | null | null |
def test_sm_sberbank_act_include_eic_yestoday(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'СБЕРБАНК-АСТ'
app.testHelperSMSearch.select_first_publish_date(3, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > 2800
def test_sm_sberbank_act_without_eic_yestoday(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'СБЕРБАНК-АСТ'
app.testHelperSMSearch.select_first_publish_date(3, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.find_in_container_number(11, 2, 1)
app.testHelperSMSearch.find_in_container_number(11, 6, 3)
app.testHelperSMSearch.find_in_container_number(11, 6, 4)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > 50
def test_sm_sberbank_act_include_eic_yestoday_today(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'СБЕРБАНК-АСТ'
app.testHelperSMSearch.select_first_publish_date(11, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > 2800
def test_sm_sberbank_act_without_eic_yestoday_today(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'СБЕРБАНК-АСТ'
app.testHelperSMSearch.select_first_publish_date(11, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.find_in_container_number(11, 2, 1)
app.testHelperSMSearch.find_in_container_number(11, 6, 3)
app.testHelperSMSearch.find_in_container_number(11, 6, 4)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > 50
def test_sm_sberbank_act_include_eic_7_days(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'СБЕРБАНК-АСТ'
app.testHelperSMSearch.select_first_publish_date(4, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > 15000
def test_sm_sberbank_act_without_eic_7_days(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'СБЕРБАНК-АСТ'
app.testHelperSMSearch.select_first_publish_date(4, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.find_in_container_number(11, 2, 1)
app.testHelperSMSearch.find_in_container_number(11, 6, 3)
app.testHelperSMSearch.find_in_container_number(11, 6, 4)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > 250
def test_sm_sberbank_act_include_eic_current_month(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'СБЕРБАНК-АСТ'
app.testHelperSMSearch.select_first_publish_date(5, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > int(app.testHelperSMSearch.current_date_time_day())*150
def test_sm_sberbank_act_without_eic_current_month(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'СБЕРБАНК-АСТ'
app.testHelperSMSearch.select_first_publish_date(5, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.find_in_container_number(11, 2, 1)
app.testHelperSMSearch.find_in_container_number(11, 6, 3)
app.testHelperSMSearch.find_in_container_number(11, 6, 4)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > int(app.testHelperSMSearch.current_date_time_day())*25
def test_sm_sberbank_act_include_eic_prev_month(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'СБЕРБАНК-АСТ'
app.testHelperSMSearch.select_first_publish_date(6, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > 65000
def test_sm_sberbank_act_without_eic_prev_month(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'СБЕРБАНК-АСТ'
app.testHelperSMSearch.select_first_publish_date(6, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.find_in_container_number(11, 2, 1)
app.testHelperSMSearch.find_in_container_number(11, 6, 3)
app.testHelperSMSearch.find_in_container_number(11, 6, 4)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > 1500
| 48.797872
| 112
| 0.775343
| 1,207
| 9,174
| 5.628832
| 0.065452
| 0.238004
| 0.091993
| 0.0471
| 0.996762
| 0.996762
| 0.996762
| 0.976891
| 0.972917
| 0.972917
| 0
| 0.019757
| 0.128297
| 9,174
| 188
| 113
| 48.797872
| 0.829811
| 0.152496
| 0
| 0.889655
| 0
| 0
| 0.01677
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 1
| 0.068966
| false
| 0.137931
| 0
| 0
| 0.068966
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
446f24471146451113d5d3bf50c30e6cbafa4070
| 321
|
py
|
Python
|
src/consistent_faker/formats/promotions/__init__.py
|
linkerzx/ConsistentFaker
|
6de78aaac2e08b8f2b2e8f687076271159bbc871
|
[
"MIT"
] | null | null | null |
src/consistent_faker/formats/promotions/__init__.py
|
linkerzx/ConsistentFaker
|
6de78aaac2e08b8f2b2e8f687076271159bbc871
|
[
"MIT"
] | null | null | null |
src/consistent_faker/formats/promotions/__init__.py
|
linkerzx/ConsistentFaker
|
6de78aaac2e08b8f2b2e8f687076271159bbc871
|
[
"MIT"
] | null | null | null |
"""
REPR formats for the Fake Promotion objects
Not currently used
"""
from consistent_faker.formats.promotions.promotion_repr import COUPON_PROMOTION_REPR
from consistent_faker.formats.promotions.promotion_repr import NO_PROMOTION_REPR
from consistent_faker.formats.promotions.promotion_repr import OTHER_PROMOTION_REPR
| 40.125
| 84
| 0.878505
| 43
| 321
| 6.27907
| 0.418605
| 0.288889
| 0.211111
| 0.288889
| 0.707407
| 0.707407
| 0.707407
| 0.707407
| 0.503704
| 0.503704
| 0
| 0
| 0.074766
| 321
| 7
| 85
| 45.857143
| 0.909091
| 0.193146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
4471527ed28973889b32d9a4ea8d0e5949e2116e
| 28,122
|
py
|
Python
|
pyclustering/cluster/tests/unit/ut_kmedoids.py
|
JosephChataignon/pyclustering
|
bf4f51a472622292627ec8c294eb205585e50f52
|
[
"BSD-3-Clause"
] | 1,013
|
2015-01-26T19:50:14.000Z
|
2022-03-31T07:38:48.000Z
|
pyclustering/cluster/tests/unit/ut_kmedoids.py
|
peterlau0626/pyclustering
|
bf4f51a472622292627ec8c294eb205585e50f52
|
[
"BSD-3-Clause"
] | 542
|
2015-01-20T16:44:32.000Z
|
2022-01-29T14:57:20.000Z
|
pyclustering/cluster/tests/unit/ut_kmedoids.py
|
peterlau0626/pyclustering
|
bf4f51a472622292627ec8c294eb205585e50f52
|
[
"BSD-3-Clause"
] | 262
|
2015-03-19T07:28:12.000Z
|
2022-03-30T07:28:24.000Z
|
"""!
@brief Unit-tests for K-Medoids algorithm.
@authors Andrei Novikov (pyclustering@yandex.ru)
@date 2014-2020
@copyright BSD-3-Clause
"""
import unittest
# Generate images without having a window appear.
import matplotlib
matplotlib.use('Agg')
from pyclustering.cluster.kmedoids import kmedoids, build, pam
from pyclustering.cluster.tests.kmedoids_templates import kmedoids_test_template
from pyclustering.samples.definitions import SIMPLE_SAMPLES, SIMPLE_ANSWERS
from pyclustering.utils import read_sample
from pyclustering.utils.metric import type_metric, distance_metric
class KmedoidsUnitTest(unittest.TestCase):
def testClusterAllocationSampleSimple1(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], False)
def testClusterAllocationSampleSimple1WrongInitials1(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [1, 2, 3, 4], [2, 2, 3, 3], False)
def testClusterAllocationSampleSimple1DistanceMatrix(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], False, data_type='distance_matrix')
def testClusterAllocationSampleSimple1DistanceMatrixNumpy(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], False, data_type='distance_matrix', input_type='numpy')
def testClusterAllocationSampleSimple1Euclidean(self):
metric = distance_metric(type_metric.EUCLIDEAN)
kmedoids_test_template.templateLengthProcessWithMetric(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], metric, False)
def testClusterAllocationSampleSimple1EuclideanDistanceMatrix(self):
metric = distance_metric(type_metric.EUCLIDEAN)
kmedoids_test_template.templateLengthProcessWithMetric(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], metric, False, data_type='distance_matrix')
def testClusterAllocationSampleSimple1SquareEuclidean(self):
metric = distance_metric(type_metric.EUCLIDEAN_SQUARE)
kmedoids_test_template.templateLengthProcessWithMetric(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], metric, False)
def testClusterAllocationSampleSimple1SquareEuclideanDistanceMatrix(self):
metric = distance_metric(type_metric.EUCLIDEAN_SQUARE)
kmedoids_test_template.templateLengthProcessWithMetric(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], metric, False, data_type='distance_matrix')
def testClusterAllocationSampleSimple1Manhattan(self):
metric = distance_metric(type_metric.MANHATTAN)
kmedoids_test_template.templateLengthProcessWithMetric(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], metric, False)
def testClusterAllocationSampleSimple1ManhattanDistanceMatrix(self):
metric = distance_metric(type_metric.MANHATTAN)
kmedoids_test_template.templateLengthProcessWithMetric(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], metric, False, data_type='distance_matrix')
def testClusterAllocationSampleSimple1Chebyshev(self):
metric = distance_metric(type_metric.CHEBYSHEV)
kmedoids_test_template.templateLengthProcessWithMetric(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], metric, False)
def testClusterAllocationSampleSimple1ChebyshevDistanceMatrix(self):
metric = distance_metric(type_metric.CHEBYSHEV)
kmedoids_test_template.templateLengthProcessWithMetric(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], metric, False, data_type='distance_matrix')
def testClusterAllocationSampleSimple1Minkowski(self):
metric = distance_metric(type_metric.MINKOWSKI, degree=2.0)
kmedoids_test_template.templateLengthProcessWithMetric(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], metric, False)
def testClusterAllocationSampleSimple1MinkowskiDistanceMatrix(self):
metric = distance_metric(type_metric.MINKOWSKI, degree=2.0)
kmedoids_test_template.templateLengthProcessWithMetric(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], metric, False, data_type='distance_matrix')
def testClusterAllocationSampleSimple1Gower(self):
metric = distance_metric(type_metric.GOWER, data=read_sample(SIMPLE_SAMPLES.SAMPLE_SIMPLE1))
kmedoids_test_template.templateLengthProcessWithMetric(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], metric, False)
def testClusterAllocationSampleSimple1GowerDistanceMatrix(self):
metric = distance_metric(type_metric.GOWER, data=read_sample(SIMPLE_SAMPLES.SAMPLE_SIMPLE1))
kmedoids_test_template.templateLengthProcessWithMetric(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], metric, False, data_type='distance_matrix')
def testClusterAllocationSampleSimple1UserDefined(self):
metric = distance_metric(type_metric.USER_DEFINED, func=distance_metric(type_metric.EUCLIDEAN))
kmedoids_test_template.templateLengthProcessWithMetric(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], metric, False)
def testClusterAllocationSampleSimple1UserDefinedDistanceMatrix(self):
metric = distance_metric(type_metric.USER_DEFINED, func=distance_metric(type_metric.EUCLIDEAN))
kmedoids_test_template.templateLengthProcessWithMetric(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], metric, False, data_type='distance_matrix')
def testClusterOneAllocationSampleSimple1(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [5], [10], False)
def testClusterOneAllocationSampleSimple1DistanceMatrix(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [5], [10], False, data_type='distance_matrix')
def testClusterAllocationSampleSimple1WithMedoidsInitializer(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, None, [5, 5], False, initialize_medoids=2)
def testClusterAllocationSampleSimple2(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE2, [3, 12, 20], [10, 5, 8], False)
def testClusterAllocationSampleSimple2DistanceMatrix(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE2, [3, 12, 20], [10, 5, 8], False, data_type='distance_matrix')
def testClusterOneAllocationSampleSimple2(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE2, [10], [23], False)
def testClusterOneAllocationSampleSimple2DistanceMatrix(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE2, [10], [23], False, data_type='distance_matrix')
def testClusterAllocationSampleSimple2WithMedoidsInitializer(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE2, None, [10, 5, 8], False, initialize_medoids=3)
def testClusterAllocationSampleSimple3(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, [4, 12, 25, 37], [10, 10, 10, 30], False)
def testClusterAllocationSampleSimple3DistanceMatrix(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, [4, 12, 25, 37], [10, 10, 10, 30], False, data_type='distance_matrix')
def testClusterOneAllocationSampleSimple3(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, [30], [60], False)
def testClusterAllocationSampleSimple3WithMedoidsInitializer(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, None, [10, 10, 10, 30], False, initialize_medoids=4)
def testClusterAllocationSampleSimple5(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE5, [4, 18, 34, 55], [15, 15, 15, 15], False)
def testClusterAllocationSampleSimple5DistanceMatrix(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE5, [4, 18, 34, 55], [15, 15, 15, 15], False, data_type='distance_matrix')
def testClusterOneAllocationSampleSimple5(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE5, [35], [60], False)
def testClusterTheSameData1(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE9, [2, 20], [10, 20], False)
def testClusterTheSameData1DistanceMatrix(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE9, [2, 20], [10, 20], False, data_type='distance_matrix')
def testClusterTheSameData2(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE12, [2, 7, 12], [5, 5, 5], False)
def testClusterTheSameData2DistanceMatrix(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE12, [2, 7, 12], [5, 5, 5], False, data_type='distance_matrix')
def testClusterAllocationOneDimensionData(self):
kmedoids_test_template.templateClusterAllocationOneDimensionData(False)
def testClusterAllocationTheSameObjectsOneInitialMedoid(self):
kmedoids_test_template.templateClusterAllocationTheSameObjects(20, 1, False)
def testClusterAllocationTheSameObjectsTwoInitialMedoids(self):
kmedoids_test_template.templateClusterAllocationTheSameObjects(15, 2, False)
def testClusterAllocationTheSameObjectsThreeInitialMedoids(self):
kmedoids_test_template.templateClusterAllocationTheSameObjects(25, 3, False)
def testPredictOnePoint(self):
medoids = [4, 12, 25, 37]
kmedoids_test_template.templatePredict(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, medoids, [[0.3, 0.2]], [0], False)
kmedoids_test_template.templatePredict(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, medoids, [[4.1, 1.1]], [3], False)
kmedoids_test_template.templatePredict(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, medoids, [[2.1, 1.9]], [2], False)
kmedoids_test_template.templatePredict(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, medoids, [[2.1, 4.1]], [1], False)
def testPredictOnePointUserMetric(self):
medoids = [4, 12, 25, 37]
metric = distance_metric(type_metric.USER_DEFINED, func=distance_metric(type_metric.EUCLIDEAN))
kmedoids_test_template.templatePredict(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, medoids, [[0.3, 0.2]], [0], False, metric=metric)
def testPredictTwoPoints(self):
medoids = [4, 12, 25, 37]
kmedoids_test_template.templatePredict(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, medoids, [[0.3, 0.2], [2.1, 1.9]], [0, 2], False)
kmedoids_test_template.templatePredict(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, medoids, [[2.1, 4.1], [2.1, 1.9]], [1, 2], False)
def testPredictTwoPointsUserMetric(self):
medoids = [4, 12, 25, 37]
metric = distance_metric(type_metric.USER_DEFINED, func=distance_metric(type_metric.EUCLIDEAN))
kmedoids_test_template.templatePredict(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, medoids, [[0.3, 0.2], [2.1, 1.9]], [0, 2], False, metric=metric)
def testPredictFourPoints(self):
medoids = [4, 12, 25, 37]
to_predict = [[0.3, 0.2], [4.1, 1.1], [2.1, 1.9], [2.1, 4.1]]
kmedoids_test_template.templatePredict(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, medoids, to_predict, [0, 3, 2, 1], False)
def testPredictFivePoints(self):
medoids = [4, 12, 25, 37]
to_predict = [[0.3, 0.2], [4.1, 1.1], [3.9, 1.1], [2.1, 1.9], [2.1, 4.1]]
kmedoids_test_template.templatePredict(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, medoids, to_predict, [0, 3, 3, 2, 1], False)
def testPredictFivePointsUserMetric(self):
medoids = [4, 12, 25, 37]
to_predict = [[0.3, 0.2], [4.1, 1.1], [3.9, 1.1], [2.1, 1.9], [2.1, 4.1]]
metric = distance_metric(type_metric.USER_DEFINED, func=distance_metric(type_metric.EUCLIDEAN))
kmedoids_test_template.templatePredict(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, medoids, to_predict, [0, 3, 3, 2, 1], False, metric=metric)
def testAllocatedRequestedClustersSampleSimple01(self):
sample = read_sample(SIMPLE_SAMPLES.SAMPLE_SIMPLE1)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 1, None, False)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 2, None, False)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 3, None, False)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 4, None, False)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 5, None, False)
def testAllocatedRequestedClustersSampleSimple02(self):
sample = read_sample(SIMPLE_SAMPLES.SAMPLE_SIMPLE2)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 1, None, False)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 2, None, False)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 3, None, False)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 4, None, False)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 5, None, False)
def testAllocatedRequestedClustersSampleSimple03(self):
sample = read_sample(SIMPLE_SAMPLES.SAMPLE_SIMPLE3)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 2, None, False)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 5, None, False)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 8, None, False)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 10, None, False)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 15, None, False)
def testAllocatedRequestedClustersSampleSimple04(self):
sample = read_sample(SIMPLE_SAMPLES.SAMPLE_SIMPLE4)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 10, None, False)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 25, None, False)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 40, None, False)
def testAllocatedRequestedClustersWithTheSamePoints1(self):
# Bug issue #366 - Kmedoids returns incorrect number of clusters.
sample = [[0.0, 0.0], [0.1, 0.1], [0.0, 0.0], [0.1, 0.2]]
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 3, None, False)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 3, None, False)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 2, None, False)
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 1, None, False)
def testAllocatedRequestedClustersWithTheSamePoints2(self):
sample = [[0.23, 0.2], [-0.1, 0.1], [0.0, 0.9], [0.1, -0.2], [0.8, 0.1], [-0.1, 0.1], [-0.4, -0.2], [0.0, 0.9]]
answers = [1, 2, 3, 4, 5, 6, 6, 6]
for expected_amount in answers:
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, expected_amount, None, False)
def testAllocatedRequestedClustersWithTotallyTheSamePoints(self):
# Bug issue #366 - Kmedoids returns incorrect number of clusters.
sample = [[0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]]
kmedoids_test_template.templateAllocateRequestedClusterAmount(sample, 1, None, False)
def testItermax0(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [], False, itermax=0)
def testItermax1(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], False, itermax=1)
def testItermax10Simple01(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, [2, 9], [5, 5], False, itermax=10)
def testItermax10Simple02(self):
kmedoids_test_template.templateLengthProcessData(SIMPLE_SAMPLES.SAMPLE_SIMPLE2, [3, 12, 20], [10, 5, 8], False, itermax=10)
def testSimple01AnswerByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, SIMPLE_ANSWERS.ANSWER_SIMPLE1, False, random_state=1000)
def testSimple01AnswerDistanceMatrixByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, SIMPLE_ANSWERS.ANSWER_SIMPLE1, False, random_state=1000, data_type='distance_matrix')
def testSimple02AnswerByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE2, SIMPLE_ANSWERS.ANSWER_SIMPLE2, False, random_state=1000)
def testSimple02AnswerDistanceMatrixByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE2, SIMPLE_ANSWERS.ANSWER_SIMPLE2, False, random_state=1000, data_type='distance_matrix')
def testSimple03AnswerByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, SIMPLE_ANSWERS.ANSWER_SIMPLE3, False, random_state=1000)
def testSimple03AnswerDistanceMatrixByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, SIMPLE_ANSWERS.ANSWER_SIMPLE3, False, random_state=1000, data_type='distance_matrix')
def testSimple04AnswerByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE4, SIMPLE_ANSWERS.ANSWER_SIMPLE4, False, random_state=1000)
def testSimple04AnswerDistanceMatrixByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE4, SIMPLE_ANSWERS.ANSWER_SIMPLE4, False, random_state=1000, data_type='distance_matrix')
def testSimple05AnswerByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE5, SIMPLE_ANSWERS.ANSWER_SIMPLE5, False, random_state=1000)
def testSimple05AnswerDistanceMatrixByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE5, SIMPLE_ANSWERS.ANSWER_SIMPLE5, False, random_state=1000, data_type='distance_matrix')
def testSimple06AnswerByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE6, SIMPLE_ANSWERS.ANSWER_SIMPLE6, False, random_state=1000)
def testSimple06AnswerDistanceMatrixByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE6, SIMPLE_ANSWERS.ANSWER_SIMPLE6, False, random_state=1000, data_type='distance_matrix')
def testSimple07AnswerByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE7, SIMPLE_ANSWERS.ANSWER_SIMPLE7, False, random_state=1000)
def testSimple07AnswerDistanceMatrixByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE7, SIMPLE_ANSWERS.ANSWER_SIMPLE7, False, random_state=1000, data_type='distance_matrix')
def testSimple08AnswerByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE8, SIMPLE_ANSWERS.ANSWER_SIMPLE8, False, random_state=1000)
def testSimple08AnswerDistanceMatrixByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE8, SIMPLE_ANSWERS.ANSWER_SIMPLE8, False, random_state=1000, data_type='distance_matrix')
def testSimple09AnswerByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE9, SIMPLE_ANSWERS.ANSWER_SIMPLE9, False, random_state=1000)
def testSimple09AnswerDistanceMatrixByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE9, SIMPLE_ANSWERS.ANSWER_SIMPLE9, False, random_state=1000, data_type='distance_matrix')
def testSimple10AnswerByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE10, SIMPLE_ANSWERS.ANSWER_SIMPLE10, False, random_state=1000)
def testSimple10AnswerDistanceMatrixByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE10, SIMPLE_ANSWERS.ANSWER_SIMPLE10, False, random_state=1000, data_type='distance_matrix')
def testSimple11AnswerByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE11, SIMPLE_ANSWERS.ANSWER_SIMPLE11, False, random_state=1000)
def testSimple11AnswerDistanceMatrixByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE11, SIMPLE_ANSWERS.ANSWER_SIMPLE11, False, random_state=1000, data_type='distance_matrix')
def testSimple12AnswerByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE12, SIMPLE_ANSWERS.ANSWER_SIMPLE12, False, random_state=1000)
def testSimple12AnswerDistanceMatrixByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE12, SIMPLE_ANSWERS.ANSWER_SIMPLE12, False, random_state=1000, data_type='distance_matrix')
def testSimple13AnswerByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE13, SIMPLE_ANSWERS.ANSWER_SIMPLE13, False, random_state=1000)
def testSimple13AnswerDistanceMatrixByCore(self):
kmedoids_test_template.clustering_with_answer(SIMPLE_SAMPLES.SAMPLE_SIMPLE13, SIMPLE_ANSWERS.ANSWER_SIMPLE13, False, random_state=1000, data_type='distance_matrix')
def test_incorrect_data(self):
self.assertRaises(ValueError, kmedoids, [], [1])
def test_incorrect_centers(self):
self.assertRaises(ValueError, kmedoids, [[0], [1], [2]], [])
def test_incorrect_tolerance(self):
self.assertRaises(ValueError, kmedoids, [[0], [1], [2]], [1], -1.0)
def test_incorrect_itermax(self):
self.assertRaises(ValueError, kmedoids, [[0], [1], [2]], [1], itermax=-5)
def test_pam_alias(self):
self.assertRaises(ValueError, pam, [], [1])
self.assertRaises(ValueError, pam, [[0], [1], [2]], [])
self.assertRaises(ValueError, pam, [[0], [1], [2]], [1], -1.0)
self.assertRaises(ValueError, pam, [[0], [1], [2]], [1], itermax=-5)
def test_initial_medoids_sample01(self):
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, 2, [4, 8], False)
def test_initial_medoids_sample01_matrix(self):
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, 2, [4, 8], False, data_type='distance_matrix')
def test_initial_medoids_sample01_wrong_amount_1(self):
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, 1, [4], False)
def test_initial_medoids_sample01_wrong_amount_3(self):
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, 3, [4, 8, 0], False)
def test_initial_medoids_sample01_wrong_amount_10(self):
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, 10, [4, 8, 0, 9, 1, 7, 6, 5, 2, 3], False)
def test_initial_medoids_sample01_euclidean(self):
metric = distance_metric(type_metric.EUCLIDEAN)
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, 2, [4, 8], False, metric=metric)
def test_initial_medoids_sample01_euclidean_square(self):
metric = distance_metric(type_metric.EUCLIDEAN_SQUARE)
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, 2, [4, 8], False, metric=metric)
def test_initial_medoids_sample01_euclidean_square_matrix(self):
metric = distance_metric(type_metric.EUCLIDEAN_SQUARE)
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, 2, [4, 8], False, metric=metric, data_type='distance_matrix')
def test_initial_medoids_sample01_euclidean_manhattan(self):
metric = distance_metric(type_metric.MANHATTAN)
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, 2, [4, 8], False, metric=metric)
def test_initial_medoids_sample01_euclidean_manhattan_matrix(self):
metric = distance_metric(type_metric.MANHATTAN)
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, 2, [4, 8], False, metric=metric, data_type='distance_matrix')
def test_initial_medoids_sample01_euclidean_chebyshev(self):
metric = distance_metric(type_metric.CHEBYSHEV)
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, 2, [4, 8], False, metric=metric)
def test_initial_medoids_sample01_euclidean_chebyshev_matrix(self):
metric = distance_metric(type_metric.CHEBYSHEV)
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE1, 2, [4, 8], False, metric=metric, data_type='distance_matrix')
def test_initial_medoids_sample02(self):
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE2, 3, [3, 20, 14], False)
def test_initial_medoids_sample02_matrix(self):
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE2, 3, [3, 20, 14], False, data_type='distance_matrix')
def test_initial_medoids_sample03(self):
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, 4, [28, 56, 5, 34], False)
def test_initial_medoids_sample03_matrix(self):
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE3, 4, [28, 56, 5, 34], False, data_type='distance_matrix')
def test_initial_medoids_sample04(self):
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE4, 5, [44, 7, 64, 25, 55], False)
def test_initial_medoids_sample04_matrix(self):
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE4, 5, [44, 7, 64, 25, 55], False, data_type='distance_matrix')
def test_initial_medoids_one_dimensional(self):
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE9, 2, [0, 20], False)
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE9, 1, [0], False)
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE9, 3, [0, 20, 1], False)
def test_initial_medoids_one_dimensional_matrix(self):
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE9, 2, [0, 20], False, data_type='distance_matrix')
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE9, 1, [0], False, data_type='distance_matrix')
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE9, 3, [0, 20, 1], False, data_type='distance_matrix')
def test_initial_medoids_three_dimensional(self):
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE11, 2, [15, 4], False)
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE11, 1, [15], False)
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE11, 3, [15, 4, 14], False)
def test_initial_medoids_three_dimensional_matrix(self):
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE11, 2, [15, 4], False, data_type='distance_matrix')
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE11, 1, [15], False, data_type='distance_matrix')
kmedoids_test_template.initialize_medoids(SIMPLE_SAMPLES.SAMPLE_SIMPLE11, 3, [15, 4, 14], False, data_type='distance_matrix')
def test_initial_medoids_incorrect_data(self):
self.assertRaises(ValueError, build, [], 1)
self.assertRaises(ValueError, build, None, 1)
def test_initial_medoids_incorrect_amount(self):
self.assertRaises(ValueError, build, [[0], [1], [2]], 0)
self.assertRaises(ValueError, build, [[0], [1], [2]], 4)
| 61.267974
| 173
| 0.763424
| 3,112
| 28,122
| 6.591581
| 0.076478
| 0.080144
| 0.133574
| 0.084239
| 0.782577
| 0.758348
| 0.740311
| 0.716911
| 0.70448
| 0.647687
| 0
| 0.047572
| 0.141882
| 28,122
| 458
| 174
| 61.401747
| 0.802461
| 0.011023
| 0
| 0.232484
| 1
| 0
| 0.023334
| 0
| 0
| 0
| 0
| 0
| 0.038217
| 1
| 0.363057
| false
| 0
| 0.022293
| 0
| 0.388535
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
924296e36c0fadb76a005c6abe38c63df61f99c8
| 160
|
py
|
Python
|
pos_wechat/models/__init__.py
|
nahualventure/pos-addons
|
3c911c28c259967fb74e311ddcc8e6ca032c005d
|
[
"MIT"
] | null | null | null |
pos_wechat/models/__init__.py
|
nahualventure/pos-addons
|
3c911c28c259967fb74e311ddcc8e6ca032c005d
|
[
"MIT"
] | null | null | null |
pos_wechat/models/__init__.py
|
nahualventure/pos-addons
|
3c911c28c259967fb74e311ddcc8e6ca032c005d
|
[
"MIT"
] | 3
|
2021-06-15T05:45:42.000Z
|
2021-07-27T12:28:53.000Z
|
# License MIT (https://opensource.org/licenses/MIT).
from . import wechat_pos
from . import wechat_micropay
from . import wechat_order
from . import pos_config
| 26.666667
| 52
| 0.7875
| 23
| 160
| 5.304348
| 0.565217
| 0.327869
| 0.393443
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 160
| 5
| 53
| 32
| 0.871429
| 0.3125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
927ec4a9aa97700f8a8b02205afe2aca4868877b
| 147
|
py
|
Python
|
CA117/Lab_2/password_12.py
|
PRITI1999/OneLineWonders
|
91a7368e0796e5a3b5839c9165f9fbe5460879f5
|
[
"MIT"
] | 6
|
2016-02-04T00:15:20.000Z
|
2019-10-13T13:53:16.000Z
|
CA117/Lab_2/password_12.py
|
PRITI1999/OneLineWonders
|
91a7368e0796e5a3b5839c9165f9fbe5460879f5
|
[
"MIT"
] | 2
|
2016-03-14T04:01:36.000Z
|
2019-10-16T12:45:34.000Z
|
CA117/Lab_2/password_12.py
|
PRITI1999/OneLineWonders
|
91a7368e0796e5a3b5839c9165f9fbe5460879f5
|
[
"MIT"
] | 10
|
2016-02-09T14:38:32.000Z
|
2021-05-25T08:16:26.000Z
|
(lambda p,s:print((p.lower()!=p)+(p.upper()!=p)+([]!=s("[0-9]",p))+([]!=s("[^0-9A-Za-z]",p))))(__import__("sys").argv[1],__import__('re').findall)
| 73.5
| 146
| 0.517007
| 27
| 147
| 2.518519
| 0.62963
| 0.088235
| 0.088235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034483
| 0.013605
| 147
| 1
| 147
| 147
| 0.434483
| 0
| 0
| 0
| 0
| 0
| 0.14966
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 8
|
9297c31d2a23e3da8645651f8f3c6d63580d3a8c
| 11,561
|
py
|
Python
|
tests/logic/schemas/test_convert_to_schema.py
|
nilsholle/sampledb
|
90d7487a3990995ca2ec5dfd8b59d4739d6a9a87
|
[
"MIT"
] | 5
|
2020-02-13T15:25:37.000Z
|
2021-05-06T21:05:14.000Z
|
tests/logic/schemas/test_convert_to_schema.py
|
nilsholle/sampledb
|
90d7487a3990995ca2ec5dfd8b59d4739d6a9a87
|
[
"MIT"
] | 28
|
2019-11-12T14:14:08.000Z
|
2022-03-11T16:29:27.000Z
|
tests/logic/schemas/test_convert_to_schema.py
|
nilsholle/sampledb
|
90d7487a3990995ca2ec5dfd8b59d4739d6a9a87
|
[
"MIT"
] | 8
|
2019-12-10T15:46:02.000Z
|
2021-11-02T12:24:52.000Z
|
# coding: utf-8
"""
"""
import pytest
import sampledb
from sampledb.logic.schemas import validate, convert_to_schema
from sampledb.logic.schemas.generate_placeholder import SchemaError
def test_convert_same_schema():
data = {
'_type': 'text',
'text': 'Example Text'
}
previous_schema = {
'type': 'text',
'title': 'Test'
}
new_schema = {
'type': 'text',
'title': 'Test'
}
validate(data, previous_schema)
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data == data
assert not warnings
def test_convert_same_type():
data = {
'_type': 'text',
'text': 'Example Text'
}
previous_schema = {
'type': 'text',
'title': 'Test',
'minLength': 1
}
new_schema = {
'type': 'text',
'title': 'Test'
}
validate(data, previous_schema)
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data == data
assert not warnings
def test_convert_object():
data = {
'name': {
'_type': 'text',
'text': 'Example, Text'
},
'keywords': {
'_type': 'text',
'text': 'tag1, tag2'
}
}
previous_schema = {
'type': 'object',
'title': 'Test',
'properties': {
'name': {
'title': 'Name',
'type': 'text'
},
'keywords': {
'title': 'Keywords',
'type': 'text'
}
}
}
new_schema = {
'type': 'object',
'title': 'Test',
'properties': {
'name': {
'title': 'Name',
'type': 'text'
},
'keywords': {
'title': 'Keywords',
'type': 'tags'
}
}
}
validate(data, previous_schema)
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data == {
'name': {
'_type': 'text',
'text': 'Example, Text'
},
'keywords': {
'_type': 'tags',
'tags': ['tag1', 'tag2']
}
}
assert not warnings
def test_convert_schema_with_unknown_type():
data = {}
previous_schema = {
'type': 'unknown',
'title': 'Test'
}
new_schema = {
'type': 'unknown',
'title': 'Test'
}
with pytest.raises(SchemaError):
convert_to_schema(data, previous_schema, new_schema)
def test_convert_incompatible_schemas():
data = {
'_type': 'text',
'text': 'Example Text'
}
previous_schema = {
'type': 'text',
'title': 'Test'
}
new_schema = {
'type': 'bool',
'title': 'Test'
}
validate(data, previous_schema)
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data is None
assert warnings
def test_convert_text_to_tags():
data = {
'_type': 'text',
'text': 'Tag1 ,Tag2, Tag3'
}
previous_schema = {
'type': 'text',
'title': 'Test'
}
new_schema = {
'type': 'tags',
'title': 'Test'
}
validate(data, previous_schema)
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data == {
'_type': 'tags',
'tags': [
'tag1', 'tag2', 'tag3'
]
}
assert not warnings
def test_convert_quantities_same_dimensionality():
data = {
'_type': 'quantity',
'dimensionality': '[length]',
'magnitude_in_base_units': 1,
'units': 'm'
}
previous_schema = {
'type': 'quantity',
'title': 'Test',
'units': 'm'
}
new_schema = {
'type': 'quantity',
'title': 'Test',
'units': 'cm'
}
validate(data, previous_schema)
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data == data
assert not warnings
def test_convert_quantities_differing_dimensionality():
data = {
'_type': 'quantity',
'dimensionality': '[length]',
'magnitude_in_base_units': 1,
'units': 'm'
}
previous_schema = {
'type': 'quantity',
'title': 'Test',
'units': 'm'
}
new_schema = {
'type': 'quantity',
'title': 'Test',
'units': 'ms'
}
validate(data, previous_schema)
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data is None
assert warnings
def test_convert_array():
data = [
{
'_type': 'text',
'text': 'Example, Text'
},
{
'_type': 'text',
'text': 'Example Text'
}
]
previous_schema = {
'type': 'array',
'title': 'Test',
'items': {
'type': 'text'
}
}
new_schema = {
'type': 'array',
'title': 'Test',
'items': {
'type': 'tags'
}
}
validate(data, previous_schema)
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data == [
{
'_type': 'tags',
'tags': ['example', 'text']
},
{
'_type': 'tags',
'tags': ['example text']
}
]
assert not warnings
def create_object_of_type(action_type_id):
user = sampledb.logic.users.create_user(
name='Example User',
email='email@example.com',
type=sampledb.logic.users.UserType.OTHER
)
action = sampledb.logic.actions.create_action(
action_type_id=action_type_id,
schema={
'type': 'object',
'title': 'Object Information',
'properties': {
'name': {
'type': 'text',
'title': 'Object Name'
}
},
'required': ['name']
}
)
object = sampledb.logic.objects.create_object(
action_id=action.id,
data={
'name': {
'_type': 'text',
'text': 'Example Object'
}
},
user_id=user.id
)
return object.id
def test_convert_sample_to_object_reference():
object_id = create_object_of_type(sampledb.models.ActionType.SAMPLE_CREATION)
data = {
'_type': 'sample',
'object_id': object_id
}
previous_schema = {
'type': 'sample',
'title': 'Test'
}
new_schema = {
'type': 'object_reference',
'title': 'Test'
}
validate(data, previous_schema)
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data == {
'_type': 'object_reference',
'object_id': object_id
}
assert not warnings
new_schema = {
'type': 'object_reference',
'title': 'Test',
'action_type_id': None
}
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data == {
'_type': 'object_reference',
'object_id': object_id
}
assert not warnings
new_schema = {
'type': 'object_reference',
'title': 'Test',
'action_type_id': sampledb.models.ActionType.SAMPLE_CREATION
}
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data == {
'_type': 'object_reference',
'object_id': object_id
}
assert not warnings
new_schema = {
'type': 'object_reference',
'title': 'Test',
'action_type_id': sampledb.models.ActionType.MEASUREMENT
}
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data is None
assert warnings
new_schema = {
'type': 'object_reference',
'title': 'Test',
'action_id': sampledb.logic.objects.get_object(object_id).action_id
}
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data == {
'_type': 'object_reference',
'object_id': object_id
}
assert not warnings
new_schema = {
'type': 'object_reference',
'title': 'Test',
'action_id': sampledb.logic.objects.get_object(object_id).action_id + 1
}
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data is None
assert warnings
def test_convert_object_reference_to_sample():
object_id = create_object_of_type(sampledb.models.ActionType.SAMPLE_CREATION)
data = {
'_type': 'object_reference',
'object_id': object_id
}
previous_schema = {
'type': 'object_reference',
'title': 'Test'
}
new_schema = {
'type': 'sample',
'title': 'Test'
}
validate(data, previous_schema)
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data == data
assert not warnings
previous_schema = {
'type': 'object_reference',
'title': 'Test',
'action_type_id': None
}
validate(data, previous_schema)
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data == data
assert not warnings
previous_schema = {
'type': 'object_reference',
'title': 'Test',
'action_type_id': sampledb.models.ActionType.SAMPLE_CREATION
}
validate(data, previous_schema)
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data == data
assert not warnings
def test_convert_object_reference_to_measurement():
object_id = create_object_of_type(sampledb.models.ActionType.MEASUREMENT)
data = {
'_type': 'object_reference',
'object_id': object_id
}
previous_schema = {
'type': 'object_reference',
'title': 'Test'
}
new_schema = {
'type': 'measurement',
'title': 'Test'
}
validate(data, previous_schema)
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data == data
assert not warnings
previous_schema = {
'type': 'object_reference',
'title': 'Test',
'action_type_id': None
}
validate(data, previous_schema)
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data == data
assert not warnings
previous_schema = {
'type': 'object_reference',
'title': 'Test',
'action_type_id': sampledb.models.ActionType.MEASUREMENT
}
validate(data, previous_schema)
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data == data
assert not warnings
def test_convert_schema_missing_values():
data = {}
previous_schema = {
'type': 'object',
'properties': {
'text': {
'title': 'Text',
'type': 'text'
}
}
}
new_schema = {
'type': 'object',
'properties': {
'text': {
'title': 'Text2',
'type': 'text'
}
}
}
validate(data, previous_schema)
new_data, warnings = convert_to_schema(data, previous_schema, new_schema)
assert new_data == data
assert not warnings
| 25.02381
| 81
| 0.547617
| 1,143
| 11,561
| 5.23622
| 0.074366
| 0.128655
| 0.120301
| 0.133333
| 0.857978
| 0.828404
| 0.804678
| 0.765246
| 0.751378
| 0.721972
| 0
| 0.002051
| 0.325231
| 11,561
| 461
| 82
| 25.078091
| 0.765158
| 0.001124
| 0
| 0.661098
| 1
| 0
| 0.168573
| 0.003987
| 0
| 0
| 0
| 0
| 0.100239
| 1
| 0.033413
| false
| 0
| 0.009547
| 0
| 0.045346
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
92a9ab90bf9e1de557da0c518ec1706c76147764
| 971
|
py
|
Python
|
test/test_product_match.py
|
Cloudmersive/Cloudmersive.APIClient.Python.Barcode
|
e584de80304ebddbcce99ee6ff42196d46486421
|
[
"Apache-2.0"
] | 1
|
2018-06-24T04:50:28.000Z
|
2018-06-24T04:50:28.000Z
|
test/test_product_match.py
|
Cloudmersive/Cloudmersive.APIClient.Python.Barcode
|
e584de80304ebddbcce99ee6ff42196d46486421
|
[
"Apache-2.0"
] | 1
|
2019-02-25T18:23:23.000Z
|
2019-02-25T18:23:23.000Z
|
test/test_product_match.py
|
Cloudmersive/Cloudmersive.APIClient.Python.Barcode
|
e584de80304ebddbcce99ee6ff42196d46486421
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
barcodeapi
Barcode APIs let you generate barcode images, and recognize values from images of barcodes. # noqa: E501
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import cloudmersive_barcode_api_client
from cloudmersive_barcode_api_client.models.product_match import ProductMatch # noqa: E501
from cloudmersive_barcode_api_client.rest import ApiException
class TestProductMatch(unittest.TestCase):
"""ProductMatch unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testProductMatch(self):
"""Test ProductMatch"""
# FIXME: construct object with mandatory attributes with example values
# model = cloudmersive_barcode_api_client.models.product_match.ProductMatch() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 23.682927
| 109
| 0.725026
| 113
| 971
| 5.99115
| 0.575221
| 0.11226
| 0.129985
| 0.165436
| 0.189069
| 0.135894
| 0.135894
| 0
| 0
| 0
| 0
| 0.014157
| 0.199794
| 971
| 40
| 110
| 24.275
| 0.857143
| 0.454171
| 0
| 0.214286
| 1
| 0
| 0.016393
| 0
| 0
| 0
| 0
| 0.025
| 0
| 1
| 0.214286
| false
| 0.214286
| 0.357143
| 0
| 0.642857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
2ba151a11483da7024f1f262003da893053e3e65
| 8,956
|
py
|
Python
|
tests/security/flavors/test_fuzz_flavors.py
|
jqxin2006/poppy
|
10636e6255c7370172422afece4a5c3d95c1e937
|
[
"Apache-2.0"
] | null | null | null |
tests/security/flavors/test_fuzz_flavors.py
|
jqxin2006/poppy
|
10636e6255c7370172422afece4a5c3d95c1e937
|
[
"Apache-2.0"
] | null | null | null |
tests/security/flavors/test_fuzz_flavors.py
|
jqxin2006/poppy
|
10636e6255c7370172422afece4a5c3d95c1e937
|
[
"Apache-2.0"
] | null | null | null |
# coding= utf-8
# Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid
import ddt
from nose.plugins import attrib
from tests.api import base
from tests.api import providers
from tests.api.utils.schema import flavors
@ddt.ddt
class TestFuzzCreateFlavor(providers.TestProviderBase):
"""Security Tests for Fuzzing Create Flavor."""
def setUp(self):
super(TestFuzzCreateFlavor, self).setUp()
self.flavor_id = str(uuid.uuid1())
@attrib.attr('fuzz')
@ddt.file_data('../services/data_fuzz.json')
def test_fuzz_flavor_id(self, test_data):
provider_list = [{"provider": "fastly",
"links": [{"href": "www.watermelon.com", "rel": "provider_url"}]}]
limits = [{"origins": {"min": 1, "max": 5}},
{"domains": {"min": 1, "max": 5}},
{"caching": {"min": 3600, "max": 604800, "incr": 300}}]
flavor_id = test_data['fuzz_string']
resp = self.client.create_flavor(flavor_id=flavor_id,
provider_list=provider_list,
limits=limits)
self.assertLess(resp.status_code, 500)
@attrib.attr('fuzz')
@ddt.file_data('../services/data_fuzz.json')
def test_fuzz_provider(self, test_data):
provider_list = [{"provider": test_data['fuzz_string'],
"links": [{"href": "www.watermelon.com", "rel": "provider_url"}]}]
limits = [{"origins": {"min": 1, "max": 5}},
{"domains": {"min": 1, "max": 5}},
{"caching": {"min": 3600, "max": 604800, "incr": 300}}]
resp = self.client.create_flavor(flavor_id=self.flavor_id,
provider_list=provider_list,
limits=limits)
self.assertLess(resp.status_code, 500)
@attrib.attr('fuzz')
@ddt.file_data('../services/data_fuzz.json')
def test_fuzz_href(self, test_data):
provider_list = [{"provider": "fastly",
"links": [{"href": test_data['fuzz_string'], "rel": "provider_url"}]}]
limits = [{"origins": {"min": 1, "max": 5}},
{"domains": {"min": 1, "max": 5}},
{"caching": {"min": 3600, "max": 604800, "incr": 300}}]
resp = self.client.create_flavor(flavor_id=self.flavor_id,
provider_list=provider_list,
limits=limits)
self.assertLess(resp.status_code, 500)
@attrib.attr('fuzz')
@ddt.file_data('../services/data_fuzz.json')
def test_fuzz_rel(self, test_data):
provider_list = [{"provider": "fastly",
"links": [{"href": "www.watermelon.com", "rel": test_data['fuzz_string']}]}]
limits = [{"origins": {"min": 1, "max": 5}},
{"domains": {"min": 1, "max": 5}},
{"caching": {"min": 3600, "max": 604800, "incr": 300}}]
resp = self.client.create_flavor(flavor_id=self.flavor_id,
provider_list=provider_list,
limits=limits)
self.assertLess(resp.status_code, 500)
@attrib.attr('fuzz')
@ddt.file_data('../services/data_fuzz.json')
def test_fuzz_origins_min(self, test_data):
provider_list = [{"provider": "fastly",
"links": [{"href": "www.watermelon.com", "rel": "provider_url"}]}]
limits = [{"origins": {"min": test_data['fuzz_string'], "max": 5}},
{"domains": {"min": 1, "max": 5}},
{"caching": {"min": 3600, "max": 604800, "incr": 300}}]
resp = self.client.create_flavor(flavor_id=self.flavor_id,
provider_list=provider_list,
limits=limits)
self.assertLess(resp.status_code, 500)
@attrib.attr('fuzz')
@ddt.file_data('../services/data_fuzz.json')
def test_fuzz_origins_max(self, test_data):
provider_list = [{"provider": "fastly",
"links": [{"href": "www.watermelon.com", "rel": "provider_url"}]}]
limits = [{"origins": {"min": 1, "max": test_data['fuzz_string']}},
{"domains": {"min": 1, "max": 5}},
{"caching": {"min": 3600, "max": 604800, "incr": 300}}]
resp = self.client.create_flavor(flavor_id=self.flavor_id,
provider_list=provider_list,
limits=limits)
self.assertLess(resp.status_code, 500)
@attrib.attr('fuzz')
@ddt.file_data('../services/data_fuzz.json')
def test_fuzz_domains_min(self, test_data):
provider_list = [{"provider": "fastly",
"links": [{"href": "www.watermelon.com", "rel": "provider_url"}]}]
limits = [{"origins": {"min": 1, "max": 5}},
{"domains": {"min": test_data['fuzz_string'], "max": 5}},
{"caching": {"min": 3600, "max": 604800, "incr": 300}}]
resp = self.client.create_flavor(flavor_id=self.flavor_id,
provider_list=provider_list,
limits=limits)
self.assertLess(resp.status_code, 500)
@attrib.attr('fuzz')
@ddt.file_data('../services/data_fuzz.json')
def test_fuzz_domains_max(self, test_data):
provider_list = [{"provider": "fastly",
"links": [{"href": "www.watermelon.com", "rel": "provider_url"}]}]
limits = [{"origins": {"min": 1, "max": 5}},
{"domains": {"min": 1, "max": test_data['fuzz_string']}},
{"caching": {"min": 3600, "max": 604800, "incr": 300}}]
resp = self.client.create_flavor(flavor_id=self.flavor_id,
provider_list=provider_list,
limits=limits)
self.assertLess(resp.status_code, 500)
@attrib.attr('fuzz')
@ddt.file_data('../services/data_fuzz.json')
def test_fuzz_caching_min(self, test_data):
provider_list = [{"provider": "fastly",
"links": [{"href": "www.watermelon.com", "rel": "provider_url"}]}]
limits = [{"origins": {"min": 1, "max": 5}},
{"domains": {"min": 1, "max": 5}},
{"caching": {"min": test_data['fuzz_string'], "max": 604800, "incr": 300}}]
resp = self.client.create_flavor(flavor_id=self.flavor_id,
provider_list=provider_list,
limits=limits)
self.assertLess(resp.status_code, 500)
@attrib.attr('fuzz')
@ddt.file_data('../services/data_fuzz.json')
def test_fuzz_caching_max(self, test_data):
provider_list = [{"provider": "fastly",
"links": [{"href": "www.watermelon.com", "rel": "provider_url"}]}]
limits = [{"origins": {"min": 1, "max": 5}},
{"domains": {"min": 1, "max": 5}},
{"caching": {"min": 3600, "max": test_data['fuzz_string'], "incr": 300}}]
resp = self.client.create_flavor(flavor_id=self.flavor_id,
provider_list=provider_list,
limits=limits)
self.assertLess(resp.status_code, 500)
@attrib.attr('fuzz')
@ddt.file_data('../services/data_fuzz.json')
def test_fuzz_caching_incr(self, test_data):
provider_list = [{"provider": "fastly",
"links": [{"href": "www.watermelon.com", "rel": "provider_url"}]}]
limits = [{"origins": {"min": 1, "max": 5}},
{"domains": {"min": 1, "max": 5}},
{"caching": {"min": 3600, "max": 604800, "incr": test_data['fuzz_string']}}]
resp = self.client.create_flavor(flavor_id=self.flavor_id,
provider_list=provider_list,
limits=limits)
self.assertLess(resp.status_code, 500)
def tearDown(self):
self.client.delete_flavor(flavor_id=self.flavor_id)
super(TestFuzzCreateFlavor, self).tearDown()
| 43.475728
| 102
| 0.524006
| 960
| 8,956
| 4.704167
| 0.136458
| 0.087688
| 0.097431
| 0.031887
| 0.800487
| 0.796501
| 0.779672
| 0.763729
| 0.763729
| 0.753322
| 0
| 0.03473
| 0.315208
| 8,956
| 205
| 103
| 43.687805
| 0.701614
| 0.068334
| 0
| 0.748299
| 0
| 0
| 0.180682
| 0.034358
| 0
| 0
| 0
| 0
| 0.07483
| 1
| 0.088435
| false
| 0
| 0.040816
| 0
| 0.136054
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2bbc6a9991d7ccd15c6a29193d3c045ab1382cbf
| 186
|
py
|
Python
|
oncopolicy/models/__init__.py
|
yala/Tempo
|
bf3e0e78d64869bb2079c582a4a35982f78386ad
|
[
"MIT"
] | 6
|
2022-01-15T11:57:19.000Z
|
2022-02-13T21:15:22.000Z
|
oncopolicy/models/__init__.py
|
yala/Tempo
|
bf3e0e78d64869bb2079c582a4a35982f78386ad
|
[
"MIT"
] | null | null | null |
oncopolicy/models/__init__.py
|
yala/Tempo
|
bf3e0e78d64869bb2079c582a4a35982f78386ad
|
[
"MIT"
] | 2
|
2022-02-02T13:09:29.000Z
|
2022-02-18T07:06:19.000Z
|
import oncopolicy.models.deterministic_screening
import oncopolicy.models.deterministic_progression
import oncopolicy.models.neural_progression
import oncopolicy.models.neural_screening
| 37.2
| 50
| 0.913978
| 20
| 186
| 8.3
| 0.35
| 0.385542
| 0.53012
| 0.421687
| 0.46988
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043011
| 186
| 4
| 51
| 46.5
| 0.932584
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
2bd9daaac4c2852ecf84d50d691a7a997fd1f72d
| 11,097
|
py
|
Python
|
depthEstimation_nick_gpu/launch_script_on_screen.py
|
nicolasrosa-forks/evaluating_bdl
|
2973b0d018551de0c9f087e2ae4e6b2c22f2ce3c
|
[
"MIT"
] | null | null | null |
depthEstimation_nick_gpu/launch_script_on_screen.py
|
nicolasrosa-forks/evaluating_bdl
|
2973b0d018551de0c9f087e2ae4e6b2c22f2ce3c
|
[
"MIT"
] | null | null | null |
depthEstimation_nick_gpu/launch_script_on_screen.py
|
nicolasrosa-forks/evaluating_bdl
|
2973b0d018551de0c9f087e2ae4e6b2c22f2ce3c
|
[
"MIT"
] | null | null | null |
import subprocess
import time
def launch(screen_name, command):
print(f"{screen_name} | {command}")
subprocess.call(f"screen -dmS {screen_name} {command} &", shell=True)
if __name__ == "__main__":
print("Launching...")
# launch("test", "python3 test.py")
# launch("tensorboard", "./run_tensorboard.sh")
# (running) launch("nicolas_exp1_14", "python3 ensembling_train_epoch.py -M 1 -f relu -o adam -b 1 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o adam -b 2 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o adam -b 4 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o adam -b 6 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o adam -b 8 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o adam -b 10 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o adam -b 12 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o adam -b 14 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# (running) launch("nicolas_exp1_23", "python3 ensembling_train_epoch.py -M 1 -f relu -o adabelief -b 1 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o adabelief -b 2 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o adabelief -b 4 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o adabelief -b 6 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o adabelief -b 8 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o adabelief -b 10 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o adabelief -b 12 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o adabelief -b 14 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# (running) launch("nicolas_exp1_32", "python3 ensembling_train_epoch.py -M 1 -f relu -o noam -b 1 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o noam -b 2 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o noam -b 4 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o noam -b 6 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o noam -b 8 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o noam -b 10 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f relu -o noam -b 12 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_39", "python3 ensembling_train_epoch.py -M 1 -f relu -o noam -b 14 -e 5 -r 'runs/2021-11-14_21-07-32, M0, imgs=85898, relu, opt=adam, bs=2, lr=1e-05, wd=0.0005, 5/model_M0_epoch_5.pth'")
# launch("nicolas_exp1_41", "python3 ensembling_train_epoch.py -M 1 -f elu -o adam -b 1 -e 5 -r 'runs/2021-11-19_15-38-48, M0, imgs=78982, elu, opt=noam, bs=1, lr=0, wd=0.0005, 5/model_M0_epoch_1.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o adam -b 2 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o adam -b 4 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o adam -b 6 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o adam -b 8 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o adam -b 10 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o adam -b 12 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o adam -b 14 -e 5")
# launch("nicolas_exp1_50", "python3 ensembling_train_epoch.py -M 1 -f elu -o adabelief -b 1 -e 5 -r 'runs/2021-11-19_15-38-48, M0, imgs=78982, elu, opt=noam, bs=1, lr=0, wd=0.0005, 5/model_M0_epoch_1.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o adabelief -b 2 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o adabelief -b 4 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o adabelief -b 6 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o adabelief -b 8 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o adabelief -b 10 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o adabelief -b 12 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o adabelief -b 14 -e 5")
# (running) launch("nicolas_exp1_59", "python3 ensembling_train_epoch.py -M 1 -f elu -o noam -b 1 -e 5 -r 'runs/2021-11-19_15-38-48, M0, imgs=78982, elu, opt=noam, bs=1, lr=0, wd=0.0005, 5/model_M0_epoch_1.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o noam -b 2 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o noam -b 4 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o noam -b 6 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o noam -b 8 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o noam -b 10 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o noam -b 12 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f elu -o noam -b 14 -e 5")
# launch("nicolas_exp1_68", "python3 ensembling_train_epoch.py -M 1 -f selu -o adam -b 1 -e 5 -r 'runs/2021-11-15_20-16-34, M0, imgs=85898, selu, opt=adam, bs=1, lr=1e-05, wd=0.0005, 5/model_M0_epoch_2.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o adam -b 2 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o adam -b 4 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o adam -b 6 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o adam -b 8 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o adam -b 10 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o adam -b 12 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o adam -b 14 -e 5")
# launch("nicolas_exp1_77", "python3 ensembling_train_epoch.py -M 1 -f selu -o adabelief -b 1 -e 5 -r 'runs/2021-11-15_20-16-34, M0, imgs=85898, selu, opt=adam, bs=1, lr=1e-05, wd=0.0005, 5/model_M0_epoch_2.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o adabelief -b 2 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o adabelief -b 4 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o adabelief -b 6 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o adabelief -b 8 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o adabelief -b 10 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o adabelief -b 12 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o adabelief -b 14 -e 5")
# launch("nicolas_exp1_86", "python3 ensembling_train_epoch.py -M 1 -f selu -o noam -b 1 -e 5 -r 'runs/2021-11-15_20-16-34, M0, imgs=85898, selu, opt=adam, bs=1, lr=1e-05, wd=0.0005, 5/model_M0_epoch_2.pth'")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o noam -b 2 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o noam -b 4 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o noam -b 6 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o noam -b 8 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o noam -b 10 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o noam -b 12 -e 5")
# launch("nicolas_exp1_X", "python3 ensembling_train_epoch.py -M 1 -f selu -o noam -b 14 -e 5")
print("Done.\n")
time.sleep(1)
subprocess.call("screen -ls", shell=True)
| 113.234694
| 227
| 0.683158
| 2,260
| 11,097
| 3.168584
| 0.039381
| 0.130708
| 0.170926
| 0.27147
| 0.954755
| 0.954755
| 0.953638
| 0.947773
| 0.947773
| 0.941908
| 0
| 0.144708
| 0.151212
| 11,097
| 97
| 228
| 114.402062
| 0.615564
| 0.930251
| 0
| 0
| 0
| 0
| 0.141631
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.2
| 0
| 0.3
| 0.3
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
92016dd85d131c1dfabb413063bb03b70e47800a
| 11,220
|
py
|
Python
|
test/test_benchmark.py
|
Hasenpfote/test_py
|
7bf9faaeb1c3a022e4d47ef03c03a6e0ecd04121
|
[
"MIT"
] | null | null | null |
test/test_benchmark.py
|
Hasenpfote/test_py
|
7bf9faaeb1c3a022e4d47ef03c03a6e0ecd04121
|
[
"MIT"
] | 2
|
2021-03-19T23:57:09.000Z
|
2021-06-01T22:48:01.000Z
|
test/test_benchmark.py
|
Hasenpfote/perfbench
|
846cc46689c78810081a2345e1c99926d60128f4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from unittest import TestCase
import os
import sys
sys.path.append('../')
from perfbench import *
class TestBenchmark(TestCase):
def test__default_colors(self):
actual = Benchmark._default_colors()
self.assertTrue(isinstance(actual, list))
self.assertTrue(len(actual) > 0)
def test__color(self):
actual = Benchmark._color(index=0)
self.assertTrue(isinstance(actual, str))
def test__axis_range(self):
actual = Benchmark._axis_range(sequence=[1, 10, 100])
self.assertTrue(isinstance(actual, list))
self.assertTrue(actual == [1, 100])
actual = Benchmark._axis_range(sequence=[1, 10, 100], use_log_scale=True)
self.assertTrue(isinstance(actual, list))
self.assertTrue(actual == [0, 2])
def test__label_rgba(self):
actual = Benchmark._label_rgba(colors=(32, 64, 128, 0.5))
self.assertTrue(isinstance(actual, str))
self.assertTrue(actual == 'rgba(32, 64, 128, 0.5)')
def test_plot(self):
bm = Benchmark(
datasets=[
Dataset(
factories=[
lambda n: [i for i in range(n)],
],
title=''
),
],
dataset_sizes=[2 ** n for n in range(2)],
kernels=[
Kernel(
stmt='[value + 2 for value in DATASET]',
label='add'
),
Kernel(
stmt='[value * 2 for value in DATASET]',
label='multiply'
)
],
xlabel='dataset sizes',
title='test'
)
bm.run(disable_tqdm=True)
bm.plot(auto_open=False)
bm = Benchmark(
datasets=[
Dataset(
factories=[
lambda n: [int(i) for i in range(n)],
],
title='int'
),
Dataset(
factories=[
lambda n: [float(i) for i in range(n)],
],
title='float'
)
],
dataset_sizes=[2 ** n for n in range(2)],
kernels=[
Kernel(
stmt='[value + 2 for value in DATASET]',
label='add'
),
Kernel(
stmt='[value * 2 for value in DATASET]',
label='multiply'
)
],
xlabel='dataset sizes',
title='test'
)
bm.run(disable_tqdm=True)
bm.plot(auto_open=False)
def test_plot_by_statistics(self):
bm = Benchmark(
datasets=[
Dataset(
factories=[
lambda n: [i for i in range(n)],
],
title=''
)
],
dataset_sizes=[2 ** n for n in range(2)],
kernels=[
Kernel(
stmt='[value + 2 for value in DATASET]',
label='add'
),
Kernel(
stmt='[value * 2 for value in DATASET]',
label='multiply'
)
],
measurement_mode=MeasurementMode.STATISTICS,
xlabel='dataset sizes',
title='test'
)
bm.run(disable_tqdm=True)
bm.plot(auto_open=False)
bm = Benchmark(
datasets=[
Dataset(
factories=[
lambda n: [int(i) for i in range(n)],
],
title='int'
),
Dataset(
factories=[
lambda n: [float(i) for i in range(n)],
],
title='float'
)
],
dataset_sizes=[2 ** n for n in range(2)],
kernels=[
Kernel(
stmt='[value + 2 for value in DATASET]',
label='add'
),
Kernel(
stmt='[value * 2 for value in DATASET]',
label='multiply'
)
],
measurement_mode=MeasurementMode.STATISTICS,
xlabel='dataset sizes',
title='test'
)
bm.run(disable_tqdm=True)
bm.plot(auto_open=False)
def test_save_as_html(self):
bm = Benchmark(
datasets=[
Dataset(
factories=[
lambda n: [i for i in range(n)],
],
title=''
)
],
dataset_sizes=[2 ** n for n in range(2)],
kernels=[
Kernel(
stmt='[value + 2 for value in DATASET]',
label='add'
),
Kernel(
stmt='[value * 2 for value in DATASET]',
label='multiply'
)
],
xlabel='dataset sizes',
title='test'
)
bm.run(disable_tqdm=True)
bm.save_as_html(filepath='test.html')
self.assertTrue(os.path.exists('./test.html'))
def test_save_as_png(self):
bm = Benchmark(
datasets=[
Dataset(
factories=[
lambda n: [i for i in range(n)],
],
title=''
)
],
dataset_sizes=[2 ** n for n in range(2)],
kernels=[
Kernel(
stmt='[value + 2 for value in DATASET]',
label='add'
),
Kernel(
stmt='[value * 2 for value in DATASET]',
label='multiply'
)
],
xlabel='dataset sizes',
title='test'
)
bm.run(disable_tqdm=True)
ret = bm.save_as_png(filepath='test.png')
self.assertTrue(ret)
#self.assertTrue(os.path.exists('./test.png'))
def test_layout_sizes(self):
bm = Benchmark(
datasets=[
Dataset(
factories=[
lambda n: [i for i in range(n)],
],
title=''
)
],
dataset_sizes=[2 ** n for n in range(2)],
kernels=[
Kernel(
stmt='[value + 2 for value in DATASET]',
label='add'
),
Kernel(
stmt='[value * 2 for value in DATASET]',
label='multiply'
)
],
xlabel='dataset sizes',
title='test',
layout_sizes=[
LayoutSize(width=640, height=480, label='VGA'),
LayoutSize(width=800, height=600, label='SVGA'),
LayoutSize(width=1024, height=768, label='XGA'),
LayoutSize(width=1280, height=960, label='HD 720p'),
]
)
bm.run(disable_tqdm=True)
bm.plot(auto_open=False)
bm = Benchmark(
datasets=[
Dataset(
factories=[
lambda n: [int(i) for i in range(n)],
],
title='int'
),
Dataset(
factories=[
lambda n: [float(i) for i in range(n)],
],
title='float'
)
],
dataset_sizes=[2 ** n for n in range(2)],
kernels=[
Kernel(
stmt='[value + 2 for value in DATASET]',
label='add'
),
Kernel(
stmt='[value * 2 for value in DATASET]',
label='multiply'
)
],
xlabel='dataset sizes',
title='test',
layout_sizes=[
LayoutSize(width=640, height=480, label='VGA'),
LayoutSize(width=800, height=600, label='SVGA'),
LayoutSize(width=1024, height=768, label='XGA'),
LayoutSize(width=1280, height=960, label='HD 720p'),
]
)
bm.run(disable_tqdm=True)
bm.plot(auto_open=False)
def test_results_are_not_ready(self):
bm = Benchmark(
datasets=[
Dataset(
factories=[
lambda n: [i for i in range(n)],
],
title=''
)
],
dataset_sizes=[2 ** n for n in range(2)],
kernels=[
Kernel(
stmt='[value + 2 for value in DATASET]',
label='add'
),
],
xlabel='dataset sizes',
title='test',
)
with self.assertRaises(NotReadyError):
bm.plot(auto_open=False)
with self.assertRaises(NotReadyError):
bm.save_as_html(filepath='test.html')
with self.assertRaises(NotReadyError):
bm.save_as_png(filepath='test.png')
def test_extra_args(self):
bm = Benchmark(
datasets=[
Dataset(
factories=[
lambda n: [i for i in range(n)],
],
title='',
extra_args=dict(
foo=1,
bar=2
)
)
],
dataset_sizes=[2 ** n for n in range(2)],
kernels=[
Kernel(
stmt="[value + EXTRA_ARGS['foo'] + EXTRA_ARGS['bar'] for value in DATASET]",
label='add'
)
],
xlabel='dataset sizes',
title='test'
)
bm.run(disable_tqdm=True)
bm.plot(auto_open=False)
def test_dataset_factories(self):
bm = Benchmark(
datasets=[
Dataset(
factories=[
lambda n: [+i for i in range(n)], # for kernels[0]
lambda n: [-i for i in range(n)], # for kernels[1]
],
title=''
)
],
dataset_sizes=[2 ** n for n in range(2)],
kernels=[
Kernel(
stmt='DATASET',
label=''
),
Kernel(
stmt='DATASET',
label=''
),
],
xlabel='dataset sizes',
title='test'
)
bm.run(disable_tqdm=True)
bm.plot(auto_open=False)
| 30.655738
| 96
| 0.391087
| 976
| 11,220
| 4.406762
| 0.120902
| 0.042316
| 0.062776
| 0.071146
| 0.86631
| 0.83469
| 0.820739
| 0.769588
| 0.726808
| 0.726808
| 0
| 0.025255
| 0.502406
| 11,220
| 365
| 97
| 30.739726
| 0.745119
| 0.010517
| 0
| 0.792398
| 0
| 0
| 0.093268
| 0
| 0
| 0
| 0
| 0
| 0.040936
| 1
| 0.035088
| false
| 0
| 0.011696
| 0
| 0.049708
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a64c9585d13d6ac5e0cbc04cdb4be8f5041c743b
| 15,214
|
py
|
Python
|
src/tests/unit/platform/test_reputation_overrides.py
|
kphsugntuedutw/carbon-black-cloud-sdk-python
|
5277be067223bc3eda0348c57b2a0004fa70f3e9
|
[
"MIT"
] | 24
|
2020-10-16T22:07:38.000Z
|
2022-03-24T14:58:03.000Z
|
src/tests/unit/platform/test_reputation_overrides.py
|
kphsugntuedutw/carbon-black-cloud-sdk-python
|
5277be067223bc3eda0348c57b2a0004fa70f3e9
|
[
"MIT"
] | 63
|
2020-10-26T18:26:15.000Z
|
2022-03-31T17:31:02.000Z
|
src/tests/unit/platform/test_reputation_overrides.py
|
kphsugntuedutw/carbon-black-cloud-sdk-python
|
5277be067223bc3eda0348c57b2a0004fa70f3e9
|
[
"MIT"
] | 10
|
2020-11-09T11:54:23.000Z
|
2022-03-24T20:44:00.000Z
|
# *******************************************************
# Copyright (c) VMware, Inc. 2021. All Rights Reserved.
# SPDX-License-Identifier: MIT
# *******************************************************
# *
# * DISCLAIMER. THIS PROGRAM IS PROVIDED TO YOU "AS IS" WITHOUT
# * WARRANTIES OR CONDITIONS OF ANY KIND, WHETHER ORAL OR WRITTEN,
# * EXPRESS OR IMPLIED. THE AUTHOR SPECIFICALLY DISCLAIMS ANY IMPLIED
# * WARRANTIES OR CONDITIONS OF MERCHANTABILITY, SATISFACTORY QUALITY,
# * NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE.
"""Testing ReputationOverride objects of cbc_sdk.platform"""
import pytest
import logging
from cbc_sdk.platform import ReputationOverride, Process
from cbc_sdk.endpoint_standard import EnrichedEvent
from cbc_sdk.rest_api import CBCloudAPI
from tests.unit.fixtures.CBCSDKMock import CBCSDKMock
from tests.unit.fixtures.platform.mock_reputation_override import (REPUTATION_OVERRIDE_SHA256_REQUEST,
REPUTATION_OVERRIDE_SHA256_RESPONSE,
REPUTATION_OVERRIDE_SHA256_SEARCH_RESPONSE)
from tests.unit.fixtures.platform.mock_process import (GET_PROCESS_VALIDATION_RESP,
POST_PROCESS_SEARCH_JOB_RESP,
GET_PROCESS_SEARCH_JOB_RESP,
GET_PROCESS_SEARCH_JOB_RESULTS_RESP)
from tests.unit.fixtures.endpoint_standard.mock_enriched_events import (POST_ENRICHED_EVENTS_SEARCH_JOB_RESP,
GET_ENRICHED_EVENTS_SEARCH_JOB_RESULTS_RESP,
GET_ENRICHED_EVENTS_SEARCH_JOB_RESULTS_RESP_1)
log = logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG, filename='log.txt')
@pytest.fixture(scope="function")
def cb():
"""Create CBCloudAPI singleton"""
return CBCloudAPI(url="https://example.com",
org_key="test",
token="abcd/1234",
ssl_verify=False)
@pytest.fixture(scope="function")
def cbcsdk_mock(monkeypatch, cb):
"""Mocks CBC SDK for unit tests"""
return CBCSDKMock(monkeypatch, cb)
# ==================================== UNIT TESTS BELOW ====================================
def test_reputation_override_create(cbcsdk_mock):
"""Testing Reputation Override with .create(api, {})"""
def _test_request(url, body, **kwargs):
assert body == REPUTATION_OVERRIDE_SHA256_REQUEST
return REPUTATION_OVERRIDE_SHA256_RESPONSE
cbcsdk_mock.mock_request("POST",
"/appservices/v6/orgs/test/reputations/overrides",
_test_request)
api = cbcsdk_mock.api
reputation_override = ReputationOverride.create(api, REPUTATION_OVERRIDE_SHA256_REQUEST)
assert isinstance(reputation_override, ReputationOverride)
assert reputation_override.sha256_hash == "af62e6b3d475879c4234fe7bd8ba67ff6544ce6510131a069aaac75aa92aee7a"
def test_reputation_override_select(cbcsdk_mock):
"""Testing Reputation Override with .select(ReputationOverride)"""
def _test_request(url, body, **kwargs):
assert body == {
"criteria": {
"override_list": "BLACK_LIST",
"override_type": "SHA256"
},
"query": "foo",
"sort_field": "create_time",
"sort_order": "asc"
}
return REPUTATION_OVERRIDE_SHA256_SEARCH_RESPONSE
cbcsdk_mock.mock_request("POST",
"/appservices/v6/orgs/test/reputations/overrides/_search",
_test_request)
api = cbcsdk_mock.api
reputation_override_query = api.select(ReputationOverride) \
.where("foo") \
.set_override_list("BLACK_LIST") \
.set_override_type("SHA256") \
.sort_by("create_time", "asc")
assert len(reputation_override_query) == 1
reputation_override = reputation_override_query.first()
assert isinstance(reputation_override, ReputationOverride)
assert reputation_override.sha256_hash == "af62e6b3d475879c4234fe7bd8ba67ff6544ce6510131a069aaac75aa92aee7a"
def test_reputation_override_select_async(cbcsdk_mock):
"""Testing Reputation Override with .select(ReputationOverride) async"""
def _test_request(url, body, **kwargs):
assert body == {
"criteria": {
"override_list": "BLACK_LIST",
"override_type": "SHA256"
},
"query": "foo",
"sort_field": "create_time",
"sort_order": "asc"
}
return REPUTATION_OVERRIDE_SHA256_SEARCH_RESPONSE
cbcsdk_mock.mock_request("POST",
"/appservices/v6/orgs/test/reputations/overrides/_search",
_test_request)
api = cbcsdk_mock.api
future = api.select(ReputationOverride) \
.where("foo") \
.set_override_list("BLACK_LIST") \
.set_override_type("SHA256") \
.sort_by("create_time", "asc") \
.execute_async()
results = future.result()
assert len(results) == 1
assert isinstance(results[0], ReputationOverride)
assert results[0].sha256_hash == "af62e6b3d475879c4234fe7bd8ba67ff6544ce6510131a069aaac75aa92aee7a"
def test_reputation_override_select_by_id(cbcsdk_mock):
"""Testing Reputation Override with .select(ReputationOverride, "id")"""
cbcsdk_mock.mock_request("GET",
"/appservices/v6/orgs/test/reputations/overrides/e9410b754ea011ebbfd0db2585a41b07",
REPUTATION_OVERRIDE_SHA256_RESPONSE)
api = cbcsdk_mock.api
reputation_override = api.select(ReputationOverride, "e9410b754ea011ebbfd0db2585a41b07")
reputation_override.refresh()
assert isinstance(reputation_override, ReputationOverride)
assert reputation_override.sha256_hash == "af62e6b3d475879c4234fe7bd8ba67ff6544ce6510131a069aaac75aa92aee7a"
def test_reputation_override_delete(cbcsdk_mock):
"""Testing Reputation Override with .delete()"""
cbcsdk_mock.mock_request("GET",
"/appservices/v6/orgs/test/reputations/overrides/e9410b754ea011ebbfd0db2585a41b07",
REPUTATION_OVERRIDE_SHA256_RESPONSE)
cbcsdk_mock.mock_request("DELETE",
"/appservices/v6/orgs/test/reputations/overrides/e9410b754ea011ebbfd0db2585a41b07",
None)
api = cbcsdk_mock.api
reputation_override = api.select(ReputationOverride, "e9410b754ea011ebbfd0db2585a41b07")
reputation_override.delete()
assert reputation_override._is_deleted
def test_reputation_override_bulk_delete(cbcsdk_mock):
"""Testing Reputation Override with .bulk_delete()"""
def _test_request(url, body, **kwargs):
assert body == ["ID_1", "ID_2"]
return {
"results": body,
"errors": []
}
cbcsdk_mock.mock_request("POST",
"/appservices/v6/orgs/test/reputations/overrides/_delete",
_test_request)
api = cbcsdk_mock.api
response = ReputationOverride.bulk_delete(api, ["ID_1", "ID_2"])
response["results"] == ["ID_1", "ID_2"]
def test_reputation_override_process_ban_process_sha256(cbcsdk_mock):
"""Testing Reputation Override creation from process"""
# mock the search validation
cbcsdk_mock.mock_request("GET", "/api/investigate/v1/orgs/test/processes/search_validation",
GET_PROCESS_VALIDATION_RESP)
# mock the POST of a search
cbcsdk_mock.mock_request("POST", "/api/investigate/v2/orgs/test/processes/search_job",
POST_PROCESS_SEARCH_JOB_RESP)
# mock the GET to check search status
cbcsdk_mock.mock_request("GET", ("/api/investigate/v1/orgs/test/processes/"
"search_jobs/2c292717-80ed-4f0d-845f-779e09470920"),
GET_PROCESS_SEARCH_JOB_RESP)
# mock the GET to get search results
cbcsdk_mock.mock_request("GET", ("/api/investigate/v2/orgs/test/processes/search_jobs/"
"2c292717-80ed-4f0d-845f-779e09470920/results"),
GET_PROCESS_SEARCH_JOB_RESULTS_RESP)
api = cbcsdk_mock.api
guid = 'WNEXFKQ7-0002b226-000015bd-00000000-1d6225bbba74c00'
process = api.select(Process, guid)
def _test_request(url, body, **kwargs):
resp = body
resp.update({
"id": "e9410b754ea011ebbfd0db2585a41b07",
"created_by": "example@example.com",
"create_time": "2021-01-04T15:24:18.002Z"
})
return resp
cbcsdk_mock.mock_request("POST", "/appservices/v6/orgs/test/reputations/overrides",
_test_request)
reputation_override = process.ban_process_sha256("Test ban application")
assert isinstance(reputation_override, ReputationOverride)
assert reputation_override.override_list == "BLACK_LIST"
assert reputation_override.override_type == "SHA256"
assert reputation_override.sha256_hash == "5920199e4fbfa47c1717b863814722148a353e54f8c10912cf1f991a1c86309d"
assert reputation_override.filename == "vmtoolsd.exe"
def test_reputation_override_process_approve_process_sha256(cbcsdk_mock):
"""Testing Reputation Override creation from process"""
# mock the search validation
cbcsdk_mock.mock_request("GET", "/api/investigate/v1/orgs/test/processes/search_validation",
GET_PROCESS_VALIDATION_RESP)
# mock the POST of a search
cbcsdk_mock.mock_request("POST", "/api/investigate/v2/orgs/test/processes/search_job",
POST_PROCESS_SEARCH_JOB_RESP)
# mock the GET to check search status
cbcsdk_mock.mock_request("GET", ("/api/investigate/v1/orgs/test/processes/"
"search_jobs/2c292717-80ed-4f0d-845f-779e09470920"),
GET_PROCESS_SEARCH_JOB_RESP)
# mock the GET to get search results
cbcsdk_mock.mock_request("GET", ("/api/investigate/v2/orgs/test/processes/search_jobs/"
"2c292717-80ed-4f0d-845f-779e09470920/results"),
GET_PROCESS_SEARCH_JOB_RESULTS_RESP)
api = cbcsdk_mock.api
guid = 'WNEXFKQ7-0002b226-000015bd-00000000-1d6225bbba74c00'
process = api.select(Process, guid)
def _test_request(url, body, **kwargs):
resp = body
resp.update({
"id": "e9410b754ea011ebbfd0db2585a41b07",
"created_by": "example@example.com",
"create_time": "2021-01-04T15:24:18.002Z"
})
return resp
cbcsdk_mock.mock_request("POST", "/appservices/v6/orgs/test/reputations/overrides",
_test_request)
reputation_override = process.approve_process_sha256("Test approve application")
assert isinstance(reputation_override, ReputationOverride)
assert reputation_override.override_list == "WHITE_LIST"
assert reputation_override.override_type == "SHA256"
assert reputation_override.sha256_hash == "5920199e4fbfa47c1717b863814722148a353e54f8c10912cf1f991a1c86309d"
assert reputation_override.filename == "vmtoolsd.exe"
def test_reputation_override_enriched_event_ban_process_sha256(cbcsdk_mock):
"""Testing Reputation Override creation from enriched event"""
cbcsdk_mock.mock_request("POST", "/api/investigate/v2/orgs/test/enriched_events/search_job",
POST_ENRICHED_EVENTS_SEARCH_JOB_RESP)
cbcsdk_mock.mock_request("GET",
"/api/investigate/v1/orgs/test/enriched_events/search_jobs/08ffa932-b633-4107-ba56-8741e929e48b", # noqa: E501
GET_ENRICHED_EVENTS_SEARCH_JOB_RESULTS_RESP)
cbcsdk_mock.mock_request("GET",
"/api/investigate/v2/orgs/test/enriched_events/search_jobs/08ffa932-b633-4107-ba56-8741e929e48b/results", # noqa: E501
GET_ENRICHED_EVENTS_SEARCH_JOB_RESULTS_RESP_1)
api = cbcsdk_mock.api
event = api.select(EnrichedEvent, "27a278d5150911eb86f1011a55e73b72")
def _test_request(url, body, **kwargs):
resp = body
resp.update({
"id": "e9410b754ea011ebbfd0db2585a41b07",
"created_by": "example@example.com",
"create_time": "2021-01-04T15:24:18.002Z"
})
return resp
cbcsdk_mock.mock_request("POST", "/appservices/v6/orgs/test/reputations/overrides",
_test_request)
reputation_override = event.ban_process_sha256("Test ban application")
assert isinstance(reputation_override, ReputationOverride)
assert reputation_override.override_list == "BLACK_LIST"
assert reputation_override.override_type == "SHA256"
assert reputation_override.sha256_hash == "6c02d54afe705d7df7db7ee94d92afdefb2fb91f9d1805c970126a096df52786"
assert reputation_override.filename == "scrcons.exe"
def test_reputation_override_enriched_event_approve_process_sha256(cbcsdk_mock):
"""Testing Reputation Override creation from enriched event"""
cbcsdk_mock.mock_request("POST", "/api/investigate/v2/orgs/test/enriched_events/search_job",
POST_ENRICHED_EVENTS_SEARCH_JOB_RESP)
cbcsdk_mock.mock_request("GET",
"/api/investigate/v1/orgs/test/enriched_events/search_jobs/08ffa932-b633-4107-ba56-8741e929e48b", # noqa: E501
GET_ENRICHED_EVENTS_SEARCH_JOB_RESULTS_RESP)
cbcsdk_mock.mock_request("GET",
"/api/investigate/v2/orgs/test/enriched_events/search_jobs/08ffa932-b633-4107-ba56-8741e929e48b/results", # noqa: E501
GET_ENRICHED_EVENTS_SEARCH_JOB_RESULTS_RESP_1)
api = cbcsdk_mock.api
event = api.select(EnrichedEvent, "27a278d5150911eb86f1011a55e73b72")
def _test_request(url, body, **kwargs):
resp = body
resp.update({
"id": "e9410b754ea011ebbfd0db2585a41b07",
"created_by": "example@example.com",
"create_time": "2021-01-04T15:24:18.002Z"
})
return resp
cbcsdk_mock.mock_request("POST", "/appservices/v6/orgs/test/reputations/overrides",
_test_request)
reputation_override = event.approve_process_sha256("Test approve application")
assert isinstance(reputation_override, ReputationOverride)
assert reputation_override.override_list == "WHITE_LIST"
assert reputation_override.override_type == "SHA256"
assert reputation_override.sha256_hash == "6c02d54afe705d7df7db7ee94d92afdefb2fb91f9d1805c970126a096df52786"
assert reputation_override.filename == "scrcons.exe"
| 46.10303
| 148
| 0.649796
| 1,496
| 15,214
| 6.31484
| 0.138369
| 0.135281
| 0.037049
| 0.055573
| 0.833598
| 0.822272
| 0.800889
| 0.778236
| 0.714089
| 0.714089
| 0
| 0.088976
| 0.248718
| 15,214
| 329
| 149
| 46.243161
| 0.737533
| 0.102734
| 0
| 0.710638
| 0
| 0.017021
| 0.260562
| 0.200324
| 0
| 0
| 0
| 0
| 0.148936
| 1
| 0.085106
| false
| 0
| 0.038298
| 0
| 0.165957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a66a4d81268682c39e0dbad9ebddece9de0903a8
| 184
|
py
|
Python
|
boa3_test/test_sc/interop_test/blockchain/CurrentHash.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 25
|
2020-07-22T19:37:43.000Z
|
2022-03-08T03:23:55.000Z
|
boa3_test/test_sc/interop_test/blockchain/CurrentHash.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 419
|
2020-04-23T17:48:14.000Z
|
2022-03-31T13:17:45.000Z
|
boa3_test/test_sc/interop_test/blockchain/CurrentHash.py
|
hal0x2328/neo3-boa
|
6825a3533384cb01660773050719402a9703065b
|
[
"Apache-2.0"
] | 15
|
2020-05-21T21:54:24.000Z
|
2021-11-18T06:17:24.000Z
|
from boa3.builtin import public
from boa3.builtin.interop.blockchain import current_hash
from boa3.builtin.type import UInt256
@public
def main() -> UInt256:
return current_hash
| 20.444444
| 56
| 0.798913
| 26
| 184
| 5.576923
| 0.538462
| 0.165517
| 0.310345
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056604
| 0.13587
| 184
| 8
| 57
| 23
| 0.855346
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.5
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
a6e581dfa1aa45660c64f6062f1498b3547b952e
| 197
|
py
|
Python
|
tests/test_my_project.py
|
makkus/my-project
|
c7de07b7ff3a11cb2b1862c77fce0be40c0b384c
|
[
"BlueOak-1.0.0",
"Apache-2.0"
] | null | null | null |
tests/test_my_project.py
|
makkus/my-project
|
c7de07b7ff3a11cb2b1862c77fce0be40c0b384c
|
[
"BlueOak-1.0.0",
"Apache-2.0"
] | 1
|
2021-06-18T01:34:29.000Z
|
2021-06-18T01:35:04.000Z
|
tests/test_my_project.py
|
makkus/my-project
|
c7de07b7ff3a11cb2b1862c77fce0be40c0b384c
|
[
"BlueOak-1.0.0",
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `my_project` package."""
import pytest # noqa
import my_project
def test_assert():
assert my_project.get_version() is not None
| 14.071429
| 47
| 0.670051
| 29
| 197
| 4.37931
| 0.793103
| 0.212598
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006173
| 0.177665
| 197
| 13
| 48
| 15.153846
| 0.777778
| 0.401015
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.