hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0631438c7f8ae6fdf09a93b0b3e27564659fcee2
| 44
|
py
|
Python
|
source/ports/scala_port/src/test/scala/scripts/s1.py
|
Duckxz/core
|
8761daab0874e9f5c46f8e0b08d95c9d8709f5ee
|
[
"Apache-2.0"
] | null | null | null |
source/ports/scala_port/src/test/scala/scripts/s1.py
|
Duckxz/core
|
8761daab0874e9f5c46f8e0b08d95c9d8709f5ee
|
[
"Apache-2.0"
] | null | null | null |
source/ports/scala_port/src/test/scala/scripts/s1.py
|
Duckxz/core
|
8761daab0874e9f5c46f8e0b08d95c9d8709f5ee
|
[
"Apache-2.0"
] | null | null | null |
def fn_in_s1():
return 'Hello from s1'
| 11
| 26
| 0.636364
| 8
| 44
| 3.25
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 0.25
| 44
| 3
| 27
| 14.666667
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0.302326
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
231c2c14987b6d3cd169130d7566080a52ee38bd
| 108
|
py
|
Python
|
janus/mm_wrapper/__init__.py
|
zhenglz/janus
|
f3f1ed3f2b6e377c51e958cae2d919069d221eda
|
[
"BSD-3-Clause"
] | 16
|
2019-04-18T15:45:02.000Z
|
2021-12-17T17:51:18.000Z
|
janus/mm_wrapper/__init__.py
|
zhenglz/janus
|
f3f1ed3f2b6e377c51e958cae2d919069d221eda
|
[
"BSD-3-Clause"
] | 2
|
2019-06-20T16:56:08.000Z
|
2020-08-28T16:09:16.000Z
|
janus/mm_wrapper/__init__.py
|
zhenglz/janus
|
f3f1ed3f2b6e377c51e958cae2d919069d221eda
|
[
"BSD-3-Clause"
] | 8
|
2018-11-16T17:00:58.000Z
|
2022-01-11T05:36:50.000Z
|
from janus.mm_wrapper.mm_wrapper import MMWrapper
from janus.mm_wrapper.openmm_wrapper import OpenMMWrapper
| 36
| 57
| 0.888889
| 16
| 108
| 5.75
| 0.5
| 0.293478
| 0.23913
| 0.391304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 108
| 2
| 58
| 54
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
234a4201bfc293c89c7e8430c2be8255b0ba3503
| 151,831
|
py
|
Python
|
boto3_type_annotations_with_docs/boto3_type_annotations/backup/client.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 119
|
2018-12-01T18:20:57.000Z
|
2022-02-02T10:31:29.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/backup/client.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 15
|
2018-11-16T00:16:44.000Z
|
2021-11-13T03:44:18.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/backup/client.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 11
|
2019-05-06T05:26:51.000Z
|
2021-09-28T15:27:59.000Z
|
from typing import Optional
from botocore.client import BaseClient
from typing import Dict
from botocore.paginate import Paginator
from datetime import datetime
from botocore.waiter import Waiter
from typing import Union
from typing import List
class Client(BaseClient):
def can_paginate(self, operation_name: str = None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you\'d normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator(\"create_foo\")``.
:return: ``True`` if the operation can be paginated,
``False`` otherwise.
"""
pass
def create_backup_plan(self, BackupPlan: Dict, BackupPlanTags: Dict = None, CreatorRequestId: str = None) -> Dict:
"""
Backup plans are documents that contain information that AWS Backup uses to schedule tasks that create recovery points of resources.
If you call ``CreateBackupPlan`` with a plan that already exists, the existing ``backupPlanId`` is returned.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/CreateBackupPlan>`_
**Request Syntax**
::
response = client.create_backup_plan(
BackupPlan={
'BackupPlanName': 'string',
'Rules': [
{
'RuleName': 'string',
'TargetBackupVaultName': 'string',
'ScheduleExpression': 'string',
'StartWindowMinutes': 123,
'CompletionWindowMinutes': 123,
'Lifecycle': {
'MoveToColdStorageAfterDays': 123,
'DeleteAfterDays': 123
},
'RecoveryPointTags': {
'string': 'string'
}
},
]
},
BackupPlanTags={
'string': 'string'
},
CreatorRequestId='string'
)
**Response Syntax**
::
{
'BackupPlanId': 'string',
'BackupPlanArn': 'string',
'CreationDate': datetime(2015, 1, 1),
'VersionId': 'string'
}
**Response Structure**
- *(dict) --*
- **BackupPlanId** *(string) --*
Uniquely identifies a backup plan.
- **BackupPlanArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a backup plan; for example, ``arn:aws:backup:us-east-1:123456789012:plan:8F81F553-3A74-4A3F-B93D-B3360DC80C50`` .
- **CreationDate** *(datetime) --*
The date and time that a backup plan is created, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **VersionId** *(string) --*
Unique, randomly generated, Unicode, UTF-8 encoded strings that are at most 1024 bytes long. They cannot be edited.
:type BackupPlan: dict
:param BackupPlan: **[REQUIRED]**
Specifies the body of a backup plan. Includes a ``BackupPlanName`` and one or more sets of ``Rules`` .
- **BackupPlanName** *(string) --* **[REQUIRED]**
The display name of a backup plan.
- **Rules** *(list) --* **[REQUIRED]**
An array of ``BackupRule`` objects, each of which specifies a scheduled task that is used to back up a selection of resources.
- *(dict) --*
Specifies a scheduled task used to back up a selection of resources.
- **RuleName** *(string) --* **[REQUIRED]**
>An optional display name for a backup rule.
- **TargetBackupVaultName** *(string) --* **[REQUIRED]**
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
- **ScheduleExpression** *(string) --*
A CRON expression specifying when AWS Backup initiates a backup job.
- **StartWindowMinutes** *(integer) --*
The amount of time in minutes before beginning a backup.
- **CompletionWindowMinutes** *(integer) --*
The amount of time AWS Backup attempts a backup before canceling the job and returning an error.
- **Lifecycle** *(dict) --*
The lifecycle defines when a protected resource is transitioned to cold storage and when it expires. AWS Backup will transition and expire backups automatically according to the lifecycle that you define.
Backups transitioned to cold storage must be stored in cold storage for a minimum of 90 days. Therefore, the “expire after days” setting must be 90 days greater than the “transition to cold after days”. The “transition to cold after days” setting cannot be changed after a backup has been transitioned to cold.
- **MoveToColdStorageAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is moved to cold storage.
- **DeleteAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is deleted. Must be greater than ``MoveToColdStorageAfterDays`` .
- **RecoveryPointTags** *(dict) --*
To help organize your resources, you can assign your own metadata to the resources that you create. Each tag is a key-value pair.
- *(string) --*
- *(string) --*
:type BackupPlanTags: dict
:param BackupPlanTags:
To help organize your resources, you can assign your own metadata to the resources that you create. Each tag is a key-value pair. The specified tags are assigned to all backups created with this plan.
- *(string) --*
- *(string) --*
:type CreatorRequestId: string
:param CreatorRequestId:
Identifies the request and allows failed requests to be retried without the risk of executing the operation twice. If the request includes a ``CreatorRequestId`` that matches an existing backup plan, that plan is returned. This parameter is optional.
:rtype: dict
:returns:
"""
pass
def create_backup_selection(self, BackupPlanId: str, BackupSelection: Dict, CreatorRequestId: str = None) -> Dict:
"""
Creates a JSON document that specifies a set of resources to assign to a backup plan. Resources can be included by specifying patterns for a ``ListOfTags`` and selected ``Resources`` .
For example, consider the following patterns:
* ``Resources: "arn:aws:ec2:region:account-id:volume/volume-id"``
* ``ConditionKey:"department"`` ``ConditionValue:"finance"`` ``ConditionType:"StringEquals"``
* ``ConditionKey:"importance"`` ``ConditionValue:"critical"`` ``ConditionType:"StringEquals"``
Using these patterns would back up all Amazon Elastic Block Store (Amazon EBS) volumes that are tagged as ``"department=finance"`` , ``"importance=critical"`` , in addition to an EBS volume with the specified volume Id.
Resources and conditions are additive in that all resources that match the pattern are selected. This shouldn't be confused with a logical AND, where all conditions must match. The matching patterns are logically 'put together using the OR operator. In other words, all patterns that match are selected for backup.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/CreateBackupSelection>`_
**Request Syntax**
::
response = client.create_backup_selection(
BackupPlanId='string',
BackupSelection={
'SelectionName': 'string',
'IamRoleArn': 'string',
'Resources': [
'string',
],
'ListOfTags': [
{
'ConditionType': 'STRINGEQUALS',
'ConditionKey': 'string',
'ConditionValue': 'string'
},
]
},
CreatorRequestId='string'
)
**Response Syntax**
::
{
'SelectionId': 'string',
'BackupPlanId': 'string',
'CreationDate': datetime(2015, 1, 1)
}
**Response Structure**
- *(dict) --*
- **SelectionId** *(string) --*
Uniquely identifies the body of a request to assign a set of resources to a backup plan.
- **BackupPlanId** *(string) --*
Uniquely identifies a backup plan.
- **CreationDate** *(datetime) --*
The date and time a backup selection is created, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
:type BackupPlanId: string
:param BackupPlanId: **[REQUIRED]**
Uniquely identifies the backup plan to be associated with the selection of resources.
:type BackupSelection: dict
:param BackupSelection: **[REQUIRED]**
Specifies the body of a request to assign a set of resources to a backup plan.
It includes an array of resources, an optional array of patterns to exclude resources, an optional role to provide access to the AWS service the resource belongs to, and an optional array of tags used to identify a set of resources.
- **SelectionName** *(string) --* **[REQUIRED]**
The display name of a resource selection document.
- **IamRoleArn** *(string) --* **[REQUIRED]**
The ARN of the IAM role that AWS Backup uses to authenticate when restoring the target resource; for example, ``arn:aws:iam::123456789012:role/S3Access`` .
- **Resources** *(list) --*
An array of strings that either contain Amazon Resource Names (ARNs) or match patterns such as \"``arn:aws:ec2:us-east-1:123456789012:volume/*`` \" of resources to assign to a backup plan.
- *(string) --*
- **ListOfTags** *(list) --*
An array of conditions used to specify a set of resources to assign to a backup plan; for example, ``\"StringEquals\": {\"ec2:ResourceTag/Department\": \"accounting\"`` .
- *(dict) --*
Contains an array of triplets made up of a condition type (such as ``StringEquals`` ), a key, and a value. Conditions are used to filter resources in a selection that is assigned to a backup plan.
- **ConditionType** *(string) --* **[REQUIRED]**
An operation, such as ``StringEquals`` , that is applied to a key-value pair used to filter resources in a selection.
- **ConditionKey** *(string) --* **[REQUIRED]**
The key in a key-value pair. For example, in ``\"ec2:ResourceTag/Department\": \"accounting\"`` , ``\"ec2:ResourceTag/Department\"`` is the key.
- **ConditionValue** *(string) --* **[REQUIRED]**
The value in a key-value pair. For example, in ``\"ec2:ResourceTag/Department\": \"accounting\"`` , ``\"accounting\"`` is the value.
:type CreatorRequestId: string
:param CreatorRequestId:
A unique string that identifies the request and allows failed requests to be retried without the risk of executing the operation twice.
:rtype: dict
:returns:
"""
pass
def create_backup_vault(self, BackupVaultName: str, BackupVaultTags: Dict = None, EncryptionKeyArn: str = None, CreatorRequestId: str = None) -> Dict:
"""
Creates a logical container where backups are stored. A ``CreateBackupVault`` request includes a name, optionally one or more resource tags, an encryption key, and a request ID.
.. note::
Sensitive data, such as passport numbers, should not be included the name of a backup vault.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/CreateBackupVault>`_
**Request Syntax**
::
response = client.create_backup_vault(
BackupVaultName='string',
BackupVaultTags={
'string': 'string'
},
EncryptionKeyArn='string',
CreatorRequestId='string'
)
**Response Syntax**
::
{
'BackupVaultName': 'string',
'BackupVaultArn': 'string',
'CreationDate': datetime(2015, 1, 1)
}
**Response Structure**
- *(dict) --*
- **BackupVaultName** *(string) --*
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the Region where they are created. They consist of lowercase letters, numbers, and hyphens.
- **BackupVaultArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a backup vault; for example, ``arn:aws:backup:us-east-1:123456789012:vault:aBackupVault`` .
- **CreationDate** *(datetime) --*
The date and time a backup vault is created, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
:type BackupVaultName: string
:param BackupVaultName: **[REQUIRED]**
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
:type BackupVaultTags: dict
:param BackupVaultTags:
Metadata that you can assign to help organize the resources that you create. Each tag is a key-value pair.
- *(string) --*
- *(string) --*
:type EncryptionKeyArn: string
:param EncryptionKeyArn:
The server-side encryption key that is used to protect your backups; for example, ``arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab`` .
:type CreatorRequestId: string
:param CreatorRequestId:
A unique string that identifies the request and allows failed requests to be retried without the risk of executing the operation twice.
:rtype: dict
:returns:
"""
pass
def delete_backup_plan(self, BackupPlanId: str) -> Dict:
"""
Deletes a backup plan. A backup plan can only be deleted after all associated selections of resources have been deleted. Deleting a backup plan deletes the current version of a backup plan. Previous versions, if any, will still exist.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/DeleteBackupPlan>`_
**Request Syntax**
::
response = client.delete_backup_plan(
BackupPlanId='string'
)
**Response Syntax**
::
{
'BackupPlanId': 'string',
'BackupPlanArn': 'string',
'DeletionDate': datetime(2015, 1, 1),
'VersionId': 'string'
}
**Response Structure**
- *(dict) --*
- **BackupPlanId** *(string) --*
Uniquely identifies a backup plan.
- **BackupPlanArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a backup plan; for example, ``arn:aws:backup:us-east-1:123456789012:plan:8F81F553-3A74-4A3F-B93D-B3360DC80C50`` .
- **DeletionDate** *(datetime) --*
The date and time a backup plan is deleted, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **VersionId** *(string) --*
Unique, randomly generated, Unicode, UTF-8 encoded strings that are at most 1,024 bytes long. Version Ids cannot be edited.
:type BackupPlanId: string
:param BackupPlanId: **[REQUIRED]**
Uniquely identifies a backup plan.
:rtype: dict
:returns:
"""
pass
def delete_backup_selection(self, BackupPlanId: str, SelectionId: str):
"""
Deletes the resource selection associated with a backup plan that is specified by the ``SelectionId`` .
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/DeleteBackupSelection>`_
**Request Syntax**
::
response = client.delete_backup_selection(
BackupPlanId='string',
SelectionId='string'
)
:type BackupPlanId: string
:param BackupPlanId: **[REQUIRED]**
Uniquely identifies a backup plan.
:type SelectionId: string
:param SelectionId: **[REQUIRED]**
Uniquely identifies the body of a request to assign a set of resources to a backup plan.
:returns: None
"""
pass
def delete_backup_vault(self, BackupVaultName: str):
"""
Deletes the backup vault identified by its name. A vault can be deleted only if it is empty.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/DeleteBackupVault>`_
**Request Syntax**
::
response = client.delete_backup_vault(
BackupVaultName='string'
)
:type BackupVaultName: string
:param BackupVaultName: **[REQUIRED]**
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and theAWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
:returns: None
"""
pass
def delete_backup_vault_access_policy(self, BackupVaultName: str):
"""
Deletes the policy document that manages permissions on a backup vault.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/DeleteBackupVaultAccessPolicy>`_
**Request Syntax**
::
response = client.delete_backup_vault_access_policy(
BackupVaultName='string'
)
:type BackupVaultName: string
:param BackupVaultName: **[REQUIRED]**
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
:returns: None
"""
pass
def delete_backup_vault_notifications(self, BackupVaultName: str):
"""
Deletes event notifications for the specified backup vault.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/DeleteBackupVaultNotifications>`_
**Request Syntax**
::
response = client.delete_backup_vault_notifications(
BackupVaultName='string'
)
:type BackupVaultName: string
:param BackupVaultName: **[REQUIRED]**
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the Region where they are created. They consist of lowercase letters, numbers, and hyphens.
:returns: None
"""
pass
def delete_recovery_point(self, BackupVaultName: str, RecoveryPointArn: str):
"""
Deletes the recovery point specified by a recovery point ID.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/DeleteRecoveryPoint>`_
**Request Syntax**
::
response = client.delete_recovery_point(
BackupVaultName='string',
RecoveryPointArn='string'
)
:type BackupVaultName: string
:param BackupVaultName: **[REQUIRED]**
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
:type RecoveryPointArn: string
:param RecoveryPointArn: **[REQUIRED]**
An Amazon Resource Name (ARN) that uniquely identifies a recovery point; for example, ``arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45`` .
:returns: None
"""
pass
def describe_backup_job(self, BackupJobId: str) -> Dict:
"""
Returns metadata associated with creating a backup of a resource.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/DescribeBackupJob>`_
**Request Syntax**
::
response = client.describe_backup_job(
BackupJobId='string'
)
**Response Syntax**
::
{
'BackupJobId': 'string',
'BackupVaultName': 'string',
'BackupVaultArn': 'string',
'RecoveryPointArn': 'string',
'ResourceArn': 'string',
'CreationDate': datetime(2015, 1, 1),
'CompletionDate': datetime(2015, 1, 1),
'State': 'CREATED'|'PENDING'|'RUNNING'|'ABORTING'|'ABORTED'|'COMPLETED'|'FAILED'|'EXPIRED',
'StatusMessage': 'string',
'PercentDone': 'string',
'BackupSizeInBytes': 123,
'IamRoleArn': 'string',
'CreatedBy': {
'BackupPlanId': 'string',
'BackupPlanArn': 'string',
'BackupPlanVersion': 'string',
'BackupRuleId': 'string'
},
'ResourceType': 'string',
'BytesTransferred': 123,
'ExpectedCompletionDate': datetime(2015, 1, 1),
'StartBy': datetime(2015, 1, 1)
}
**Response Structure**
- *(dict) --*
- **BackupJobId** *(string) --*
Uniquely identifies a request to AWS Backup to back up a resource.
- **BackupVaultName** *(string) --*
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
- **BackupVaultArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a backup vault; for example, ``arn:aws:backup:us-east-1:123456789012:vault:aBackupVault`` .
- **RecoveryPointArn** *(string) --*
An ARN that uniquely identifies a recovery point; for example, ``arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45`` .
- **ResourceArn** *(string) --*
An ARN that uniquely identifies a saved resource. The format of the ARN depends on the resource type.
- **CreationDate** *(datetime) --*
The date and time that a backup job is created, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **CompletionDate** *(datetime) --*
The date and time that a job to create a backup job is completed, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **State** *(string) --*
The current state of a resource recovery point.
- **StatusMessage** *(string) --*
A detailed message explaining the status of the job to back up a resource.
- **PercentDone** *(string) --*
Contains an estimated percentage that is complete of a job at the time the job status was queried.
- **BackupSizeInBytes** *(integer) --*
The size, in bytes, of a backup.
- **IamRoleArn** *(string) --*
Specifies the IAM role ARN used to create the target recovery point; for example, ``arn:aws:iam::123456789012:role/S3Access`` .
- **CreatedBy** *(dict) --*
Contains identifying information about the creation of a backup job, including the ``BackupPlanArn`` , ``BackupPlanId`` , ``BackupPlanVersion`` , and ``BackupRuleId`` of the backup plan that is used to create it.
- **BackupPlanId** *(string) --*
Uniquely identifies a backup plan.
- **BackupPlanArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a backup plan; for example, ``arn:aws:backup:us-east-1:123456789012:plan:8F81F553-3A74-4A3F-B93D-B3360DC80C50`` .
- **BackupPlanVersion** *(string) --*
Version IDs are unique, randomly generated, Unicode, UTF-8 encoded strings that are at most 1,024 bytes long. They cannot be edited.
- **BackupRuleId** *(string) --*
Uniquely identifies a rule used to schedule the backup of a selection of resources.
- **ResourceType** *(string) --*
The type of AWS resource to be backed-up; for example, an Amazon Elastic Block Store (Amazon EBS) volume or an Amazon Relational Database Service (Amazon RDS) database.
- **BytesTransferred** *(integer) --*
The size in bytes transferred to a backup vault at the time that the job status was queried.
- **ExpectedCompletionDate** *(datetime) --*
The date and time that a job to back up resources is expected to be completed, in Unix format and Coordinated Universal Time (UTC). The value of ``ExpectedCompletionDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **StartBy** *(datetime) --*
Specifies the time in Unix format and Coordinated Universal Time (UTC) when a backup job must be started before it is canceled. The value is calculated by adding the start window to the scheduled time. So if the scheduled time were 6:00 PM and the start window is 2 hours, the ``StartBy`` time would be 8:00 PM on the date specified. The value of ``StartBy`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
:type BackupJobId: string
:param BackupJobId: **[REQUIRED]**
Uniquely identifies a request to AWS Backup to back up a resource.
:rtype: dict
:returns:
"""
pass
def describe_backup_vault(self, BackupVaultName: str) -> Dict:
"""
Returns metadata about a backup vault specified by its name.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/DescribeBackupVault>`_
**Request Syntax**
::
response = client.describe_backup_vault(
BackupVaultName='string'
)
**Response Syntax**
::
{
'BackupVaultName': 'string',
'BackupVaultArn': 'string',
'EncryptionKeyArn': 'string',
'CreationDate': datetime(2015, 1, 1),
'CreatorRequestId': 'string',
'NumberOfRecoveryPoints': 123
}
**Response Structure**
- *(dict) --*
- **BackupVaultName** *(string) --*
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the Region where they are created. They consist of lowercase letters, numbers, and hyphens.
- **BackupVaultArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a backup vault; for example, ``arn:aws:backup:us-east-1:123456789012:vault:aBackupVault`` .
- **EncryptionKeyArn** *(string) --*
The server-side encryption key that is used to protect your backups; for example, ``arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab`` .
- **CreationDate** *(datetime) --*
The date and time that a backup vault is created, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **CreatorRequestId** *(string) --*
A unique string that identifies the request and allows failed requests to be retried without the risk of executing the operation twice.
- **NumberOfRecoveryPoints** *(integer) --*
The number of recovery points that are stored in a backup vault.
:type BackupVaultName: string
:param BackupVaultName: **[REQUIRED]**
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
:rtype: dict
:returns:
"""
pass
def describe_protected_resource(self, ResourceArn: str) -> Dict:
"""
Returns information about a saved resource, including the last time it was backed-up, its Amazon Resource Name (ARN), and the AWS service type of the saved resource.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/DescribeProtectedResource>`_
**Request Syntax**
::
response = client.describe_protected_resource(
ResourceArn='string'
)
**Response Syntax**
::
{
'ResourceArn': 'string',
'ResourceType': 'string',
'LastBackupTime': datetime(2015, 1, 1)
}
**Response Structure**
- *(dict) --*
- **ResourceArn** *(string) --*
An ARN that uniquely identifies a resource. The format of the ARN depends on the resource type.
- **ResourceType** *(string) --*
The type of AWS resource saved as a recovery point; for example, an EBS volume or an Amazon RDS database.
- **LastBackupTime** *(datetime) --*
The date and time that a resource was last backed up, in Unix format and Coordinated Universal Time (UTC). The value of ``LastBackupTime`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
:type ResourceArn: string
:param ResourceArn: **[REQUIRED]**
An Amazon Resource Name (ARN) that uniquely identifies a resource. The format of the ARN depends on the resource type.
:rtype: dict
:returns:
"""
pass
def describe_recovery_point(self, BackupVaultName: str, RecoveryPointArn: str) -> Dict:
"""
Returns metadata associated with a recovery point, including ID, status, encryption, and lifecycle.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/DescribeRecoveryPoint>`_
**Request Syntax**
::
response = client.describe_recovery_point(
BackupVaultName='string',
RecoveryPointArn='string'
)
**Response Syntax**
::
{
'RecoveryPointArn': 'string',
'BackupVaultName': 'string',
'BackupVaultArn': 'string',
'ResourceArn': 'string',
'ResourceType': 'string',
'CreatedBy': {
'BackupPlanId': 'string',
'BackupPlanArn': 'string',
'BackupPlanVersion': 'string',
'BackupRuleId': 'string'
},
'IamRoleArn': 'string',
'Status': 'COMPLETED'|'PARTIAL'|'DELETING'|'EXPIRED',
'CreationDate': datetime(2015, 1, 1),
'CompletionDate': datetime(2015, 1, 1),
'BackupSizeInBytes': 123,
'CalculatedLifecycle': {
'MoveToColdStorageAt': datetime(2015, 1, 1),
'DeleteAt': datetime(2015, 1, 1)
},
'Lifecycle': {
'MoveToColdStorageAfterDays': 123,
'DeleteAfterDays': 123
},
'EncryptionKeyArn': 'string',
'IsEncrypted': True|False,
'StorageClass': 'WARM'|'COLD'|'DELETED',
'LastRestoreTime': datetime(2015, 1, 1)
}
**Response Structure**
- *(dict) --*
- **RecoveryPointArn** *(string) --*
An ARN that uniquely identifies a recovery point; for example, ``arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45`` .
- **BackupVaultName** *(string) --*
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the Region where they are created. They consist of lowercase letters, numbers, and hyphens.
- **BackupVaultArn** *(string) --*
An ARN that uniquely identifies a backup vault; for example, ``arn:aws:backup:us-east-1:123456789012:vault:aBackupVault`` .
- **ResourceArn** *(string) --*
An ARN that uniquely identifies a saved resource. The format of the ARN depends on the resource type.
- **ResourceType** *(string) --*
The type of AWS resource to save as a recovery point; for example, an Amazon Elastic Block Store (Amazon EBS) volume or an Amazon Relational Database Service (Amazon RDS) database.
- **CreatedBy** *(dict) --*
Contains identifying information about the creation of a recovery point, including the ``BackupPlanArn`` , ``BackupPlanId`` , ``BackupPlanVersion`` , and ``BackupRuleId`` of the backup plan used to create it.
- **BackupPlanId** *(string) --*
Uniquely identifies a backup plan.
- **BackupPlanArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a backup plan; for example, ``arn:aws:backup:us-east-1:123456789012:plan:8F81F553-3A74-4A3F-B93D-B3360DC80C50`` .
- **BackupPlanVersion** *(string) --*
Version IDs are unique, randomly generated, Unicode, UTF-8 encoded strings that are at most 1,024 bytes long. They cannot be edited.
- **BackupRuleId** *(string) --*
Uniquely identifies a rule used to schedule the backup of a selection of resources.
- **IamRoleArn** *(string) --*
Specifies the IAM role ARN used to create the target recovery point; for example, ``arn:aws:iam::123456789012:role/S3Access`` .
- **Status** *(string) --*
A status code specifying the state of the recovery point.
.. note::
A partial status indicates that the recovery point was not successfully re-created and must be retried.
- **CreationDate** *(datetime) --*
The date and time that a recovery point is created, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **CompletionDate** *(datetime) --*
The date and time that a job to create a recovery point is completed, in Unix format and Coordinated Universal Time (UTC). The value of ``CompletionDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **BackupSizeInBytes** *(integer) --*
The size, in bytes, of a backup.
- **CalculatedLifecycle** *(dict) --*
A ``CalculatedLifecycle`` object containing ``DeleteAt`` and ``MoveToColdStorageAt`` timestamps.
- **MoveToColdStorageAt** *(datetime) --*
A timestamp that specifies when to transition a recovery point to cold storage.
- **DeleteAt** *(datetime) --*
A timestamp that specifies when to delete a recovery point.
- **Lifecycle** *(dict) --*
The lifecycle defines when a protected resource is transitioned to cold storage and when it expires. AWS Backup transitions and expires backups automatically according to the lifecycle that you define.
Backups that are transitioned to cold storage must be stored in cold storage for a minimum of 90 days. Therefore, the “expire after days” setting must be 90 days greater than the “transition to cold after days” setting. The “transition to cold after days” setting cannot be changed after a backup has been transitioned to cold.
- **MoveToColdStorageAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is moved to cold storage.
- **DeleteAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is deleted. Must be greater than ``MoveToColdStorageAfterDays`` .
- **EncryptionKeyArn** *(string) --*
The server-side encryption key used to protect your backups; for example, ``arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab`` .
- **IsEncrypted** *(boolean) --*
A Boolean value that is returned as ``TRUE`` if the specified recovery point is encrypted, or ``FALSE`` if the recovery point is not encrypted.
- **StorageClass** *(string) --*
Specifies the storage class of the recovery point. Valid values are ``WARM`` or ``COLD`` .
- **LastRestoreTime** *(datetime) --*
The date and time that a recovery point was last restored, in Unix format and Coordinated Universal Time (UTC). The value of ``LastRestoreTime`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
:type BackupVaultName: string
:param BackupVaultName: **[REQUIRED]**
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
:type RecoveryPointArn: string
:param RecoveryPointArn: **[REQUIRED]**
An Amazon Resource Name (ARN) that uniquely identifies a recovery point; for example, ``arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45`` .
:rtype: dict
:returns:
"""
pass
def describe_restore_job(self, RestoreJobId: str) -> Dict:
"""
Returns metadata associated with a restore job that is specified by a job ID.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/DescribeRestoreJob>`_
**Request Syntax**
::
response = client.describe_restore_job(
RestoreJobId='string'
)
**Response Syntax**
::
{
'RestoreJobId': 'string',
'RecoveryPointArn': 'string',
'CreationDate': datetime(2015, 1, 1),
'CompletionDate': datetime(2015, 1, 1),
'Status': 'PENDING'|'RUNNING'|'COMPLETED'|'ABORTED'|'FAILED',
'StatusMessage': 'string',
'PercentDone': 'string',
'BackupSizeInBytes': 123,
'IamRoleArn': 'string',
'ExpectedCompletionTimeMinutes': 123,
'CreatedResourceArn': 'string'
}
**Response Structure**
- *(dict) --*
- **RestoreJobId** *(string) --*
Uniquely identifies the job that restores a recovery point.
- **RecoveryPointArn** *(string) --*
An ARN that uniquely identifies a recovery point; for example, ``arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45`` .
- **CreationDate** *(datetime) --*
The date and time that a restore job is created, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **CompletionDate** *(datetime) --*
The date and time that a job to restore a recovery point is completed, in Unix format and Coordinated Universal Time (UTC). The value of ``CompletionDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **Status** *(string) --*
Status code specifying the state of the job that is initiated by AWS Backup to restore a recovery point.
- **StatusMessage** *(string) --*
A detailed message explaining the status of a job to restore a recovery point.
- **PercentDone** *(string) --*
Contains an estimated percentage that is complete of a job at the time the job status was queried.
- **BackupSizeInBytes** *(integer) --*
The size, in bytes, of the restored resource.
- **IamRoleArn** *(string) --*
Specifies the IAM role ARN used to create the target recovery point; for example, ``arn:aws:iam::123456789012:role/S3Access`` .
- **ExpectedCompletionTimeMinutes** *(integer) --*
The amount of time in minutes that a job restoring a recovery point is expected to take.
- **CreatedResourceArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a resource whose recovery point is being restored. The format of the ARN depends on the resource type of the backed-up resource.
:type RestoreJobId: string
:param RestoreJobId: **[REQUIRED]**
Uniquely identifies the job that restores a recovery point.
:rtype: dict
:returns:
"""
pass
def export_backup_plan_template(self, BackupPlanId: str) -> Dict:
"""
Returns the backup plan that is specified by the plan ID as a backup template.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/ExportBackupPlanTemplate>`_
**Request Syntax**
::
response = client.export_backup_plan_template(
BackupPlanId='string'
)
**Response Syntax**
::
{
'BackupPlanTemplateJson': 'string'
}
**Response Structure**
- *(dict) --*
- **BackupPlanTemplateJson** *(string) --*
The body of a backup plan template in JSON format.
.. note::
This is a signed JSON document that cannot be modified before being passed to ``GetBackupPlanFromJSON.``
:type BackupPlanId: string
:param BackupPlanId: **[REQUIRED]**
Uniquely identifies a backup plan.
:rtype: dict
:returns:
"""
pass
def generate_presigned_url(self, ClientMethod: str = None, Params: Dict = None, ExpiresIn: int = None, HttpMethod: str = None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to
``ClientMethod``.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid
for. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By
default, the http method is whatever is used in the method\'s model.
:returns: The presigned url
"""
pass
def get_backup_plan(self, BackupPlanId: str, VersionId: str = None) -> Dict:
"""
Returns the body of a backup plan in JSON format, in addition to plan metadata.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/GetBackupPlan>`_
**Request Syntax**
::
response = client.get_backup_plan(
BackupPlanId='string',
VersionId='string'
)
**Response Syntax**
::
{
'BackupPlan': {
'BackupPlanName': 'string',
'Rules': [
{
'RuleName': 'string',
'TargetBackupVaultName': 'string',
'ScheduleExpression': 'string',
'StartWindowMinutes': 123,
'CompletionWindowMinutes': 123,
'Lifecycle': {
'MoveToColdStorageAfterDays': 123,
'DeleteAfterDays': 123
},
'RecoveryPointTags': {
'string': 'string'
},
'RuleId': 'string'
},
]
},
'BackupPlanId': 'string',
'BackupPlanArn': 'string',
'VersionId': 'string',
'CreatorRequestId': 'string',
'CreationDate': datetime(2015, 1, 1),
'DeletionDate': datetime(2015, 1, 1),
'LastExecutionDate': datetime(2015, 1, 1)
}
**Response Structure**
- *(dict) --*
- **BackupPlan** *(dict) --*
Specifies the body of a backup plan. Includes a ``BackupPlanName`` and one or more sets of ``Rules`` .
- **BackupPlanName** *(string) --*
The display name of a backup plan.
- **Rules** *(list) --*
An array of ``BackupRule`` objects, each of which specifies a scheduled task that is used to back up a selection of resources.
- *(dict) --*
Specifies a scheduled task used to back up a selection of resources.
- **RuleName** *(string) --*
An optional display name for a backup rule.
- **TargetBackupVaultName** *(string) --*
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
- **ScheduleExpression** *(string) --*
A CRON expression specifying when AWS Backup initiates a backup job.
- **StartWindowMinutes** *(integer) --*
An optional value that specifies a period of time in minutes after a backup is scheduled before a job is canceled if it doesn't start successfully.
- **CompletionWindowMinutes** *(integer) --*
A value in minutes after a backup job is successfully started before it must be completed or it is canceled by AWS Backup. This value is optional.
- **Lifecycle** *(dict) --*
The lifecycle defines when a protected resource is transitioned to cold storage and when it expires. AWS Backup transitions and expires backups automatically according to the lifecycle that you define.
Backups transitioned to cold storage must be stored in cold storage for a minimum of 90 days. Therefore, the “expire after days” setting must be 90 days greater than the “transition to cold after days” setting. The “transition to cold after days” setting cannot be changed after a backup has been transitioned to cold.
- **MoveToColdStorageAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is moved to cold storage.
- **DeleteAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is deleted. Must be greater than ``MoveToColdStorageAfterDays`` .
- **RecoveryPointTags** *(dict) --*
An array of key-value pair strings that are assigned to resources that are associated with this rule when restored from backup.
- *(string) --*
- *(string) --*
- **RuleId** *(string) --*
Uniquely identifies a rule that is used to schedule the backup of a selection of resources.
- **BackupPlanId** *(string) --*
Uniquely identifies a backup plan.
- **BackupPlanArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a backup plan; for example, ``arn:aws:backup:us-east-1:123456789012:plan:8F81F553-3A74-4A3F-B93D-B3360DC80C50`` .
- **VersionId** *(string) --*
Unique, randomly generated, Unicode, UTF-8 encoded strings that are at most 1,024 bytes long. Version IDs cannot be edited.
- **CreatorRequestId** *(string) --*
A unique string that identifies the request and allows failed requests to be retried without the risk of executing the operation twice.
- **CreationDate** *(datetime) --*
The date and time that a backup plan is created, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **DeletionDate** *(datetime) --*
The date and time that a backup plan is deleted, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **LastExecutionDate** *(datetime) --*
The last time a job to back up resources was executed with this backup plan. A date and time, in Unix format and Coordinated Universal Time (UTC). The value of ``LastExecutionDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
:type BackupPlanId: string
:param BackupPlanId: **[REQUIRED]**
Uniquely identifies a backup plan.
:type VersionId: string
:param VersionId:
Unique, randomly generated, Unicode, UTF-8 encoded strings that are at most 1,024 bytes long. Version IDs cannot be edited.
:rtype: dict
:returns:
"""
pass
def get_backup_plan_from_json(self, BackupPlanTemplateJson: str) -> Dict:
"""
Returns a valid JSON document specifying a backup plan or an error.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/GetBackupPlanFromJSON>`_
**Request Syntax**
::
response = client.get_backup_plan_from_json(
BackupPlanTemplateJson='string'
)
**Response Syntax**
::
{
'BackupPlan': {
'BackupPlanName': 'string',
'Rules': [
{
'RuleName': 'string',
'TargetBackupVaultName': 'string',
'ScheduleExpression': 'string',
'StartWindowMinutes': 123,
'CompletionWindowMinutes': 123,
'Lifecycle': {
'MoveToColdStorageAfterDays': 123,
'DeleteAfterDays': 123
},
'RecoveryPointTags': {
'string': 'string'
},
'RuleId': 'string'
},
]
}
}
**Response Structure**
- *(dict) --*
- **BackupPlan** *(dict) --*
Specifies the body of a backup plan. Includes a ``BackupPlanName`` and one or more sets of ``Rules`` .
- **BackupPlanName** *(string) --*
The display name of a backup plan.
- **Rules** *(list) --*
An array of ``BackupRule`` objects, each of which specifies a scheduled task that is used to back up a selection of resources.
- *(dict) --*
Specifies a scheduled task used to back up a selection of resources.
- **RuleName** *(string) --*
An optional display name for a backup rule.
- **TargetBackupVaultName** *(string) --*
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
- **ScheduleExpression** *(string) --*
A CRON expression specifying when AWS Backup initiates a backup job.
- **StartWindowMinutes** *(integer) --*
An optional value that specifies a period of time in minutes after a backup is scheduled before a job is canceled if it doesn't start successfully.
- **CompletionWindowMinutes** *(integer) --*
A value in minutes after a backup job is successfully started before it must be completed or it is canceled by AWS Backup. This value is optional.
- **Lifecycle** *(dict) --*
The lifecycle defines when a protected resource is transitioned to cold storage and when it expires. AWS Backup transitions and expires backups automatically according to the lifecycle that you define.
Backups transitioned to cold storage must be stored in cold storage for a minimum of 90 days. Therefore, the “expire after days” setting must be 90 days greater than the “transition to cold after days” setting. The “transition to cold after days” setting cannot be changed after a backup has been transitioned to cold.
- **MoveToColdStorageAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is moved to cold storage.
- **DeleteAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is deleted. Must be greater than ``MoveToColdStorageAfterDays`` .
- **RecoveryPointTags** *(dict) --*
An array of key-value pair strings that are assigned to resources that are associated with this rule when restored from backup.
- *(string) --*
- *(string) --*
- **RuleId** *(string) --*
Uniquely identifies a rule that is used to schedule the backup of a selection of resources.
:type BackupPlanTemplateJson: string
:param BackupPlanTemplateJson: **[REQUIRED]**
A customer-supplied backup plan document in JSON format.
:rtype: dict
:returns:
"""
pass
def get_backup_plan_from_template(self, BackupPlanTemplateId: str) -> Dict:
"""
Returns the template specified by its ``templateId`` as a backup plan.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/GetBackupPlanFromTemplate>`_
**Request Syntax**
::
response = client.get_backup_plan_from_template(
BackupPlanTemplateId='string'
)
**Response Syntax**
::
{
'BackupPlanDocument': {
'BackupPlanName': 'string',
'Rules': [
{
'RuleName': 'string',
'TargetBackupVaultName': 'string',
'ScheduleExpression': 'string',
'StartWindowMinutes': 123,
'CompletionWindowMinutes': 123,
'Lifecycle': {
'MoveToColdStorageAfterDays': 123,
'DeleteAfterDays': 123
},
'RecoveryPointTags': {
'string': 'string'
},
'RuleId': 'string'
},
]
}
}
**Response Structure**
- *(dict) --*
- **BackupPlanDocument** *(dict) --*
Returns the body of a backup plan based on the target template, including the name, rules, and backup vault of the plan.
- **BackupPlanName** *(string) --*
The display name of a backup plan.
- **Rules** *(list) --*
An array of ``BackupRule`` objects, each of which specifies a scheduled task that is used to back up a selection of resources.
- *(dict) --*
Specifies a scheduled task used to back up a selection of resources.
- **RuleName** *(string) --*
An optional display name for a backup rule.
- **TargetBackupVaultName** *(string) --*
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
- **ScheduleExpression** *(string) --*
A CRON expression specifying when AWS Backup initiates a backup job.
- **StartWindowMinutes** *(integer) --*
An optional value that specifies a period of time in minutes after a backup is scheduled before a job is canceled if it doesn't start successfully.
- **CompletionWindowMinutes** *(integer) --*
A value in minutes after a backup job is successfully started before it must be completed or it is canceled by AWS Backup. This value is optional.
- **Lifecycle** *(dict) --*
The lifecycle defines when a protected resource is transitioned to cold storage and when it expires. AWS Backup transitions and expires backups automatically according to the lifecycle that you define.
Backups transitioned to cold storage must be stored in cold storage for a minimum of 90 days. Therefore, the “expire after days” setting must be 90 days greater than the “transition to cold after days” setting. The “transition to cold after days” setting cannot be changed after a backup has been transitioned to cold.
- **MoveToColdStorageAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is moved to cold storage.
- **DeleteAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is deleted. Must be greater than ``MoveToColdStorageAfterDays`` .
- **RecoveryPointTags** *(dict) --*
An array of key-value pair strings that are assigned to resources that are associated with this rule when restored from backup.
- *(string) --*
- *(string) --*
- **RuleId** *(string) --*
Uniquely identifies a rule that is used to schedule the backup of a selection of resources.
:type BackupPlanTemplateId: string
:param BackupPlanTemplateId: **[REQUIRED]**
Uniquely identifies a stored backup plan template.
:rtype: dict
:returns:
"""
pass
def get_backup_selection(self, BackupPlanId: str, SelectionId: str) -> Dict:
"""
Returns selection metadata and a document in JSON format that specifies a list of resources that are associated with a backup plan.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/GetBackupSelection>`_
**Request Syntax**
::
response = client.get_backup_selection(
BackupPlanId='string',
SelectionId='string'
)
**Response Syntax**
::
{
'BackupSelection': {
'SelectionName': 'string',
'IamRoleArn': 'string',
'Resources': [
'string',
],
'ListOfTags': [
{
'ConditionType': 'STRINGEQUALS',
'ConditionKey': 'string',
'ConditionValue': 'string'
},
]
},
'SelectionId': 'string',
'BackupPlanId': 'string',
'CreationDate': datetime(2015, 1, 1),
'CreatorRequestId': 'string'
}
**Response Structure**
- *(dict) --*
- **BackupSelection** *(dict) --*
Specifies the body of a request to assign a set of resources to a backup plan.
It includes an array of resources, an optional array of patterns to exclude resources, an optional role to provide access to the AWS service that the resource belongs to, and an optional array of tags used to identify a set of resources.
- **SelectionName** *(string) --*
The display name of a resource selection document.
- **IamRoleArn** *(string) --*
The ARN of the IAM role that AWS Backup uses to authenticate when restoring the target resource; for example, ``arn:aws:iam::123456789012:role/S3Access`` .
- **Resources** *(list) --*
An array of strings that either contain Amazon Resource Names (ARNs) or match patterns such as "``arn:aws:ec2:us-east-1:123456789012:volume/*`` " of resources to assign to a backup plan.
- *(string) --*
- **ListOfTags** *(list) --*
An array of conditions used to specify a set of resources to assign to a backup plan; for example, ``"StringEquals": {"ec2:ResourceTag/Department": "accounting"`` .
- *(dict) --*
Contains an array of triplets made up of a condition type (such as ``StringEquals`` ), a key, and a value. Conditions are used to filter resources in a selection that is assigned to a backup plan.
- **ConditionType** *(string) --*
An operation, such as ``StringEquals`` , that is applied to a key-value pair used to filter resources in a selection.
- **ConditionKey** *(string) --*
The key in a key-value pair. For example, in ``"ec2:ResourceTag/Department": "accounting"`` , ``"ec2:ResourceTag/Department"`` is the key.
- **ConditionValue** *(string) --*
The value in a key-value pair. For example, in ``"ec2:ResourceTag/Department": "accounting"`` , ``"accounting"`` is the value.
- **SelectionId** *(string) --*
Uniquely identifies the body of a request to assign a set of resources to a backup plan.
- **BackupPlanId** *(string) --*
Uniquely identifies a backup plan.
- **CreationDate** *(datetime) --*
The date and time a backup selection is created, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **CreatorRequestId** *(string) --*
A unique string that identifies the request and allows failed requests to be retried without the risk of executing the operation twice.
:type BackupPlanId: string
:param BackupPlanId: **[REQUIRED]**
Uniquely identifies a backup plan.
:type SelectionId: string
:param SelectionId: **[REQUIRED]**
Uniquely identifies the body of a request to assign a set of resources to a backup plan.
:rtype: dict
:returns:
"""
pass
def get_backup_vault_access_policy(self, BackupVaultName: str) -> Dict:
"""
Returns the access policy document that is associated with the named backup vault.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/GetBackupVaultAccessPolicy>`_
**Request Syntax**
::
response = client.get_backup_vault_access_policy(
BackupVaultName='string'
)
**Response Syntax**
::
{
'BackupVaultName': 'string',
'BackupVaultArn': 'string',
'Policy': 'string'
}
**Response Structure**
- *(dict) --*
- **BackupVaultName** *(string) --*
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the Region where they are created. They consist of lowercase letters, numbers, and hyphens.
- **BackupVaultArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a backup vault; for example, ``arn:aws:backup:us-east-1:123456789012:vault:aBackupVault`` .
- **Policy** *(string) --*
The backup vault access policy document in JSON format.
:type BackupVaultName: string
:param BackupVaultName: **[REQUIRED]**
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
:rtype: dict
:returns:
"""
pass
def get_backup_vault_notifications(self, BackupVaultName: str) -> Dict:
"""
Returns event notifications for the specified backup vault.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/GetBackupVaultNotifications>`_
**Request Syntax**
::
response = client.get_backup_vault_notifications(
BackupVaultName='string'
)
**Response Syntax**
::
{
'BackupVaultName': 'string',
'BackupVaultArn': 'string',
'SNSTopicArn': 'string',
'BackupVaultEvents': [
'BACKUP_JOB_STARTED'|'BACKUP_JOB_COMPLETED'|'RESTORE_JOB_STARTED'|'RESTORE_JOB_COMPLETED'|'RECOVERY_POINT_MODIFIED'|'BACKUP_PLAN_CREATED'|'BACKUP_PLAN_MODIFIED',
]
}
**Response Structure**
- *(dict) --*
- **BackupVaultName** *(string) --*
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the Region where they are created. They consist of lowercase letters, numbers, and hyphens.
- **BackupVaultArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a backup vault; for example, ``arn:aws:backup:us-east-1:123456789012:vault:aBackupVault`` .
- **SNSTopicArn** *(string) --*
An ARN that uniquely identifies an Amazon Simple Notification Service (Amazon SNS) topic; for example, ``arn:aws:sns:us-west-2:111122223333:MyTopic`` .
- **BackupVaultEvents** *(list) --*
An array of events that indicate the status of jobs to back up resources to the backup vault.
- *(string) --*
:type BackupVaultName: string
:param BackupVaultName: **[REQUIRED]**
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
:rtype: dict
:returns:
"""
pass
def get_paginator(self, operation_name: str = None) -> Paginator:
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you\'d normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator(\"create_foo\")``.
:raise OperationNotPageableError: Raised if the operation is not
pageable. You can use the ``client.can_paginate`` method to
check if an operation is pageable.
:rtype: L{botocore.paginate.Paginator}
:return: A paginator object.
"""
pass
def get_recovery_point_restore_metadata(self, BackupVaultName: str, RecoveryPointArn: str) -> Dict:
"""
Returns two sets of metadata key-value pairs. The first set lists the metadata that the recovery point was created with. The second set lists the metadata key-value pairs that are required to restore the recovery point.
These sets can be the same, or the restore metadata set can contain different values if the target service to be restored has changed since the recovery point was created and now requires additional or different information in order to be restored.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/GetRecoveryPointRestoreMetadata>`_
**Request Syntax**
::
response = client.get_recovery_point_restore_metadata(
BackupVaultName='string',
RecoveryPointArn='string'
)
**Response Syntax**
::
{
'BackupVaultArn': 'string',
'RecoveryPointArn': 'string',
'RestoreMetadata': {
'string': 'string'
}
}
**Response Structure**
- *(dict) --*
- **BackupVaultArn** *(string) --*
An ARN that uniquely identifies a backup vault; for example, ``arn:aws:backup:us-east-1:123456789012:vault:aBackupVault`` .
- **RecoveryPointArn** *(string) --*
An ARN that uniquely identifies a recovery point; for example, ``arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45`` .
- **RestoreMetadata** *(dict) --*
A set of metadata key-value pairs that lists the metadata key-value pairs that are required to restore the recovery point.
- *(string) --*
- *(string) --*
:type BackupVaultName: string
:param BackupVaultName: **[REQUIRED]**
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
:type RecoveryPointArn: string
:param RecoveryPointArn: **[REQUIRED]**
An Amazon Resource Name (ARN) that uniquely identifies a recovery point; for example, ``arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45`` .
:rtype: dict
:returns:
"""
pass
def get_supported_resource_types(self) -> Dict:
"""
Returns the AWS resource types supported by AWS Backup.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/GetSupportedResourceTypes>`_
**Request Syntax**
::
response = client.get_supported_resource_types()
**Response Syntax**
::
{
'ResourceTypes': [
'string',
]
}
**Response Structure**
- *(dict) --*
- **ResourceTypes** *(list) --*
Contains a string with the supported AWS resource types:
* ``EBS`` for Amazon Elastic Block Store
* ``SGW`` for AWS Storage Gateway
* ``RDS`` for Amazon Relational Database Service
* ``DDB`` for Amazon DynamoDB
* ``EFS`` for Amazon Elastic File System
- *(string) --*
:rtype: dict
:returns:
"""
pass
def get_waiter(self, waiter_name: str = None) -> Waiter:
"""
Returns an object that can wait for some condition.
:type waiter_name: str
:param waiter_name: The name of the waiter to get. See the waiters
section of the service docs for a list of available waiters.
:returns: The specified waiter object.
:rtype: botocore.waiter.Waiter
"""
pass
def list_backup_jobs(self, NextToken: str = None, MaxResults: int = None, ByResourceArn: str = None, ByState: str = None, ByBackupVaultName: str = None, ByCreatedBefore: datetime = None, ByCreatedAfter: datetime = None, ByResourceType: str = None) -> Dict:
"""
Returns metadata about your backup jobs.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/ListBackupJobs>`_
**Request Syntax**
::
response = client.list_backup_jobs(
NextToken='string',
MaxResults=123,
ByResourceArn='string',
ByState='CREATED'|'PENDING'|'RUNNING'|'ABORTING'|'ABORTED'|'COMPLETED'|'FAILED'|'EXPIRED',
ByBackupVaultName='string',
ByCreatedBefore=datetime(2015, 1, 1),
ByCreatedAfter=datetime(2015, 1, 1),
ByResourceType='string'
)
**Response Syntax**
::
{
'BackupJobs': [
{
'BackupJobId': 'string',
'BackupVaultName': 'string',
'BackupVaultArn': 'string',
'RecoveryPointArn': 'string',
'ResourceArn': 'string',
'CreationDate': datetime(2015, 1, 1),
'CompletionDate': datetime(2015, 1, 1),
'State': 'CREATED'|'PENDING'|'RUNNING'|'ABORTING'|'ABORTED'|'COMPLETED'|'FAILED'|'EXPIRED',
'StatusMessage': 'string',
'PercentDone': 'string',
'BackupSizeInBytes': 123,
'IamRoleArn': 'string',
'CreatedBy': {
'BackupPlanId': 'string',
'BackupPlanArn': 'string',
'BackupPlanVersion': 'string',
'BackupRuleId': 'string'
},
'ExpectedCompletionDate': datetime(2015, 1, 1),
'StartBy': datetime(2015, 1, 1),
'ResourceType': 'string',
'BytesTransferred': 123
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **BackupJobs** *(list) --*
An array of structures containing metadata about your backup jobs returned in JSON format.
- *(dict) --*
Contains detailed information about a backup job.
- **BackupJobId** *(string) --*
Uniquely identifies a request to AWS Backup to back up a resource.
- **BackupVaultName** *(string) --*
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
- **BackupVaultArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a backup vault; for example, ``arn:aws:backup:us-east-1:123456789012:vault:aBackupVault`` .
- **RecoveryPointArn** *(string) --*
An ARN that uniquely identifies a recovery point; for example, ``arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45`` .
- **ResourceArn** *(string) --*
An ARN that uniquely identifies a resource. The format of the ARN depends on the resource type.
- **CreationDate** *(datetime) --*
The date and time a backup job is created, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **CompletionDate** *(datetime) --*
The date and time a job to create a backup job is completed, in Unix format and Coordinated Universal Time (UTC). The value of ``CompletionDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **State** *(string) --*
The current state of a resource recovery point.
- **StatusMessage** *(string) --*
A detailed message explaining the status of the job to back up a resource.
- **PercentDone** *(string) --*
Contains an estimated percentage complete of a job at the time the job status was queried.
- **BackupSizeInBytes** *(integer) --*
The size, in bytes, of a backup.
- **IamRoleArn** *(string) --*
Specifies the IAM role ARN used to create the target recovery point; for example, ``arn:aws:iam::123456789012:role/S3Access`` .
- **CreatedBy** *(dict) --*
Contains identifying information about the creation of a backup job, including the ``BackupPlanArn`` , ``BackupPlanId`` , ``BackupPlanVersion`` , and ``BackupRuleId`` of the backup plan used to create it.
- **BackupPlanId** *(string) --*
Uniquely identifies a backup plan.
- **BackupPlanArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a backup plan; for example, ``arn:aws:backup:us-east-1:123456789012:plan:8F81F553-3A74-4A3F-B93D-B3360DC80C50`` .
- **BackupPlanVersion** *(string) --*
Version IDs are unique, randomly generated, Unicode, UTF-8 encoded strings that are at most 1,024 bytes long. They cannot be edited.
- **BackupRuleId** *(string) --*
Uniquely identifies a rule used to schedule the backup of a selection of resources.
- **ExpectedCompletionDate** *(datetime) --*
The date and time a job to back up resources is expected to be completed, in Unix format and Coordinated Universal Time (UTC). The value of ``ExpectedCompletionDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **StartBy** *(datetime) --*
Specifies the time in Unix format and Coordinated Universal Time (UTC) when a backup job must be started before it is canceled. The value is calculated by adding the start window to the scheduled time. So if the scheduled time were 6:00 PM and the start window is 2 hours, the ``StartBy`` time would be 8:00 PM on the date specified. The value of ``StartBy`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **ResourceType** *(string) --*
The type of AWS resource to be backed-up; for example, an Amazon Elastic Block Store (Amazon EBS) volume or an Amazon Relational Database Service (Amazon RDS) database.
- **BytesTransferred** *(integer) --*
The size in bytes transferred to a backup vault at the time that the job status was queried.
- **NextToken** *(string) --*
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
:type NextToken: string
:param NextToken:
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
:type MaxResults: integer
:param MaxResults:
The maximum number of items to be returned.
:type ByResourceArn: string
:param ByResourceArn:
Returns only backup jobs that match the specified resource Amazon Resource Name (ARN).
:type ByState: string
:param ByState:
Returns only backup jobs that are in the specified state.
:type ByBackupVaultName: string
:param ByBackupVaultName:
Returns only backup jobs that will be stored in the specified backup vault. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
:type ByCreatedBefore: datetime
:param ByCreatedBefore:
Returns only backup jobs that were created before the specified date.
:type ByCreatedAfter: datetime
:param ByCreatedAfter:
Returns only backup jobs that were created after the specified date.
:type ByResourceType: string
:param ByResourceType:
Returns only backup jobs for the specified resources:
* ``EBS`` for Amazon Elastic Block Store
* ``SGW`` for AWS Storage Gateway
* ``RDS`` for Amazon Relational Database Service
* ``DDB`` for Amazon DynamoDB
* ``EFS`` for Amazon Elastic File System
:rtype: dict
:returns:
"""
pass
def list_backup_plan_templates(self, NextToken: str = None, MaxResults: int = None) -> Dict:
"""
Returns metadata of your saved backup plan templates, including the template ID, name, and the creation and deletion dates.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/ListBackupPlanTemplates>`_
**Request Syntax**
::
response = client.list_backup_plan_templates(
NextToken='string',
MaxResults=123
)
**Response Syntax**
::
{
'NextToken': 'string',
'BackupPlanTemplatesList': [
{
'BackupPlanTemplateId': 'string',
'BackupPlanTemplateName': 'string'
},
]
}
**Response Structure**
- *(dict) --*
- **NextToken** *(string) --*
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
- **BackupPlanTemplatesList** *(list) --*
An array of template list items containing metadata about your saved templates.
- *(dict) --*
An object specifying metadata associated with a backup plan template.
- **BackupPlanTemplateId** *(string) --*
Uniquely identifies a stored backup plan template.
- **BackupPlanTemplateName** *(string) --*
The optional display name of a backup plan template.
:type NextToken: string
:param NextToken:
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
:type MaxResults: integer
:param MaxResults:
The maximum number of items to be returned.
:rtype: dict
:returns:
"""
pass
def list_backup_plan_versions(self, BackupPlanId: str, NextToken: str = None, MaxResults: int = None) -> Dict:
"""
Returns version metadata of your backup plans, including Amazon Resource Names (ARNs), backup plan IDs, creation and deletion dates, plan names, and version IDs.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/ListBackupPlanVersions>`_
**Request Syntax**
::
response = client.list_backup_plan_versions(
BackupPlanId='string',
NextToken='string',
MaxResults=123
)
**Response Syntax**
::
{
'NextToken': 'string',
'BackupPlanVersionsList': [
{
'BackupPlanArn': 'string',
'BackupPlanId': 'string',
'CreationDate': datetime(2015, 1, 1),
'DeletionDate': datetime(2015, 1, 1),
'VersionId': 'string',
'BackupPlanName': 'string',
'CreatorRequestId': 'string',
'LastExecutionDate': datetime(2015, 1, 1)
},
]
}
**Response Structure**
- *(dict) --*
- **NextToken** *(string) --*
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
- **BackupPlanVersionsList** *(list) --*
An array of version list items containing metadata about your backup plans.
- *(dict) --*
Contains metadata about a backup plan.
- **BackupPlanArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a backup plan; for example, ``arn:aws:backup:us-east-1:123456789012:plan:8F81F553-3A74-4A3F-B93D-B3360DC80C50`` .
- **BackupPlanId** *(string) --*
Uniquely identifies a backup plan.
- **CreationDate** *(datetime) --*
The date and time a resource backup plan is created, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **DeletionDate** *(datetime) --*
The date and time a backup plan is deleted, in Unix format and Coordinated Universal Time (UTC). The value of ``DeletionDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **VersionId** *(string) --*
Unique, randomly generated, Unicode, UTF-8 encoded strings that are at most 1,024 bytes long. Version IDs cannot be edited.
- **BackupPlanName** *(string) --*
The display name of a saved backup plan.
- **CreatorRequestId** *(string) --*
A unique string that identifies the request and allows failed requests to be retried without the risk of executing the operation twice.
- **LastExecutionDate** *(datetime) --*
The last time a job to back up resources was executed with this rule. A date and time, in Unix format and Coordinated Universal Time (UTC). The value of ``LastExecutionDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
:type BackupPlanId: string
:param BackupPlanId: **[REQUIRED]**
Uniquely identifies a backup plan.
:type NextToken: string
:param NextToken:
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
:type MaxResults: integer
:param MaxResults:
The maximum number of items to be returned.
:rtype: dict
:returns:
"""
pass
def list_backup_plans(self, NextToken: str = None, MaxResults: int = None, IncludeDeleted: bool = None) -> Dict:
"""
Returns metadata of your saved backup plans, including Amazon Resource Names (ARNs), plan IDs, creation and deletion dates, version IDs, plan names, and creator request IDs.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/ListBackupPlans>`_
**Request Syntax**
::
response = client.list_backup_plans(
NextToken='string',
MaxResults=123,
IncludeDeleted=True|False
)
**Response Syntax**
::
{
'NextToken': 'string',
'BackupPlansList': [
{
'BackupPlanArn': 'string',
'BackupPlanId': 'string',
'CreationDate': datetime(2015, 1, 1),
'DeletionDate': datetime(2015, 1, 1),
'VersionId': 'string',
'BackupPlanName': 'string',
'CreatorRequestId': 'string',
'LastExecutionDate': datetime(2015, 1, 1)
},
]
}
**Response Structure**
- *(dict) --*
- **NextToken** *(string) --*
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
- **BackupPlansList** *(list) --*
An array of backup plan list items containing metadata about your saved backup plans.
- *(dict) --*
Contains metadata about a backup plan.
- **BackupPlanArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a backup plan; for example, ``arn:aws:backup:us-east-1:123456789012:plan:8F81F553-3A74-4A3F-B93D-B3360DC80C50`` .
- **BackupPlanId** *(string) --*
Uniquely identifies a backup plan.
- **CreationDate** *(datetime) --*
The date and time a resource backup plan is created, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **DeletionDate** *(datetime) --*
The date and time a backup plan is deleted, in Unix format and Coordinated Universal Time (UTC). The value of ``DeletionDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **VersionId** *(string) --*
Unique, randomly generated, Unicode, UTF-8 encoded strings that are at most 1,024 bytes long. Version IDs cannot be edited.
- **BackupPlanName** *(string) --*
The display name of a saved backup plan.
- **CreatorRequestId** *(string) --*
A unique string that identifies the request and allows failed requests to be retried without the risk of executing the operation twice.
- **LastExecutionDate** *(datetime) --*
The last time a job to back up resources was executed with this rule. A date and time, in Unix format and Coordinated Universal Time (UTC). The value of ``LastExecutionDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
:type NextToken: string
:param NextToken:
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
:type MaxResults: integer
:param MaxResults:
The maximum number of items to be returned.
:type IncludeDeleted: boolean
:param IncludeDeleted:
A Boolean value with a default value of ``FALSE`` that returns deleted backup plans when set to ``TRUE`` .
:rtype: dict
:returns:
"""
pass
def list_backup_selections(self, BackupPlanId: str, NextToken: str = None, MaxResults: int = None) -> Dict:
"""
Returns an array containing metadata of the resources associated with the target backup plan.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/ListBackupSelections>`_
**Request Syntax**
::
response = client.list_backup_selections(
BackupPlanId='string',
NextToken='string',
MaxResults=123
)
**Response Syntax**
::
{
'NextToken': 'string',
'BackupSelectionsList': [
{
'SelectionId': 'string',
'SelectionName': 'string',
'BackupPlanId': 'string',
'CreationDate': datetime(2015, 1, 1),
'CreatorRequestId': 'string',
'IamRoleArn': 'string'
},
]
}
**Response Structure**
- *(dict) --*
- **NextToken** *(string) --*
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
- **BackupSelectionsList** *(list) --*
An array of backup selection list items containing metadata about each resource in the list.
- *(dict) --*
Contains metadata about a ``BackupSelection`` object.
- **SelectionId** *(string) --*
Uniquely identifies a request to assign a set of resources to a backup plan.
- **SelectionName** *(string) --*
The display name of a resource selection document.
- **BackupPlanId** *(string) --*
Uniquely identifies a backup plan.
- **CreationDate** *(datetime) --*
The date and time a backup plan is created, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **CreatorRequestId** *(string) --*
A unique string that identifies the request and allows failed requests to be retried without the risk of executing the operation twice.
- **IamRoleArn** *(string) --*
Specifies the IAM role Amazon Resource Name (ARN) to create the target recovery point; for example, ``arn:aws:iam::123456789012:role/S3Access`` .
:type BackupPlanId: string
:param BackupPlanId: **[REQUIRED]**
Uniquely identifies a backup plan.
:type NextToken: string
:param NextToken:
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
:type MaxResults: integer
:param MaxResults:
The maximum number of items to be returned.
:rtype: dict
:returns:
"""
pass
def list_backup_vaults(self, NextToken: str = None, MaxResults: int = None) -> Dict:
"""
Returns a list of recovery point storage containers along with information about them.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/ListBackupVaults>`_
**Request Syntax**
::
response = client.list_backup_vaults(
NextToken='string',
MaxResults=123
)
**Response Syntax**
::
{
'BackupVaultList': [
{
'BackupVaultName': 'string',
'BackupVaultArn': 'string',
'CreationDate': datetime(2015, 1, 1),
'EncryptionKeyArn': 'string',
'CreatorRequestId': 'string',
'NumberOfRecoveryPoints': 123
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **BackupVaultList** *(list) --*
An array of backup vault list members containing vault metadata, including Amazon Resource Name (ARN), display name, creation date, number of saved recovery points, and encryption information if the resources saved in the backup vault are encrypted.
- *(dict) --*
Contains metadata about a backup vault.
- **BackupVaultName** *(string) --*
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
- **BackupVaultArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a backup vault; for example, ``arn:aws:backup:us-east-1:123456789012:vault:aBackupVault`` .
- **CreationDate** *(datetime) --*
The date and time a resource backup is created, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **EncryptionKeyArn** *(string) --*
The server-side encryption key that is used to protect your backups; for example, ``arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab`` .
- **CreatorRequestId** *(string) --*
A unique string that identifies the request and allows failed requests to be retried without the risk of executing the operation twice.
- **NumberOfRecoveryPoints** *(integer) --*
The number of recovery points that are stored in a backup vault.
- **NextToken** *(string) --*
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
:type NextToken: string
:param NextToken:
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
:type MaxResults: integer
:param MaxResults:
The maximum number of items to be returned.
:rtype: dict
:returns:
"""
pass
def list_protected_resources(self, NextToken: str = None, MaxResults: int = None) -> Dict:
"""
Returns an array of resources successfully backed up by AWS Backup, including the time the resource was saved, an Amazon Resource Name (ARN) of the resource, and a resource type.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/ListProtectedResources>`_
**Request Syntax**
::
response = client.list_protected_resources(
NextToken='string',
MaxResults=123
)
**Response Syntax**
::
{
'Results': [
{
'ResourceArn': 'string',
'ResourceType': 'string',
'LastBackupTime': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **Results** *(list) --*
An array of resources successfully backed up by AWS Backup including the time the resource was saved, an Amazon Resource Name (ARN) of the resource, and a resource type.
- *(dict) --*
A structure that contains information about a backed-up resource.
- **ResourceArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a resource. The format of the ARN depends on the resource type.
- **ResourceType** *(string) --*
The type of AWS resource; for example, an Amazon Elastic Block Store (Amazon EBS) volume or an Amazon Relational Database Service (Amazon RDS) database.
- **LastBackupTime** *(datetime) --*
The date and time a resource was last backed up, in Unix format and Coordinated Universal Time (UTC). The value of ``LastBackupTime`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **NextToken** *(string) --*
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
:type NextToken: string
:param NextToken:
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
:type MaxResults: integer
:param MaxResults:
The maximum number of items to be returned.
:rtype: dict
:returns:
"""
pass
def list_recovery_points_by_backup_vault(self, BackupVaultName: str, NextToken: str = None, MaxResults: int = None, ByResourceArn: str = None, ByResourceType: str = None, ByBackupPlanId: str = None, ByCreatedBefore: datetime = None, ByCreatedAfter: datetime = None) -> Dict:
"""
Returns detailed information about the recovery points stored in a backup vault.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/ListRecoveryPointsByBackupVault>`_
**Request Syntax**
::
response = client.list_recovery_points_by_backup_vault(
BackupVaultName='string',
NextToken='string',
MaxResults=123,
ByResourceArn='string',
ByResourceType='string',
ByBackupPlanId='string',
ByCreatedBefore=datetime(2015, 1, 1),
ByCreatedAfter=datetime(2015, 1, 1)
)
**Response Syntax**
::
{
'NextToken': 'string',
'RecoveryPoints': [
{
'RecoveryPointArn': 'string',
'BackupVaultName': 'string',
'BackupVaultArn': 'string',
'ResourceArn': 'string',
'ResourceType': 'string',
'CreatedBy': {
'BackupPlanId': 'string',
'BackupPlanArn': 'string',
'BackupPlanVersion': 'string',
'BackupRuleId': 'string'
},
'IamRoleArn': 'string',
'Status': 'COMPLETED'|'PARTIAL'|'DELETING'|'EXPIRED',
'CreationDate': datetime(2015, 1, 1),
'CompletionDate': datetime(2015, 1, 1),
'BackupSizeInBytes': 123,
'CalculatedLifecycle': {
'MoveToColdStorageAt': datetime(2015, 1, 1),
'DeleteAt': datetime(2015, 1, 1)
},
'Lifecycle': {
'MoveToColdStorageAfterDays': 123,
'DeleteAfterDays': 123
},
'EncryptionKeyArn': 'string',
'IsEncrypted': True|False,
'LastRestoreTime': datetime(2015, 1, 1)
},
]
}
**Response Structure**
- *(dict) --*
- **NextToken** *(string) --*
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
- **RecoveryPoints** *(list) --*
An array of objects that contain detailed information about recovery points saved in a backup vault.
- *(dict) --*
Contains detailed information about the recovery points stored in a backup vault.
- **RecoveryPointArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a recovery point; for example, ``arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45`` .
- **BackupVaultName** *(string) --*
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
- **BackupVaultArn** *(string) --*
An ARN that uniquely identifies a backup vault; for example, ``arn:aws:backup:us-east-1:123456789012:vault:aBackupVault`` .
- **ResourceArn** *(string) --*
An ARN that uniquely identifies a resource. The format of the ARN depends on the resource type.
- **ResourceType** *(string) --*
The type of AWS resource saved as a recovery point; for example, an Amazon Elastic Block Store (Amazon EBS) volume or an Amazon Relational Database Service (Amazon RDS) database.
- **CreatedBy** *(dict) --*
Contains identifying information about the creation of a recovery point, including the ``BackupPlanArn`` , ``BackupPlanId`` , ``BackupPlanVersion`` , and ``BackupRuleId`` of the backup plan that is used to create it.
- **BackupPlanId** *(string) --*
Uniquely identifies a backup plan.
- **BackupPlanArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a backup plan; for example, ``arn:aws:backup:us-east-1:123456789012:plan:8F81F553-3A74-4A3F-B93D-B3360DC80C50`` .
- **BackupPlanVersion** *(string) --*
Version IDs are unique, randomly generated, Unicode, UTF-8 encoded strings that are at most 1,024 bytes long. They cannot be edited.
- **BackupRuleId** *(string) --*
Uniquely identifies a rule used to schedule the backup of a selection of resources.
- **IamRoleArn** *(string) --*
Specifies the IAM role ARN used to create the target recovery point; for example, ``arn:aws:iam::123456789012:role/S3Access`` .
- **Status** *(string) --*
A status code specifying the state of the recovery point.
- **CreationDate** *(datetime) --*
The date and time a recovery point is created, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **CompletionDate** *(datetime) --*
The date and time a job to restore a recovery point is completed, in Unix format and Coordinated Universal Time (UTC). The value of ``CompletionDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **BackupSizeInBytes** *(integer) --*
The size, in bytes, of a backup.
- **CalculatedLifecycle** *(dict) --*
A ``CalculatedLifecycle`` object containing ``DeleteAt`` and ``MoveToColdStorageAt`` timestamps.
- **MoveToColdStorageAt** *(datetime) --*
A timestamp that specifies when to transition a recovery point to cold storage.
- **DeleteAt** *(datetime) --*
A timestamp that specifies when to delete a recovery point.
- **Lifecycle** *(dict) --*
The lifecycle defines when a protected resource is transitioned to cold storage and when it expires. AWS Backup transitions and expires backups automatically according to the lifecycle that you define.
Backups transitioned to cold storage must be stored in cold storage for a minimum of 90 days. Therefore, the “expire after days” setting must be 90 days greater than the “transition to cold after days” setting. The “transition to cold after days” setting cannot be changed after a backup has been transitioned to cold.
- **MoveToColdStorageAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is moved to cold storage.
- **DeleteAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is deleted. Must be greater than ``MoveToColdStorageAfterDays`` .
- **EncryptionKeyArn** *(string) --*
The server-side encryption key that is used to protect your backups; for example, ``arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab`` .
- **IsEncrypted** *(boolean) --*
A Boolean value that is returned as ``TRUE`` if the specified recovery point is encrypted, or ``FALSE`` if the recovery point is not encrypted.
- **LastRestoreTime** *(datetime) --*
The date and time a recovery point was last restored, in Unix format and Coordinated Universal Time (UTC). The value of ``LastRestoreTime`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
:type BackupVaultName: string
:param BackupVaultName: **[REQUIRED]**
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
:type NextToken: string
:param NextToken:
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
:type MaxResults: integer
:param MaxResults:
The maximum number of items to be returned.
:type ByResourceArn: string
:param ByResourceArn:
Returns only recovery points that match the specified resource Amazon Resource Name (ARN).
:type ByResourceType: string
:param ByResourceType:
Returns only recovery points that match the specified resource type.
:type ByBackupPlanId: string
:param ByBackupPlanId:
Returns only recovery points that match the specified backup plan ID.
:type ByCreatedBefore: datetime
:param ByCreatedBefore:
Returns only recovery points that were created before the specified timestamp.
:type ByCreatedAfter: datetime
:param ByCreatedAfter:
Returns only recovery points that were created after the specified timestamp.
:rtype: dict
:returns:
"""
pass
def list_recovery_points_by_resource(self, ResourceArn: str, NextToken: str = None, MaxResults: int = None) -> Dict:
"""
Returns detailed information about recovery points of the type specified by a resource Amazon Resource Name (ARN).
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/ListRecoveryPointsByResource>`_
**Request Syntax**
::
response = client.list_recovery_points_by_resource(
ResourceArn='string',
NextToken='string',
MaxResults=123
)
**Response Syntax**
::
{
'NextToken': 'string',
'RecoveryPoints': [
{
'RecoveryPointArn': 'string',
'CreationDate': datetime(2015, 1, 1),
'Status': 'COMPLETED'|'PARTIAL'|'DELETING'|'EXPIRED',
'EncryptionKeyArn': 'string',
'BackupSizeBytes': 123,
'BackupVaultName': 'string'
},
]
}
**Response Structure**
- *(dict) --*
- **NextToken** *(string) --*
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
- **RecoveryPoints** *(list) --*
An array of objects that contain detailed information about recovery points of the specified resource type.
- *(dict) --*
Contains detailed information about a saved recovery point.
- **RecoveryPointArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a recovery point; for example, ``arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45`` .
- **CreationDate** *(datetime) --*
The date and time a recovery point is created, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **Status** *(string) --*
A status code specifying the state of the recovery point.
- **EncryptionKeyArn** *(string) --*
The server-side encryption key that is used to protect your backups; for example, ``arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab`` .
- **BackupSizeBytes** *(integer) --*
The size, in bytes, of a backup.
- **BackupVaultName** *(string) --*
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
:type ResourceArn: string
:param ResourceArn: **[REQUIRED]**
An ARN that uniquely identifies a resource. The format of the ARN depends on the resource type.
:type NextToken: string
:param NextToken:
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
:type MaxResults: integer
:param MaxResults:
The maximum number of items to be returned.
:rtype: dict
:returns:
"""
pass
def list_restore_jobs(self, NextToken: str = None, MaxResults: int = None) -> Dict:
"""
Returns a list of jobs that AWS Backup initiated to restore a saved resource, including metadata about the recovery process.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/ListRestoreJobs>`_
**Request Syntax**
::
response = client.list_restore_jobs(
NextToken='string',
MaxResults=123
)
**Response Syntax**
::
{
'RestoreJobs': [
{
'RestoreJobId': 'string',
'RecoveryPointArn': 'string',
'CreationDate': datetime(2015, 1, 1),
'CompletionDate': datetime(2015, 1, 1),
'Status': 'PENDING'|'RUNNING'|'COMPLETED'|'ABORTED'|'FAILED',
'StatusMessage': 'string',
'PercentDone': 'string',
'BackupSizeInBytes': 123,
'IamRoleArn': 'string',
'ExpectedCompletionTimeMinutes': 123,
'CreatedResourceArn': 'string'
},
],
'NextToken': 'string'
}
**Response Structure**
- *(dict) --*
- **RestoreJobs** *(list) --*
An array of objects that contain detailed information about jobs to restore saved resources.
- *(dict) --*
Contains metadata about a restore job.
- **RestoreJobId** *(string) --*
Uniquely identifies the job that restores a recovery point.
- **RecoveryPointArn** *(string) --*
An ARN that uniquely identifies a recovery point; for example, ``arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45`` .
- **CreationDate** *(datetime) --*
The date and time a restore job is created, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **CompletionDate** *(datetime) --*
The date and time a job to restore a recovery point is completed, in Unix format and Coordinated Universal Time (UTC). The value of ``CompletionDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **Status** *(string) --*
A status code specifying the state of the job initiated by AWS Backup to restore a recovery point.
- **StatusMessage** *(string) --*
A detailed message explaining the status of the job to restore a recovery point.
- **PercentDone** *(string) --*
Contains an estimated percentage complete of a job at the time the job status was queried.
- **BackupSizeInBytes** *(integer) --*
The size, in bytes, of the restored resource.
- **IamRoleArn** *(string) --*
Specifies the IAM role ARN used to create the target recovery point; for example, ``arn:aws:iam::123456789012:role/S3Access`` .
- **ExpectedCompletionTimeMinutes** *(integer) --*
The amount of time in minutes that a job restoring a recovery point is expected to take.
- **CreatedResourceArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a resource. The format of the ARN depends on the resource type.
- **NextToken** *(string) --*
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
:type NextToken: string
:param NextToken:
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
:type MaxResults: integer
:param MaxResults:
The maximum number of items to be returned.
:rtype: dict
:returns:
"""
pass
def list_tags(self, ResourceArn: str, NextToken: str = None, MaxResults: int = None) -> Dict:
"""
Returns a list of key-value pairs assigned to a target recovery point, backup plan, or backup vault.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/ListTags>`_
**Request Syntax**
::
response = client.list_tags(
ResourceArn='string',
NextToken='string',
MaxResults=123
)
**Response Syntax**
::
{
'NextToken': 'string',
'Tags': {
'string': 'string'
}
}
**Response Structure**
- *(dict) --*
- **NextToken** *(string) --*
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
- **Tags** *(dict) --*
To help organize your resources, you can assign your own metadata to the resources you create. Each tag is a key-value pair.
- *(string) --*
- *(string) --*
:type ResourceArn: string
:param ResourceArn: **[REQUIRED]**
An Amazon Resource Name (ARN) that uniquely identifies a resource. The format of the ARN depends on the type of resource. Valid targets for ``ListTags`` are recovery points, backup plans, and backup vaults.
:type NextToken: string
:param NextToken:
The next item following a partial list of returned items. For example, if a request is made to return ``maxResults`` number of items, ``NextToken`` allows you to return more items in your list starting at the location pointed to by the next token.
:type MaxResults: integer
:param MaxResults:
The maximum number of items to be returned.
:rtype: dict
:returns:
"""
pass
def put_backup_vault_access_policy(self, BackupVaultName: str, Policy: str = None):
"""
Sets a resource-based policy that is used to manage access permissions on the target backup vault. Requires a backup vault name and an access policy document in JSON format.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/PutBackupVaultAccessPolicy>`_
**Request Syntax**
::
response = client.put_backup_vault_access_policy(
BackupVaultName='string',
Policy='string'
)
:type BackupVaultName: string
:param BackupVaultName: **[REQUIRED]**
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
:type Policy: string
:param Policy:
The backup vault access policy document in JSON format.
:returns: None
"""
pass
def put_backup_vault_notifications(self, BackupVaultName: str, SNSTopicArn: str, BackupVaultEvents: List):
"""
Turns on notifications on a backup vault for the specified topic and events.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/PutBackupVaultNotifications>`_
**Request Syntax**
::
response = client.put_backup_vault_notifications(
BackupVaultName='string',
SNSTopicArn='string',
BackupVaultEvents=[
'BACKUP_JOB_STARTED'|'BACKUP_JOB_COMPLETED'|'RESTORE_JOB_STARTED'|'RESTORE_JOB_COMPLETED'|'RECOVERY_POINT_MODIFIED'|'BACKUP_PLAN_CREATED'|'BACKUP_PLAN_MODIFIED',
]
)
:type BackupVaultName: string
:param BackupVaultName: **[REQUIRED]**
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
:type SNSTopicArn: string
:param SNSTopicArn: **[REQUIRED]**
The Amazon Resource Name (ARN) that specifies the topic for a backup vault’s events; for example, ``arn:aws:sns:us-west-2:111122223333:MyVaultTopic`` .
:type BackupVaultEvents: list
:param BackupVaultEvents: **[REQUIRED]**
An array of events that indicate the status of jobs to back up resources to the backup vault.
- *(string) --*
:returns: None
"""
pass
def start_backup_job(self, BackupVaultName: str, ResourceArn: str, IamRoleArn: str, IdempotencyToken: str = None, StartWindowMinutes: int = None, CompleteWindowMinutes: int = None, Lifecycle: Dict = None, RecoveryPointTags: Dict = None) -> Dict:
"""
Starts a job to create a one-time backup of the specified resource.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/StartBackupJob>`_
**Request Syntax**
::
response = client.start_backup_job(
BackupVaultName='string',
ResourceArn='string',
IamRoleArn='string',
IdempotencyToken='string',
StartWindowMinutes=123,
CompleteWindowMinutes=123,
Lifecycle={
'MoveToColdStorageAfterDays': 123,
'DeleteAfterDays': 123
},
RecoveryPointTags={
'string': 'string'
}
)
**Response Syntax**
::
{
'BackupJobId': 'string',
'RecoveryPointArn': 'string',
'CreationDate': datetime(2015, 1, 1)
}
**Response Structure**
- *(dict) --*
- **BackupJobId** *(string) --*
Uniquely identifies a request to AWS Backup to back up a resource.
- **RecoveryPointArn** *(string) --*
An ARN that uniquely identifies a recovery point; for example, ``arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45`` .
- **CreationDate** *(datetime) --*
The date and time that a backup job is started, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
:type BackupVaultName: string
:param BackupVaultName: **[REQUIRED]**
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
:type ResourceArn: string
:param ResourceArn: **[REQUIRED]**
An Amazon Resource Name (ARN) that uniquely identifies a resource. The format of the ARN depends on the resource type.
:type IamRoleArn: string
:param IamRoleArn: **[REQUIRED]**
Specifies the IAM role ARN used to create the target recovery point; for example, ``arn:aws:iam::123456789012:role/S3Access`` .
:type IdempotencyToken: string
:param IdempotencyToken:
A customer chosen string that can be used to distinguish between calls to ``StartBackupJob`` . Idempotency tokens time out after one hour. Therefore, if you call ``StartBackupJob`` multiple times with the same idempotency token within one hour, AWS Backup recognizes that you are requesting only one backup job and initiates only one. If you change the idempotency token for each call, AWS Backup recognizes that you are requesting to start multiple backups.
:type StartWindowMinutes: integer
:param StartWindowMinutes:
The amount of time in minutes before beginning a backup.
:type CompleteWindowMinutes: integer
:param CompleteWindowMinutes:
The amount of time AWS Backup attempts a backup before canceling the job and returning an error.
:type Lifecycle: dict
:param Lifecycle:
The lifecycle defines when a protected resource is transitioned to cold storage and when it expires. AWS Backup will transition and expire backups automatically according to the lifecycle that you define.
Backups transitioned to cold storage must be stored in cold storage for a minimum of 90 days. Therefore, the “expire after days” setting must be 90 days greater than the “transition to cold after days” setting. The “transition to cold after days” setting cannot be changed after a backup has been transitioned to cold.
- **MoveToColdStorageAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is moved to cold storage.
- **DeleteAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is deleted. Must be greater than ``MoveToColdStorageAfterDays`` .
:type RecoveryPointTags: dict
:param RecoveryPointTags:
To help organize your resources, you can assign your own metadata to the resources that you create. Each tag is a key-value pair.
- *(string) --*
- *(string) --*
:rtype: dict
:returns:
"""
pass
def start_restore_job(self, RecoveryPointArn: str, Metadata: Dict, IamRoleArn: str, IdempotencyToken: str = None, ResourceType: str = None) -> Dict:
"""
Recovers the saved resource identified by an Amazon Resource Name (ARN).
If the resource ARN is included in the request, then the last complete backup of that resource is recovered. If the ARN of a recovery point is supplied, then that recovery point is restored.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/StartRestoreJob>`_
**Request Syntax**
::
response = client.start_restore_job(
RecoveryPointArn='string',
Metadata={
'string': 'string'
},
IamRoleArn='string',
IdempotencyToken='string',
ResourceType='string'
)
**Response Syntax**
::
{
'RestoreJobId': 'string'
}
**Response Structure**
- *(dict) --*
- **RestoreJobId** *(string) --*
Uniquely identifies the job that restores a recovery point.
:type RecoveryPointArn: string
:param RecoveryPointArn: **[REQUIRED]**
An ARN that uniquely identifies a recovery point; for example, ``arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45`` .
:type Metadata: dict
:param Metadata: **[REQUIRED]**
A set of metadata key-value pairs. Lists the metadata that the recovery point was created with.
- *(string) --*
- *(string) --*
:type IamRoleArn: string
:param IamRoleArn: **[REQUIRED]**
The Amazon Resource Name (ARN) of the IAM role that AWS Backup uses to create the target recovery point; for example, ``arn:aws:iam::123456789012:role/S3Access`` .
:type IdempotencyToken: string
:param IdempotencyToken:
A customer chosen string that can be used to distinguish between calls to ``StartRestoreJob`` . Idempotency tokens time out after one hour. Therefore, if you call ``StartRestoreJob`` multiple times with the same idempotency token within one hour, AWS Backup recognizes that you are requesting only one restore job and initiates only one. If you change the idempotency token for each call, AWS Backup recognizes that you are requesting to start multiple restores.
:type ResourceType: string
:param ResourceType:
Starts a job to restore a recovery point for one of the following resources:
* ``EBS`` for Amazon Elastic Block Store
* ``SGW`` for AWS Storage Gateway
* ``RDS`` for Amazon Relational Database Service
* ``DDB`` for Amazon DynamoDB
* ``EFS`` for Amazon Elastic File System
:rtype: dict
:returns:
"""
pass
def stop_backup_job(self, BackupJobId: str):
"""
Attempts to cancel a job to create a one-time backup of a resource.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/StopBackupJob>`_
**Request Syntax**
::
response = client.stop_backup_job(
BackupJobId='string'
)
:type BackupJobId: string
:param BackupJobId: **[REQUIRED]**
Uniquely identifies a request to AWS Backup to back up a resource.
:returns: None
"""
pass
def tag_resource(self, ResourceArn: str, Tags: Dict):
"""
Assigns a set of key-value pairs to a recovery point, backup plan, or backup vault identified by an Amazon Resource Name (ARN).
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/TagResource>`_
**Request Syntax**
::
response = client.tag_resource(
ResourceArn='string',
Tags={
'string': 'string'
}
)
:type ResourceArn: string
:param ResourceArn: **[REQUIRED]**
An ARN that uniquely identifies a resource. The format of the ARN depends on the type of the tagged resource.
:type Tags: dict
:param Tags: **[REQUIRED]**
Key-value pairs that are used to help organize your resources. You can assign your own metadata to the resources you create.
- *(string) --*
- *(string) --*
:returns: None
"""
pass
def untag_resource(self, ResourceArn: str, TagKeyList: List):
"""
Removes a set of key-value pairs from a recovery point, backup plan, or backup vault identified by an Amazon Resource Name (ARN)
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/UntagResource>`_
**Request Syntax**
::
response = client.untag_resource(
ResourceArn='string',
TagKeyList=[
'string',
]
)
:type ResourceArn: string
:param ResourceArn: **[REQUIRED]**
An ARN that uniquely identifies a resource. The format of the ARN depends on the type of the tagged resource.
:type TagKeyList: list
:param TagKeyList: **[REQUIRED]**
A list of keys to identify which key-value tags to remove from a resource.
- *(string) --*
:returns: None
"""
pass
def update_backup_plan(self, BackupPlanId: str, BackupPlan: Dict) -> Dict:
"""
Replaces the body of a saved backup plan identified by its ``backupPlanId`` with the input document in JSON format. The new version is uniquely identified by a ``VersionId`` .
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/UpdateBackupPlan>`_
**Request Syntax**
::
response = client.update_backup_plan(
BackupPlanId='string',
BackupPlan={
'BackupPlanName': 'string',
'Rules': [
{
'RuleName': 'string',
'TargetBackupVaultName': 'string',
'ScheduleExpression': 'string',
'StartWindowMinutes': 123,
'CompletionWindowMinutes': 123,
'Lifecycle': {
'MoveToColdStorageAfterDays': 123,
'DeleteAfterDays': 123
},
'RecoveryPointTags': {
'string': 'string'
}
},
]
}
)
**Response Syntax**
::
{
'BackupPlanId': 'string',
'BackupPlanArn': 'string',
'CreationDate': datetime(2015, 1, 1),
'VersionId': 'string'
}
**Response Structure**
- *(dict) --*
- **BackupPlanId** *(string) --*
Uniquely identifies a backup plan.
- **BackupPlanArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a backup plan; for example, ``arn:aws:backup:us-east-1:123456789012:plan:8F81F553-3A74-4A3F-B93D-B3360DC80C50`` .
- **CreationDate** *(datetime) --*
The date and time a backup plan is updated, in Unix format and Coordinated Universal Time (UTC). The value of ``CreationDate`` is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM.
- **VersionId** *(string) --*
Unique, randomly generated, Unicode, UTF-8 encoded strings that are at most 1,024 bytes long. Version Ids cannot be edited.
:type BackupPlanId: string
:param BackupPlanId: **[REQUIRED]**
Uniquely identifies a backup plan.
:type BackupPlan: dict
:param BackupPlan: **[REQUIRED]**
Specifies the body of a backup plan. Includes a ``BackupPlanName`` and one or more sets of ``Rules`` .
- **BackupPlanName** *(string) --* **[REQUIRED]**
The display name of a backup plan.
- **Rules** *(list) --* **[REQUIRED]**
An array of ``BackupRule`` objects, each of which specifies a scheduled task that is used to back up a selection of resources.
- *(dict) --*
Specifies a scheduled task used to back up a selection of resources.
- **RuleName** *(string) --* **[REQUIRED]**
>An optional display name for a backup rule.
- **TargetBackupVaultName** *(string) --* **[REQUIRED]**
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
- **ScheduleExpression** *(string) --*
A CRON expression specifying when AWS Backup initiates a backup job.
- **StartWindowMinutes** *(integer) --*
The amount of time in minutes before beginning a backup.
- **CompletionWindowMinutes** *(integer) --*
The amount of time AWS Backup attempts a backup before canceling the job and returning an error.
- **Lifecycle** *(dict) --*
The lifecycle defines when a protected resource is transitioned to cold storage and when it expires. AWS Backup will transition and expire backups automatically according to the lifecycle that you define.
Backups transitioned to cold storage must be stored in cold storage for a minimum of 90 days. Therefore, the “expire after days” setting must be 90 days greater than the “transition to cold after days”. The “transition to cold after days” setting cannot be changed after a backup has been transitioned to cold.
- **MoveToColdStorageAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is moved to cold storage.
- **DeleteAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is deleted. Must be greater than ``MoveToColdStorageAfterDays`` .
- **RecoveryPointTags** *(dict) --*
To help organize your resources, you can assign your own metadata to the resources that you create. Each tag is a key-value pair.
- *(string) --*
- *(string) --*
:rtype: dict
:returns:
"""
pass
def update_recovery_point_lifecycle(self, BackupVaultName: str, RecoveryPointArn: str, Lifecycle: Dict = None) -> Dict:
"""
Sets the transition lifecycle of a recovery point.
The lifecycle defines when a protected resource is transitioned to cold storage and when it expires. AWS Backup transitions and expires backups automatically according to the lifecycle that you define.
Backups transitioned to cold storage must be stored in cold storage for a minimum of 90 days. Therefore, the “expire after days” setting must be 90 days greater than the “transition to cold after days” setting. The “transition to cold after days” setting cannot be changed after a backup has been transitioned to cold.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/UpdateRecoveryPointLifecycle>`_
**Request Syntax**
::
response = client.update_recovery_point_lifecycle(
BackupVaultName='string',
RecoveryPointArn='string',
Lifecycle={
'MoveToColdStorageAfterDays': 123,
'DeleteAfterDays': 123
}
)
**Response Syntax**
::
{
'BackupVaultArn': 'string',
'RecoveryPointArn': 'string',
'Lifecycle': {
'MoveToColdStorageAfterDays': 123,
'DeleteAfterDays': 123
},
'CalculatedLifecycle': {
'MoveToColdStorageAt': datetime(2015, 1, 1),
'DeleteAt': datetime(2015, 1, 1)
}
}
**Response Structure**
- *(dict) --*
- **BackupVaultArn** *(string) --*
An ARN that uniquely identifies a backup vault; for example, ``arn:aws:backup:us-east-1:123456789012:vault:aBackupVault`` .
- **RecoveryPointArn** *(string) --*
An Amazon Resource Name (ARN) that uniquely identifies a recovery point; for example, ``arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45`` .
- **Lifecycle** *(dict) --*
The lifecycle defines when a protected resource is transitioned to cold storage and when it expires. AWS Backup transitions and expires backups automatically according to the lifecycle that you define.
Backups transitioned to cold storage must be stored in cold storage for a minimum of 90 days. Therefore, the “expire after days” setting must be 90 days greater than the “transition to cold after days” setting. The “transition to cold after days” setting cannot be changed after a backup has been transitioned to cold.
- **MoveToColdStorageAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is moved to cold storage.
- **DeleteAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is deleted. Must be greater than ``MoveToColdStorageAfterDays`` .
- **CalculatedLifecycle** *(dict) --*
A ``CalculatedLifecycle`` object containing ``DeleteAt`` and ``MoveToColdStorageAt`` timestamps.
- **MoveToColdStorageAt** *(datetime) --*
A timestamp that specifies when to transition a recovery point to cold storage.
- **DeleteAt** *(datetime) --*
A timestamp that specifies when to delete a recovery point.
:type BackupVaultName: string
:param BackupVaultName: **[REQUIRED]**
The name of a logical container where backups are stored. Backup vaults are identified by names that are unique to the account used to create them and the AWS Region where they are created. They consist of lowercase letters, numbers, and hyphens.
:type RecoveryPointArn: string
:param RecoveryPointArn: **[REQUIRED]**
An Amazon Resource Name (ARN) that uniquely identifies a recovery point; for example, ``arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45`` .
:type Lifecycle: dict
:param Lifecycle:
The lifecycle defines when a protected resource is transitioned to cold storage and when it expires. AWS Backup transitions and expires backups automatically according to the lifecycle that you define.
Backups transitioned to cold storage must be stored in cold storage for a minimum of 90 days. Therefore, the “expire after days” setting must be 90 days greater than the “transition to cold after days” setting. The “transition to cold after days” setting cannot be changed after a backup has been transitioned to cold.
- **MoveToColdStorageAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is moved to cold storage.
- **DeleteAfterDays** *(integer) --*
Specifies the number of days after creation that a recovery point is deleted. Must be greater than ``MoveToColdStorageAfterDays`` .
:rtype: dict
:returns:
"""
pass
| 60.659608
| 496
| 0.595267
| 16,376
| 151,831
| 5.5
| 0.042196
| 0.012124
| 0.018142
| 0.00977
| 0.872696
| 0.848248
| 0.822956
| 0.79742
| 0.782387
| 0.768764
| 0
| 0.033005
| 0.315337
| 151,831
| 2,502
| 497
| 60.683853
| 0.833425
| 0.836845
| 0
| 0.456311
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.456311
| false
| 0.456311
| 0.07767
| 0
| 0.543689
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 10
|
88f1894643514c7980a71a1462d1bc0a44dc70f4
| 974
|
py
|
Python
|
src/test_echo_server.py
|
linus87/drl_shape_optimization
|
39e6b66bd5b70dfce07e145aafe815071bc1b6fe
|
[
"MIT"
] | 17
|
2020-12-28T16:25:47.000Z
|
2022-03-27T18:28:44.000Z
|
src/test_echo_server.py
|
linus87/drl_shape_optimization
|
39e6b66bd5b70dfce07e145aafe815071bc1b6fe
|
[
"MIT"
] | 2
|
2021-04-18T03:40:02.000Z
|
2022-01-24T08:40:10.000Z
|
src/test_echo_server.py
|
linus87/drl_shape_optimization
|
39e6b66bd5b70dfce07e145aafe815071bc1b6fe
|
[
"MIT"
] | 8
|
2020-12-23T05:59:52.000Z
|
2022-03-28T12:06:35.000Z
|
from __future__ import print_function
from echo_server import EchoServer
import numpy as np
# check with a list
print()
print("Test with a list")
data = [1, 2, 3]
echo_server_instance = EchoServer(verbose=5)
encoded = echo_server_instance.encode_message('DUMP', data)
print(encoded)
decoded = echo_server_instance.decode_message(encoded)
print(decoded)
# check with a numpy array
print()
print("Test with a np array")
data = np.array([1.0, 2.0, 3.0])
echo_server_instance = EchoServer(verbose=5)
encoded = echo_server_instance.encode_message('DUMP', data)
print(encoded)
decoded = echo_server_instance.decode_message(encoded)
print(decoded)
# check with a dict
print()
print("Test with a np array")
data = {'a': np.array([1.0, 2.0, 3.0]), 'b': [1, 2, 3], 4 : "blabla"}
echo_server_instance = EchoServer(verbose=5)
encoded = echo_server_instance.encode_message('DUMP', data)
print(encoded)
decoded = echo_server_instance.decode_message(encoded)
print(decoded)
| 21.173913
| 69
| 0.752567
| 151
| 974
| 4.655629
| 0.231788
| 0.142248
| 0.230441
| 0.076814
| 0.822191
| 0.795164
| 0.795164
| 0.795164
| 0.672831
| 0.672831
| 0
| 0.025731
| 0.122177
| 974
| 45
| 70
| 21.644444
| 0.796491
| 0.061602
| 0
| 0.740741
| 0
| 0
| 0.083516
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0.481481
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
0023081cca9c1d1a17ed7792764dfd7817bf6b1f
| 8,276
|
py
|
Python
|
tests/test_endem.py
|
rombr/endem-su-test-task
|
5ee92273049499b7ae50ca7db4efe108e81eccfb
|
[
"MIT"
] | null | null | null |
tests/test_endem.py
|
rombr/endem-su-test-task
|
5ee92273049499b7ae50ca7db4efe108e81eccfb
|
[
"MIT"
] | null | null | null |
tests/test_endem.py
|
rombr/endem-su-test-task
|
5ee92273049499b7ae50ca7db4efe108e81eccfb
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_endem
----------------------------------
Tests for `endem` module.
"""
import pytest
import smtplib
from endem import endem
@pytest.fixture
def emaillib_gmail():
return endem.EmailSender('gmail')
@pytest.fixture
def emaillib_yandex():
return endem.EmailSender('yandex')
@pytest.fixture
def emaillib_mail_ru():
return endem.EmailSender('mail_ru')
def test_emaillib_init():
'''
Инициализация библиотеки
'''
assert endem.EmailSender('gmail')
def test_emaillib_wrong_init():
'''
Инициализация библиотеки без параметров
'''
with pytest.raises(TypeError):
endem.EmailSender()
def test_emaillib_wrong_provider_init():
'''
Инициализация библиотеки c неверным провайдером
'''
with pytest.raises(endem.EmailSenderError) as excinfo:
endem.EmailSender('wrong_provider')
assert 'Invalid provider' in str(excinfo.value)
class TestGmailSend:
sender_email = 'user@gmail.com'
sender_password = 'secret'
to_email = 'user@gmail.com'
def test_send_with_wrong_to_email(self, emaillib_gmail):
'''
Отправка письма с неверным email получателя
'''
with pytest.raises(endem.EmailSenderError) as excinfo:
emaillib_gmail.send(
'email@example.com', 'password',
'wrong',
'subject', 'message'
)
assert 'invalid email' in str(excinfo.value)
def test_send_with_wrong_from_email(self, emaillib_gmail):
'''
Отправка письма с неверным email отправителя
'''
with pytest.raises(endem.EmailSenderError) as excinfo:
emaillib_gmail.send(
'wrong', 'password',
'email@example.com',
'subject', 'message'
)
assert 'invalid email' in str(excinfo.value)
def test_send_email(self, emaillib_gmail):
'''
Отправка письма
'''
assert emaillib_gmail.send(
self.sender_email, self.sender_password,
self.to_email,
'From test run!', 'message'
)
def test_send_email_to_multi_receivers(self, emaillib_gmail):
'''
Отправка письма нескольким получателям
'''
assert emaillib_gmail.send(
self.sender_email, self.sender_password,
[self.to_email, 'user@example.com'],
'From test run!', 'message'
)
def test_send_email_with_markdown(self, emaillib_gmail):
'''
Отправка письма в формате markdown
'''
res = emaillib_gmail.send(
self.sender_email, self.sender_password,
self.to_email,
'From test run!',
'# Title\n\ntext `code`'
)
assert res.get('body') == (
'<h1>Title</h1>\n'
'<p>text <code>code</code></p>'
)
def test_send_email_with_wrong_password(self, emaillib_gmail):
'''
Отправка письма c неверным паролем
'''
with pytest.raises(smtplib.SMTPAuthenticationError) as excinfo:
emaillib_gmail.send(
self.sender_email, 'wrong_password',
self.to_email,
'From test run!', 'message'
)
assert 'Username and Password not accepted' in str(excinfo.value)
class TestYandexSend:
sender_email = 'user@yandex.ru'
sender_password = 'secret'
to_email = 'user@yandex.ru'
def test_send_with_wrong_to_email(self, emaillib_yandex):
'''
Отправка письма с неверным email получателя
'''
with pytest.raises(endem.EmailSenderError) as excinfo:
emaillib_yandex.send(
'email@example.com', 'password',
'wrong',
'subject', 'message'
)
assert 'invalid email' in str(excinfo.value)
def test_send_with_wrong_from_email(self, emaillib_yandex):
'''
Отправка письма с неверным email отправителя
'''
with pytest.raises(endem.EmailSenderError) as excinfo:
emaillib_yandex.send(
'wrong', 'password',
'email@example.com',
'subject', 'message'
)
assert 'invalid email' in str(excinfo.value)
def test_send_email(self, emaillib_yandex):
'''
Отправка письма
'''
assert emaillib_yandex.send(
self.sender_email, self.sender_password,
self.to_email,
'From test run!', 'message'
)
def test_send_email_to_multi_receivers(self, emaillib_yandex):
'''
Отправка письма нескольким получателям
'''
assert emaillib_yandex.send(
self.sender_email, self.sender_password,
[self.to_email, 'user@example.com'],
'From test run!', 'message'
)
def test_send_email_with_markdown(self, emaillib_yandex):
'''
Отправка письма в формате markdown
'''
res = emaillib_yandex.send(
self.sender_email, self.sender_password,
self.to_email,
'From test run!',
'# Title\n\ntext `code`'
)
assert res.get('body') == (
'<h1>Title</h1>\n'
'<p>text <code>code</code></p>'
)
def test_send_email_with_wrong_password(self, emaillib_yandex):
'''
Отправка письма c неверным паролем
'''
with pytest.raises(smtplib.SMTPAuthenticationError) as excinfo:
emaillib_yandex.send(
self.sender_email, 'wrong_password',
self.to_email,
'From test run!', 'message'
)
assert 'Invalid user or password!' in str(excinfo.value)
class TestMailRuSend:
sender_email = 'user@mail.ru'
sender_password = 'secret'
to_email = 'user@mail.ru'
def test_send_with_wrong_to_email(self, emaillib_mail_ru):
'''
Отправка письма с неверным email получателя
'''
with pytest.raises(endem.EmailSenderError) as excinfo:
emaillib_mail_ru.send(
'email@example.com', 'password',
'wrong',
'subject', 'message'
)
assert 'invalid email' in str(excinfo.value)
def test_send_with_wrong_from_email(self, emaillib_mail_ru):
'''
Отправка письма с неверным email отправителя
'''
with pytest.raises(endem.EmailSenderError) as excinfo:
emaillib_mail_ru.send(
'wrong', 'password',
'email@example.com',
'subject', 'message'
)
assert 'invalid email' in str(excinfo.value)
def test_send_email(self, emaillib_mail_ru):
'''
Отправка письма
'''
assert emaillib_mail_ru.send(
self.sender_email, self.sender_password,
self.to_email,
'From test run!', 'message'
)
def test_send_email_to_multi_receivers(self, emaillib_mail_ru):
'''
Отправка письма нескольким получателям
'''
assert emaillib_mail_ru.send(
self.sender_email, self.sender_password,
[self.to_email, 'user@example.com'],
'From test run!', 'message'
)
def test_send_email_with_markdown(self, emaillib_mail_ru):
'''
Отправка письма в формате markdown
'''
res = emaillib_mail_ru.send(
self.sender_email, self.sender_password,
self.to_email,
'From test run!',
'# Title\n\ntext `code`'
)
assert res.get('body') == (
'<h1>Title</h1>\n'
'<p>text <code>code</code></p>'
)
def test_send_email_with_wrong_password(self, emaillib_mail_ru):
'''
Отправка письма c неверным паролем
'''
with pytest.raises(smtplib.SMTPAuthenticationError) as excinfo:
emaillib_mail_ru.send(
self.sender_email, 'wrong_password',
self.to_email,
'From test run!', 'message'
)
assert 'Authentication failed.' in str(excinfo.value)
| 28.937063
| 73
| 0.574795
| 862
| 8,276
| 5.301624
| 0.109049
| 0.032166
| 0.043326
| 0.042013
| 0.830416
| 0.808753
| 0.764114
| 0.712254
| 0.712254
| 0.698906
| 0
| 0.001242
| 0.319116
| 8,276
| 285
| 74
| 29.038596
| 0.80976
| 0.105123
| 0
| 0.591954
| 0
| 0
| 0.1597
| 0.00908
| 0
| 0
| 0
| 0
| 0.114943
| 1
| 0.137931
| false
| 0.149425
| 0.017241
| 0.017241
| 0.241379
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
002f08bae27ae34c9caaa436e3fc5f491c037f0a
| 42,764
|
py
|
Python
|
business_register/migrations/0158_auto_20210806_0930.py
|
roman-tiukh/Data_converter
|
3a0db911417a853ba6cffc4210fe83ba9959955c
|
[
"MIT"
] | null | null | null |
business_register/migrations/0158_auto_20210806_0930.py
|
roman-tiukh/Data_converter
|
3a0db911417a853ba6cffc4210fe83ba9959955c
|
[
"MIT"
] | null | null | null |
business_register/migrations/0158_auto_20210806_0930.py
|
roman-tiukh/Data_converter
|
3a0db911417a853ba6cffc4210fe83ba9959955c
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.12 on 2021-08-06 09:30
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('location_register', '0024_auto_20210526_0845'),
('business_register', '0157_auto_20210805_0925'),
]
operations = [
migrations.AddField(
model_name='companysanction',
name='decree_number',
field=models.CharField(default='', help_text='number of the decree of the President of Ukraine', max_length=9, validators=[django.core.validators.RegexValidator(message='Invalid format of the Decree number, example: 350/2021', regex='^\\d{1,4}\\/20\\d\\d$')], verbose_name='decree number'),
preserve_default=False,
),
migrations.AddField(
model_name='countrysanction',
name='decree_number',
field=models.CharField(default='', help_text='number of the decree of the President of Ukraine', max_length=9, validators=[django.core.validators.RegexValidator(message='Invalid format of the Decree number, example: 350/2021', regex='^\\d{1,4}\\/20\\d\\d$')], verbose_name='decree number'),
preserve_default=False,
),
migrations.AddField(
model_name='personsanction',
name='decree_number',
field=models.CharField(default='', help_text='number of the decree of the President of Ukraine', max_length=9, validators=[django.core.validators.RegexValidator(message='Invalid format of the Decree number, example: 350/2021', regex='^\\d{1,4}\\/20\\d\\d$')], verbose_name='decree number'),
preserve_default=False,
),
migrations.AlterField(
model_name='beneficiary',
name='company',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='beneficiaries_from_declarations', to='business_register.company'),
),
migrations.AlterField(
model_name='beneficiary',
name='company_address',
field=models.TextField(blank=True, default='', help_text='address of the company'),
),
migrations.AlterField(
model_name='beneficiary',
name='company_email',
field=models.TextField(blank=True, default='', help_text='email of the company'),
),
migrations.AlterField(
model_name='beneficiary',
name='company_fax',
field=models.TextField(blank=True, default='', help_text='fax number of the company'),
),
migrations.AlterField(
model_name='beneficiary',
name='company_name',
field=models.TextField(blank=True, default='', help_text='name of the company', max_length=75),
),
migrations.AlterField(
model_name='beneficiary',
name='company_name_eng',
field=models.TextField(blank=True, default='', help_text='name in English of the company', max_length=75),
),
migrations.AlterField(
model_name='beneficiary',
name='company_phone',
field=models.TextField(blank=True, default='', help_text='phone number of the company'),
),
migrations.AlterField(
model_name='beneficiary',
name='company_registration_number',
field=models.CharField(blank=True, default='', help_text='number of registration of the company', max_length=100),
),
migrations.AlterField(
model_name='beneficiary',
name='company_type_name',
field=models.TextField(blank=True, default='', help_text='name of type of the company'),
),
migrations.AlterField(
model_name='beneficiary',
name='country',
field=models.ForeignKey(blank=True, default=None, help_text='country where the company is registered', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='declared_pep_beneficiaries', to='location_register.country'),
),
migrations.AlterField(
model_name='beneficiary',
name='declaration',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='beneficiaries', to='business_register.declaration'),
),
migrations.AlterField(
model_name='corporaterights',
name='company',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='corporate_rights_from_declarations', to='business_register.company'),
),
migrations.AlterField(
model_name='corporaterights',
name='company_name',
field=models.TextField(blank=True, default='', help_text='name of the company', max_length=75),
),
migrations.AlterField(
model_name='corporaterights',
name='company_name_eng',
field=models.TextField(blank=True, default='', help_text='name in English of the company', max_length=75),
),
migrations.AlterField(
model_name='corporaterights',
name='company_registration_number',
field=models.CharField(blank=True, default='', help_text='registration number of the company', max_length=100),
),
migrations.AlterField(
model_name='corporaterights',
name='company_type_name',
field=models.TextField(blank=True, default='', help_text='name of type of the company'),
),
migrations.AlterField(
model_name='corporaterights',
name='country',
field=models.ForeignKey(blank=True, default=None, help_text='country where the company is registered', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='declared_pep_corporate_rights', to='location_register.country'),
),
migrations.AlterField(
model_name='corporaterights',
name='declaration',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='corporate_rights', to='business_register.declaration'),
),
migrations.AlterField(
model_name='corporaterights',
name='is_transferred',
field=models.BooleanField(blank=True, default=None, help_text='is corporate rights transferred to another person or company', null=True),
),
migrations.AlterField(
model_name='corporaterights',
name='share',
field=models.DecimalField(blank=True, decimal_places=2, help_text='company share', max_digits=12, null=True),
),
migrations.AlterField(
model_name='corporaterights',
name='value',
field=models.DecimalField(blank=True, decimal_places=2, help_text='value of rights', max_digits=12, null=True),
),
migrations.AlterField(
model_name='corporaterightsright',
name='acquisition_date',
field=models.DateField(blank=True, help_text='date of acquisition of the right', null=True),
),
migrations.AlterField(
model_name='corporaterightsright',
name='additional_info',
field=models.TextField(blank=True, default='', help_text='additional info about the right'),
),
migrations.AlterField(
model_name='corporaterightsright',
name='company',
field=models.ForeignKey(blank=True, default=None, help_text='company or organisation that owns the right', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='business_register_corporaterightsright_rights', to='business_register.company'),
),
migrations.AlterField(
model_name='corporaterightsright',
name='corporate_rights',
field=models.ForeignKey(help_text='right to corporate rights', on_delete=django.db.models.deletion.CASCADE, related_name='rights', to='business_register.corporaterights'),
),
migrations.AlterField(
model_name='corporaterightsright',
name='full_name',
field=models.CharField(blank=True, default='', help_text='full name of the person that owns the right', max_length=75),
),
migrations.AlterField(
model_name='corporaterightsright',
name='owner_type',
field=models.PositiveSmallIntegerField(blank=True, choices=[(1, 'Declarant'), (2, 'Family member'), (3, 'Ukraine citizen'), (4, 'Foreign citizen'), (5, 'Legal entity registered in Ukraine'), (6, 'Legal entity registered abroad')], help_text='type of the owner', null=True),
),
migrations.AlterField(
model_name='corporaterightsright',
name='share',
field=models.DecimalField(blank=True, decimal_places=2, help_text='share of the right', max_digits=12, null=True),
),
migrations.AlterField(
model_name='corporaterightsright',
name='type',
field=models.PositiveSmallIntegerField(blank=True, choices=[(1, 'Ownership'), (2, 'Beneficial ownership'), (3, 'Joint ownership'), (4, 'Common property'), (5, 'Rent'), (6, 'Usage'), (10, 'Other right of usage'), (7, 'Owner is another person'), (20, 'Family member did not provide the information'), (21, 'Family member did not consent to the processing of personal data')], help_text='type of the right', null=True),
),
migrations.AlterField(
model_name='declaration',
name='last_employer',
field=models.TextField(blank=True, default='', help_text='last employer of the declarant'),
),
migrations.AlterField(
model_name='declaration',
name='last_job_title',
field=models.TextField(blank=True, default='', help_text='title of the last job of the declarant'),
),
migrations.AlterField(
model_name='declaration',
name='spouse',
field=models.ForeignKey(blank=True, default=None, help_text='spouse of the declarant', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='spouse', to='business_register.pep'),
),
migrations.AlterField(
model_name='declaration',
name='submission_date',
field=models.DateField(blank=True, help_text='date of submission of the declaration', null=True),
),
migrations.AlterField(
model_name='declaration',
name='type',
field=models.PositiveSmallIntegerField(choices=[(1, 'annual declaration'), (3, 'declaration after resignation'), (2, 'declaration before resignation'), (4, 'declaration of the candidate')], help_text='type of the declaration'),
),
migrations.AlterField(
model_name='income',
name='additional_info',
field=models.TextField(blank=True, default='', help_text='additional info about the income'),
),
migrations.AlterField(
model_name='income',
name='amount',
field=models.DecimalField(blank=True, decimal_places=2, help_text='amount of income', max_digits=12, null=True),
),
migrations.AlterField(
model_name='income',
name='company_registration_number',
field=models.CharField(blank=True, default='', help_text='number of registration of the company or organisation that paid', max_length=100),
),
migrations.AlterField(
model_name='income',
name='declaration',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='incomes', to='business_register.declaration'),
),
migrations.AlterField(
model_name='income',
name='paid_by_company',
field=models.ForeignKey(blank=True, default=None, help_text='company or organisation that paid', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='paid_to', to='business_register.company'),
),
migrations.AlterField(
model_name='income',
name='paid_by_person',
field=models.TextField(blank=True, default='', help_text='full name of the person that paid'),
),
migrations.AlterField(
model_name='income',
name='recipient',
field=models.ForeignKey(help_text='person that got income', on_delete=django.db.models.deletion.PROTECT, related_name='incomes', to='business_register.pep'),
),
migrations.AlterField(
model_name='income',
name='type',
field=models.PositiveSmallIntegerField(blank=True, choices=[(1, 'Salary'), (2, 'Interest'), (3, 'Dividends'), (5, 'From sale of securities or corporate rights'), (6, 'Business'), (7, 'Gift in cash'), (8, 'Gift'), (9, 'Fees and other payments'), (10, 'Other'), (11, 'Income from renting property'), (12, 'Pension'), (13, 'Insurance payments'), (14, 'Sale of securities and corporate rights'), (15, 'Prize'), (16, 'Charity'), (17, 'Sale of property'), (18, 'Legacy'), (19, 'Salary from part-time job'), (20, 'Sale of movables'), (21, 'Self-employment'), (22, 'Royalty'), (30, 'Family member did not provide the information')], help_text='type of income', null=True),
),
migrations.AlterField(
model_name='intangibleassetright',
name='acquisition_date',
field=models.DateField(blank=True, help_text='date of acquisition of the right', null=True),
),
migrations.AlterField(
model_name='intangibleassetright',
name='additional_info',
field=models.TextField(blank=True, default='', help_text='additional info about the right'),
),
migrations.AlterField(
model_name='intangibleassetright',
name='company',
field=models.ForeignKey(blank=True, default=None, help_text='company or organisation that owns the right', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='business_register_intangibleassetright_rights', to='business_register.company'),
),
migrations.AlterField(
model_name='intangibleassetright',
name='full_name',
field=models.CharField(blank=True, default='', help_text='full name of the person that owns the right', max_length=75),
),
migrations.AlterField(
model_name='intangibleassetright',
name='intangible_assets',
field=models.ForeignKey(help_text='right to the intangible asset', on_delete=django.db.models.deletion.CASCADE, related_name='rights', to='business_register.intangibleasset'),
),
migrations.AlterField(
model_name='intangibleassetright',
name='owner_type',
field=models.PositiveSmallIntegerField(blank=True, choices=[(1, 'Declarant'), (2, 'Family member'), (3, 'Ukraine citizen'), (4, 'Foreign citizen'), (5, 'Legal entity registered in Ukraine'), (6, 'Legal entity registered abroad')], help_text='type of the owner', null=True),
),
migrations.AlterField(
model_name='intangibleassetright',
name='share',
field=models.DecimalField(blank=True, decimal_places=2, help_text='share of the right', max_digits=12, null=True),
),
migrations.AlterField(
model_name='intangibleassetright',
name='type',
field=models.PositiveSmallIntegerField(blank=True, choices=[(1, 'Ownership'), (2, 'Beneficial ownership'), (3, 'Joint ownership'), (4, 'Common property'), (5, 'Rent'), (6, 'Usage'), (10, 'Other right of usage'), (7, 'Owner is another person'), (20, 'Family member did not provide the information'), (21, 'Family member did not consent to the processing of personal data')], help_text='type of the right', null=True),
),
migrations.AlterField(
model_name='liability',
name='additional_info',
field=models.TextField(blank=True, default='', help_text='additional info about the liability'),
),
migrations.AlterField(
model_name='liability',
name='amount',
field=models.DecimalField(blank=True, decimal_places=2, help_text='amount of the liability', max_digits=12, null=True),
),
migrations.AlterField(
model_name='liability',
name='bank',
field=models.ForeignKey(blank=True, default=None, help_text='bank or company to whom money is owed', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='lent_money', to='business_register.company'),
),
migrations.AlterField(
model_name='liability',
name='bank_name',
field=models.TextField(blank=True, default='', help_text='name of the bank', max_length=75),
),
migrations.AlterField(
model_name='liability',
name='creditor_full_name',
field=models.CharField(blank=True, default='', help_text='fullname of the person to whom money is owed', max_length=75),
),
migrations.AlterField(
model_name='liability',
name='currency',
field=models.CharField(blank=True, default='', max_length=33),
),
migrations.AlterField(
model_name='liability',
name='date',
field=models.DateField(blank=True, help_text='liability date', null=True),
),
migrations.AlterField(
model_name='liability',
name='declaration',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='liabilities', to='business_register.declaration'),
),
migrations.AlterField(
model_name='liability',
name='guarantee_amount',
field=models.DecimalField(blank=True, decimal_places=2, help_text='amount of the loan guarantee', max_digits=12, null=True),
),
migrations.AlterField(
model_name='liability',
name='interest_paid',
field=models.DecimalField(blank=True, decimal_places=2, help_text='amount of the interest of the loan that was paid during declaration`s period', max_digits=12, null=True),
),
migrations.AlterField(
model_name='liability',
name='loan_paid',
field=models.DecimalField(blank=True, decimal_places=2, help_text='amount of the body of the loan that was paid during declaration`s period', max_digits=12, null=True),
),
migrations.AlterField(
model_name='liability',
name='loan_rest',
field=models.DecimalField(blank=True, decimal_places=2, help_text='amount of the rest of the loan', max_digits=12, null=True),
),
migrations.AlterField(
model_name='liability',
name='type',
field=models.PositiveSmallIntegerField(choices=[(1, 'Loan'), (2, 'Money borrowed by another person'), (3, 'Tax debt'), (4, 'Liabilities under pension insurance contract'), (5, 'Liabilities under insurance contract'), (6, 'Liabilities under leasing contract'), (7, 'loan payments'), (8, 'interest payments on the loan'), (10, 'Other')], help_text='type of the liability'),
),
migrations.AlterField(
model_name='luxuryitem',
name='additional_info',
field=models.TextField(blank=True, default='', help_text='additional info about the item'),
),
migrations.AlterField(
model_name='luxuryitem',
name='declaration',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='luxuries', to='business_register.declaration'),
),
migrations.AlterField(
model_name='luxuryitem',
name='description',
field=models.TextField(blank=True, default='', help_text='description of the item'),
),
migrations.AlterField(
model_name='luxuryitem',
name='producer',
field=models.TextField(blank=True, default='', help_text='producer of the item'),
),
migrations.AlterField(
model_name='luxuryitem',
name='trademark',
field=models.CharField(blank=True, default='', help_text='trademark of the item', max_length=100),
),
migrations.AlterField(
model_name='luxuryitem',
name='type',
field=models.PositiveSmallIntegerField(choices=[(1, 'Art'), (2, 'Personal or home electronic devices'), (3, 'Antiques'), (4, 'Clothes'), (5, 'Jewelry'), (10, 'Other')], help_text='type of the item'),
),
migrations.AlterField(
model_name='luxuryitemright',
name='acquisition_date',
field=models.DateField(blank=True, help_text='date of acquisition of the right', null=True),
),
migrations.AlterField(
model_name='luxuryitemright',
name='additional_info',
field=models.TextField(blank=True, default='', help_text='additional info about the right'),
),
migrations.AlterField(
model_name='luxuryitemright',
name='company',
field=models.ForeignKey(blank=True, default=None, help_text='company or organisation that owns the right', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='business_register_luxuryitemright_rights', to='business_register.company'),
),
migrations.AlterField(
model_name='luxuryitemright',
name='full_name',
field=models.CharField(blank=True, default='', help_text='full name of the person that owns the right', max_length=75),
),
migrations.AlterField(
model_name='luxuryitemright',
name='luxury_item',
field=models.ForeignKey(help_text='right to the luxury item', on_delete=django.db.models.deletion.CASCADE, related_name='rights', to='business_register.luxuryitem'),
),
migrations.AlterField(
model_name='luxuryitemright',
name='owner_type',
field=models.PositiveSmallIntegerField(blank=True, choices=[(1, 'Declarant'), (2, 'Family member'), (3, 'Ukraine citizen'), (4, 'Foreign citizen'), (5, 'Legal entity registered in Ukraine'), (6, 'Legal entity registered abroad')], help_text='type of the owner', null=True),
),
migrations.AlterField(
model_name='luxuryitemright',
name='share',
field=models.DecimalField(blank=True, decimal_places=2, help_text='share of the right', max_digits=12, null=True),
),
migrations.AlterField(
model_name='luxuryitemright',
name='type',
field=models.PositiveSmallIntegerField(blank=True, choices=[(1, 'Ownership'), (2, 'Beneficial ownership'), (3, 'Joint ownership'), (4, 'Common property'), (5, 'Rent'), (6, 'Usage'), (10, 'Other right of usage'), (7, 'Owner is another person'), (20, 'Family member did not provide the information'), (21, 'Family member did not consent to the processing of personal data')], help_text='type of the right', null=True),
),
migrations.AlterField(
model_name='money',
name='additional_info',
field=models.TextField(blank=True, default='', help_text='additional info about the money'),
),
migrations.AlterField(
model_name='money',
name='amount',
field=models.DecimalField(blank=True, decimal_places=2, help_text='amount of money', max_digits=12, null=True),
),
migrations.AlterField(
model_name='money',
name='bank',
field=models.ForeignKey(blank=True, default=None, help_text='bank, credit union or investment fund where the money is stored', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='money_in_banks', to='business_register.company'),
),
migrations.AlterField(
model_name='money',
name='currency',
field=models.CharField(blank=True, default='', help_text='currency', max_length=33),
),
migrations.AlterField(
model_name='money',
name='declaration',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='money', to='business_register.declaration'),
),
migrations.AlterField(
model_name='money',
name='owner',
field=models.ForeignKey(help_text='owner of money', on_delete=django.db.models.deletion.PROTECT, related_name='money', to='business_register.pep'),
),
migrations.AlterField(
model_name='money',
name='type',
field=models.PositiveSmallIntegerField(choices=[(1, 'Bank account'), (2, 'Hard cash'), (3, 'Contribution to the credit union or investment fund'), (4, 'Money lent to another person'), (5, 'Precious metals'), (10, 'Other')], help_text='type'),
),
migrations.AlterField(
model_name='ngoparticipation',
name='declaration',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ngo_participation', to='business_register.declaration'),
),
migrations.AlterField(
model_name='ngoparticipation',
name='participation_type',
field=models.PositiveSmallIntegerField(choices=[(1, 'Membership'), (2, 'Leadership')], help_text='type of the participation in the NGO'),
),
migrations.AlterField(
model_name='parttimejob',
name='declaration',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='part_time_jobs', to='business_register.declaration'),
),
migrations.AlterField(
model_name='parttimejob',
name='description',
field=models.TextField(blank=True, default='', help_text='description of the PEP`s part-time job'),
),
migrations.AlterField(
model_name='parttimejob',
name='employer',
field=models.ForeignKey(blank=True, default=None, help_text='employer of the PEP for part-time job', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='peps_employees', to='business_register.company'),
),
migrations.AlterField(
model_name='parttimejob',
name='employer_full_name',
field=models.TextField(blank=True, default='', help_text='full name of the person that gave PEP part-time job'),
),
migrations.AlterField(
model_name='parttimejob',
name='is_paid',
field=models.BooleanField(blank=True, default=None, help_text='is the job paid', null=True),
),
migrations.AlterField(
model_name='property',
name='additional_info',
field=models.TextField(blank=True, default='', help_text='additional info about the property'),
),
migrations.AlterField(
model_name='property',
name='country',
field=models.ForeignKey(blank=True, default=None, help_text='country where the property is located', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='declared_pep_properties', to='location_register.country'),
),
migrations.AlterField(
model_name='property',
name='declaration',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='properties', to='business_register.declaration'),
),
migrations.AlterField(
model_name='property',
name='type',
field=models.PositiveSmallIntegerField(choices=[(1, 'House'), (2, 'Summer house'), (3, 'Apartment'), (4, 'Room'), (5, 'Garage'), (6, 'Unfinished construction'), (7, 'Land'), (8, 'Office'), (10, 'Other')], help_text='type of the property'),
),
migrations.AlterField(
model_name='property',
name='valuation',
field=models.DecimalField(blank=True, decimal_places=2, help_text='valuation of the property', max_digits=12, null=True),
),
migrations.AlterField(
model_name='propertyright',
name='acquisition_date',
field=models.DateField(blank=True, help_text='date of acquisition of the right', null=True),
),
migrations.AlterField(
model_name='propertyright',
name='additional_info',
field=models.TextField(blank=True, default='', help_text='additional info about the right'),
),
migrations.AlterField(
model_name='propertyright',
name='company',
field=models.ForeignKey(blank=True, default=None, help_text='company or organisation that owns the right', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='business_register_propertyright_rights', to='business_register.company'),
),
migrations.AlterField(
model_name='propertyright',
name='full_name',
field=models.CharField(blank=True, default='', help_text='full name of the person that owns the right', max_length=75),
),
migrations.AlterField(
model_name='propertyright',
name='owner_type',
field=models.PositiveSmallIntegerField(blank=True, choices=[(1, 'Declarant'), (2, 'Family member'), (3, 'Ukraine citizen'), (4, 'Foreign citizen'), (5, 'Legal entity registered in Ukraine'), (6, 'Legal entity registered abroad')], help_text='type of the owner', null=True),
),
migrations.AlterField(
model_name='propertyright',
name='property',
field=models.ForeignKey(help_text='right to the property', on_delete=django.db.models.deletion.CASCADE, related_name='rights', to='business_register.property'),
),
migrations.AlterField(
model_name='propertyright',
name='share',
field=models.DecimalField(blank=True, decimal_places=2, help_text='share of the right', max_digits=12, null=True),
),
migrations.AlterField(
model_name='propertyright',
name='type',
field=models.PositiveSmallIntegerField(blank=True, choices=[(1, 'Ownership'), (2, 'Beneficial ownership'), (3, 'Joint ownership'), (4, 'Common property'), (5, 'Rent'), (6, 'Usage'), (10, 'Other right of usage'), (7, 'Owner is another person'), (20, 'Family member did not provide the information'), (21, 'Family member did not consent to the processing of personal data')], help_text='type of the right', null=True),
),
migrations.AlterField(
model_name='securities',
name='additional_info',
field=models.TextField(blank=True, default='', help_text='additional info about securities'),
),
migrations.AlterField(
model_name='securities',
name='declaration',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='securities', to='business_register.declaration'),
),
migrations.AlterField(
model_name='securities',
name='issuer_address',
field=models.TextField(blank=True, default='', help_text='address of the issuer of securities'),
),
migrations.AlterField(
model_name='securities',
name='issuer_company',
field=models.ForeignKey(blank=True, default=None, help_text='company that issued the securities', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='shares_from_declarations', to='business_register.company'),
),
migrations.AlterField(
model_name='securities',
name='issuer_name',
field=models.TextField(blank=True, default='', help_text='name of the issuer of securities', max_length=75),
),
migrations.AlterField(
model_name='securities',
name='issuer_registration_number',
field=models.CharField(blank=True, default='', help_text='number of registration of the issuer of securities', max_length=100),
),
migrations.AlterField(
model_name='securities',
name='nominal_value',
field=models.DecimalField(blank=True, decimal_places=2, help_text='nominal value of securities', max_digits=12, null=True),
),
migrations.AlterField(
model_name='securities',
name='quantity',
field=models.DecimalField(blank=True, decimal_places=2, help_text='quantity of securities', max_digits=12, null=True),
),
migrations.AlterField(
model_name='securities',
name='transfer_date',
field=models.DateField(blank=True, help_text='date of the transfer of the securities', null=True),
),
migrations.AlterField(
model_name='securities',
name='trustee_company',
field=models.ForeignKey(blank=True, default=None, help_text='company that is a trustee of the securities', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='shares_in_trust', to='business_register.company'),
),
migrations.AlterField(
model_name='securities',
name='type',
field=models.PositiveSmallIntegerField(choices=[(1, 'Share'), (2, 'Corporate right'), (3, 'Mortgage securities'), (4, 'Commodity securities'), (5, 'Derivatives'), (6, 'Debt securities'), (7, 'Privatization securities (vouchers, etc)'), (8, 'Investment certificates)'), (9, 'Check'), (10, 'Other')], help_text='type of securities'),
),
migrations.AlterField(
model_name='securitiesright',
name='acquisition_date',
field=models.DateField(blank=True, help_text='date of acquisition of the right', null=True),
),
migrations.AlterField(
model_name='securitiesright',
name='additional_info',
field=models.TextField(blank=True, default='', help_text='additional info about the right'),
),
migrations.AlterField(
model_name='securitiesright',
name='company',
field=models.ForeignKey(blank=True, default=None, help_text='company or organisation that owns the right', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='business_register_securitiesright_rights', to='business_register.company'),
),
migrations.AlterField(
model_name='securitiesright',
name='full_name',
field=models.CharField(blank=True, default='', help_text='full name of the person that owns the right', max_length=75),
),
migrations.AlterField(
model_name='securitiesright',
name='owner_type',
field=models.PositiveSmallIntegerField(blank=True, choices=[(1, 'Declarant'), (2, 'Family member'), (3, 'Ukraine citizen'), (4, 'Foreign citizen'), (5, 'Legal entity registered in Ukraine'), (6, 'Legal entity registered abroad')], help_text='type of the owner', null=True),
),
migrations.AlterField(
model_name='securitiesright',
name='securities',
field=models.ForeignKey(help_text='right to securities', on_delete=django.db.models.deletion.CASCADE, related_name='rights', to='business_register.securities'),
),
migrations.AlterField(
model_name='securitiesright',
name='share',
field=models.DecimalField(blank=True, decimal_places=2, help_text='share of the right', max_digits=12, null=True),
),
migrations.AlterField(
model_name='securitiesright',
name='type',
field=models.PositiveSmallIntegerField(blank=True, choices=[(1, 'Ownership'), (2, 'Beneficial ownership'), (3, 'Joint ownership'), (4, 'Common property'), (5, 'Rent'), (6, 'Usage'), (10, 'Other right of usage'), (7, 'Owner is another person'), (20, 'Family member did not provide the information'), (21, 'Family member did not consent to the processing of personal data')], help_text='type of the right', null=True),
),
migrations.AlterField(
model_name='transaction',
name='amount',
field=models.DecimalField(blank=True, decimal_places=2, help_text='amount of the transaction', max_digits=12, null=True),
),
migrations.AlterField(
model_name='transaction',
name='country',
field=models.ForeignKey(blank=True, default=None, help_text='country where the transaction is registered', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='declared_pep_transactions', to='location_register.country'),
),
migrations.AlterField(
model_name='transaction',
name='date',
field=models.DateField(blank=True, help_text='date of the transaction', null=True),
),
migrations.AlterField(
model_name='transaction',
name='declaration',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='transactions', to='business_register.declaration'),
),
migrations.AlterField(
model_name='transaction',
name='is_money_spent',
field=models.BooleanField(blank=True, default=None, help_text='whether the money spent during the transaction', null=True),
),
migrations.AlterField(
model_name='transaction',
name='transaction_object',
field=models.TextField(blank=True, default='', help_text='object of the transaction'),
),
migrations.AlterField(
model_name='transaction',
name='transaction_object_type',
field=models.TextField(blank=True, default='', help_text='type of the object of the transaction'),
),
migrations.AlterField(
model_name='transaction',
name='transaction_result',
field=models.TextField(blank=True, default='', help_text='result of the transaction'),
),
migrations.AlterField(
model_name='vehicle',
name='additional_info',
field=models.TextField(blank=True, default='', help_text='additional info about the vehicle'),
),
migrations.AlterField(
model_name='vehicle',
name='brand',
field=models.CharField(blank=True, default='', max_length=80),
),
migrations.AlterField(
model_name='vehicle',
name='declaration',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vehicles', to='business_register.declaration'),
),
migrations.AlterField(
model_name='vehicle',
name='is_luxury',
field=models.BooleanField(blank=True, default=None, null=True),
),
migrations.AlterField(
model_name='vehicle',
name='model',
field=models.CharField(blank=True, default='', max_length=140),
),
migrations.AlterField(
model_name='vehicle',
name='type',
field=models.PositiveSmallIntegerField(choices=[(1, 'Car'), (2, 'Truck'), (4, 'Boat'), (5, 'Agricultural machinery'), (6, 'Air_means'), (10, 'Other')], help_text='type of the vehicle'),
),
migrations.AlterField(
model_name='vehicle',
name='valuation',
field=models.PositiveIntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='vehicleright',
name='acquisition_date',
field=models.DateField(blank=True, help_text='date of acquisition of the right', null=True),
),
migrations.AlterField(
model_name='vehicleright',
name='additional_info',
field=models.TextField(blank=True, default='', help_text='additional info about the right'),
),
migrations.AlterField(
model_name='vehicleright',
name='car',
field=models.ForeignKey(help_text='right to the vehicle', on_delete=django.db.models.deletion.CASCADE, related_name='rights', to='business_register.vehicle'),
),
migrations.AlterField(
model_name='vehicleright',
name='company',
field=models.ForeignKey(blank=True, default=None, help_text='company or organisation that owns the right', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='business_register_vehicleright_rights', to='business_register.company'),
),
migrations.AlterField(
model_name='vehicleright',
name='full_name',
field=models.CharField(blank=True, default='', help_text='full name of the person that owns the right', max_length=75),
),
migrations.AlterField(
model_name='vehicleright',
name='owner_type',
field=models.PositiveSmallIntegerField(blank=True, choices=[(1, 'Declarant'), (2, 'Family member'), (3, 'Ukraine citizen'), (4, 'Foreign citizen'), (5, 'Legal entity registered in Ukraine'), (6, 'Legal entity registered abroad')], help_text='type of the owner', null=True),
),
migrations.AlterField(
model_name='vehicleright',
name='share',
field=models.DecimalField(blank=True, decimal_places=2, help_text='share of the right', max_digits=12, null=True),
),
migrations.AlterField(
model_name='vehicleright',
name='type',
field=models.PositiveSmallIntegerField(blank=True, choices=[(1, 'Ownership'), (2, 'Beneficial ownership'), (3, 'Joint ownership'), (4, 'Common property'), (5, 'Rent'), (6, 'Usage'), (10, 'Other right of usage'), (7, 'Owner is another person'), (20, 'Family member did not provide the information'), (21, 'Family member did not consent to the processing of personal data')], help_text='type of the right', null=True),
),
]
| 55.900654
| 676
| 0.631115
| 4,601
| 42,764
| 5.733102
| 0.071072
| 0.050838
| 0.138373
| 0.160513
| 0.895898
| 0.885586
| 0.835772
| 0.783494
| 0.672113
| 0.62264
| 0
| 0.012281
| 0.240249
| 42,764
| 764
| 677
| 55.973822
| 0.7996
| 0.001076
| 0
| 0.783641
| 1
| 0
| 0.291624
| 0.040032
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003958
| 0
| 0.007916
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cc6f51d93ed47bc1de6d9c5af2f20b7104573b51
| 38
|
py
|
Python
|
src/lib/shelve.py
|
DTenore/skulpt
|
098d20acfb088d6db85535132c324b7ac2f2d212
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
src/lib/shelve.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
src/lib/shelve.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
import _sk_fail; _sk_fail._("shelve")
| 19
| 37
| 0.763158
| 6
| 38
| 4
| 0.666667
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 38
| 1
| 38
| 38
| 0.685714
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cc80a3e8dce98ff536cfd10a7c60aad3ec559548
| 8,160
|
py
|
Python
|
dlkit/abstract_osid/authorization/receivers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
dlkit/abstract_osid/authorization/receivers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
dlkit/abstract_osid/authorization/receivers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
"""Implementations of authorization abstract base class receivers."""
# pylint: disable=invalid-name
# Method names comply with OSID specification.
# pylint: disable=no-init
# Abstract classes do not define __init__.
# pylint: disable=too-few-public-methods
# Some interfaces are specified as 'markers' and include no methods.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
# pylint: disable=too-many-arguments
# Argument signature defined in specification.
# pylint: disable=duplicate-code
# All apparent duplicates have been inspected. They aren't.
import abc
class AuthorizationReceiver:
"""The authorization receiver is the consumer supplied interface for receiving notifications pertaining to new, updated or deleted ``Authorizations``."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def new_authorizations(self, notification_id, authorization_ids):
"""The callback for notifications of new authorizations.
:param notification_id: the notification ``Id``
:type notification_id: ``osid.id.Id``
:param authorization_ids: the Id of the new ``Authorizations``
:type authorization_ids: ``osid.id.IdList``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def changed_authorizations(self, notification_id, authorization_ids):
"""The callback for notification of updated authorization.
:param notification_id: the notification ``Id``
:type notification_id: ``osid.id.Id``
:param authorization_ids: the Id of the updated ``Authorizations``
:type authorization_ids: ``osid.id.IdList``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def deleted_authorizations(self, notification_id, authorization_ids):
"""The callback for notification of deleted authorizations.
:param notification_id: the notification ``Id``
:type notification_id: ``osid.id.Id``
:param authorization_ids: the Id of the deleted ``Authorizations``
:type authorization_ids: ``osid.id.IdList``
*compliance: mandatory -- This method must be implemented.*
"""
pass
class FunctionReceiver:
"""The function receiver is the consumer supplied interface for receiving notifications pertaining to new, updated or deleted ``Functions``."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def new_functions(self, notification_id, function_ids):
"""The callback for notifications of new functions.
:param notification_id: the notification Id
:type notification_id: ``osid.id.Id``
:param function_ids: the Id of the new ``Functions``
:type function_ids: ``osid.id.IdList``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def changed_functions(self, notification_id, function_ids):
"""The callback for notification of updated functions.
:param notification_id: the notification Id
:type notification_id: ``osid.id.Id``
:param function_ids: the Id of the updated ``Functions``
:type function_ids: ``osid.id.IdList``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def deleted_functions(self, notification_id, function_ids):
"""The callback for notification of deleted functions.
:param notification_id: the notification Id
:type notification_id: ``osid.id.Id``
:param function_ids: the Id of the deleted ``Functions``
:type function_ids: ``osid.id.IdList``
*compliance: mandatory -- This method must be implemented.*
"""
pass
class QualifierReceiver:
"""The qualifier receiver is the consumer supplied interface for receiving notifications pertaining to new, updated or deleted ``Qualifier`` objects."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def new_qualifiers(self, notification_id, qualifier_ids):
"""The callback for notifications of new qualifiers.
:param notification_id: the notification ``Id``
:type notification_id: ``osid.id.Id``
:param qualifier_ids: the ``Id`` of the new ``Qualifiers``
:type qualifier_ids: ``osid.id.IdList``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def changed_qualifiers(self, notification_id, qualifier_ids):
"""The callback for notification of updated qualifiers.
:param notification_id: the notification ``Id``
:type notification_id: ``osid.id.Id``
:param qualifier_ids: the ``Id`` of the updated ``Qualifiers``
:type qualifier_ids: ``osid.id.IdList``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def deleted_qualifiers(self, notification_id, qualifier_ids):
"""the callback for notification of deleted qualifiers.
:param notification_id: the notification ``Id``
:type notification_id: ``osid.id.Id``
:param qualifier_ids: the ``Id`` of the deleted ``Qualifiers``
:type qualifier_ids: ``osid.id.IdList``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def changed_child_of_billings(self, notification_id, qualifier_ids):
"""The callback for notifications of changes to children of qualifier hierarchy nodes.
:param notification_id: the notification ``Id``
:type notification_id: ``osid.id.Id``
:param qualifier_ids: the ``Ids`` of the ``Qualifiers`` whose children have changed
:type qualifier_ids: ``osid.id.IdList``
*compliance: mandatory -- This method must be implemented.*
"""
pass
class VaultReceiver:
"""The vault receiver is the consumer supplied interface for receiving notifications pertaining to new, updated or deleted ``Vault`` objects."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def new_vaults(self, notification_id, vault_ids):
"""The callback for notifications of new vaults.
:param notification_id: the notification Id
:type notification_id: ``osid.id.Id``
:param vault_ids: the ``Id`` of the new ``Vaults``
:type vault_ids: ``osid.id.IdList``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def changed_vaults(self, notification_id, vault_ids):
"""The callback for notification of updated vaults.
:param notification_id: the notification Id
:type notification_id: ``osid.id.Id``
:param vault_ids: the ``Id`` of the updated ``Vaults``
:type vault_ids: ``osid.id.IdList``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def deleted_vaults(self, notification_id, vault_ids):
"""The callback for notification of deleted vaults.
:param notification_id: the notification Id
:type notification_id: ``osid.id.Id``
:param vault_ids: the ``Id`` of the deleted ``Vaults``
:type vault_ids: ``osid.id.IdList``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def changed_child_of_vaults(self, notification_id, vault_ids):
"""The callback for notifications of changes to children of vault hierarchy nodes.
:param notification_id: the notification Id
:type notification_id: ``osid.id.Id``
:param vault_ids: the ``Ids`` of the ``Vaults`` whose children have changed
:type vault_ids: ``osid.id.IdList``
*compliance: mandatory -- This method must be implemented.*
"""
pass
| 33.036437
| 157
| 0.668382
| 932
| 8,160
| 5.72103
| 0.119099
| 0.147037
| 0.052513
| 0.044636
| 0.882596
| 0.864029
| 0.851275
| 0.801388
| 0.783008
| 0.759752
| 0
| 0
| 0.234436
| 8,160
| 246
| 158
| 33.170732
| 0.85353
| 0.674265
| 0
| 0.627451
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.27451
| false
| 0.27451
| 0.019608
| 0
| 0.45098
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
ccd3e54c44e1e124c2de8d42b7474943241d0978
| 87
|
py
|
Python
|
secure_ml/__init__.py
|
Koukyosyumei/secure_ml
|
9da24f4ce4782ec2f6dd63b0437f657a0e190e40
|
[
"MIT"
] | 10
|
2021-02-23T01:32:48.000Z
|
2021-11-16T06:02:26.000Z
|
secure_ml/__init__.py
|
ahatamiz/secure_ml
|
1242148e0686d0374795f99143fcb0a8f34f71f2
|
[
"MIT"
] | 2
|
2021-05-16T08:38:19.000Z
|
2021-06-20T09:01:45.000Z
|
secure_ml/__init__.py
|
ahatamiz/secure_ml
|
1242148e0686d0374795f99143fcb0a8f34f71f2
|
[
"MIT"
] | 4
|
2021-02-25T04:33:06.000Z
|
2021-08-17T05:43:47.000Z
|
from secure_ml import utils
from secure_ml import attack
from secure_ml import defense
| 21.75
| 29
| 0.862069
| 15
| 87
| 4.8
| 0.466667
| 0.416667
| 0.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 87
| 3
| 30
| 29
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
aeadf08a10c109853a2f7df35406489e1809a077
| 2,800
|
py
|
Python
|
test_rows2atoms.py
|
rphilander/proteus
|
840ed265131c7399b983802afaae61306f02ba3b
|
[
"MIT"
] | null | null | null |
test_rows2atoms.py
|
rphilander/proteus
|
840ed265131c7399b983802afaae61306f02ba3b
|
[
"MIT"
] | null | null | null |
test_rows2atoms.py
|
rphilander/proteus
|
840ed265131c7399b983802afaae61306f02ba3b
|
[
"MIT"
] | null | null | null |
import unittest
from core import Atom
from rows2atoms import transform
headers = ['athlete', 'age', 'country', 'year', 'sport', 'golds', 'silvers', 'bronzes', 'total_medals']
class Rows2AtomsTests(unittest.TestCase):
def test_empty(self):
self.assertEqual(list(transform([headers])), [])
def test_one_row_nine(self):
row = ['Rory Bumpkins', 42, 'Slovenia', 4042, 'Nether Bowling', 7, 2, 5, 99]
result = list(transform([headers, row]))
self.assertEqual(len(result), 9)
self.assertTrue(Atom('athlete', 'Rory Bumpkins') in result)
self.assertTrue(Atom('age', 42) in result)
self.assertTrue(Atom('country', 'Slovenia') in result)
self.assertTrue(Atom('year', 4042) in result)
self.assertTrue(Atom('sport', 'Nether Bowling') in result)
self.assertTrue(Atom('golds', 7) in result)
self.assertTrue(Atom('silvers', 2) in result)
self.assertTrue(Atom('bronzes', 5) in result)
self.assertTrue(Atom('total_medals', 99) in result)
def test_one_row_seven(self):
row = ['Slovenia', 4042, 'Nether Bowling', 7, 2, 5, 99]
result = list(transform([headers, row]))
self.assertEqual(len(result), 7)
self.assertTrue(Atom('country', 'Slovenia') in result)
self.assertTrue(Atom('year', 4042) in result)
self.assertTrue(Atom('sport', 'Nether Bowling') in result)
self.assertTrue(Atom('golds', 7) in result)
self.assertTrue(Atom('silvers', 2) in result)
self.assertTrue(Atom('bronzes', 5) in result)
self.assertTrue(Atom('total_medals', 99) in result)
def test_two_rows(self):
rows = [ ['Rory Bumpkins', 42, 'Slovenia', 4042, 'Nether Bowling', 7, 2, 5, 99],
['Rory Bumpkins', 42, 'New Slovenia', 9088, 'Nether Bowling', 0, 0, 16, 99] ]
result = list(transform([headers] + rows))
self.assertEqual(len(result), 14)
self.assertTrue(Atom('athlete', 'Rory Bumpkins') in result)
self.assertTrue(Atom('age', 42) in result)
self.assertTrue(Atom('country', 'Slovenia') in result)
self.assertTrue(Atom('country', 'New Slovenia') in result)
self.assertTrue(Atom('year', 4042) in result)
self.assertTrue(Atom('year', 9088) in result)
self.assertTrue(Atom('sport', 'Nether Bowling') in result)
self.assertTrue(Atom('golds', 7) in result)
self.assertTrue(Atom('golds', 0) in result)
self.assertTrue(Atom('silvers', 2) in result)
self.assertTrue(Atom('silvers', 0) in result)
self.assertTrue(Atom('bronzes', 5) in result)
self.assertTrue(Atom('bronzes', 16) in result)
self.assertTrue(Atom('total_medals', 99) in result)
if __name__ == '__main__':
unittest.main()
| 43.75
| 103
| 0.628571
| 350
| 2,800
| 4.968571
| 0.162857
| 0.241518
| 0.310523
| 0.341576
| 0.783209
| 0.767108
| 0.748706
| 0.748706
| 0.748706
| 0.748706
| 0
| 0.040164
| 0.2175
| 2,800
| 63
| 104
| 44.444444
| 0.753537
| 0
| 0
| 0.519231
| 0
| 0
| 0.175357
| 0
| 0
| 0
| 0
| 0
| 0.653846
| 1
| 0.076923
| false
| 0
| 0.057692
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
aedcb25961afd836530e10b91c0d29278ee4150a
| 142
|
py
|
Python
|
CRM-Project/accounts/admin.py
|
wiky-avis/trainee-domclick-test
|
a12727563a9a728b2a3472b7fee7b16cf11b298b
|
[
"MIT"
] | null | null | null |
CRM-Project/accounts/admin.py
|
wiky-avis/trainee-domclick-test
|
a12727563a9a728b2a3472b7fee7b16cf11b298b
|
[
"MIT"
] | 2
|
2022-01-13T03:52:59.000Z
|
2022-03-12T01:00:19.000Z
|
CRM-Project/accounts/admin.py
|
wiky-avis/trainee-domclick-test
|
a12727563a9a728b2a3472b7fee7b16cf11b298b
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import ClientProfile, Profile
admin.site.register(Profile)
admin.site.register(ClientProfile)
| 20.285714
| 42
| 0.830986
| 18
| 142
| 6.555556
| 0.555556
| 0.20339
| 0.271186
| 0.40678
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091549
| 142
| 6
| 43
| 23.666667
| 0.914729
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
4e15e9bbd5ae48d400029634fc9bdb548d58aabf
| 9,338
|
py
|
Python
|
mayan/apps/tags/tests/test_document_tag_api.py
|
atitaya1412/Mayan-EDMS
|
bda9302ba4b743e7d829ad118b8b836221888172
|
[
"Apache-2.0"
] | 343
|
2015-01-05T14:19:35.000Z
|
2018-12-10T19:07:48.000Z
|
mayan/apps/tags/tests/test_document_tag_api.py
|
atitaya1412/Mayan-EDMS
|
bda9302ba4b743e7d829ad118b8b836221888172
|
[
"Apache-2.0"
] | 191
|
2015-01-03T00:48:19.000Z
|
2018-11-30T09:10:25.000Z
|
mayan/apps/tags/tests/test_document_tag_api.py
|
atitaya1412/Mayan-EDMS
|
bda9302ba4b743e7d829ad118b8b836221888172
|
[
"Apache-2.0"
] | 257
|
2019-05-14T10:26:37.000Z
|
2022-03-30T03:37:36.000Z
|
from rest_framework import status
from mayan.apps.documents.tests.mixins.document_mixins import DocumentTestMixin
from mayan.apps.rest_api.tests.base import BaseAPITestCase
from ..events import event_tag_attached, event_tag_removed
from ..permissions import (
permission_tag_attach, permission_tag_remove, permission_tag_view
)
from .mixins import TagAPIViewTestMixin, TagTestMixin
class DocumentTagAPIViewTestCase(
DocumentTestMixin, TagAPIViewTestMixin, TagTestMixin, BaseAPITestCase
):
auto_upload_test_document = False
def setUp(self):
super().setUp()
self._create_test_tag()
self._create_test_document_stub()
def test_document_attach_tag_api_view_no_permission(self):
self._clear_events()
response = self._request_test_document_tag_attach_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertTrue(self.test_tag not in self.test_document.tags.all())
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_attach_tag_api_view_with_document_access(self):
self.grant_access(
obj=self.test_document, permission=permission_tag_attach
)
self._clear_events()
response = self._request_test_document_tag_attach_api_view()
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertTrue(self.test_tag not in self.test_document.tags.all())
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_attach_tag_api_view_with_tag_access(self):
self.grant_access(
obj=self.test_tag, permission=permission_tag_attach
)
self._clear_events()
response = self._request_test_document_tag_attach_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertTrue(self.test_tag not in self.test_document.tags.all())
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_attach_tag_api_view_with_full_access(self):
self.grant_access(
obj=self.test_document, permission=permission_tag_attach
)
self.grant_access(
obj=self.test_tag, permission=permission_tag_attach
)
self._clear_events()
response = self._request_test_document_tag_attach_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertTrue(self.test_tag in self.test_document.tags.all())
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_tag)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_document)
self.assertEqual(events[0].verb, event_tag_attached.id)
def test_trashed_document_attach_tag_api_view_with_full_access(self):
self.grant_access(
obj=self.test_document, permission=permission_tag_attach
)
self.grant_access(
obj=self.test_tag, permission=permission_tag_attach
)
self.test_document.delete()
self._clear_events()
response = self._request_test_document_tag_attach_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertTrue(self.test_tag not in self.test_document.tags.all())
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_tag_list_api_view_no_permission(self):
self.test_tag.documents.add(self.test_document)
self._clear_events()
response = self._request_test_document_tag_list_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_tag_list_api_view_with_document_access(self):
self.test_tag.documents.add(self.test_document)
self.grant_access(
obj=self.test_document, permission=permission_tag_view
)
self._clear_events()
response = self._request_test_document_tag_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 0)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_tag_list_api_view_with_tag_access(self):
self.test_tag.documents.add(self.test_document)
self.grant_access(obj=self.test_tag, permission=permission_tag_view)
self._clear_events()
response = self._request_test_document_tag_list_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_tag_list_api_view_with_full_access(self):
self.test_tag.documents.add(self.test_document)
self.grant_access(
obj=self.test_document, permission=permission_tag_view
)
self.grant_access(obj=self.test_tag, permission=permission_tag_view)
self._clear_events()
response = self._request_test_document_tag_list_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['results'][0]['label'], self.test_tag.label)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_trashed_document_tag_list_api_view_with_full_access(self):
self.test_tag.documents.add(self.test_document)
self.grant_access(
obj=self.test_document, permission=permission_tag_view
)
self.grant_access(obj=self.test_tag, permission=permission_tag_view)
self.test_document.delete()
self._clear_events()
response = self._request_test_document_tag_list_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_tag_remove_api_view_no_permission(self):
self.test_tag.documents.add(self.test_document)
self._clear_events()
response = self._request_test_document_tag_remove_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertTrue(self.test_tag in self.test_document.tags.all())
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_tag_remove_api_view_with_document_access(self):
self.test_tag.documents.add(self.test_document)
self.grant_access(
obj=self.test_document, permission=permission_tag_remove
)
self._clear_events()
response = self._request_test_document_tag_remove_api_view()
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertTrue(self.test_tag in self.test_document.tags.all())
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_tag_remove_api_view_with_tag_access(self):
self.test_tag.documents.add(self.test_document)
self.grant_access(obj=self.test_tag, permission=permission_tag_remove)
self._clear_events()
response = self._request_test_document_tag_remove_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertTrue(self.test_tag in self.test_document.tags.all())
events = self._get_test_events()
self.assertEqual(events.count(), 0)
def test_document_tag_remove_api_view_with_full_access(self):
self.test_tag.documents.add(self.test_document)
self.grant_access(
obj=self.test_document, permission=permission_tag_remove
)
self.grant_access(obj=self.test_tag, permission=permission_tag_remove)
self._clear_events()
response = self._request_test_document_tag_remove_api_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertFalse(self.test_tag in self.test_document.tags.all())
events = self._get_test_events()
self.assertEqual(events.count(), 1)
self.assertEqual(events[0].action_object, self.test_tag)
self.assertEqual(events[0].actor, self._test_case_user)
self.assertEqual(events[0].target, self.test_document)
self.assertEqual(events[0].verb, event_tag_removed.id)
def test_trashed_document_tag_remove_api_view_with_full_access(self):
self.test_tag.documents.add(self.test_document)
self.grant_access(
obj=self.test_document, permission=permission_tag_remove
)
self.grant_access(obj=self.test_tag, permission=permission_tag_remove)
self.test_document.delete()
self._clear_events()
response = self._request_test_document_tag_remove_api_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertTrue(self.test_tag in self.test_document.tags.all())
events = self._get_test_events()
self.assertEqual(events.count(), 0)
| 34.585185
| 83
| 0.718676
| 1,207
| 9,338
| 5.136703
| 0.067937
| 0.087742
| 0.087742
| 0.052258
| 0.90871
| 0.902419
| 0.899839
| 0.893065
| 0.891452
| 0.891452
| 0
| 0.00929
| 0.193082
| 9,338
| 269
| 84
| 34.713755
| 0.813537
| 0
| 0
| 0.734807
| 0
| 0
| 0.001821
| 0
| 0
| 0
| 0
| 0
| 0.276243
| 1
| 0.088398
| false
| 0
| 0.033149
| 0
| 0.132597
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4e3befa174fa8b5b8d88a334fe52793718dc335a
| 14,608
|
py
|
Python
|
safe_transaction_service/contracts/tests/test_etherscan_api.py
|
peekpi/safe-transaction-service
|
d75008096e6ee9d87dcb274478777b4984f4b71e
|
[
"MIT"
] | 5
|
2021-01-28T17:41:42.000Z
|
2021-11-14T17:09:18.000Z
|
safe_transaction_service/contracts/tests/test_etherscan_api.py
|
peekpi/safe-transaction-service
|
d75008096e6ee9d87dcb274478777b4984f4b71e
|
[
"MIT"
] | 5
|
2021-06-09T18:47:49.000Z
|
2022-03-12T00:56:46.000Z
|
safe_transaction_service/contracts/tests/test_etherscan_api.py
|
peekpi/safe-transaction-service
|
d75008096e6ee9d87dcb274478777b4984f4b71e
|
[
"MIT"
] | 5
|
2021-04-06T17:20:02.000Z
|
2022-01-13T10:58:08.000Z
|
from django.test import TestCase
from gnosis.eth.ethereum_client import EthereumNetwork
from ..clients import EtherscanApi
from ..clients.etherscan_api import RateLimitError
class TestEtherscanApi(TestCase):
def test_etherscan_get_abi(self):
try:
etherscan_api = EtherscanApi(EthereumNetwork.MAINNET)
safe_master_copy_abi = [{'constant': True, 'inputs': [{'internalType': 'bytes', 'name': 'message', 'type': 'bytes'}], 'name': 'getMessageHash', 'outputs': [{'internalType': 'bytes32', 'name': '', 'type': 'bytes32'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'constant': False, 'inputs': [{'internalType': 'address', 'name': 'owner', 'type': 'address'}, {'internalType': 'uint256', 'name': '_threshold', 'type': 'uint256'}], 'name': 'addOwnerWithThreshold', 'outputs': [], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': True, 'inputs': [], 'name': 'DOMAIN_SEPARATOR_TYPEHASH', 'outputs': [{'internalType': 'bytes32', 'name': '', 'type': 'bytes32'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'constant': False, 'inputs': [{'internalType': 'bytes', 'name': '_data', 'type': 'bytes'}, {'internalType': 'bytes', 'name': '_signature', 'type': 'bytes'}], 'name': 'isValidSignature', 'outputs': [{'internalType': 'bytes4', 'name': '', 'type': 'bytes4'}], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': True, 'inputs': [{'internalType': 'address', 'name': 'owner', 'type': 'address'}], 'name': 'isOwner', 'outputs': [{'internalType': 'bool', 'name': '', 'type': 'bool'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'constant': False, 'inputs': [{'internalType': 'address', 'name': 'to', 'type': 'address'}, {'internalType': 'uint256', 'name': 'value', 'type': 'uint256'}, {'internalType': 'bytes', 'name': 'data', 'type': 'bytes'}, {'internalType': 'enum Enum.Operation', 'name': 'operation', 'type': 'uint8'}], 'name': 'execTransactionFromModule', 'outputs': [{'internalType': 'bool', 'name': 'success', 'type': 'bool'}], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': False, 'inputs': [{'internalType': 'address', 'name': 'to', 'type': 'address'}, {'internalType': 'uint256', 'name': 'value', 'type': 'uint256'}, {'internalType': 'bytes', 'name': 'data', 'type': 'bytes'}, {'internalType': 'enum Enum.Operation', 'name': 'operation', 'type': 'uint8'}], 'name': 'execTransactionFromModuleReturnData', 'outputs': [{'internalType': 'bool', 'name': 'success', 'type': 'bool'}, {'internalType': 'bytes', 'name': 'returnData', 'type': 'bytes'}], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': True, 'inputs': [{'internalType': 'bytes32', 'name': '', 'type': 'bytes32'}], 'name': 'signedMessages', 'outputs': [{'internalType': 'uint256', 'name': '', 'type': 'uint256'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'constant': False, 'inputs': [{'internalType': 'contract Module', 'name': 'module', 'type': 'address'}], 'name': 'enableModule', 'outputs': [], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': False, 'inputs': [{'internalType': 'uint256', 'name': '_threshold', 'type': 'uint256'}], 'name': 'changeThreshold', 'outputs': [], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': False, 'inputs': [{'internalType': 'address', 'name': 'to', 'type': 'address'}, {'internalType': 'uint256', 'name': 'value', 'type': 'uint256'}, {'internalType': 'bytes', 'name': 'data', 'type': 'bytes'}, {'internalType': 'enum Enum.Operation', 'name': 'operation', 'type': 'uint8'}, {'internalType': 'uint256', 'name': 'safeTxGas', 'type': 'uint256'}, {'internalType': 'uint256', 'name': 'baseGas', 'type': 'uint256'}, {'internalType': 'uint256', 'name': 'gasPrice', 'type': 'uint256'}, {'internalType': 'address', 'name': 'gasToken', 'type': 'address'}, {'internalType': 'address payable', 'name': 'refundReceiver', 'type': 'address'}, {'internalType': 'bytes', 'name': 'signatures', 'type': 'bytes'}], 'name': 'execTransaction', 'outputs': [{'internalType': 'bool', 'name': 'success', 'type': 'bool'}], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': True, 'inputs': [{'internalType': 'address', 'name': '', 'type': 'address'}, {'internalType': 'bytes32', 'name': '', 'type': 'bytes32'}], 'name': 'approvedHashes', 'outputs': [{'internalType': 'uint256', 'name': '', 'type': 'uint256'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'constant': False, 'inputs': [{'internalType': 'address', 'name': '_masterCopy', 'type': 'address'}], 'name': 'changeMasterCopy', 'outputs': [], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': False, 'inputs': [{'internalType': 'bytes', 'name': '_data', 'type': 'bytes'}], 'name': 'signMessage', 'outputs': [], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': True, 'inputs': [], 'name': 'SENTINEL_MODULES', 'outputs': [{'internalType': 'address', 'name': '', 'type': 'address'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'constant': True, 'inputs': [], 'name': 'SENTINEL_OWNERS', 'outputs': [{'internalType': 'address', 'name': '', 'type': 'address'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'constant': True, 'inputs': [], 'name': 'getOwners', 'outputs': [{'internalType': 'address[]', 'name': '', 'type': 'address[]'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'constant': True, 'inputs': [], 'name': 'NAME', 'outputs': [{'internalType': 'string', 'name': '', 'type': 'string'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'constant': True, 'inputs': [], 'name': 'nonce', 'outputs': [{'internalType': 'uint256', 'name': '', 'type': 'uint256'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'constant': True, 'inputs': [], 'name': 'getModules', 'outputs': [{'internalType': 'address[]', 'name': '', 'type': 'address[]'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'constant': False, 'inputs': [{'internalType': 'address[]', 'name': '_owners', 'type': 'address[]'}, {'internalType': 'uint256', 'name': '_threshold', 'type': 'uint256'}, {'internalType': 'address', 'name': 'to', 'type': 'address'}, {'internalType': 'bytes', 'name': 'data', 'type': 'bytes'}, {'internalType': 'address', 'name': 'fallbackHandler', 'type': 'address'}, {'internalType': 'address', 'name': 'paymentToken', 'type': 'address'}, {'internalType': 'uint256', 'name': 'payment', 'type': 'uint256'}, {'internalType': 'address payable', 'name': 'paymentReceiver', 'type': 'address'}], 'name': 'setup', 'outputs': [], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': True, 'inputs': [], 'name': 'SAFE_MSG_TYPEHASH', 'outputs': [{'internalType': 'bytes32', 'name': '', 'type': 'bytes32'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'constant': False, 'inputs': [{'internalType': 'address', 'name': 'to', 'type': 'address'}, {'internalType': 'uint256', 'name': 'value', 'type': 'uint256'}, {'internalType': 'bytes', 'name': 'data', 'type': 'bytes'}, {'internalType': 'enum Enum.Operation', 'name': 'operation', 'type': 'uint8'}], 'name': 'requiredTxGas', 'outputs': [{'internalType': 'uint256', 'name': '', 'type': 'uint256'}], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': True, 'inputs': [], 'name': 'SAFE_TX_TYPEHASH', 'outputs': [{'internalType': 'bytes32', 'name': '', 'type': 'bytes32'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'constant': False, 'inputs': [{'internalType': 'bytes32', 'name': 'hashToApprove', 'type': 'bytes32'}], 'name': 'approveHash', 'outputs': [], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': True, 'inputs': [{'internalType': 'address', 'name': 'to', 'type': 'address'}, {'internalType': 'uint256', 'name': 'value', 'type': 'uint256'}, {'internalType': 'bytes', 'name': 'data', 'type': 'bytes'}, {'internalType': 'enum Enum.Operation', 'name': 'operation', 'type': 'uint8'}, {'internalType': 'uint256', 'name': 'safeTxGas', 'type': 'uint256'}, {'internalType': 'uint256', 'name': 'baseGas', 'type': 'uint256'}, {'internalType': 'uint256', 'name': 'gasPrice', 'type': 'uint256'}, {'internalType': 'address', 'name': 'gasToken', 'type': 'address'}, {'internalType': 'address', 'name': 'refundReceiver', 'type': 'address'}, {'internalType': 'uint256', 'name': '_nonce', 'type': 'uint256'}], 'name': 'getTransactionHash', 'outputs': [{'internalType': 'bytes32', 'name': '', 'type': 'bytes32'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'constant': False, 'inputs': [{'internalType': 'contract Module', 'name': 'prevModule', 'type': 'address'}, {'internalType': 'contract Module', 'name': 'module', 'type': 'address'}], 'name': 'disableModule', 'outputs': [], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': False, 'inputs': [{'internalType': 'address', 'name': 'prevOwner', 'type': 'address'}, {'internalType': 'address', 'name': 'oldOwner', 'type': 'address'}, {'internalType': 'address', 'name': 'newOwner', 'type': 'address'}], 'name': 'swapOwner', 'outputs': [], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': True, 'inputs': [], 'name': 'getThreshold', 'outputs': [{'internalType': 'uint256', 'name': '', 'type': 'uint256'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'constant': True, 'inputs': [{'internalType': 'address', 'name': 'to', 'type': 'address'}, {'internalType': 'uint256', 'name': 'value', 'type': 'uint256'}, {'internalType': 'bytes', 'name': 'data', 'type': 'bytes'}, {'internalType': 'enum Enum.Operation', 'name': 'operation', 'type': 'uint8'}, {'internalType': 'uint256', 'name': 'safeTxGas', 'type': 'uint256'}, {'internalType': 'uint256', 'name': 'baseGas', 'type': 'uint256'}, {'internalType': 'uint256', 'name': 'gasPrice', 'type': 'uint256'}, {'internalType': 'address', 'name': 'gasToken', 'type': 'address'}, {'internalType': 'address', 'name': 'refundReceiver', 'type': 'address'}, {'internalType': 'uint256', 'name': '_nonce', 'type': 'uint256'}], 'name': 'encodeTransactionData', 'outputs': [{'internalType': 'bytes', 'name': '', 'type': 'bytes'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'constant': False, 'inputs': [{'internalType': 'address', 'name': 'handler', 'type': 'address'}], 'name': 'setFallbackHandler', 'outputs': [], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': True, 'inputs': [], 'name': 'domainSeparator', 'outputs': [{'internalType': 'bytes32', 'name': '', 'type': 'bytes32'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'constant': False, 'inputs': [{'internalType': 'address', 'name': 'prevOwner', 'type': 'address'}, {'internalType': 'address', 'name': 'owner', 'type': 'address'}, {'internalType': 'uint256', 'name': '_threshold', 'type': 'uint256'}], 'name': 'removeOwner', 'outputs': [], 'payable': False, 'stateMutability': 'nonpayable', 'type': 'function'}, {'constant': True, 'inputs': [], 'name': 'VERSION', 'outputs': [{'internalType': 'string', 'name': '', 'type': 'string'}], 'payable': False, 'stateMutability': 'view', 'type': 'function'}, {'payable': True, 'stateMutability': 'payable', 'type': 'fallback'}, {'anonymous': False, 'inputs': [{'indexed': True, 'internalType': 'bytes32', 'name': 'approvedHash', 'type': 'bytes32'}, {'indexed': True, 'internalType': 'address', 'name': 'owner', 'type': 'address'}], 'name': 'ApproveHash', 'type': 'event'}, {'anonymous': False, 'inputs': [{'indexed': True, 'internalType': 'bytes32', 'name': 'msgHash', 'type': 'bytes32'}], 'name': 'SignMsg', 'type': 'event'}, {'anonymous': False, 'inputs': [{'indexed': False, 'internalType': 'bytes32', 'name': 'txHash', 'type': 'bytes32'}, {'indexed': False, 'internalType': 'uint256', 'name': 'payment', 'type': 'uint256'}], 'name': 'ExecutionFailure', 'type': 'event'}, {'anonymous': False, 'inputs': [{'indexed': False, 'internalType': 'bytes32', 'name': 'txHash', 'type': 'bytes32'}, {'indexed': False, 'internalType': 'uint256', 'name': 'payment', 'type': 'uint256'}], 'name': 'ExecutionSuccess', 'type': 'event'}, {'anonymous': False, 'inputs': [{'indexed': False, 'internalType': 'address', 'name': 'from', 'type': 'address'}, {'indexed': False, 'internalType': 'uint256', 'name': 'value', 'type': 'uint256'}], 'name': 'IncomingTransaction', 'type': 'event'}, {'anonymous': False, 'inputs': [{'indexed': False, 'internalType': 'address', 'name': 'owner', 'type': 'address'}], 'name': 'AddedOwner', 'type': 'event'}, {'anonymous': False, 'inputs': [{'indexed': False, 'internalType': 'address', 'name': 'owner', 'type': 'address'}], 'name': 'RemovedOwner', 'type': 'event'}, {'anonymous': False, 'inputs': [{'indexed': False, 'internalType': 'uint256', 'name': 'threshold', 'type': 'uint256'}], 'name': 'ChangedThreshold', 'type': 'event'}, {'anonymous': False, 'inputs': [{'indexed': False, 'internalType': 'contract Module', 'name': 'module', 'type': 'address'}], 'name': 'EnabledModule', 'type': 'event'}, {'anonymous': False, 'inputs': [{'indexed': False, 'internalType': 'contract Module', 'name': 'module', 'type': 'address'}], 'name': 'DisabledModule', 'type': 'event'}, {'anonymous': False, 'inputs': [{'indexed': True, 'internalType': 'address', 'name': 'module', 'type': 'address'}], 'name': 'ExecutionFromModuleSuccess', 'type': 'event'}, {'anonymous': False, 'inputs': [{'indexed': True, 'internalType': 'address', 'name': 'module', 'type': 'address'}], 'name': 'ExecutionFromModuleFailure', 'type': 'event'}, {'anonymous': False, 'inputs': [{'indexed': False, 'internalType': 'address', 'name': 'newContract', 'type': 'address'}], 'name': 'ContractCreation', 'type': 'event'}, {'anonymous': False, 'inputs': [{'indexed': False, 'internalType': 'address', 'name': 'masterCopy', 'type': 'address'}], 'name': 'ChangedMasterCopy', 'type': 'event'}]
self.assertEqual(etherscan_api.get_contract_abi('0xaE32496491b53841efb51829d6f886387708F99B'),
safe_master_copy_abi)
self.assertIsNone(etherscan_api.get_contract_abi('0xaE32496491b53841efb51829d6f886387708F99a'))
except RateLimitError:
self.skipTest('Etherscan rate limit reached')
| 730.4
| 13,922
| 0.622673
| 1,311
| 14,608
| 6.909992
| 0.109077
| 0.064356
| 0.09394
| 0.061596
| 0.810575
| 0.783972
| 0.768076
| 0.731096
| 0.698421
| 0.693896
| 0
| 0.02336
| 0.097412
| 14,608
| 19
| 13,923
| 768.842105
| 0.663709
| 0
| 0
| 0
| 0
| 0
| 0.56339
| 0.018004
| 0
| 0
| 0.00575
| 0
| 0.142857
| 1
| 0.071429
| false
| 0
| 0.285714
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9d6c6be7029c3d70f5d87dfcd7764d898d4c0f06
| 3,927
|
py
|
Python
|
source code/functions.py
|
muhammadBadawy/napster
|
54205e6cd097148529a6e6f6fd512495dd606def
|
[
"MIT"
] | null | null | null |
source code/functions.py
|
muhammadBadawy/napster
|
54205e6cd097148529a6e6f6fd512495dd606def
|
[
"MIT"
] | null | null | null |
source code/functions.py
|
muhammadBadawy/napster
|
54205e6cd097148529a6e6f6fd512495dd606def
|
[
"MIT"
] | null | null | null |
import pickle
# def register(conn, addr, nick):
#
# try:
# users = pickle.load(open("users", "rb"))
# # print users
# except:
# users = {}
# pickle.dump(users, open("users", "wb"))
# # print users[addr[0]]
# try:
# nickname = users[addr[0]]['nick']
# # print 'hi'
# conn.sendall('User already registered with nickname ' + nickname)
# except:
# users[addr[0]] = {}
# users[addr[0]]['nick'] = nick
# users[addr[0]]['fileList'] = {}
# conn.sendall('You have been registered with nickname ' + nick)
#
# pickle.dump(users, open("users", "wb"))
def register(conn, addr, nick , index):
print index
try:
users = pickle.load(open("users", "rb"))
# print users
except:
users = {}
pickle.dump(users, open("users", "wb"))
# print users[addr[0]]
try:
nickname = users[index]['nick']
# print 'hi'
conn.sendall('User already registered with nickname ' + nickname)
except:
users[str(index)] = {}
users[str(index)]['nick'] = nick
users[str(index)]['fileList'] = {}
conn.sendall('You have been registered with nickname ' + nick)
pickle.dump(users, open("users", "wb"))
def share(conn, addr, file,index):
try:
users = pickle.load(open("users", "rb"))
except:
conn.sendall('You need to register first')
return
try:
nickname = users[str(index)]['nick']
except:
conn.sendall('You need to register first')
return
fileName = file.split(' ')[0]
# print fileName
#filePath = file.split(' ')[1]
# print filePath
users[str(index)]['fileList'][fileName] = fileName
# print users
pickle.dump(users, open("users", "wb"))
conn.sendall('File ' + fileName + ' added')
def search(conn, addr, fileName, activePeers):
try:
users = pickle.load(open("users", "rb"))
# print users
except:
conn.sendall('ERROR\nNo users registered till now')
return
usersHavingFile = {}
userList = users.keys()
for user in userList:
found = False
# print users[user]['fileList'].keys()
if fileName in users[user]['fileList'].keys():
#if user in activePeers:
usersHavingFile[user] = {}
usersHavingFile[user]['nick'] = users[user]['nick']
usersHavingFile[user]['filePath'] = users[user]['fileList'][fileName]
usersHavingFile[user]['port'] = users[user]
conn.sendall(str(usersHavingFile))
# def share(conn, addr, file):
# try:
# users = pickle.load(open("users", "rb"))
# except:
# conn.sendall('You need to register first')
# return
# try:
# nickname = users[addr[0]]['nick']
# except:
# conn.sendall('You need to register first')
# return
#
# fileName = file.split(' ')[0]
# # print fileName
# filePath = file.split(' ')[1]
# # print filePath
# users[addr[0]]['fileList'][fileName] = filePath
# # print users
# pickle.dump(users, open("users", "wb"))
# conn.sendall('File ' + fileName + ' added')
# def search(conn, addr, fileName, activePeers):
# try:
# users = pickle.load(open("users", "rb"))
# # print users
# except:
# conn.sendall('ERROR\nNo users registered till now')
# return
#
# usersHavingFile = {}
# userList = users.keys()
# for user in userList:
# found = False
# # print users[user]['fileList'].keys()
# if fileName in users[user]['fileList'].keys():
# if user in activePeers:
# usersHavingFile[user] = {}
# usersHavingFile[user]['nick'] = users[user]['nick']
# usersHavingFile[user]['filePath'] = users[user]['fileList'][fileName]
#
# conn.sendall(str(usersHavingFile))
| 29.088889
| 87
| 0.554367
| 424
| 3,927
| 5.134434
| 0.132075
| 0.07074
| 0.036748
| 0.04961
| 0.88011
| 0.843362
| 0.838769
| 0.838769
| 0.834175
| 0.834175
| 0
| 0.004258
| 0.282404
| 3,927
| 134
| 88
| 29.30597
| 0.768275
| 0.514897
| 0
| 0.4375
| 0
| 0
| 0.154436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.020833
| null | null | 0.020833
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9dc8ed3c4c4fa2706cc6a37767873c88535a78d8
| 115
|
py
|
Python
|
test/unit/reference_dummy.py
|
barberj/bridge-python
|
1c33df5fa1d92ac6c54bbb6d868c71e1f883e8fe
|
[
"MIT"
] | null | null | null |
test/unit/reference_dummy.py
|
barberj/bridge-python
|
1c33df5fa1d92ac6c54bbb6d868c71e1f883e8fe
|
[
"MIT"
] | null | null | null |
test/unit/reference_dummy.py
|
barberj/bridge-python
|
1c33df5fa1d92ac6c54bbb6d868c71e1f883e8fe
|
[
"MIT"
] | null | null | null |
class ReferenceDummy():
def __init__(self, *args):
return
def _to_dict(self, *args):
return "dummy"
| 16.428571
| 28
| 0.643478
| 14
| 115
| 4.857143
| 0.714286
| 0.235294
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.226087
| 115
| 6
| 29
| 19.166667
| 0.764045
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
9dd1fe9277b140d77a1bcbd39826bf7b60188b87
| 141
|
py
|
Python
|
8kyu/(8 kyu) Return Negative/(8 kyu) Return Negative.py
|
e1r0nd/codewars
|
9b05e32a26ee5f36a4b3f1e76a71e0c79b3c865b
|
[
"MIT"
] | 49
|
2018-04-30T06:42:45.000Z
|
2021-07-22T16:39:02.000Z
|
(8 kyu) Return Negative/(8 kyu) Return Negative.py
|
novsunheng/codewars
|
c54b1d822356889b91587b088d02ca0bd3d8dc9e
|
[
"MIT"
] | 1
|
2020-08-31T02:36:53.000Z
|
2020-08-31T10:14:00.000Z
|
(8 kyu) Return Negative/(8 kyu) Return Negative.py
|
novsunheng/codewars
|
c54b1d822356889b91587b088d02ca0bd3d8dc9e
|
[
"MIT"
] | 25
|
2018-04-02T20:57:58.000Z
|
2021-05-28T15:24:51.000Z
|
# #1
# def make_negative( number ):
# return number if number < 0 else - number
# #2
def make_negative( number ):
return -abs(number)
| 23.5
| 47
| 0.652482
| 20
| 141
| 4.5
| 0.55
| 0.155556
| 0.333333
| 0.466667
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027523
| 0.22695
| 141
| 6
| 48
| 23.5
| 0.798165
| 0.553191
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
d18031999d0f81aab48fc108b2e5c575cf2385a4
| 3,351
|
py
|
Python
|
jimmy.py
|
mvaenskae/cil-2018-road-segmentation
|
512db116f064da519a8ad20483b7dcda7567d596
|
[
"BSD-3-Clause"
] | 1
|
2018-09-16T18:36:01.000Z
|
2018-09-16T18:36:01.000Z
|
jimmy.py
|
mvaenskae/cil-2018-road-segmentation
|
512db116f064da519a8ad20483b7dcda7567d596
|
[
"BSD-3-Clause"
] | null | null | null |
jimmy.py
|
mvaenskae/cil-2018-road-segmentation
|
512db116f064da519a8ad20483b7dcda7567d596
|
[
"BSD-3-Clause"
] | 2
|
2018-09-16T18:36:39.000Z
|
2020-03-29T15:15:36.000Z
|
from cnn_model import *
from label_cnn import LabelCNN
from keras.layers import Activation, Concatenate
class VEGGG(CnnModel):
def __init__(self, model_name):
super().__init__()
self.MODEL_NAME = model_name
def build_model(self):
layers = BasicLayers(relu_version='parametric')
input_tensor = Input(shape=self.INPUT_SHAPE)
x = input_tensor
x = layers.cbr(x, 32, kernel_size=(3, 3), strides=(2, 2), dilation_rate=(1, 1), padding='same')
x = layers._max_pool(x, pool=(2, 2), strides=(2, 2), padding='same')
x = layers.cbr(x, 64, kernel_size=(3, 3), strides=(2, 2), dilation_rate=(1, 1), padding='same')
x = layers._max_pool(x, pool=(2, 2), strides=(2, 2), padding='same')
x = layers.cbr(x, 128, kernel_size=(3, 3), strides=(2, 2), dilation_rate=(1, 1), padding='same')
x = layers._max_pool(x, pool=(2, 2), strides=(2, 2), padding='same')
x = layers.cbr(x, 256, kernel_size=(3, 3), strides=(2, 2), dilation_rate=(1, 1), padding='same')
x = layers._max_pool(x, pool=(2, 2), strides=(2, 2), padding='same')
x = layers.cbr(x, 512, kernel_size=(3, 3), strides=(2, 2), dilation_rate=(1, 1), padding='same')
x = layers._max_pool(x, pool=(2, 2), strides=(2, 2), padding='same')
x = layers.cbr(x, 512, kernel_size=(3, 3), strides=(2, 2), dilation_rate=(1, 1), padding='same')
x = layers._max_pool(x, pool=(2, 2), strides=(2, 2), padding='same')
x2 = layers._flatten(x)
x3 = layers._dense(x2, 2 * ((self.CONTEXT * self.CONTEXT) // (self.PATCH_SIZE * self.PATCH_SIZE)))
x3 = layers._dropout(x3, rate=0.5)
x3 = layers._act_fun(x3)
x4 = layers._dense(x3, self.NB_CLASSES)
x5 = Activation('softmax')(x4)
self.model = Model(inputs=input_tensor, outputs=x5)
class Simple(LabelCNN):
def __init__(self, model_name):
super().__init__(image_size=64)
self.MODEL_NAME = model_name
def build_model(self):
layers = BasicLayers(relu_version='leaky')
input_tensor = Input(shape=self.INPUT_SHAPE)
x = input_tensor
x = layers.cbr(x, 64, kernel_size=(5, 5), strides=(2, 2), dilation_rate=(1, 1), padding='same')
x = layers._max_pool(x, pool=(2, 2), strides=(2, 2), padding='same')
x = layers.cbr(x, 128, kernel_size=(3, 3), strides=(2, 2), dilation_rate=(1, 1), padding='same')
x = layers._max_pool(x, pool=(2, 2), strides=(2, 2), padding='same')
x = layers.cbr(x, 256, kernel_size=(3, 3), strides=(2, 2), dilation_rate=(1, 1), padding='same')
x = layers._max_pool(x, pool=(2, 2), strides=(2, 2), padding='same')
x = layers.cbr(x, 512, kernel_size=(3, 3), strides=(2, 2), dilation_rate=(1, 1), padding='same')
x = layers._max_pool(x, pool=(2, 2), strides=(2, 2), padding='same')
x = layers.cbr(x, 512, kernel_size=(3, 3), strides=(2, 2), dilation_rate=(1, 1), padding='same')
x = layers._max_pool(x, pool=(2, 2), strides=(2, 2), padding='same')
x2 = layers._flatten(x)
x3 = layers._dense(x2, 2 * 256)
x3 = layers._dropout(x3, rate=0.5)
x3 = layers._act_fun(x3)
x4 = layers._dense(x3, self.NB_CLASSES)
x5 = Activation('softmax')(x4)
self.model = Model(inputs=input_tensor, outputs=x5)
| 47.871429
| 106
| 0.600119
| 518
| 3,351
| 3.694981
| 0.127413
| 0.034483
| 0.103448
| 0.188088
| 0.893417
| 0.893417
| 0.893417
| 0.855799
| 0.855799
| 0.855799
| 0
| 0.066515
| 0.214861
| 3,351
| 69
| 107
| 48.565217
| 0.660965
| 0
| 0
| 0.745455
| 0
| 0
| 0.034936
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.072727
| false
| 0
| 0.054545
| 0
| 0.163636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d188e271348baf17cd1bc0f0dd7b861beb006e91
| 1,421
|
py
|
Python
|
plotshape.py
|
martinchristen/pyconhk-2020
|
ff4ad82809fb8ce0c1ea4ae69dc32be81728e93c
|
[
"MIT"
] | 2
|
2020-05-09T18:45:22.000Z
|
2020-06-06T20:48:51.000Z
|
plotshape.py
|
martinchristen/pyconhk-2020
|
ff4ad82809fb8ce0c1ea4ae69dc32be81728e93c
|
[
"MIT"
] | null | null | null |
plotshape.py
|
martinchristen/pyconhk-2020
|
ff4ad82809fb8ce0c1ea4ae69dc32be81728e93c
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
from cartopy.feature import ShapelyFeature
from shapely.geometry import shape
def plot(s):
proj = ccrs.PlateCarree()
ax = plt.axes(projection=proj)
ax.set_extent((s.bounds[0], s.bounds[2], s.bounds[1], s.bounds[3]), crs=ccrs.PlateCarree())
shape_feature = ShapelyFeature([s], ccrs.PlateCarree(), facecolor='#AAFFAA', edgecolor='k')
ax.add_feature(shape_feature);
gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=2, color='gray', alpha=0.5, linestyle='--')
gl.xlabels_top = False
gl.ylabels_left = False
gl.xlabel_style = {'size': 10, 'color': 'black'}
gl.ylabel_style = {'size': 10, 'color': 'black'}
return gl
def plot_merc(s):
proj = ccrs.Mercator()
ax = plt.axes(projection=proj)
ax.set_extent((s.bounds[0], s.bounds[2], s.bounds[1], s.bounds[3]), crs=ccrs.PlateCarree())
shape_feature = ShapelyFeature([s], ccrs.PlateCarree(), facecolor='#AAFFAA', edgecolor='k')
ax.add_feature(shape_feature);
gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=2, color='gray', alpha=0.5, linestyle='--')
gl.xlabels_top = False
gl.ylabels_left = False
gl.xlabel_style = {'size': 10, 'color': 'black'}
gl.ylabel_style = {'size': 10, 'color': 'black'}
return gl
| 34.658537
| 95
| 0.635468
| 191
| 1,421
| 4.628272
| 0.308901
| 0.063348
| 0.081448
| 0.072398
| 0.80543
| 0.80543
| 0.80543
| 0.80543
| 0.80543
| 0.80543
| 0
| 0.019383
| 0.201267
| 1,421
| 41
| 96
| 34.658537
| 0.759471
| 0
| 0
| 0.733333
| 0
| 0
| 0.059072
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.133333
| 0
| 0.266667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d1ae488d4e262ffe388fd943071a2cab19caa4ab
| 1,465
|
py
|
Python
|
terrascript/gitlab/r.py
|
amlodzianowski/python-terrascript
|
1111affe6cd30d9b8b7bc74ae4e27590f7d4dc49
|
[
"BSD-2-Clause"
] | null | null | null |
terrascript/gitlab/r.py
|
amlodzianowski/python-terrascript
|
1111affe6cd30d9b8b7bc74ae4e27590f7d4dc49
|
[
"BSD-2-Clause"
] | null | null | null |
terrascript/gitlab/r.py
|
amlodzianowski/python-terrascript
|
1111affe6cd30d9b8b7bc74ae4e27590f7d4dc49
|
[
"BSD-2-Clause"
] | null | null | null |
# terrascript/gitlab/r.py
import terrascript
class gitlab_branch_protection(terrascript.Resource):
pass
class gitlab_tag_protection(terrascript.Resource):
pass
class gitlab_group(terrascript.Resource):
pass
class gitlab_project(terrascript.Resource):
pass
class gitlab_label(terrascript.Resource):
pass
class gitlab_group_label(terrascript.Resource):
pass
class gitlab_pipeline_schedule(terrascript.Resource):
pass
class gitlab_pipeline_schedule_variable(terrascript.Resource):
pass
class gitlab_pipeline_trigger(terrascript.Resource):
pass
class gitlab_project_hook(terrascript.Resource):
pass
class gitlab_project_push_rules(terrascript.Resource):
pass
class gitlab_deploy_key(terrascript.Resource):
pass
class gitlab_deploy_key_enable(terrascript.Resource):
pass
class gitlab_user(terrascript.Resource):
pass
class gitlab_project_membership(terrascript.Resource):
pass
class gitlab_group_membership(terrascript.Resource):
pass
class gitlab_project_variable(terrascript.Resource):
pass
class gitlab_group_variable(terrascript.Resource):
pass
class gitlab_project_cluster(terrascript.Resource):
pass
class gitlab_service_slack(terrascript.Resource):
pass
class gitlab_service_jira(terrascript.Resource):
pass
class gitlab_project_share_group(terrascript.Resource):
pass
class gitlab_group_cluster(terrascript.Resource):
pass
| 15.260417
| 62
| 0.789761
| 167
| 1,465
| 6.652695
| 0.185629
| 0.227723
| 0.476148
| 0.554455
| 0.875788
| 0.814581
| 0.167417
| 0
| 0
| 0
| 0
| 0
| 0.14471
| 1,465
| 95
| 63
| 15.421053
| 0.886672
| 0.0157
| 0
| 0.489362
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.489362
| 0.021277
| 0
| 0.510638
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
ae21219f9eaee4b2eb6ba5675746666711634b4a
| 9,991
|
py
|
Python
|
data/get_data.py
|
victorUTF/loterias-datamining
|
878799c89c7b9b02baa9b25a8e627f2264d7027c
|
[
"MIT"
] | null | null | null |
data/get_data.py
|
victorUTF/loterias-datamining
|
878799c89c7b9b02baa9b25a8e627f2264d7027c
|
[
"MIT"
] | null | null | null |
data/get_data.py
|
victorUTF/loterias-datamining
|
878799c89c7b9b02baa9b25a8e627f2264d7027c
|
[
"MIT"
] | null | null | null |
import requests
import zipfile
from bs4 import BeautifulSoup
import codecs
# Getting data from Loterias Caixa and saving files
# # URL of the file containing the data we need
downloadMega = 'http://www1.caixa.gov.br/loterias/_arquivos/loterias/D_mgsasc.zip'
downloadLoto = 'http://www1.caixa.gov.br/loterias/_arquivos/loterias/D_lotfac.zip'
rMega = requests.get(downloadMega, allow_redirects=True)
rLoto = requests.get(downloadLoto, allow_redirects=True)
open('mega.zip', 'wb').write(rMega.content)
open('loto.zip', 'wb').write(rLoto.content)
# Unzip these files
with zipfile.ZipFile('mega.zip', 'r') as zip_ref:
zip_ref.extractall('./unziped_files/mega')
with zipfile.ZipFile('loto.zip', 'r') as zip_ref:
zip_ref.extractall('./unziped_files/loto')
# Creates BeatifulSoup object for Web Crawling
soupLoto = BeautifulSoup(codecs.open(
'./unziped_files/loto/d_lotfac.htm', "r", "latin1"), 'html.parser')
soupMega = BeautifulSoup(codecs.open(
'./unziped_files/mega/d_megasc.htm', "r", "latin1"), 'html.parser')
# Find all "td" tags
extractionLoto = soupLoto.find_all('td')
extractionMega = soupMega.find_all('td')
# Dictionaries that contain the data
concursoLoto = []
sorteioLoto = []
concursoMega = []
sorteioMega = []
# Variables
dataInicio = ''
# Converts a "money string" to float
def toFloat(string):
aux = string.split(',', 1)
return (float(aux[0].replace('.', '')) + (float(aux[1])/100))
# ================================== Lotofácil ==================================
# Starts from "Concurso 429" ~ because some data is missing before it
# # Data: 25/05/2009 - Index: 16823
for idx, extract in enumerate(extractionLoto):
if idx >= 16822:
# First elem treatment
if idx == 16823:
dataInicio = '21/05/2009'
concursoLoto.append({'id_concurso': int(extractionLoto[idx-1].text),
'dataInicio': dataInicio,
'dataFim': extractionLoto[idx].text,
'arrecadacaoTotal': toFloat(extractionLoto[idx+16].text),
'valorDoPremio': toFloat(extractionLoto[idx+30].text),
'id_tipo': 1,
'id_sort': 1,
'ganhadores15': int(extractionLoto[idx+17].text),
'ganhadores14': int(extractionLoto[idx+20].text),
'ganhadores13': int(extractionLoto[idx+21].text),
'ganhadores12': int(extractionLoto[idx+22].text),
'ganhadores11': int(extractionLoto[idx+23].text)
})
sorteioLoto.append({'concursoPremiado': int(extractionLoto[idx-1].text),
'bola1': int(extractionLoto[idx+1].text),
'bola2': int(extractionLoto[idx+2].text),
'bola3': int(extractionLoto[idx+3].text),
'bola4': int(extractionLoto[idx+4].text),
'bola5': int(extractionLoto[idx+5].text),
'bola6': int(extractionLoto[idx+6].text),
'bola7': int(extractionLoto[idx+7].text),
'bola8': int(extractionLoto[idx+8].text),
'bola9': int(extractionLoto[idx+9].text),
'bola10': int(extractionLoto[idx+10].text),
'bola11': int(extractionLoto[idx+11].text),
'bola12': int(extractionLoto[idx+12].text),
'bola13': int(extractionLoto[idx+13].text),
'bola14': int(extractionLoto[idx+14].text),
'bola15': int(extractionLoto[idx+15].text)
})
# Update the variable
dataInicio = extractionLoto[idx].text
else:
# Strategy to discover the "Concurso"
if extract.text.__contains__('/'):
# print('Data: ' + extract.text + ' - Index: ' + str(idx)) # Verification
concursoLoto.append({'id_concurso': int(extractionLoto[idx-1].text),
'dataInicio': dataInicio,
'dataFim': extractionLoto[idx].text,
'arrecadacaoTotal': toFloat(extractionLoto[idx+16].text),
'valorDoPremio': toFloat(extractionLoto[idx+30].text),
'id_tipo': 1,
'id_sort': int(extractionLoto[idx-1].text),
'ganhadores15': int(extractionLoto[idx+17].text),
'ganhadores14': int(extractionLoto[idx+20].text),
'ganhadores13': int(extractionLoto[idx+21].text),
'ganhadores12': int(extractionLoto[idx+22].text),
'ganhadores11': int(extractionLoto[idx+23].text)
})
sorteioLoto.append({'concursoPremiado': int(extractionLoto[idx-1].text),
'bola1': int(extractionLoto[idx+1].text),
'bola2': int(extractionLoto[idx+2].text),
'bola3': int(extractionLoto[idx+3].text),
'bola4': int(extractionLoto[idx+4].text),
'bola5': int(extractionLoto[idx+5].text),
'bola6': int(extractionLoto[idx+6].text),
'bola7': int(extractionLoto[idx+7].text),
'bola8': int(extractionLoto[idx+8].text),
'bola9': int(extractionLoto[idx+9].text),
'bola10': int(extractionLoto[idx+10].text),
'bola11': int(extractionLoto[idx+11].text),
'bola12': int(extractionLoto[idx+12].text),
'bola13': int(extractionLoto[idx+13].text),
'bola14': int(extractionLoto[idx+14].text),
'bola15': int(extractionLoto[idx+15].text)
})
# Update the variable
dataInicio = extractionLoto[idx].text
# Converts "SIM" and "NÃO", to 1 or 0
def toBool(string):
if string == 'SIM':
return 1
else:
return 0
# ================================== Megasena ==================================
# Starts from "Concurso 1077" ~ because some data is missing before it
# # Data: 27/05/2009 - Index: 22765
for idx, extract in enumerate(extractionMega):
if idx >= 22765:
# First elem treatment
if idx == 22765:
dataInicio = '23/05/2009'
concursoMega.append({'id_concurso': int(extractionMega[idx-1].text),
'dataInicio': dataInicio,
'dataFim': extractionMega[idx].text,
'arrecadacaoTotal': toFloat(extractionMega[idx+7].text),
'valorDoPremio': toFloat(extractionMega[idx+18].text),
'id_tipo': 2,
'id_sort': int(extractionMega[idx-1].text),
'acumulado': toBool(extractionMega[idx+16].text),
'somadorMegaDaVirada': toFloat(extractionMega[idx+19].text),
'ganhadores6': int(extractionMega[idx+8].text),
'ganhadores5': int(extractionMega[idx+12].text),
'ganhadores4': int(extractionMega[idx+14].text)
})
sorteioMega.append({'concursoPremiado': int(extractionMega[idx-1].text),
'bola1': int(extractionMega[idx+1].text),
'bola2': int(extractionMega[idx+2].text),
'bola3': int(extractionMega[idx+3].text),
'bola4': int(extractionMega[idx+4].text),
'bola5': int(extractionMega[idx+5].text),
'bola6': int(extractionMega[idx+6].text)
})
# Update the variable
dataInicio = extractionMega[idx].text
else:
# Strategy to discover the "Concurso"
if extract.text.__contains__('/'):
# print('Data: ' + extract.text + ' - Index: ' + str(idx)) # Verification
concursoMega.append({'id_concurso': int(extractionMega[idx-1].text),
'dataInicio': dataInicio,
'dataFim': extractionMega[idx].text,
'arrecadacaoTotal': toFloat(extractionMega[idx+7].text),
'valorDoPremio': toFloat(extractionMega[idx+18].text),
'id_tipo': 2,
'id_sort': int(extractionMega[idx-1].text),
'acumulado': toBool(extractionMega[idx+16].text),
'somadorMegaDaVirada': toFloat(extractionMega[idx+19].text),
'ganhadores6': int(extractionMega[idx+8].text),
'ganhadores5': int(extractionMega[idx+12].text),
'ganhadores4': int(extractionMega[idx+14].text)
})
sorteioMega.append({'concursoPremiado': int(extractionMega[idx-1].text),
'bola1': int(extractionMega[idx+1].text),
'bola2': int(extractionMega[idx+2].text),
'bola3': int(extractionMega[idx+3].text),
'bola4': int(extractionMega[idx+4].text),
'bola5': int(extractionMega[idx+5].text),
'bola6': int(extractionMega[idx+6].text)
})
# Update the variable
dataInicio = extractionMega[idx].text
| 49.216749
| 92
| 0.504554
| 898
| 9,991
| 5.571269
| 0.216036
| 0.180092
| 0.179892
| 0.03358
| 0.80012
| 0.754347
| 0.754347
| 0.754347
| 0.739956
| 0.722766
| 0
| 0.044327
| 0.354219
| 9,991
| 202
| 93
| 49.460396
| 0.731091
| 0.09969
| 0
| 0.721854
| 0
| 0
| 0.12864
| 0.007364
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013245
| false
| 0
| 0.02649
| 0
| 0.059603
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8897f7cff5dc5a18284333e7e9c76d0eef06e6d8
| 47
|
py
|
Python
|
Orest Lab1/2.py
|
PetroSidliar/HelloPython
|
7df40e339d5615e7b305ae8c189a5061607adc92
|
[
"MIT"
] | null | null | null |
Orest Lab1/2.py
|
PetroSidliar/HelloPython
|
7df40e339d5615e7b305ae8c189a5061607adc92
|
[
"MIT"
] | null | null | null |
Orest Lab1/2.py
|
PetroSidliar/HelloPython
|
7df40e339d5615e7b305ae8c189a5061607adc92
|
[
"MIT"
] | null | null | null |
m = 35.0 / 8.0
n = int(35/8)
print(m)
print(n)
| 9.4
| 14
| 0.531915
| 13
| 47
| 1.923077
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.216216
| 0.212766
| 47
| 4
| 15
| 11.75
| 0.459459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
ee28bf426b84f8291fe79aa934cb696fe7bc722a
| 3,302
|
py
|
Python
|
home/migrations/0007_auto_20210221_2103.py
|
SmithJesko/volny-films
|
7c50713eb1d2c2d5984700a5de20a12e4045e1b9
|
[
"MIT"
] | 1
|
2021-02-23T00:12:43.000Z
|
2021-02-23T00:12:43.000Z
|
home/migrations/0007_auto_20210221_2103.py
|
SmithJesko/volny-films
|
7c50713eb1d2c2d5984700a5de20a12e4045e1b9
|
[
"MIT"
] | null | null | null |
home/migrations/0007_auto_20210221_2103.py
|
SmithJesko/volny-films
|
7c50713eb1d2c2d5984700a5de20a12e4045e1b9
|
[
"MIT"
] | 1
|
2021-02-23T06:04:13.000Z
|
2021-02-23T06:04:13.000Z
|
# Generated by Django 2.2 on 2021-02-22 03:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0006_auto_20210214_2205'),
]
operations = [
migrations.AlterField(
model_name='movie',
name='actors',
field=models.CharField(blank=True, max_length=512, null=True),
),
migrations.AlterField(
model_name='movie',
name='added',
field=models.CharField(blank=True, max_length=512, null=True),
),
migrations.AlterField(
model_name='movie',
name='awards',
field=models.CharField(blank=True, max_length=512, null=True),
),
migrations.AlterField(
model_name='movie',
name='director',
field=models.CharField(blank=True, max_length=512, null=True),
),
migrations.AlterField(
model_name='movie',
name='genre',
field=models.CharField(blank=True, max_length=512, null=True),
),
migrations.AlterField(
model_name='movie',
name='imdb_id',
field=models.CharField(blank=True, max_length=512, null=True),
),
migrations.AlterField(
model_name='movie',
name='imdb_rating',
field=models.CharField(blank=True, max_length=512, null=True),
),
migrations.AlterField(
model_name='movie',
name='language',
field=models.CharField(max_length=512),
),
migrations.AlterField(
model_name='movie',
name='media_type',
field=models.CharField(blank=True, max_length=512, null=True),
),
migrations.AlterField(
model_name='movie',
name='movie_id',
field=models.CharField(max_length=512),
),
migrations.AlterField(
model_name='movie',
name='plot',
field=models.CharField(blank=True, max_length=512, null=True),
),
migrations.AlterField(
model_name='movie',
name='popularity',
field=models.CharField(max_length=512),
),
migrations.AlterField(
model_name='movie',
name='poster',
field=models.CharField(blank=True, max_length=512, null=True),
),
migrations.AlterField(
model_name='movie',
name='rated',
field=models.CharField(blank=True, max_length=512, null=True),
),
migrations.AlterField(
model_name='movie',
name='release_date',
field=models.CharField(max_length=512),
),
migrations.AlterField(
model_name='movie',
name='runtime',
field=models.CharField(blank=True, max_length=512, null=True),
),
migrations.AlterField(
model_name='movie',
name='title',
field=models.CharField(max_length=512),
),
migrations.AlterField(
model_name='movie',
name='writer',
field=models.CharField(blank=True, max_length=512, null=True),
),
]
| 31.75
| 74
| 0.54361
| 319
| 3,302
| 5.489028
| 0.172414
| 0.097658
| 0.256996
| 0.298115
| 0.850371
| 0.850371
| 0.828669
| 0.828669
| 0.828669
| 0.828669
| 0
| 0.038251
| 0.334949
| 3,302
| 103
| 75
| 32.058252
| 0.759107
| 0.013022
| 0
| 0.742268
| 1
| 0
| 0.07553
| 0.007062
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010309
| 0
| 0.041237
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
ee5d40b596189c355b7d5bc64488ac6d717c2ce5
| 282
|
py
|
Python
|
wordcount/forms.py
|
saurabht16/wordcount
|
7207f35d97c988a8320b9096eb432c03571a4c79
|
[
"Apache-2.0"
] | null | null | null |
wordcount/forms.py
|
saurabht16/wordcount
|
7207f35d97c988a8320b9096eb432c03571a4c79
|
[
"Apache-2.0"
] | null | null | null |
wordcount/forms.py
|
saurabht16/wordcount
|
7207f35d97c988a8320b9096eb432c03571a4c79
|
[
"Apache-2.0"
] | null | null | null |
from django import forms
class HomeForm(forms.Form):
your_text = forms.CharField(label='Text To Process', max_length=2000, widget = forms.Textarea)
class CountForm(forms.Form):
your_text = forms.CharField(label='Text To Process', max_length=2000, widget = forms.Textarea)
| 35.25
| 98
| 0.758865
| 40
| 282
| 5.25
| 0.475
| 0.085714
| 0.12381
| 0.161905
| 0.771429
| 0.771429
| 0.771429
| 0.771429
| 0.771429
| 0.771429
| 0
| 0.03252
| 0.12766
| 282
| 8
| 99
| 35.25
| 0.821138
| 0
| 0
| 0.4
| 0
| 0
| 0.106007
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
c9d6d8906084a9c393728c6e2f49a184475e24cf
| 96
|
py
|
Python
|
stable_baselines/poar/__init__.py
|
BillChan226/Robotic
|
a6a8052e105369656d34fffc4f7ca4475dcc38df
|
[
"MIT"
] | 3
|
2021-09-17T13:53:06.000Z
|
2022-02-08T03:33:20.000Z
|
stable_baselines/poar/__init__.py
|
BillChan226/POAR-SRL-4-Robot
|
a6a8052e105369656d34fffc4f7ca4475dcc38df
|
[
"MIT"
] | null | null | null |
stable_baselines/poar/__init__.py
|
BillChan226/POAR-SRL-4-Robot
|
a6a8052e105369656d34fffc4f7ca4475dcc38df
|
[
"MIT"
] | null | null | null |
from stable_baselines.poar.poar import POAR
from stable_baselines.poar.poar_mmd import POAR_MMD
| 32
| 51
| 0.875
| 16
| 96
| 5
| 0.375
| 0.25
| 0.475
| 0.575
| 0.675
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 96
| 3
| 51
| 32
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a00c1446db76d5517821c150dd2cf33f3b5ea61f
| 93
|
py
|
Python
|
pymontecarlo/results/__init__.py
|
pymontecarlo/pymontecarlo
|
87050041724feb17f1ccff5794e9830c3209244e
|
[
"Apache-2.0"
] | 5
|
2018-04-10T07:15:06.000Z
|
2021-07-01T15:40:29.000Z
|
pymontecarlo/results/__init__.py
|
pymontecarlo/pymontecarlo
|
87050041724feb17f1ccff5794e9830c3209244e
|
[
"Apache-2.0"
] | 73
|
2015-09-04T09:48:29.000Z
|
2022-01-03T17:49:01.000Z
|
pymontecarlo/results/__init__.py
|
pymontecarlo/pymontecarlo
|
87050041724feb17f1ccff5794e9830c3209244e
|
[
"Apache-2.0"
] | 4
|
2016-05-17T12:57:20.000Z
|
2021-01-31T10:55:24.000Z
|
from pymontecarlo.results.photonintensity import *
from pymontecarlo.results.kratio import *
| 31
| 50
| 0.849462
| 10
| 93
| 7.9
| 0.6
| 0.405063
| 0.582278
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086022
| 93
| 2
| 51
| 46.5
| 0.929412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4e4cf3022b65204dca56954e099aba6f5990a8d6
| 3,904
|
py
|
Python
|
silo/benchmarks/results/istc11-3-14-13.py
|
anshsarkar/TailBench
|
25845756aee9a892229c25b681051591c94daafd
|
[
"MIT"
] | 274
|
2015-01-23T16:24:09.000Z
|
2022-02-22T03:16:14.000Z
|
silo/benchmarks/results/istc11-3-14-13.py
|
anshsarkar/TailBench
|
25845756aee9a892229c25b681051591c94daafd
|
[
"MIT"
] | 3
|
2015-03-17T11:52:36.000Z
|
2019-07-22T23:04:25.000Z
|
silo/benchmarks/results/istc11-3-14-13.py
|
anshsarkar/TailBench
|
25845756aee9a892229c25b681051591c94daafd
|
[
"MIT"
] | 94
|
2015-01-07T06:55:36.000Z
|
2022-01-22T08:14:15.000Z
|
RESULTS = [({'scale_factor': 1000, 'threads': 1, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (559456.0, 0.0)), ({'scale_factor': 1000, 'threads': 1, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (208605.0, 0.0)), ({'scale_factor': 4000, 'threads': 4, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (1772190.0, 0.0)), ({'scale_factor': 4000, 'threads': 4, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (869710.0, 0.0)), ({'scale_factor': 8000, 'threads': 8, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (3070970.0, 0.0)), ({'scale_factor': 8000, 'threads': 8, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (1703070.0, 0.0)), ({'scale_factor': 12000, 'threads': 12, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (3899520.0, 0.0)), ({'scale_factor': 12000, 'threads': 12, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (2412830.0, 0.0)), ({'scale_factor': 16000, 'threads': 16, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (4281320.0, 0.0)), ({'scale_factor': 16000, 'threads': 16, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (2834350.0, 0.0)), ({'scale_factor': 20000, 'threads': 20, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (4407900.0, 0.0)), ({'scale_factor': 20000, 'threads': 20, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (3180140.0, 0.0)), ({'scale_factor': 24000, 'threads': 24, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (4469700.0, 0.0)), ({'scale_factor': 24000, 'threads': 24, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (3413230.0, 0.0)), ({'scale_factor': 28000, 'threads': 28, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (4511650.0, 0.0)), ({'scale_factor': 28000, 'threads': 28, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (4222430.0, 0.0)), ({'scale_factor': 32000, 'threads': 32, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'ycsb'}, (4969290.0, 0.0)), ({'scale_factor': 32000, 'threads': 32, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'ycsb'}, (3812400.0, 0.0166666)), ({'scale_factor': 1, 'threads': 1, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (28786.0, 0.0)), ({'scale_factor': 1, 'threads': 1, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (11423.1, 0.0)), ({'scale_factor': 4, 'threads': 4, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (101236.0, 0.0)), ({'scale_factor': 4, 'threads': 4, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (43745.5, 0.583332)), ({'scale_factor': 8, 'threads': 8, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (200612.0, 0.0)), ({'scale_factor': 8, 'threads': 8, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (88838.5, 1.24999)), ({'scale_factor': 12, 'threads': 12, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (287726.0, 0.0)), ({'scale_factor': 12, 'threads': 12, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (124150.0, 1.63333)), ({'scale_factor': 16, 'threads': 16, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (373223.0, 0.0)), ({'scale_factor': 16, 'threads': 16, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (165641.0, 2.25)), ({'scale_factor': 20, 'threads': 20, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (469325.0, 0.0)), ({'scale_factor': 20, 'threads': 20, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (203462.0, 2.63333)), ({'scale_factor': 24, 'threads': 24, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (542229.0, 0.0)), ({'scale_factor': 24, 'threads': 24, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (237899.0, 2.84999)), ({'scale_factor': 28, 'threads': 28, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (604902.0, 0.0)), ({'scale_factor': 28, 'threads': 28, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (268282.0, 3.06666)), ({'scale_factor': 32, 'threads': 32, 'txn_flags': 1, 'db': 'kvdb', 'bench': 'tpcc'}, (649687.0, 0.0)), ({'scale_factor': 32, 'threads': 32, 'txn_flags': 1, 'db': 'ndb-proto2', 'bench': 'tpcc'}, (245272.0, 2.76666))]
| 1,952
| 3,903
| 0.570184
| 595
| 3,904
| 3.620168
| 0.132773
| 0.050139
| 0.150418
| 0.183844
| 0.861653
| 0.848654
| 0.848654
| 0.815227
| 0.706592
| 0.31662
| 0
| 0.172622
| 0.111168
| 3,904
| 1
| 3,904
| 3,904
| 0.448127
| 0
| 0
| 0
| 0
| 0
| 0.42418
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
14d4a9bc25a9241cc0fb462e2d7e038d54897c48
| 22,366
|
py
|
Python
|
plotterapp.py
|
martinlink00/plotterapp
|
247262f8e84921c593d8cce441babad02cc64171
|
[
"MIT"
] | null | null | null |
plotterapp.py
|
martinlink00/plotterapp
|
247262f8e84921c593d8cce441babad02cc64171
|
[
"MIT"
] | null | null | null |
plotterapp.py
|
martinlink00/plotterapp
|
247262f8e84921c593d8cce441babad02cc64171
|
[
"MIT"
] | null | null | null |
###############################################################################
"""This is the main GUI app used to plot and analyse data from the database"""
###############################################################################
#Note that no camera needs to be connected to the computer, in order to use this app.
#All it does is extract data from the influxdb database and plot it.
#This was done intentionally, so that maybe the influx Database can be run on a remote server.
from datetime import datetime as dt
from datetime import timedelta
import time
import numpy as np
import dash
import dash_core_components as dcc
import dash_html_components as html
import plotter_interface as im
import os
###############################################################################
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
EXPORTPATH=os.getcwd()
guiintplot=im.Guiinterfaceplotter()
if len(guiintplot.beamsindb)!=0 and len(guiintplot.tempindb)!=0:
app.layout = html.Div([
html.Div([
html.Div([
html.H3('Beam Data Analysis'),
html.Div([
dcc.Dropdown(
id='beam-selection',
options=[{'label': i, 'value': i} for i in guiintplot.beamsindb],
value=guiintplot.beamsindb[0]
),
html.Div('Select time range: (the last day is portrayed if nothing is selected)'),
dcc.DatePickerRange(
id='rangepicker_cam',
clearable=True,
min_date_allowed=dt(2020, 4, 1),
initial_visible_month=dt.fromtimestamp(time.time()),
max_date_allowed=dt(2050,1,1),
),
dcc.Graph(
id='beamgraph'
),
dcc.Checklist(
id='field-selection',
options=[{'label': 'Horizontal position', 'value': 'hcenter'},
{'label': 'Vertical position', 'value': 'vcenter'},
{'label': 'Waist (large axis)', 'value': 'largewaist'},
{'label': 'Waist (small axis)', 'value': 'smallwaist'},
{'label': 'Angle','value':'angle'}
],
value=['hcenter', 'vcenter','largewaist','smallwaist','angle'],
labelStyle={'display': 'inline-block'}
),
html.Div([
##Export region
html.H3('Export to ".dat" file:'),
dcc.Input(
id="path-input_cam",
type='text',
value=EXPORTPATH,
placeholder="Export destination",
style={'width': '100%'}),
html.Button(id='export-button_cam', n_clicks=0, children='Submit'),
html.Div(id="button-div_cam"),
]),
dcc.Interval(
id='interval-beam',
interval=5*1000, # in milliseconds
n_intervals=0
)
],
style={'width': '95%','height':'95%', 'display': 'inline-block'}),
],style={'width': '95%','height':'95%', 'display': 'inline-block'}),
html.Div([
html.H3('Temperature Data Analysis'),
html.Div([
dcc.Dropdown(
id='temp-selection',
options=[{'label': i, 'value': i} for i in guiintplot.tempindb],
value=guiintplot.tempindb[0]
),
html.Div('Select time range: (the last day is portrayed if nothing is selected)'),
dcc.DatePickerRange(
id='rangepicker_temp',
clearable=True,
min_date_allowed=dt(2020, 4, 1),
max_date_allowed=dt(2050,1,1),
),
dcc.Graph(
id='tempgraph'
),
html.Div([
##Export region
html.H3('Export to ".dat" file:'),
dcc.Input(
id="path-input_temp",
type='text',
value=EXPORTPATH,
placeholder="Export destination",
style={'width': '100%'}),
html.Button(id='export-button_temp', n_clicks=0, children='Submit'),
html.Div(id="button-div_temp"),
]),
],
style={'width': '95%','height':'95%', 'display': 'inline-block'})
],className="six columns",style={'width': '95%','height':'95%', 'display': 'inline-block'})
], className="row",
style={
'borderBottom': 'thin lightgrey solid',
'backgroundColor': 'rgb(250, 250, 250)',
'padding': '10px 5px',
'columnCount':2
}),
], style={'columnCount': 1}
)
elif len(guiintplot.beamsindb)!=0 and len(guiintplot.tempindb)==0:
app.layout = html.Div([
html.Div([
html.Div([
html.H3('Beam Data Analysis'),
html.Div([
dcc.Dropdown(
id='beam-selection',
options=[{'label': i, 'value': i} for i in guiintplot.beamsindb],
value=guiintplot.beamsindb[0]
),
html.Div('Select time range: (the last day is portrayed if nothing is selected)'),
dcc.DatePickerRange(
id='rangepicker_cam',
clearable=True,
min_date_allowed=dt(2020, 4, 1),
initial_visible_month=dt.fromtimestamp(time.time()),
max_date_allowed=dt(2050,1,1),
),
dcc.Graph(
id='beamgraph'
),
dcc.Checklist(
id='field-selection',
options=[{'label': 'Horizontal position', 'value': 'hcenter'},
{'label': 'Vertical position', 'value': 'vcenter'},
{'label': 'Waist (large axis)', 'value': 'largewaist'},
{'label': 'Waist (small axis)', 'value': 'smallwaist'},
{'label': 'Angle','value':'angle'}
],
value=['hcenter', 'vcenter','largewaist','smallwaist','angle'],
labelStyle={'display': 'inline-block'}
),
html.Div([
##Export region
html.H3('Export to ".dat" file:'),
dcc.Input(
id="path-input_cam",
type='text',
value=EXPORTPATH,
placeholder="Export destination",
style={'width': '100%'}),
html.Button(id='export-button_cam', n_clicks=0, children='Submit'),
html.Div(id="button-div_cam"),
]),
dcc.Interval(
id='interval-beam',
interval=5*1000, # in milliseconds
n_intervals=0
)
],
style={'width': '95%','height':'95%', 'display': 'inline-block'}),
],style={'width': '95%','height':'95%', 'display': 'inline-block'}),
html.Div([
html.H3('Temperature Data Analysis'),
html.Div([
dcc.Dropdown(
id='temp-selection',
disabled=True
),
html.Div('Select time range: (the last day is portrayed if nothing is selected)'),
dcc.DatePickerRange(
id='rangepicker_temp',
clearable=True,
min_date_allowed=dt(2020, 4, 1),
initial_visible_month=dt.fromtimestamp(time.time()),
max_date_allowed=dt(2050,1,1),
),
dcc.Graph(
id='tempgraph'
),
html.Div([
##Export region
html.H3('Export to ".dat" file:'),
dcc.Input(
id="path-input_temp",
type='text',
value=EXPORTPATH,
placeholder="Export destination",
style={'width': '100%'}),
html.Button(id='export-button_temp', n_clicks=0, children='Submit'),
html.Div(id="button-div_temp"),
]),
],
style={'width': '95%','height':'95%', 'display': 'inline-block'})
],className="six columns",style={'width': '95%','height':'95%', 'display': 'inline-block'})
], className="row",style={
'borderBottom': 'thin lightgrey solid',
'backgroundColor': 'rgb(250, 250, 250)',
'padding': '10px 5px',
'columnCount':2
}),
], style={'columnCount': 1}
)
elif len(guiintplot.beamsindb)==0 and len(guiintplot.tempindb)!=0:
app.layout = html.Div([
html.Div([
html.Div([
html.H3('Beam Data Analysis'),
html.Div([
dcc.Dropdown(
id='beam-selection',
disabled=True
),
html.Div('Select time range: (the last day is portrayed if nothing is selected)'),
dcc.DatePickerRange(
id='rangepicker_cam',
clearable=True,
min_date_allowed=dt(2020, 4, 1),
initial_visible_month=dt.fromtimestamp(time.time()),
max_date_allowed=dt(2050,1,1),
),
dcc.Graph(
id='beamgraph'
),
dcc.Checklist(
id='field-selection',
options=[{'label': 'Horizontal position', 'value': 'hcenter'},
{'label': 'Vertical position', 'value': 'vcenter'},
{'label': 'Waist (large axis)', 'value': 'largewaist'},
{'label': 'Waist (small axis)', 'value': 'smallwaist'},
{'label': 'Angle','value':'angle'}
],
value=['hcenter', 'vcenter','largewaist','smallwaist','angle'],
labelStyle={'display': 'inline-block'}
),
html.Div([
##Export region
html.H3('Export to ".dat" file:'),
dcc.Input(
id="path-input_cam",
type='text',
value=EXPORTPATH,
placeholder="Export destination",
style={'width': '100%'}),
html.Button(id='export-button_cam', n_clicks=0, children='Submit'),
html.Div(id="button-div_cam"),
]),
dcc.Interval(
id='interval-beam',
interval=5*1000, # in milliseconds
n_intervals=0
)
],
style={'width': '95%','height':'95%', 'display': 'inline-block'}),
],style={'width': '95%','height':'95%', 'display': 'inline-block'}),
html.Div([
html.H3('Temperature Data Analysis'),
html.Div([
dcc.Dropdown(
id='temp-selection',
options=[{'label': i, 'value': i} for i in guiintplot.tempindb],
value=guiintplot.tempindb[0]
),
html.Div('Select time range: (the last day is portrayed if nothing is selected)'),
dcc.DatePickerRange(
id='rangepicker_temp',
clearable=True,
min_date_allowed=dt(2020, 4, 1),
initial_visible_month=dt.fromtimestamp(time.time()),
max_date_allowed=dt(2050,1,1),
),
dcc.Graph(
id='tempgraph'
),
html.Div([
##Export region
html.H3('Export to ".dat" file:'),
dcc.Input(
id="path-input_temp",
type='text',
value=EXPORTPATH,
placeholder="Export destination",
style={'width': '100%'}),
html.Button(id='export-button_temp', n_clicks=0, children='Submit'),
html.Div(id="button-div_temp"),
]),
],
style={'width': '95%','height':'95%', 'display': 'inline-block'})
],className="six columns",style={'width': '95%','height':'95%', 'display': 'inline-block'})
],className="row", style={
'borderBottom': 'thin lightgrey solid',
'backgroundColor': 'rgb(250, 250, 250)',
'padding': '10px 5px',
'columnCount':2
}),
], style={'columnCount': 1}
)
else:
app.layout = html.Div([
html.Div([
html.Div([
html.H3('Beam Data Analysis'),
html.Div([
dcc.Dropdown(
id='beam-selection',
disabled=True
),
html.Div('Select time range: (the last day is portrayed if nothing is selected)'),
dcc.DatePickerRange(
id='rangepicker_cam',
clearable=True,
min_date_allowed=dt(2020, 4, 1),
initial_visible_month=dt.fromtimestamp(time.time()),
max_date_allowed=dt(2050,1,1),
),
dcc.Graph(
id='beamgraph'
),
dcc.Checklist(
id='field-selection',
options=[{'label': 'Horizontal position', 'value': 'hcenter'},
{'label': 'Vertical position', 'value': 'vcenter'},
{'label': 'Waist (large axis)', 'value': 'largewaist'},
{'label': 'Waist (small axis)', 'value': 'smallwaist'},
{'label': 'Angle','value':'angle'}
],
value=['hcenter', 'vcenter','largewaist','smallwaist','angle'],
labelStyle={'display': 'inline-block'}
),
html.Div([
##Export region
html.H3('Export to ".dat" file:'),
dcc.Input(
id="path-input_cam",
type='text',
value=EXPORTPATH,
placeholder="Export destination",
style={'width': '100%'}),
html.Button(id='export-button_cam', n_clicks=0, children='Submit'),
html.Div(id="button-div_cam"),
]),
dcc.Interval(
id='interval-beam',
interval=5*1000, # in milliseconds
n_intervals=0
)
],
style={'width': '95%','height':'95%', 'display': 'inline-block'}),
],style={'width': '95%','height':'95%', 'display': 'inline-block'}),
html.Div([
html.H3('Temperature Data Analysis'),
html.Div([
dcc.Dropdown(
id='temp-selection',
disabled=True
),
html.Div('Select time range: (the last day is portrayed if nothing is selected)'),
dcc.DatePickerRange(
id='rangepicker_temp',
clearable=True,
min_date_allowed=dt(2020, 4, 1),
initial_visible_month=dt.fromtimestamp(time.time()),
max_date_allowed=dt(2050,1,1),
),
dcc.Graph(
id='tempgraph'
),
html.Div([
##Export region
html.H3('Export to ".dat" file:'),
dcc.Input(
id="path-input_temp",
type='text',
value=EXPORTPATH,
placeholder="Export destination",
style={'width': '100%'}),
html.Button(id='export-button_temp', n_clicks=0, children='Submit'),
html.Div(id="button-div_temp"),
]),
],
style={'width': '95%','height':'95%', 'display': 'inline-block'})
],className="six columns",style={'width': '95%','height':'95%', 'display': 'inline-block'})
],className="row", style={
'borderBottom': 'thin lightgrey solid',
'backgroundColor': 'rgb(250, 250, 250)',
'padding': '10px 5px',
'columnCount':2
}),
], style={'columnCount': 1}
)
@app.callback(
[dash.dependencies.Output('beamgraph', 'figure'),
dash.dependencies.Output('tempgraph', 'figure')],
[dash.dependencies.Input('beam-selection', 'value'),
dash.dependencies.Input('temp-selection', 'value'),
dash.dependencies.Input('field-selection', 'value'),
dash.dependencies.Input('interval-beam', 'n_intervals'),
dash.dependencies.Input('rangepicker_cam', 'start_date'),
dash.dependencies.Input('rangepicker_cam', 'end_date'),
dash.dependencies.Input('rangepicker_temp', 'start_date'),
dash.dependencies.Input('rangepicker_temp', 'end_date')])
def update_graphs(beamselection,tempselection,fieldselection,n,startdatecam,enddatecam,startdatetemp,enddatetemp):
guiintplot.selectbeam(beamselection)
guiintplot.selecttemp(tempselection)
if startdatecam is not None:
timestcam="'"+startdatecam+"'"
else:
timestcam=None
if enddatecam is not None:
timeencam="'"+enddatecam+"'"+"+1d" #The 1d was necessary in order to include the whole 'end date' in the query
else:
timeencam=None
if startdatetemp is not None:
timesttemp="'"+startdatetemp+"'"
else:
timesttemp=None
if enddatetemp is not None:
timeentemp="'"+enddatetemp+"'"+"+1d" #The 1d was necessary in order to include the whole 'end date' in the query
else:
timeentemp=None
return guiintplot.plotbeamgraph(fieldselection,[timestcam,timeencam]), guiintplot.plottempgraph([timesttemp,timeentemp])
@app.callback(
dash.dependencies.Output('button-div_cam','children'),
[dash.dependencies.Input('export-button_cam', 'n_clicks'),
dash.dependencies.Input('field-selection', 'value'),
dash.dependencies.Input('rangepicker_cam', 'start_date'),
dash.dependencies.Input('rangepicker_cam', 'end_date')],
[dash.dependencies.State('path-input_cam', 'value')]
)
def onexportbuttoncam(n_clicks,fieldselection,startdate,enddate,path):
if guiintplot.camexportcounter<n_clicks:
if startdate is not None:
timest="'"+startdate+"'"
else:
timest=None
if enddate is not None:
timeen="'"+enddate+"'"+"+1d" #The 1d was necessary in order to include the whole 'end date' in the query
else:
timeen=None
guiintplot.writedatfile_cam(path,fieldselection,[timest,timeen])
return "Pressing the button will export one DAT-File for each trace you can see in the plot above."
@app.callback(
dash.dependencies.Output('button-div_temp','children'),
[dash.dependencies.Input('export-button_temp', 'n_clicks'),
dash.dependencies.Input('rangepicker_temp', 'start_date'),
dash.dependencies.Input('rangepicker_temp', 'end_date')],
[dash.dependencies.State('path-input_temp', 'value')]
)
def onexportbuttontemp(n_clicks,startdate,enddate,path):
if guiintplot.tempexportcounter<n_clicks:
if startdate is not None:
timest="'"+startdate+"'"
else:
timest=None
if enddate is not None:
timeen="'"+enddate+"'"+"+1d" #The 1d was necessary in order to include the whole 'end date' in the query
else:
timeen=None
guiintplot.writedatfile_temp(path,[timest,timeen])
return "Pressing the button will export one DAT-File for each trace you can see in the plot above."
if __name__ == '__main__':
app.run_server(debug=True,port=8051)
| 39.868093
| 124
| 0.440892
| 1,902
| 22,366
| 5.109359
| 0.123554
| 0.034575
| 0.037045
| 0.029636
| 0.855526
| 0.838341
| 0.829903
| 0.816732
| 0.816732
| 0.806339
| 0
| 0.023983
| 0.422069
| 22,366
| 560
| 125
| 39.939286
| 0.727835
| 0.034964
| 0
| 0.852097
| 0
| 0
| 0.219798
| 0
| 0.00883
| 0
| 0
| 0
| 0
| 1
| 0.006623
| false
| 0
| 0.019868
| 0
| 0.033113
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
14e8acbb532c5a118d0f47e7fc4ee1186d2a3560
| 20,132
|
py
|
Python
|
tasks-deploy/hexandxor/generate.py
|
irdkwmnsb/lkshl-ctf
|
e5c0200ddc8ba73df5f321b87b9763fb1bbaba57
|
[
"MIT"
] | 3
|
2021-03-30T06:27:58.000Z
|
2021-04-03T17:56:35.000Z
|
tasks-deploy/hexandxor/generate.py
|
irdkwmnsb/lkshl-ctf
|
e5c0200ddc8ba73df5f321b87b9763fb1bbaba57
|
[
"MIT"
] | null | null | null |
tasks-deploy/hexandxor/generate.py
|
irdkwmnsb/lkshl-ctf
|
e5c0200ddc8ba73df5f321b87b9763fb1bbaba57
|
[
"MIT"
] | null | null | null |
TITLE = "Hex and Xor"
STATEMENT_TEMPLATE = '''
Дан код, шифрующий флаг, и результат его работы. Получите флаг.
`flag = 'some string'
key = '' # one byte key was removed for security reasons
output = ""
for character in flag:
temp = ord(character) ^ ord(key)
output += (hex(temp))[2:] + ' '
key = chr(temp)
print(output)`
stdout:
```{0}```
'''
def generate(context):
participant = context['participant']
token = tokens[participant.id % len(tokens)]
return TaskStatement(TITLE, STATEMENT_TEMPLATE.format(token))
tokens = ['9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c f 7f 1b 6d 5e 6b 1b 4a 37 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 8 5d 1a 52 38 40 32 64 19 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 36 59 6b 19 63 a 78 10 6d ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c a 7a 3 6e 4 49 2e 47 3a ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 4 70 21 57 65 e 62 34 49 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7c b 6a 2f 65 3f 5a 1f 62 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 29 1a 6f 8 69 2a 53 64 19 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3e 75 39 57 15 41 24 55 28 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 2d 43 2b 1f 67 1d 2a 1a 67 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3c 79 2b 7a 3d 5c 28 6c 11 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 26 41 19 5c 18 6c 39 78 5 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 38 6f 26 7e 12 43 34 42 3f ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 74 16 54 27 6d 20 4a 1b 66 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 79 2a 7f 34 71 17 55 65 18 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7d 39 7b 18 60 38 5c 39 44 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1a 56 30 5b 16 79 0 69 14 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 2b 60 30 7e 12 26 11 46 3b ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 2a 46 24 6a 3d 7e 13 55 28 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 26 72 33 7a f 3f 5b 34 49 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3 47 31 79 29 78 1c 6f 12 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 29 1f 47 35 59 1f 26 61 1c ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1d 72 47 10 76 31 0 68 15 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3f 7c 4b 7d 8 4f 21 49 34 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 2a 66 a 5e 3d 72 16 61 1c ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3d f 7a 2e 57 3a 40 15 68 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 22 45 33 79 1c 64 20 47 3a ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 2a 48 2a 52 1c 2f 69 1c 61 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c d 4f 1 43 28 40 78 15 68 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 74 1 47 15 41 33 5e 6d 10 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7a f 39 5d 2b 71 1 6a 17 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c d 45 24 6a 7 4c 75 1d 60 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3c 8 3b 5a 2f 41 38 5a 27 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c f 4a 25 49 28 42 23 57 2a ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 78 2f 18 2a 62 29 43 7 7a ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7e 2a 6d 26 43 20 4a 7a 7 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7 50 34 6d 39 78 0 6e 13 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7c 14 78 1c 56 0 46 24 59 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3e 6 69 1d 55 17 5c 2d 50 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c f 62 7 51 28 5f 67 a 77 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3d 74 2d 5d 27 73 17 20 5d ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 20 42 c 3c 4d 39 73 30 4d ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7e 13 5e 12 63 a 64 1 7c ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7b c 55 25 4d 3b 6c 35 48 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 2e 42 1a 5e 16 5b 30 5a 27 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1 30 69 59 3f 47 2e 77 a ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c d 47 26 1e 6a 5b e 3b 46 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 15 22 47 3f 5c 2a 47 f 72 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c b 78 36 5d 11 7b 19 73 e ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7e 39 7f 4e f 3b 71 38 45 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7a 4b 5 57 6e 1c 69 1a 67 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3b 79 4e 14 22 46 32 5f 22 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 16 46 77 26 74 2c 68 11 6c ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 34 6 62 23 76 37 67 3f 42 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 26 74 4d c 3d 67 55 32 4f ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c a 39 51 26 17 25 44 2e 53 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 79 20 6a 0 45 2b 72 1e 63 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1c 75 11 63 35 4c 7c 2e 53 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7a 2f 6a 8 49 30 55 10 6d ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 35 4d 37 44 72 2 5b 1d 60 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3 30 6a 32 5d 1e 4b 7e 3 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1e 4c 6 4f 76 4e 0 34 49 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 0 41 4 3d 45 3d 4e 19 64 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 2e 48 30 48 3 36 5e 17 6a ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7d d 3e 78 4b 3f 68 5a 27 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3e 53 1e 29 4a 23 72 7 7a ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 78 13 47 76 1f 27 6a 5a 27 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7 64 c 7c 48 21 52 2a 57 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7a 4e 77 11 4b 28 4f 37 4a ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 0 30 5c 5 50 17 72 19 64 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 14 42 29 1c 59 15 7d 1b 66 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 27 51 1e 51 34 77 35 5f 22 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 20 55 5 42 34 60 23 53 2e ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7 52 3f 50 7 69 19 20 5d ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 2 43 13 62 6 6d 54 1f 62 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 2f 5f 6f 19 2d 40 e 62 1f ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 2a 69 51 1c 71 2 30 66 1b ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7a 35 4d 8 50 1b 2b 13 6e ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7d 5 31 70 12 67 32 59 24 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 19 7a 4d 38 61 3 47 29 54 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7c 2e 56 c 4f 1d 57 31 4c ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3d e 6d 1c 4e b 7b 33 4e ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 36 50 15 2c 67 55 62 57 2a ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 5 55 37 7a 38 57 d 3d 40 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1a 55 1a 4c 3b 4b 9 42 3f ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 27 14 46 22 67 20 10 7f 2 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 2f 62 53 1f 6a 21 4a 10 6d ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3f d 45 3f 54 63 28 6b 16 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7c a 47 c 59 1e 2c 18 65 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 34 56 1c 6c 2b 60 24 6e 13 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 14 73 0 6d 2c 5a 6e f 72 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 36 67 8 43 2f 6e 3f 4d 30 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 5 34 43 6 7f 48 9 7c 1 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 35 6 5f 10 51 7 46 8 75 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 26 42 2a 6e 2c 4e 18 28 55 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7f 35 4c 1c 66 2f 58 3a 47 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 36 66 30 66 9 48 4 6e 13 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 8 40 d 46 1e 53 31 7 7a ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 22 7b 39 8 6e 5c 3a 78 5 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 36 6 4d 79 1d 78 1c 53 2e ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1e 6b 29 5b 21 10 46 3c 41 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 14 4e 3f 6d 3f 79 37 4 79 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 74 17 50 5 33 5e 37 5a 27 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 19 6d 21 75 0 75 26 52 2f ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c d 5c 3b 4b c 7e b 66 1b ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 75 2 73 45 21 62 23 44 39 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 0 49 2c 1b 77 22 46 21 5c ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3c 8 7b 2f 65 b 7e 18 65 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 74 37 7a 2d 7a f 64 34 49 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 25 77 4 46 2 56 3d 57 2a ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 26 6a 2d 74 1e 49 21 68 15 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 4 6d 15 61 35 5 41 75 8 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 18 6d 3a 9 3e 6c 1c 55 28 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 25 52 15 67 e 54 63 37 4a ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c f 5b 8 47 2b 5a 34 5c 21 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1f 70 3 46 8 6a 3a 7f 2 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7e 2b 78 48 38 49 4 56 2b ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1e 74 1 57 65 54 62 38 45 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c b 66 1c 52 61 2 45 8 75 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 19 28 11 24 7c 31 5b 29 54 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 18 6f 29 7c 12 68 39 6e 13 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7c 29 5f 34 7d 38 f 6e 13 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7b 4e 18 2a 5b b 73 38 45 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 14 7d 44 32 68 b 7c 1f 62 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1e 54 61 25 5f c 58 35 48 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 16 4e 3e 5b 1a 7e 4f 19 64 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 15 42 10 4a 0 74 11 43 3e ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7c 1e 6b 26 7e 9 70 1c 61 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 79 17 2e 6f 22 64 8 5d 20 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c f 44 71 45 30 67 3f 9 74 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c d 46 1e 64 3c 4f 19 58 25 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 75 1a 70 1b 49 22 49 78 5 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7a 4e 7d 13 6a 1e 46 35 48 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 75 d 6c 5f 15 7e 4b 5 78 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 5 69 10 7d 19 58 2e 69 14 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 2b 49 3b 4d 2c 43 15 59 24 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 2 64 36 40 6 73 3d c 71 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7 37 75 36 47 2b 6e f 72 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7d 32 56 12 4b 25 17 71 c ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 18 69 25 51 37 4e 27 73 e ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 8 5a 14 4d 2c 43 73 35 48 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7c 2f 6a 24 6e 23 6c 59 24 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 9 3c 7f 3a 4f 23 5b 6a 17 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1f 7d 28 70 13 76 17 54 29 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1 38 48 4 63 8 4d 34 49 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3d 49 25 16 2e 7e 14 44 39 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 21 4b 72 41 74 10 60 d 70 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 75 1a 56 21 68 5c 4 46 3b ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 2a 4d 6 4d 38 48 22 17 6a ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 27 4f 0 61 7 40 3a 4b 36 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 8 6e 37 76 4e 7b 11 6b 16 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3c 5 6c 36 72 7 65 4 79 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7f d 3f 54 30 66 35 5b 26 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1c 79 a 32 41 77 2e 7e 3 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 39 7e 1f 6b 32 40 25 76 b ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c d 49 7b 30 79 2e 79 35 48 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 5 4d b 41 27 16 27 4e 33 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 8 47 1e 5d 29 4d 7a d 70 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 20 17 66 34 c 7c a 65 18 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c d 40 18 53 3c 79 34 42 3f ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 25 50 35 d 43 8 3b 6e 13 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3d 4 62 17 47 28 51 3d 40 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7c 4c 1a 6f 3b 7a 2 56 2b ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 28 7b 1f 2a 42 38 71 7 7a ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1 4d 3c 70 1e 5b 6b 6 7b ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c a 3d 64 1c 4a 7c 30 42 3f ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3 6c 2a 68 3c 58 6c 1 7c ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 0 70 6 4a c 7c 4c 5 78 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 25 5f d 5c 24 57 63 e 73 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 26 62 51 1b 50 5 40 3a 47 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 24 69 1e 4e 3e 44 3c 50 2d ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c b 5b 1a 4a 5 76 17 4f 32 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1b 50 26 15 6f 3 56 1b 66 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3f e 43 16 6c 6 75 4d 30 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 38 53 3a 50 65 2c 40 c 71 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3f 6d 6 43 30 7 5e 66 1b ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3e 4e 18 5a 3f 7d 4d 17 6a ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 2b 46 1 72 25 13 41 72 f ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 2 76 1b 7e 6 77 43 9 74 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1f 48 3d 50 2 7b 23 66 1b ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 29 7a 9 5e 33 71 44 d 70 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 14 47 6 53 9 30 9 45 38 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 8 30 65 f 3e 8 67 3e 43 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c f 65 f 4c 21 78 b 68 15 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1c 74 3a 73 19 5e 35 40 3d ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c d 6a 33 49 1 63 12 4b 36 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 29 4a 7c 49 a 61 0 6f 12 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1c 4f 19 54 39 71 36 72 f ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 8 7e 4b 7b 31 64 10 5b 26 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 3 5a 1c 71 43 e 67 15 68 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 79 20 48 23 48 32 47 a 77 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 36 6f 5b 3c 69 2c 4f 4 79 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7c 31 66 12 48 1e 7f f 72 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 9 4d 8 49 b 4d 22 78 5 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7b 11 73 9 7f 4e 7e 10 6d ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 21 11 21 42 7b 32 60 d 70 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 20 4f 19 52 19 70 5 66 1b ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 1a 7c 3d 6b 22 51 3d 58 25 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 34 43 8 6f 1c 2c 46 72 f ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 7 57 6f 19 75 42 70 3d 40 ', '9 42 e 75 1d 2e 56 9 3d 53 37 68 10 20 52 d 39 4b 78 27 53 63 13 4c 0 71 30 6a 3e 9 3a 3 7e ']
| 875.304348
| 19,562
| 0.621746
| 6,669
| 20,132
| 1.876593
| 0.025941
| 0.048182
| 0.063923
| 0.095885
| 0.779305
| 0.779305
| 0.779305
| 0.779305
| 0.779305
| 0.779305
| 0
| 0.761724
| 0.344377
| 20,132
| 23
| 19,562
| 875.304348
| 0.186454
| 0
| 0
| 0
| 0
| 10.526316
| 0.949132
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0
| 0
| 0
| 0.105263
| 0.052632
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
090c20f68cb380e6a2a1ec40d43f1f3f1b9079b3
| 1,769
|
py
|
Python
|
djh_app/migrations/0015_auto_20210509_2025.py
|
asvbkr/djh-box
|
a3b3919900a15efefb387380931affd76b0d942a
|
[
"Apache-2.0"
] | 1
|
2020-05-22T10:40:52.000Z
|
2020-05-22T10:40:52.000Z
|
djh_app/migrations/0015_auto_20210509_2025.py
|
asvbkr/djh-box
|
a3b3919900a15efefb387380931affd76b0d942a
|
[
"Apache-2.0"
] | 5
|
2021-04-06T17:59:29.000Z
|
2022-02-10T07:35:35.000Z
|
djh_app/migrations/0015_auto_20210509_2025.py
|
asvbkr/djh-box
|
a3b3919900a15efefb387380931affd76b0d942a
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.2.2 on 2021-05-09 17:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('djh_app', '0014_auto_20201221_2136'),
]
operations = [
migrations.AlterField(
model_name='ttbdjchatavailable',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='ttbdjlimitedbuttons',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='ttbdjsubscriber',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='ttbdjsubscriberproperty',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='ttbprevstep',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='ttbuser',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='ttbuserproperty',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
| 36.102041
| 111
| 0.614471
| 177
| 1,769
| 5.960452
| 0.259887
| 0.079621
| 0.165877
| 0.192417
| 0.72891
| 0.72891
| 0.72891
| 0.72891
| 0.72891
| 0.72891
| 0
| 0.023938
| 0.267948
| 1,769
| 48
| 112
| 36.854167
| 0.790734
| 0.025438
| 0
| 0.666667
| 1
| 0
| 0.0964
| 0.026713
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02381
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0959210a0d1290d1d5504f7ce2a2b580078c3805
| 36,680
|
py
|
Python
|
__init__.py
|
rocketbot-cl/MercadoPago
|
2bf71bb28626afbfe10e83c630503be4f1150396
|
[
"MIT"
] | null | null | null |
__init__.py
|
rocketbot-cl/MercadoPago
|
2bf71bb28626afbfe10e83c630503be4f1150396
|
[
"MIT"
] | null | null | null |
__init__.py
|
rocketbot-cl/MercadoPago
|
2bf71bb28626afbfe10e83c630503be4f1150396
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Base para desarrollo de modulos externos.
Para obtener el modulo/Funcion que se esta llamando:
GetParams("module")
Para obtener las variables enviadas desde formulario/comando Rocketbot:
var = GetParams(variable)
Las "variable" se define en forms del archivo package.json
Para modificar la variable de Rocketbot:
SetVar(Variable_Rocketbot, "dato")
Para obtener una variable de Rocketbot:
var = GetVar(Variable_Rocketbot)
Para obtener la Opcion seleccionada:
opcion = GetParams("option")
Para instalar librerias se debe ingresar por terminal a la carpeta "libs"
pip install <package> -t .
"""
import datetime
import os
import sys
base_path = tmp_global_obj["basepath"]
cur_path = base_path + 'modules' + os.sep + \
'mercadopago' + os.sep + 'libs' + os.sep
if cur_path not in sys.path:
sys.path.append(cur_path)
import mercadopago
module = GetParams("module")
global items, sdk, testkey, payments_id
items = []
if module == "login":
try:
testkey = GetParams("testkey")
sdk = mercadopago.SDK(testkey)
except Exception as e:
print("\x1B[" + "31;40mError\x1B[" + "0m")
PrintException()
raise e
if module == "add_recipient":
email = GetParams("email")
name = GetParams("name")
phone = GetParams("phone")
try:
customer_data = {
"email": email,
"phone": phone,
"description": name
}
customer_response = sdk.customer().create(customer_data)
customer = customer_response["response"]
except Exception as e:
print("\x1B[" + "31;40mError\x1B[" + "0m")
PrintException()
raise e
if module == "add_item":
amount = GetParams("amount")
quantity = GetParams("quantity")
item = GetParams("item")
amount = int(amount)
try:
temp = {"title": item, "quantity": quantity, "unit_price": amount}
items.append(temp)
print(items)
except Exception as e:
print("\x1B[" + "31;40mError\x1B[" + "0m")
PrintException()
raise e
if module == "create_invoice":
total = GetParams("total")
payment_name = GetParams("payment_name")
payment_method = GetParams("payment_method")
email = GetParams("email")
total = int(total)
try:
preference_data = {
"items": items
}
preference_response = sdk.preference().create(preference_data)
preference = preference_response["response"]
payment_data = {
"transaction_amount": total,
"description": payment_name,
"payment_method_id": payment_method,
"payer": {
"email": email
}
}
payment_response = sdk.payment().create(payment_data)
payment = payment_response["response"]
print(payment)
except Exception as e:
print("\x1B[" + "31;40mError\x1B[" + "0m")
PrintException()
raise e
if module == "get_invoice":
id = GetParams("id")
var = GetParams("var")
try:
auth = 'Bearer ' + testkey
headers = {
'Authorization': auth,
}
url = 'https://api.mercadopago.com/v1/payments/' + id
response = requests.get(url, headers=headers)
resp = response.json()
SetVar(var, resp)
except Exception as e:
print("\x1B[" + "31;40mError\x1B[" + "0m")
PrintException()
raise e
if module == "search_payments":
id = GetParams("id")
criteria = GetParams("criteria")
sort = GetParams("sort")
var = GetParams("var")
try:
if id is None:
id = ""
if not criteria:
criteria = "desc"
if not sort:
sort = "date_created"
auth = 'Bearer ' + testkey
headers = {
'Authorization': auth,
}
url = "https://api.mercadopago.com/v1/payments/search?sort=" + sort + "&criteria=" + criteria + "&external_reference=" + id
response = requests.get(url, headers=headers)
res = response.json()
payments_id = [result["id"] for result in res["results"]]
SetVar(var, payments_id)
except Exception as e:
print("\x1B[" + "31;40mError\x1B[" + "0m")
PrintException()
raise e
"""{
"en": {
"title": "Create Invoice",
"description": "Create an invoice for the customer",
"title_options": null,
"options": null
},
"es": {
"title": "Crear factura",
"description": "Crea una factura para el cliente",
"title_options": null,
"options": null
},
"form": {
"css": "modal-lg",
"inputs": [
{
"type": "input",
"placeholder": {
"es": " ",
"en": " "
},
"title": {
"es": "Total:",
"en": "Total:"
},
"help": {
"es": " ",
"en": " "
},
"id": "total",
"css": "col-lg-6"
},
{
"type": "input",
"placeholder": {
"es": " ",
"en": " "
},
"title": {
"es": "Nombre de pago",
"en": "Payment Name:"
},
"help": {
"es": " ",
"en": " "
},
"id": "payment_name",
"css": "col-lg-6"
},
{
"type": "select",
"placeholder": {
"es": " ",
"en": " "
},
"title": {
"es": "Método de pago",
"en": "Payment Method:"
},
"options": [
{
"title": "credit card",
"value": "credit_card"
},
{
"title": "debit card",
"value": "debit_card"
}
],
"help": {
"es": " ",
"en": " "
},
"id": "payment_method",
"css": "col-lg-6"
},
{
"type": "input",
"placeholder": {
"es": " ",
"en": " "
},
"title": {
"es": "Correo electrónico:",
"en": "Email:"
},
"help": {
"es": " ",
"en": " "
},
"id": "email",
"css": "col-lg-6"
}
]
},
"video_youtube": "",
"icon": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAA0gAAAHVCAMAAAD8XitdAAAANlBMVEX///9+kMVieLlDXaoONpgoRp7N1OkAnuMAI4zm7fa1v96aqNIur+gAgtDC6PgAXbWQ1fNfwu6g3K/TAAAYvklEQVR42uzc0VLzOAyG4dqWjGRPt/d/t5ukUONu/9A6/84k5X2GIygZDvIhRbF9AgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAe+K5lDCJqYkhhFpKPgFY56WGlMTWaYqhZD8BuJdrSCpiTxETEY2hkCbgi9eQHkZIevYAaQImucbUZ0TkcrmcJx/nj870jdn007vPawrlBPxWXkIS6RI052fVeQnUxxIo+R6mWClM+IXmFKm0DC016BXnc58m0VQZ6eF3KbGl6HJZ2rgxS5qkZYm6hF8j11uKpDVzw7o+T1Nk+IDfoMSvFPWPRNvDJLR4+CW8RpXW0K0az5ImZuJ4Y16T3jq6j//BrccTpcPDm/Ka5OcUbc8SUcIbyzXpQIxGzGWJKOEdeYvRx4DBsiQaGDvgfXiJYzHaXpUSUcK7yCGNxmh7VaK/w3vwEmVDjLZXJfo7vIEcdFuMzovLefZ6lJYMs3AIB3ctRzKaoylAE/l0mUxpGogST0o4NN9Sjs7dxqNur8XrRSmyYwmHlYMsORocFYj9QcvSC0WJmQOOyUuS0Tewt60RoppSXKSkbQPTlKUXQrn8HTwo4Yi8JDO7RunFLH1byB1rydm/zuiqMXU7MF67Hg9KOB6vutQTm7yYpbN8pigU/8NpQy1KT75PWvrESJJwMNccxRr6nbDPl4+VQxi8xKTtms9urRCShKPxIGYS/POuF/t5Q+ztjCCbSCq+ntNWlm7XXNk4KxprFLNEknAgtxy13Xxiq03euRvTafAn3lC1fPbnd92fMiSicynyQJJwKMstq60167LUqkjffzWSyukJnmP6dk25NCJ2I59NIknC0VTtctRnqYXpcjvw0b6TmJ+fDF6v2dxd6u4AB5KEA6mtHt3JoR2reu/6g9YRPsnL42vKpBv7tZrE+yQcQVEzqWs3fqsi3VGpVe3xb/rVSphqTKpyo5pCvQtRq0mBJGH/fI5D9R+qyPfxg2mc+6+s1n7zrn+bheynVTnnMn+tfi6aGWscsH/Jnvuf77nUMKklnyZfOfrvREHsixb/K3+fsIIVexfELPrpFWs5CmLfaT5t9Y+aKQMH7FuRsdvU06McVbWmjSK2yXPUae6wZ65jjVOOD3LkUWwmsWbPNdki5e1Fk8ck7FuwoaGYR5vEU+czOW2Qnq/B0rI17YnmDruWx17TeLBJ9D6Tavcnl3jWv9LeZWUGjj2ba0gZXJkX/UFbl4o/iFz77KAoTO6wX2XoMd6r2H0hK1H6ctR/WFLeWJLYUoH9SmaSh1ZC9Mnwqq0c3btuvNWydRWT1BOwR4MFKdyP4jyqTWL2P0z4pM0gBnmiJGGnPC4FaSRIErtpnaxPFHKQtmdpUBBTnpKwR6MvOms7obudt6/VfzouT7eUlJwY3GGXlhbtn8F72kRTnKSkNos/7TTXtnFpuCQlShL2x9UsDZ/b1XYkrR97348cNkShKOMG7FFe1viM8ByS2I2ulqN+QD5+PL4nVtzhX/bOQLmRVIeiBgktgvLz+/+v3WmSsSxCU9hJvO3kntmt1HS12z1VnAgEiAPScgYP9OxsZx6lDQpVTgtY8g59O/CDUJr17Na2J5Vaiuh9B8akIOjbgZ+DcM7h9FT0Uyb9D3k7cEBKzvz/03PRSp9I3kXMyYLD0ZLfsYqenkohbsk7DJLAz6Ata8iteM/zZLLVrUXvH5FFzgmDJHAshHLm98KmT5VJwr3JOy01BErtdcMJgCMh6V2k58pkpz1XXY1E10kriAQOh+ScXc3tTabnuKQ1rW2b3SSKKRuYkgVHo4nUzk++lYlCFX1S8i4HmVtUQmJX0RjVhMDh2EQ6X4/0v5EpxVpUT99LaSaRzFIL6VaiVsD/ApHA0ZCc+ezPJGdz6dvjkhDnPM5mq4qzyM5xhkjgcLxHJMN389J3jpds5R0XPXk2i/g2FN28JLp24HCYSF6mq0r8zWFJI39I3qmUmLpQZKBrB47HW7LBGLmUwreqFNidZqFawq1FHzQ/IyKBwyE5sxPJu2SVu9WdePQ5us9Xtoy2SiUeWoSuHTgwFpHmLnHQa2EGvh7u+phUJZTuQmomyUklpA8WISKB47OJxP/M+KtSKmrD/yibViFSDEXvL7zfrwyS9mCy/MKbRRAJvApzkazl+gOTfaMPevegqP+MUjbMoiEXiAQOh+Q/TDUylTYohkC5I8n9K857k6LTaMYFS4TA4RD2+e+5Slx1eI4Y322SueCTd82iORcsWgWHQ5PPNsxN2jy6msSxqKo0AZLKxl0mkfhrlZde5YxtFOB4KG1KLIoUxfbVWuVgaVbx9v9yEVUNg5VBkS0iTV8E1U/A0dCYbbXdjIs7QqmmGwskX+HFyt6q0Uxye6POCy+CHbLgaGhoI5M1kSzclFidjPefuizhoxBkIk0DEmo2gMNRWuu/VyQV9c/gUMv7ArlFk9RMulMkVBECx0M4L+XtzpZr2FCfQ6eielKVyOuHLkvoApgmE2kmNOrageMhlJeTZSR7D9FrmLljtlSDP1c2WLJh9hrINYADojFbumFxHlRrmXi555vEKN0VZ1JNZvT0LXDUGDggJa+HJKYqJ62Rw+Tg8b12XhKPDjtvDxWpxHkxIGXCEAkcDuE7QlJORJQ474tU015yWuPHEqkSrg9deotLxhAJHBOlvJoBd3Xl7j8sQqhVhOyvGSspO8wigYNSc16dlL1cRdJJRGIKtehIsoFJZWXRtwUk9OzAUVFeMclKdl14NpETOb8tFqoyDFe9SZI2hXxhhnlIZPTswBHR+B4Q/lnhra5cqrOOYoNjGRdX9SYJbw6fz2vL/ZCzA8dF/AhlKTCQTMZBKbWwRCsmVTc+m3JBuWJwZJQWTbLIYKeEifSTRaGWGsjuEXUJb24muWTDedlfzMaCA1PyG5d7TKIqepIauwAhamdNcNRNFdfHE2dSiZsay1+asc4OHBilvDxMstpCKYYQU2bdP2silWYpOZMiv1dwVanU/L3HI67o2YGjUnJv0vqUEsv+SjqOb7kMqtqZxBRjJF736JIRkMDRUcr3JhyspnHdn5rN1Pp2XRwplI1Fdy8ZAQkcn8Jm0iKXC2/cJPBUtE+rJ2nieAE05CuXVY8QkMArELMzaXVKqfW4qGjzo0bp+3ZJ/Dys7ZdgZpuFXfEIc0jgBZA0NWmeA0ihiJTIOehQpBbwLCZpbBlCm4VdGh9hUQN4BcojJtmyHaLtZxyI1Ag3MUlrsi9Z9gir7MBrENnG/3fGpCsk/R4/vTlxgquoioTUAtIDHiV4BA6P0sI67MlJStdTjtyuc+8pxUgpm0eLniJjB14I69wtN3RbFL7hCp9oy9VFUats8sDjz28eYZEdeCFqmu+xWyrdWPT26P+cquiGVHan8K0vobhC8Ai8BpEnE6XLGTxR1RLZBIghErdnNu4bHmGABF4Pb9L5AZM2OL3/bD8ejnLnM8Mj8JIouYZ/ecwk86b/u7EYjgxsngAvhMZ8y/1B6eIOr7RRDl/u1YjhEXhdlLLjgZHS5cJem/P5/Hhow2Y+8JJYTLL+3fOwXh3iEXhtNAxyBE/EenVYqQpeGTPp6SqZRsjXgZdHa8pPV6nXCAtVwetTKPfw5T/RiLEuCLwuKpEHKp2/1aKzTeBinSr4IbTuXQ9/j0u2hLyHCjwCr42WODBpU+n8JIsyBwyPwOsjIeURVmnhyyzinBGOwE/F1nB38Fe5dDaLelIQeAR+CFIpj2GT6XGJrEfXwxHhCPwgtPXvpjKdH3PIhyIPU8XoCPwstATKeS7TptN5TaGZQ6YRenXg59FUmsIbl41zo7encdngN4egEfiNbCpxdsyF8vCbQEtPiNAI/GC0VOL83aSIsRH46UiN3+oSU4BG4DegJcT0TRYl9OnAL0JqoMRfb1HAtBH4XajU8JV9vESwCPxSpIT4FYEpJYoVFoFfjDaZHreJE0EiABpaaiBKifkuhVKiGCARALeI1BCImlD7RjE3gagphPwcAPs+1RADbSQP/SHGUGEQAHeg4sAsKwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPAD0FJDjAGbbgF4HK0xJWZO2IALwMNITLkBkQB4GI2cIRIAn6SmDJEA+CwhQyQAPotGiAQARALgCAhEAgARCYBDgIgEACISeBlUpGyI7l1f+ngR0YXvandrf1lFFh7Rbqq1zu9UtfceRiT/6nUDp1+ALzmacsNOxXPXqV2fLQal1KAYquhMonbzdndx7f56eXoikpQaKbVj/7h9WRGd/3va64wjkj2Qc4Nxsib4BFJjykYK8leQyO560aFGIWUH+Rv9vXR9ZLltztGficn2jNGLzm/UcnsbU1UvUn+nJ+GoZ/AApouRqm7N1q6bYrNPGynajbvNtl6vV7PL4Fh0/lX2Vup068XmKDIUScLOAzGMAvd7RPkDHPQko+u9IFpTHpLMgr0GXobXvc+zFzWoDBN0RiwDkVQo7xBhElhlbkLYabYk7tOB8x61M8naso9IPvB5gt58PuVdqAw9MoicSHsPRGoP3M+8Ne1c56h+b8Iu7E36KGYxjxZMEsoTogw8mjsiaXoXxklgGWue63C1T1ee3lmmHuXqXZw/Q8Pcjmq3rYmklKdEmATWiZzvJck8mhkkbkJ0GJHC4jO8iLwxavclrb5/zA57nnkOwBp998Ya0+Qyx9M75O6hGCMlHnfMRmOp2jf8No8TibkLgP0DEtUqpbqMOcnAV24MRRL23xtqiORuZYQk8EhA4hRDIM49iUKIaTB8qPwxnafV2SW+Y+cpXVxgKoOkdPxgCMkg1ZHaZ4vTg2oRKSH1IvXGcZDRAKvCJLCGkjNhlIejqpZ79kGCbu8aJ/KC2v5UZ+yG+IDEwQIlZ2eIM5G0+0VggyQN4+EVfxBJcv8NDa2MkATupqZBKy7OIxmklePbfezUMqpvtH1Kg4oTwb3B6B3qzpv6y6l2gY/Fu+3fKTqPDK3ZQAU8sEbkQZvR2IvQpxZSL4F9un9AtfY+8EBTHgUan1ILul+7pHQiZcN9T3Ai2RfPX54gEliChr+VZdzINPpBUupijFH6B0SXqTBKJ5hRu0GShL+44BfYgqL3JXVZfi+S+Jhr+GEWRAIrmAtxHA7YGVJdr0nZp57HgYbUN+Mk3oM9GYWuXJfQvv8xtLKbSJLYBSQjOJE0ONV3nSsnAO5JfgfnQRw3+8ImkvvNXfd30VG7lVxAMuJERuM0QqWGxNmLRN0IyajJiTSRxUl2AuBxkYJpMOyIsbjOF8ca6u1/wTVolynwztEjbfZt85NNEJlI5caW6VZzdVoZfceUTgB8gUhR90UKft7T43/hB3ZjMSPl3rA5WgKlxLyzcqnwrgLqRfJDJI9AJPBEkWJeQpxIJOPvz2lBJK2UOO+QqguSYVppVWdL6iRBJPA8kSgvURZEWotIflvfXKT8qEg+VcInAI4h0pdFpEKcl0XirxEpnwA4hkjlsyLNVutRsDTGl0UkQUQCxxPpiyKSxOx5qyBU0rdGpHQC4GnJhhTDPvo1Y6TK2UibQ6JqT+hFynEq0jxrx0g2gP8i/U1F9zntiuSH9WE5IKWbWnZdRNpXQDGPBI4pUnXLpyd4kRYnZLVcEXUjpFR0uHx9NiHrH6BY2QCOI1JZT13XBZGidA0//f0T1FT0octvo3C25MkSodN0rZ0SdpuDJ4qk7CR4KCL1O1yNrqWHnfsi7y1ajd6O6PtrZWn1N6MmF/h2kU5pT4Jlkfw2Ct0LC3qb2SAZ35b6bRSy90XR/xZg7EcC/6lIkfOeH/WKTERyIngbS3c9jAUp3TaMyuO8tnz4nrhTE1YDY4cseKpIhceVjLVQulKnIpmMvoOo1AkRnQej20hMmP7lJXIfZko2yEzSmjJ6duCJIvlUAVOVU0NdJe80j0i++AkVfTcx9uW4wqDSq5bYRw+t2eDYEn4qrkQ/B+dgg6ro33M1ULIBPFmkfpK0Hf1VQ2TXFociGd25Me0RrjVH6bMSTdnmRzZSHRSOTTFUeyF7g0HhrlD/EIgzAhJ4pkhmgcEp8aBM6lQkSdnB3SNSsey17SP8Q6RR7W+tnKdY9jzmIZhEAk8XSSjnlcLd4V/27mi5jRyHwrA2lt0LC4UC3/9lN2orxgKmMD0TJVNU/i93MszOBU+RptjkNEjxs/Yg/u5o/Dog9YUxMYxtFRVLdvjNQYqBok9BH6T+EP1Ymu5Hmtejl1a8lOPFuNcF/3aQdpeXPgV9kOqSWnWux1P2T9pJzUc9EzlcuvuROEAIvyFI7et2cd1eG6TIyGs/zsRmubYuxqTzvXldScflbuULOcLvCVIUv56brtgFqW/iHK8odZE976vmydu8vZS3NnNnbr5E1Qfp/N/9Xw3S58c5SD/KywX732q/PZdOe/l8TgpS7vrnpoX0nHIe/0Vm7b28nuu9LdNH1/NU9lL2qqIx62/fbt7LMSM3F5mXy9db+1/P5z0r3yPwWq8jf//8xUhHIZdvL3sbewMRj9LOrSbK3u4f3PXt9dNLc+P/+/7gm2spszr8e+T6Vewelcub/PM2du9vTfavNbsDz5G3D0ce/J/rf/3yzpo3AAAAAAAAAAAAAAAAAOBXEvOdyalnZu5m9tetjTHcc+G8cowfpcDCxIdqGPfCJJYKh9u07Fa1bZtu0V7z4E23vVrHIEtYlbnqlqi6zHt9KdPxtdDG1+ZqPmqDQYkSlmSu21eqXgt9zAprlCQ11+bDx51S3obAcmzoNqWHAlI7vqTm2iR9NsighOWZbncNmXT7Pkni213qTY4ykoS12NgaLpPAtRHxtsxyjhrM7rCSlKMuINIXDktxa8uisjNOwDLG1lOL8aPlMbHr+PEn+wlYRBkWdHyns8mdjFTmV6nSJdrLzek8mHn4GR8NMrnDkkYeA8xELPdntS+JG3YLl40yaRPPzYns7c1mbLqF4XZrMD2aIQmLkNSdbboM4PWjMV2AGFbipjb9O0zl47P04PkSuzIkYQ2uuTvXgERufL7yNvLHtoXcXP3TZ5q4OqaxBI41eO7gIXXnEqTTnSDlFIwc2DLCSXlwMGXhDqspA0+Q2vN9/OCpLAdppPgFGSUdXh8cnLkdVpPiEvqeHszzBM2iPb2fjrLI4bVRZW6HteQBJZF2t46ImKXluBqkcUpqxNKDMxms22EtedEtyxEJHxHyEQvVUeV3IyAjBUlq6yG3wtwOC7C6ph3mY4aYeySoC5LVIKVC6SaOxneyWEuaihW550++L+2DdCo8FVo36BirDViLRS687fnxvSpBAn4qSK4bQQIOBKmZ2s1zpPoLgiQECWs5HqQoDTpcxs8HicUGrO5vrNp5zZBJjVsqkjaXxqodnsjhIEVl1Pb56Je/T83XveIECWuxwzsbfDuw+7uZKloJUrMNSAZbhLAWO7TXziUFxpsZ24j0leY0Co29dngu+bWjZjtr2eET7u/+vjvKqLH7G8/FtHsfaZ6Q0WzWS+NYritBki3wPhIWZ/VwhvqDflt3HVhSnUoakIJa+4bs4A1ZrKac+ePTz13y1K4ZPaIuJclGeoy1ZzZszOywGtN64rbEWeBpJjZdbLDJGSkpmBZH9NcgiaZK+YwcpwhhOaZbprv0kctssSDylkcVGX1z8ftWMjx8DNWNAQnr2YPUU/s60sRhjmHI0SY1Ipftv8lfSFjPgSC5TLZ+q+q9M73FjwVp/nAmdlhR05fLQCMHzto/mCQ1bqPAczGdvRDRHb46p17Wuvv3LaJNcoRnYNGTPXp1c5Zwk6NgPqkYJUg7V+4ZwxP4/yCJa9ub68/ronlXqi6eg9TfuqmcHoSVpN3aUnr1cCnxGAduYo6A5KSlIAWb3WruDEdYSgrSHpWhqrdL/+0UotdruvJ/f79vRmxv66OhsjVCauW18LNJd2M4wlpSkHZmftXk48PYi0pVrbUosbTHKBMx/8FIEdZTg3ScnKqeK1u68bRSkB5B7JNwnjf+FA8Pko2ht39D5st4ykoCns3jg6TTc7aM+yzxzB4eJBnb5M5m54ZlPLVHBSl4+ULoaui2MbPDE3t4kOpB+6pD8wes2eH5pCA9usnAgITnZpreg3gE8a2h/IWEJ2T6+LcWZJAj/GFMf8FbC6K8ZIQ/iw29ig3cv3RMUnIEZD1X5nXAzxNX3tUDHsB8RIoGMQI6PfuOV4wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPhfe3BIAAAAACDo/2tvGAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAhwDWjgbLhu6a/gAAAABJRU5ErkJggg==",
"module": "create_invoice",
"module_name": "MercadoPago",
"visible": true,
"options": false,
"father": "module",
"group": "scripts",
"linux": true,
"windows": true,
"mac": true,
"docker": true
},
{
"en": {
"title": "Add Item",
"description": "Add an item to the invoice",
"title_options": null,
"options": null
},
"es": {
"title": "Añadir artículo",
"description": "Agregar un artículo a la factura\n",
"title_options": null,
"options": null
},
"form": {
"css": "modal-lg",
"inputs": [
{
"type": "input",
"placeholder": {
"es": " ",
"en": " "
},
"title": {
"es": "Nombre del árticulo",
"en": "Item Name:"
},
"help": {
"es": " ",
"en": " "
},
"id": "item",
"css": "col-lg-6"
},
{
"type": "input",
"placeholder": {
"es": " ",
"en": " "
},
"title": {
"es": "Precio:",
"en": "Price:"
},
"help": {
"es": " ",
"en": " "
},
"id": "amount",
"css": "col-lg-6"
},
{
"type": "input",
"placeholder": {
"es": " ",
"en": " "
},
"title": {
"es": "Cantidad:",
"en": "Quantity:"
},
"help": {
"es": " ",
"en": " "
},
"id": "quantity",
"css": "col-lg-6"
}
]
},
"video_youtube": "",
"icon": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAA0gAAAHVCAMAAAD8XitdAAAANlBMVEX///9+kMVieLlDXaoONpgoRp7N1OkAnuMAI4zm7fa1v96aqNIur+gAgtDC6PgAXbWQ1fNfwu6g3K/TAAAYvklEQVR42uzc0VLzOAyG4dqWjGRPt/d/t5ukUONu/9A6/84k5X2GIygZDvIhRbF9AgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAe+K5lDCJqYkhhFpKPgFY56WGlMTWaYqhZD8BuJdrSCpiTxETEY2hkCbgi9eQHkZIevYAaQImucbUZ0TkcrmcJx/nj870jdn007vPawrlBPxWXkIS6RI052fVeQnUxxIo+R6mWClM+IXmFKm0DC016BXnc58m0VQZ6eF3KbGl6HJZ2rgxS5qkZYm6hF8j11uKpDVzw7o+T1Nk+IDfoMSvFPWPRNvDJLR4+CW8RpXW0K0az5ImZuJ4Y16T3jq6j//BrccTpcPDm/Ka5OcUbc8SUcIbyzXpQIxGzGWJKOEdeYvRx4DBsiQaGDvgfXiJYzHaXpUSUcK7yCGNxmh7VaK/w3vwEmVDjLZXJfo7vIEcdFuMzovLefZ6lJYMs3AIB3ctRzKaoylAE/l0mUxpGogST0o4NN9Sjs7dxqNur8XrRSmyYwmHlYMsORocFYj9QcvSC0WJmQOOyUuS0Tewt60RoppSXKSkbQPTlKUXQrn8HTwo4Yi8JDO7RunFLH1byB1rydm/zuiqMXU7MF67Hg9KOB6vutQTm7yYpbN8pigU/8NpQy1KT75PWvrESJJwMNccxRr6nbDPl4+VQxi8xKTtms9urRCShKPxIGYS/POuF/t5Q+ztjCCbSCq+ntNWlm7XXNk4KxprFLNEknAgtxy13Xxiq03euRvTafAn3lC1fPbnd92fMiSicynyQJJwKMstq60167LUqkjffzWSyukJnmP6dk25NCJ2I59NIknC0VTtctRnqYXpcjvw0b6TmJ+fDF6v2dxd6u4AB5KEA6mtHt3JoR2reu/6g9YRPsnL42vKpBv7tZrE+yQcQVEzqWs3fqsi3VGpVe3xb/rVSphqTKpyo5pCvQtRq0mBJGH/fI5D9R+qyPfxg2mc+6+s1n7zrn+bheynVTnnMn+tfi6aGWscsH/Jnvuf77nUMKklnyZfOfrvREHsixb/K3+fsIIVexfELPrpFWs5CmLfaT5t9Y+aKQMH7FuRsdvU06McVbWmjSK2yXPUae6wZ65jjVOOD3LkUWwmsWbPNdki5e1Fk8ck7FuwoaGYR5vEU+czOW2Qnq/B0rI17YnmDruWx17TeLBJ9D6Tavcnl3jWv9LeZWUGjj2ba0gZXJkX/UFbl4o/iFz77KAoTO6wX2XoMd6r2H0hK1H6ctR/WFLeWJLYUoH9SmaSh1ZC9Mnwqq0c3btuvNWydRWT1BOwR4MFKdyP4jyqTWL2P0z4pM0gBnmiJGGnPC4FaSRIErtpnaxPFHKQtmdpUBBTnpKwR6MvOms7obudt6/VfzouT7eUlJwY3GGXlhbtn8F72kRTnKSkNos/7TTXtnFpuCQlShL2x9UsDZ/b1XYkrR97348cNkShKOMG7FFe1viM8ByS2I2ulqN+QD5+PL4nVtzhX/bOQLmRVIeiBgktgvLz+/+v3WmSsSxCU9hJvO3kntmt1HS12z1VnAgEiAPScgYP9OxsZx6lDQpVTgtY8g59O/CDUJr17Na2J5Vaiuh9B8akIOjbgZ+DcM7h9FT0Uyb9D3k7cEBKzvz/03PRSp9I3kXMyYLD0ZLfsYqenkohbsk7DJLAz6Ata8iteM/zZLLVrUXvH5FFzgmDJHAshHLm98KmT5VJwr3JOy01BErtdcMJgCMh6V2k58pkpz1XXY1E10kriAQOh+ScXc3tTabnuKQ1rW2b3SSKKRuYkgVHo4nUzk++lYlCFX1S8i4HmVtUQmJX0RjVhMDh2EQ6X4/0v5EpxVpUT99LaSaRzFIL6VaiVsD/ApHA0ZCc+ezPJGdz6dvjkhDnPM5mq4qzyM5xhkjgcLxHJMN389J3jpds5R0XPXk2i/g2FN28JLp24HCYSF6mq0r8zWFJI39I3qmUmLpQZKBrB47HW7LBGLmUwreqFNidZqFawq1FHzQ/IyKBwyE5sxPJu2SVu9WdePQ5us9Xtoy2SiUeWoSuHTgwFpHmLnHQa2EGvh7u+phUJZTuQmomyUklpA8WISKB47OJxP/M+KtSKmrD/yibViFSDEXvL7zfrwyS9mCy/MKbRRAJvApzkazl+gOTfaMPevegqP+MUjbMoiEXiAQOh+Q/TDUylTYohkC5I8n9K857k6LTaMYFS4TA4RD2+e+5Slx1eI4Y322SueCTd82iORcsWgWHQ5PPNsxN2jy6msSxqKo0AZLKxl0mkfhrlZde5YxtFOB4KG1KLIoUxfbVWuVgaVbx9v9yEVUNg5VBkS0iTV8E1U/A0dCYbbXdjIs7QqmmGwskX+HFyt6q0Uxye6POCy+CHbLgaGhoI5M1kSzclFidjPefuizhoxBkIk0DEmo2gMNRWuu/VyQV9c/gUMv7ArlFk9RMulMkVBECx0M4L+XtzpZr2FCfQ6eielKVyOuHLkvoApgmE2kmNOrageMhlJeTZSR7D9FrmLljtlSDP1c2WLJh9hrINYADojFbumFxHlRrmXi555vEKN0VZ1JNZvT0LXDUGDggJa+HJKYqJ62Rw+Tg8b12XhKPDjtvDxWpxHkxIGXCEAkcDuE7QlJORJQ474tU015yWuPHEqkSrg9deotLxhAJHBOlvJoBd3Xl7j8sQqhVhOyvGSspO8wigYNSc16dlL1cRdJJRGIKtehIsoFJZWXRtwUk9OzAUVFeMclKdl14NpETOb8tFqoyDFe9SZI2hXxhhnlIZPTswBHR+B4Q/lnhra5cqrOOYoNjGRdX9SYJbw6fz2vL/ZCzA8dF/AhlKTCQTMZBKbWwRCsmVTc+m3JBuWJwZJQWTbLIYKeEifSTRaGWGsjuEXUJb24muWTDedlfzMaCA1PyG5d7TKIqepIauwAhamdNcNRNFdfHE2dSiZsay1+asc4OHBilvDxMstpCKYYQU2bdP2silWYpOZMiv1dwVanU/L3HI67o2YGjUnJv0vqUEsv+SjqOb7kMqtqZxBRjJF736JIRkMDRUcr3JhyspnHdn5rN1Pp2XRwplI1Fdy8ZAQkcn8Jm0iKXC2/cJPBUtE+rJ2nieAE05CuXVY8QkMArELMzaXVKqfW4qGjzo0bp+3ZJ/Dys7ZdgZpuFXfEIc0jgBZA0NWmeA0ihiJTIOehQpBbwLCZpbBlCm4VdGh9hUQN4BcojJtmyHaLtZxyI1Ag3MUlrsi9Z9gir7MBrENnG/3fGpCsk/R4/vTlxgquoioTUAtIDHiV4BA6P0sI67MlJStdTjtyuc+8pxUgpm0eLniJjB14I69wtN3RbFL7hCp9oy9VFUats8sDjz28eYZEdeCFqmu+xWyrdWPT26P+cquiGVHan8K0vobhC8Ai8BpEnE6XLGTxR1RLZBIghErdnNu4bHmGABF4Pb9L5AZM2OL3/bD8ejnLnM8Mj8JIouYZ/ecwk86b/u7EYjgxsngAvhMZ8y/1B6eIOr7RRDl/u1YjhEXhdlLLjgZHS5cJem/P5/Hhow2Y+8JJYTLL+3fOwXh3iEXhtNAxyBE/EenVYqQpeGTPp6SqZRsjXgZdHa8pPV6nXCAtVwetTKPfw5T/RiLEuCLwuKpEHKp2/1aKzTeBinSr4IbTuXQ9/j0u2hLyHCjwCr42WODBpU+n8JIsyBwyPwOsjIeURVmnhyyzinBGOwE/F1nB38Fe5dDaLelIQeAR+CFIpj2GT6XGJrEfXwxHhCPwgtPXvpjKdH3PIhyIPU8XoCPwstATKeS7TptN5TaGZQ6YRenXg59FUmsIbl41zo7encdngN4egEfiNbCpxdsyF8vCbQEtPiNAI/GC0VOL83aSIsRH46UiN3+oSU4BG4DegJcT0TRYl9OnAL0JqoMRfb1HAtBH4XajU8JV9vESwCPxSpIT4FYEpJYoVFoFfjDaZHreJE0EiABpaaiBKifkuhVKiGCARALeI1BCImlD7RjE3gagphPwcAPs+1RADbSQP/SHGUGEQAHeg4sAsKwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPAD0FJDjAGbbgF4HK0xJWZO2IALwMNITLkBkQB4GI2cIRIAn6SmDJEA+CwhQyQAPotGiAQARALgCAhEAgARCYBDgIgEACISeBlUpGyI7l1f+ngR0YXvandrf1lFFh7Rbqq1zu9UtfceRiT/6nUDp1+ALzmacsNOxXPXqV2fLQal1KAYquhMonbzdndx7f56eXoikpQaKbVj/7h9WRGd/3va64wjkj2Qc4Nxsib4BFJjykYK8leQyO560aFGIWUH+Rv9vXR9ZLltztGficn2jNGLzm/UcnsbU1UvUn+nJ+GoZ/AApouRqm7N1q6bYrNPGynajbvNtl6vV7PL4Fh0/lX2Vup068XmKDIUScLOAzGMAvd7RPkDHPQko+u9IFpTHpLMgr0GXobXvc+zFzWoDBN0RiwDkVQo7xBhElhlbkLYabYk7tOB8x61M8naso9IPvB5gt58PuVdqAw9MoicSHsPRGoP3M+8Ne1c56h+b8Iu7E36KGYxjxZMEsoTogw8mjsiaXoXxklgGWue63C1T1ee3lmmHuXqXZw/Q8Pcjmq3rYmklKdEmATWiZzvJck8mhkkbkJ0GJHC4jO8iLwxavclrb5/zA57nnkOwBp998Ya0+Qyx9M75O6hGCMlHnfMRmOp2jf8No8TibkLgP0DEtUqpbqMOcnAV24MRRL23xtqiORuZYQk8EhA4hRDIM49iUKIaTB8qPwxnafV2SW+Y+cpXVxgKoOkdPxgCMkg1ZHaZ4vTg2oRKSH1IvXGcZDRAKvCJLCGkjNhlIejqpZ79kGCbu8aJ/KC2v5UZ+yG+IDEwQIlZ2eIM5G0+0VggyQN4+EVfxBJcv8NDa2MkATupqZBKy7OIxmklePbfezUMqpvtH1Kg4oTwb3B6B3qzpv6y6l2gY/Fu+3fKTqPDK3ZQAU8sEbkQZvR2IvQpxZSL4F9un9AtfY+8EBTHgUan1ILul+7pHQiZcN9T3Ai2RfPX54gEliChr+VZdzINPpBUupijFH6B0SXqTBKJ5hRu0GShL+44BfYgqL3JXVZfi+S+Jhr+GEWRAIrmAtxHA7YGVJdr0nZp57HgYbUN+Mk3oM9GYWuXJfQvv8xtLKbSJLYBSQjOJE0ONV3nSsnAO5JfgfnQRw3+8ImkvvNXfd30VG7lVxAMuJERuM0QqWGxNmLRN0IyajJiTSRxUl2AuBxkYJpMOyIsbjOF8ca6u1/wTVolynwztEjbfZt85NNEJlI5caW6VZzdVoZfceUTgB8gUhR90UKft7T43/hB3ZjMSPl3rA5WgKlxLyzcqnwrgLqRfJDJI9AJPBEkWJeQpxIJOPvz2lBJK2UOO+QqguSYVppVWdL6iRBJPA8kSgvURZEWotIflvfXKT8qEg+VcInAI4h0pdFpEKcl0XirxEpnwA4hkjlsyLNVutRsDTGl0UkQUQCxxPpiyKSxOx5qyBU0rdGpHQC4GnJhhTDPvo1Y6TK2UibQ6JqT+hFynEq0jxrx0g2gP8i/U1F9zntiuSH9WE5IKWbWnZdRNpXQDGPBI4pUnXLpyd4kRYnZLVcEXUjpFR0uHx9NiHrH6BY2QCOI1JZT13XBZGidA0//f0T1FT0octvo3C25MkSodN0rZ0SdpuDJ4qk7CR4KCL1O1yNrqWHnfsi7y1ajd6O6PtrZWn1N6MmF/h2kU5pT4Jlkfw2Ct0LC3qb2SAZ35b6bRSy90XR/xZg7EcC/6lIkfOeH/WKTERyIngbS3c9jAUp3TaMyuO8tnz4nrhTE1YDY4cseKpIhceVjLVQulKnIpmMvoOo1AkRnQej20hMmP7lJXIfZko2yEzSmjJ6duCJIvlUAVOVU0NdJe80j0i++AkVfTcx9uW4wqDSq5bYRw+t2eDYEn4qrkQ/B+dgg6ro33M1ULIBPFmkfpK0Hf1VQ2TXFociGd25Me0RrjVH6bMSTdnmRzZSHRSOTTFUeyF7g0HhrlD/EIgzAhJ4pkhmgcEp8aBM6lQkSdnB3SNSsey17SP8Q6RR7W+tnKdY9jzmIZhEAk8XSSjnlcLd4V/27mi5jRyHwrA2lt0LC4UC3/9lN2orxgKmMD0TJVNU/i93MszOBU+RptjkNEjxs/Yg/u5o/Dog9YUxMYxtFRVLdvjNQYqBok9BH6T+EP1Ymu5Hmtejl1a8lOPFuNcF/3aQdpeXPgV9kOqSWnWux1P2T9pJzUc9EzlcuvuROEAIvyFI7et2cd1eG6TIyGs/zsRmubYuxqTzvXldScflbuULOcLvCVIUv56brtgFqW/iHK8odZE976vmydu8vZS3NnNnbr5E1Qfp/N/9Xw3S58c5SD/KywX732q/PZdOe/l8TgpS7vrnpoX0nHIe/0Vm7b28nuu9LdNH1/NU9lL2qqIx62/fbt7LMSM3F5mXy9db+1/P5z0r3yPwWq8jf//8xUhHIZdvL3sbewMRj9LOrSbK3u4f3PXt9dNLc+P/+/7gm2spszr8e+T6Vewelcub/PM2du9vTfavNbsDz5G3D0ce/J/rf/3yzpo3AAAAAAAAAAAAAAAAAOBXEvOdyalnZu5m9tetjTHcc+G8cowfpcDCxIdqGPfCJJYKh9u07Fa1bZtu0V7z4E23vVrHIEtYlbnqlqi6zHt9KdPxtdDG1+ZqPmqDQYkSlmSu21eqXgt9zAprlCQ11+bDx51S3obAcmzoNqWHAlI7vqTm2iR9NsighOWZbncNmXT7Pkni213qTY4ykoS12NgaLpPAtRHxtsxyjhrM7rCSlKMuINIXDktxa8uisjNOwDLG1lOL8aPlMbHr+PEn+wlYRBkWdHyns8mdjFTmV6nSJdrLzek8mHn4GR8NMrnDkkYeA8xELPdntS+JG3YLl40yaRPPzYns7c1mbLqF4XZrMD2aIQmLkNSdbboM4PWjMV2AGFbipjb9O0zl47P04PkSuzIkYQ2uuTvXgERufL7yNvLHtoXcXP3TZ5q4OqaxBI41eO7gIXXnEqTTnSDlFIwc2DLCSXlwMGXhDqspA0+Q2vN9/OCpLAdppPgFGSUdXh8cnLkdVpPiEvqeHszzBM2iPb2fjrLI4bVRZW6HteQBJZF2t46ImKXluBqkcUpqxNKDMxms22EtedEtyxEJHxHyEQvVUeV3IyAjBUlq6yG3wtwOC7C6ph3mY4aYeySoC5LVIKVC6SaOxneyWEuaihW550++L+2DdCo8FVo36BirDViLRS687fnxvSpBAn4qSK4bQQIOBKmZ2s1zpPoLgiQECWs5HqQoDTpcxs8HicUGrO5vrNp5zZBJjVsqkjaXxqodnsjhIEVl1Pb56Je/T83XveIECWuxwzsbfDuw+7uZKloJUrMNSAZbhLAWO7TXziUFxpsZ24j0leY0Co29dngu+bWjZjtr2eET7u/+vjvKqLH7G8/FtHsfaZ6Q0WzWS+NYritBki3wPhIWZ/VwhvqDflt3HVhSnUoakIJa+4bs4A1ZrKac+ePTz13y1K4ZPaIuJclGeoy1ZzZszOywGtN64rbEWeBpJjZdbLDJGSkpmBZH9NcgiaZK+YwcpwhhOaZbprv0kctssSDylkcVGX1z8ftWMjx8DNWNAQnr2YPUU/s60sRhjmHI0SY1Ipftv8lfSFjPgSC5TLZ+q+q9M73FjwVp/nAmdlhR05fLQCMHzto/mCQ1bqPAczGdvRDRHb46p17Wuvv3LaJNcoRnYNGTPXp1c5Zwk6NgPqkYJUg7V+4ZwxP4/yCJa9ub68/ronlXqi6eg9TfuqmcHoSVpN3aUnr1cCnxGAduYo6A5KSlIAWb3WruDEdYSgrSHpWhqrdL/+0UotdruvJ/f79vRmxv66OhsjVCauW18LNJd2M4wlpSkHZmftXk48PYi0pVrbUosbTHKBMx/8FIEdZTg3ScnKqeK1u68bRSkB5B7JNwnjf+FA8Pko2ht39D5st4ykoCns3jg6TTc7aM+yzxzB4eJBnb5M5m54ZlPLVHBSl4+ULoaui2MbPDE3t4kOpB+6pD8wes2eH5pCA9usnAgITnZpreg3gE8a2h/IWEJ2T6+LcWZJAj/GFMf8FbC6K8ZIQ/iw29ig3cv3RMUnIEZD1X5nXAzxNX3tUDHsB8RIoGMQI6PfuOV4wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPhfe3BIAAAAACDo/2tvGAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAhwDWjgbLhu6a/gAAAABJRU5ErkJggg==",
"module": "add_item",
"module_name": "MercadoPago",
"visible": true,
"options": false,
"father": "module",
"group": "scripts",
"linux": true,
"windows": true,
"mac": true,
"docker": true
},
{
"en": {
"title": "Add Customer",
"description": "Add a recipient to the invoice",
"title_options": null,
"options": null
},
"es": {
"title": " ",
"description": " ",
"title_options": null,
"options": null
},
"form": {
"css": "modal-lg",
"inputs": [
{
"type": "input",
"placeholder": {
"es": " ",
"en": " "
},
"title": {
"es": "Nombre del cliente",
"en": "Customer Name:"
},
"help": {
"es": " ",
"en": " "
},
"id": "name",
"css": "col-lg-6"
},
{
"type": "input",
"placeholder": {
"es": " ",
"en": " "
},
"title": {
"es": "Correo electrónico:",
"en": "Email:"
},
"help": {
"es": " ",
"en": " "
},
"id": "email",
"css": "col-lg-6"
},
{
"type": "input",
"placeholder": {
"es": " ",
"en": " "
},
"title": {
"es": "Teléfono:",
"en": "Phone:"
},
"help": {
"es": " ",
"en": " "
},
"id": "phone",
"css": "col-lg-6"
}
]
},
"video_youtube": "",
"icon": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAA0gAAAHVCAMAAAD8XitdAAAANlBMVEX///9+kMVieLlDXaoONpgoRp7N1OkAnuMAI4zm7fa1v96aqNIur+gAgtDC6PgAXbWQ1fNfwu6g3K/TAAAYvklEQVR42uzc0VLzOAyG4dqWjGRPt/d/t5ukUONu/9A6/84k5X2GIygZDvIhRbF9AgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAe+K5lDCJqYkhhFpKPgFY56WGlMTWaYqhZD8BuJdrSCpiTxETEY2hkCbgi9eQHkZIevYAaQImucbUZ0TkcrmcJx/nj870jdn007vPawrlBPxWXkIS6RI052fVeQnUxxIo+R6mWClM+IXmFKm0DC016BXnc58m0VQZ6eF3KbGl6HJZ2rgxS5qkZYm6hF8j11uKpDVzw7o+T1Nk+IDfoMSvFPWPRNvDJLR4+CW8RpXW0K0az5ImZuJ4Y16T3jq6j//BrccTpcPDm/Ka5OcUbc8SUcIbyzXpQIxGzGWJKOEdeYvRx4DBsiQaGDvgfXiJYzHaXpUSUcK7yCGNxmh7VaK/w3vwEmVDjLZXJfo7vIEcdFuMzovLefZ6lJYMs3AIB3ctRzKaoylAE/l0mUxpGogST0o4NN9Sjs7dxqNur8XrRSmyYwmHlYMsORocFYj9QcvSC0WJmQOOyUuS0Tewt60RoppSXKSkbQPTlKUXQrn8HTwo4Yi8JDO7RunFLH1byB1rydm/zuiqMXU7MF67Hg9KOB6vutQTm7yYpbN8pigU/8NpQy1KT75PWvrESJJwMNccxRr6nbDPl4+VQxi8xKTtms9urRCShKPxIGYS/POuF/t5Q+ztjCCbSCq+ntNWlm7XXNk4KxprFLNEknAgtxy13Xxiq03euRvTafAn3lC1fPbnd92fMiSicynyQJJwKMstq60167LUqkjffzWSyukJnmP6dk25NCJ2I59NIknC0VTtctRnqYXpcjvw0b6TmJ+fDF6v2dxd6u4AB5KEA6mtHt3JoR2reu/6g9YRPsnL42vKpBv7tZrE+yQcQVEzqWs3fqsi3VGpVe3xb/rVSphqTKpyo5pCvQtRq0mBJGH/fI5D9R+qyPfxg2mc+6+s1n7zrn+bheynVTnnMn+tfi6aGWscsH/Jnvuf77nUMKklnyZfOfrvREHsixb/K3+fsIIVexfELPrpFWs5CmLfaT5t9Y+aKQMH7FuRsdvU06McVbWmjSK2yXPUae6wZ65jjVOOD3LkUWwmsWbPNdki5e1Fk8ck7FuwoaGYR5vEU+czOW2Qnq/B0rI17YnmDruWx17TeLBJ9D6Tavcnl3jWv9LeZWUGjj2ba0gZXJkX/UFbl4o/iFz77KAoTO6wX2XoMd6r2H0hK1H6ctR/WFLeWJLYUoH9SmaSh1ZC9Mnwqq0c3btuvNWydRWT1BOwR4MFKdyP4jyqTWL2P0z4pM0gBnmiJGGnPC4FaSRIErtpnaxPFHKQtmdpUBBTnpKwR6MvOms7obudt6/VfzouT7eUlJwY3GGXlhbtn8F72kRTnKSkNos/7TTXtnFpuCQlShL2x9UsDZ/b1XYkrR97348cNkShKOMG7FFe1viM8ByS2I2ulqN+QD5+PL4nVtzhX/bOQLmRVIeiBgktgvLz+/+v3WmSsSxCU9hJvO3kntmt1HS12z1VnAgEiAPScgYP9OxsZx6lDQpVTgtY8g59O/CDUJr17Na2J5Vaiuh9B8akIOjbgZ+DcM7h9FT0Uyb9D3k7cEBKzvz/03PRSp9I3kXMyYLD0ZLfsYqenkohbsk7DJLAz6Ata8iteM/zZLLVrUXvH5FFzgmDJHAshHLm98KmT5VJwr3JOy01BErtdcMJgCMh6V2k58pkpz1XXY1E10kriAQOh+ScXc3tTabnuKQ1rW2b3SSKKRuYkgVHo4nUzk++lYlCFX1S8i4HmVtUQmJX0RjVhMDh2EQ6X4/0v5EpxVpUT99LaSaRzFIL6VaiVsD/ApHA0ZCc+ezPJGdz6dvjkhDnPM5mq4qzyM5xhkjgcLxHJMN389J3jpds5R0XPXk2i/g2FN28JLp24HCYSF6mq0r8zWFJI39I3qmUmLpQZKBrB47HW7LBGLmUwreqFNidZqFawq1FHzQ/IyKBwyE5sxPJu2SVu9WdePQ5us9Xtoy2SiUeWoSuHTgwFpHmLnHQa2EGvh7u+phUJZTuQmomyUklpA8WISKB47OJxP/M+KtSKmrD/yibViFSDEXvL7zfrwyS9mCy/MKbRRAJvApzkazl+gOTfaMPevegqP+MUjbMoiEXiAQOh+Q/TDUylTYohkC5I8n9K857k6LTaMYFS4TA4RD2+e+5Slx1eI4Y322SueCTd82iORcsWgWHQ5PPNsxN2jy6msSxqKo0AZLKxl0mkfhrlZde5YxtFOB4KG1KLIoUxfbVWuVgaVbx9v9yEVUNg5VBkS0iTV8E1U/A0dCYbbXdjIs7QqmmGwskX+HFyt6q0Uxye6POCy+CHbLgaGhoI5M1kSzclFidjPefuizhoxBkIk0DEmo2gMNRWuu/VyQV9c/gUMv7ArlFk9RMulMkVBECx0M4L+XtzpZr2FCfQ6eielKVyOuHLkvoApgmE2kmNOrageMhlJeTZSR7D9FrmLljtlSDP1c2WLJh9hrINYADojFbumFxHlRrmXi555vEKN0VZ1JNZvT0LXDUGDggJa+HJKYqJ62Rw+Tg8b12XhKPDjtvDxWpxHkxIGXCEAkcDuE7QlJORJQ474tU015yWuPHEqkSrg9deotLxhAJHBOlvJoBd3Xl7j8sQqhVhOyvGSspO8wigYNSc16dlL1cRdJJRGIKtehIsoFJZWXRtwUk9OzAUVFeMclKdl14NpETOb8tFqoyDFe9SZI2hXxhhnlIZPTswBHR+B4Q/lnhra5cqrOOYoNjGRdX9SYJbw6fz2vL/ZCzA8dF/AhlKTCQTMZBKbWwRCsmVTc+m3JBuWJwZJQWTbLIYKeEifSTRaGWGsjuEXUJb24muWTDedlfzMaCA1PyG5d7TKIqepIauwAhamdNcNRNFdfHE2dSiZsay1+asc4OHBilvDxMstpCKYYQU2bdP2silWYpOZMiv1dwVanU/L3HI67o2YGjUnJv0vqUEsv+SjqOb7kMqtqZxBRjJF736JIRkMDRUcr3JhyspnHdn5rN1Pp2XRwplI1Fdy8ZAQkcn8Jm0iKXC2/cJPBUtE+rJ2nieAE05CuXVY8QkMArELMzaXVKqfW4qGjzo0bp+3ZJ/Dys7ZdgZpuFXfEIc0jgBZA0NWmeA0ihiJTIOehQpBbwLCZpbBlCm4VdGh9hUQN4BcojJtmyHaLtZxyI1Ag3MUlrsi9Z9gir7MBrENnG/3fGpCsk/R4/vTlxgquoioTUAtIDHiV4BA6P0sI67MlJStdTjtyuc+8pxUgpm0eLniJjB14I69wtN3RbFL7hCp9oy9VFUats8sDjz28eYZEdeCFqmu+xWyrdWPT26P+cquiGVHan8K0vobhC8Ai8BpEnE6XLGTxR1RLZBIghErdnNu4bHmGABF4Pb9L5AZM2OL3/bD8ejnLnM8Mj8JIouYZ/ecwk86b/u7EYjgxsngAvhMZ8y/1B6eIOr7RRDl/u1YjhEXhdlLLjgZHS5cJem/P5/Hhow2Y+8JJYTLL+3fOwXh3iEXhtNAxyBE/EenVYqQpeGTPp6SqZRsjXgZdHa8pPV6nXCAtVwetTKPfw5T/RiLEuCLwuKpEHKp2/1aKzTeBinSr4IbTuXQ9/j0u2hLyHCjwCr42WODBpU+n8JIsyBwyPwOsjIeURVmnhyyzinBGOwE/F1nB38Fe5dDaLelIQeAR+CFIpj2GT6XGJrEfXwxHhCPwgtPXvpjKdH3PIhyIPU8XoCPwstATKeS7TptN5TaGZQ6YRenXg59FUmsIbl41zo7encdngN4egEfiNbCpxdsyF8vCbQEtPiNAI/GC0VOL83aSIsRH46UiN3+oSU4BG4DegJcT0TRYl9OnAL0JqoMRfb1HAtBH4XajU8JV9vESwCPxSpIT4FYEpJYoVFoFfjDaZHreJE0EiABpaaiBKifkuhVKiGCARALeI1BCImlD7RjE3gagphPwcAPs+1RADbSQP/SHGUGEQAHeg4sAsKwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPAD0FJDjAGbbgF4HK0xJWZO2IALwMNITLkBkQB4GI2cIRIAn6SmDJEA+CwhQyQAPotGiAQARALgCAhEAgARCYBDgIgEACISeBlUpGyI7l1f+ngR0YXvandrf1lFFh7Rbqq1zu9UtfceRiT/6nUDp1+ALzmacsNOxXPXqV2fLQal1KAYquhMonbzdndx7f56eXoikpQaKbVj/7h9WRGd/3va64wjkj2Qc4Nxsib4BFJjykYK8leQyO560aFGIWUH+Rv9vXR9ZLltztGficn2jNGLzm/UcnsbU1UvUn+nJ+GoZ/AApouRqm7N1q6bYrNPGynajbvNtl6vV7PL4Fh0/lX2Vup068XmKDIUScLOAzGMAvd7RPkDHPQko+u9IFpTHpLMgr0GXobXvc+zFzWoDBN0RiwDkVQo7xBhElhlbkLYabYk7tOB8x61M8naso9IPvB5gt58PuVdqAw9MoicSHsPRGoP3M+8Ne1c56h+b8Iu7E36KGYxjxZMEsoTogw8mjsiaXoXxklgGWue63C1T1ee3lmmHuXqXZw/Q8Pcjmq3rYmklKdEmATWiZzvJck8mhkkbkJ0GJHC4jO8iLwxavclrb5/zA57nnkOwBp998Ya0+Qyx9M75O6hGCMlHnfMRmOp2jf8No8TibkLgP0DEtUqpbqMOcnAV24MRRL23xtqiORuZYQk8EhA4hRDIM49iUKIaTB8qPwxnafV2SW+Y+cpXVxgKoOkdPxgCMkg1ZHaZ4vTg2oRKSH1IvXGcZDRAKvCJLCGkjNhlIejqpZ79kGCbu8aJ/KC2v5UZ+yG+IDEwQIlZ2eIM5G0+0VggyQN4+EVfxBJcv8NDa2MkATupqZBKy7OIxmklePbfezUMqpvtH1Kg4oTwb3B6B3qzpv6y6l2gY/Fu+3fKTqPDK3ZQAU8sEbkQZvR2IvQpxZSL4F9un9AtfY+8EBTHgUan1ILul+7pHQiZcN9T3Ai2RfPX54gEliChr+VZdzINPpBUupijFH6B0SXqTBKJ5hRu0GShL+44BfYgqL3JXVZfi+S+Jhr+GEWRAIrmAtxHA7YGVJdr0nZp57HgYbUN+Mk3oM9GYWuXJfQvv8xtLKbSJLYBSQjOJE0ONV3nSsnAO5JfgfnQRw3+8ImkvvNXfd30VG7lVxAMuJERuM0QqWGxNmLRN0IyajJiTSRxUl2AuBxkYJpMOyIsbjOF8ca6u1/wTVolynwztEjbfZt85NNEJlI5caW6VZzdVoZfceUTgB8gUhR90UKft7T43/hB3ZjMSPl3rA5WgKlxLyzcqnwrgLqRfJDJI9AJPBEkWJeQpxIJOPvz2lBJK2UOO+QqguSYVppVWdL6iRBJPA8kSgvURZEWotIflvfXKT8qEg+VcInAI4h0pdFpEKcl0XirxEpnwA4hkjlsyLNVutRsDTGl0UkQUQCxxPpiyKSxOx5qyBU0rdGpHQC4GnJhhTDPvo1Y6TK2UibQ6JqT+hFynEq0jxrx0g2gP8i/U1F9zntiuSH9WE5IKWbWnZdRNpXQDGPBI4pUnXLpyd4kRYnZLVcEXUjpFR0uHx9NiHrH6BY2QCOI1JZT13XBZGidA0//f0T1FT0octvo3C25MkSodN0rZ0SdpuDJ4qk7CR4KCL1O1yNrqWHnfsi7y1ajd6O6PtrZWn1N6MmF/h2kU5pT4Jlkfw2Ct0LC3qb2SAZ35b6bRSy90XR/xZg7EcC/6lIkfOeH/WKTERyIngbS3c9jAUp3TaMyuO8tnz4nrhTE1YDY4cseKpIhceVjLVQulKnIpmMvoOo1AkRnQej20hMmP7lJXIfZko2yEzSmjJ6duCJIvlUAVOVU0NdJe80j0i++AkVfTcx9uW4wqDSq5bYRw+t2eDYEn4qrkQ/B+dgg6ro33M1ULIBPFmkfpK0Hf1VQ2TXFociGd25Me0RrjVH6bMSTdnmRzZSHRSOTTFUeyF7g0HhrlD/EIgzAhJ4pkhmgcEp8aBM6lQkSdnB3SNSsey17SP8Q6RR7W+tnKdY9jzmIZhEAk8XSSjnlcLd4V/27mi5jRyHwrA2lt0LC4UC3/9lN2orxgKmMD0TJVNU/i93MszOBU+RptjkNEjxs/Yg/u5o/Dog9YUxMYxtFRVLdvjNQYqBok9BH6T+EP1Ymu5Hmtejl1a8lOPFuNcF/3aQdpeXPgV9kOqSWnWux1P2T9pJzUc9EzlcuvuROEAIvyFI7et2cd1eG6TIyGs/zsRmubYuxqTzvXldScflbuULOcLvCVIUv56brtgFqW/iHK8odZE976vmydu8vZS3NnNnbr5E1Qfp/N/9Xw3S58c5SD/KywX732q/PZdOe/l8TgpS7vrnpoX0nHIe/0Vm7b28nuu9LdNH1/NU9lL2qqIx62/fbt7LMSM3F5mXy9db+1/P5z0r3yPwWq8jf//8xUhHIZdvL3sbewMRj9LOrSbK3u4f3PXt9dNLc+P/+/7gm2spszr8e+T6Vewelcub/PM2du9vTfavNbsDz5G3D0ce/J/rf/3yzpo3AAAAAAAAAAAAAAAAAOBXEvOdyalnZu5m9tetjTHcc+G8cowfpcDCxIdqGPfCJJYKh9u07Fa1bZtu0V7z4E23vVrHIEtYlbnqlqi6zHt9KdPxtdDG1+ZqPmqDQYkSlmSu21eqXgt9zAprlCQ11+bDx51S3obAcmzoNqWHAlI7vqTm2iR9NsighOWZbncNmXT7Pkni213qTY4ykoS12NgaLpPAtRHxtsxyjhrM7rCSlKMuINIXDktxa8uisjNOwDLG1lOL8aPlMbHr+PEn+wlYRBkWdHyns8mdjFTmV6nSJdrLzek8mHn4GR8NMrnDkkYeA8xELPdntS+JG3YLl40yaRPPzYns7c1mbLqF4XZrMD2aIQmLkNSdbboM4PWjMV2AGFbipjb9O0zl47P04PkSuzIkYQ2uuTvXgERufL7yNvLHtoXcXP3TZ5q4OqaxBI41eO7gIXXnEqTTnSDlFIwc2DLCSXlwMGXhDqspA0+Q2vN9/OCpLAdppPgFGSUdXh8cnLkdVpPiEvqeHszzBM2iPb2fjrLI4bVRZW6HteQBJZF2t46ImKXluBqkcUpqxNKDMxms22EtedEtyxEJHxHyEQvVUeV3IyAjBUlq6yG3wtwOC7C6ph3mY4aYeySoC5LVIKVC6SaOxneyWEuaihW550++L+2DdCo8FVo36BirDViLRS687fnxvSpBAn4qSK4bQQIOBKmZ2s1zpPoLgiQECWs5HqQoDTpcxs8HicUGrO5vrNp5zZBJjVsqkjaXxqodnsjhIEVl1Pb56Je/T83XveIECWuxwzsbfDuw+7uZKloJUrMNSAZbhLAWO7TXziUFxpsZ24j0leY0Co29dngu+bWjZjtr2eET7u/+vjvKqLH7G8/FtHsfaZ6Q0WzWS+NYritBki3wPhIWZ/VwhvqDflt3HVhSnUoakIJa+4bs4A1ZrKac+ePTz13y1K4ZPaIuJclGeoy1ZzZszOywGtN64rbEWeBpJjZdbLDJGSkpmBZH9NcgiaZK+YwcpwhhOaZbprv0kctssSDylkcVGX1z8ftWMjx8DNWNAQnr2YPUU/s60sRhjmHI0SY1Ipftv8lfSFjPgSC5TLZ+q+q9M73FjwVp/nAmdlhR05fLQCMHzto/mCQ1bqPAczGdvRDRHb46p17Wuvv3LaJNcoRnYNGTPXp1c5Zwk6NgPqkYJUg7V+4ZwxP4/yCJa9ub68/ronlXqi6eg9TfuqmcHoSVpN3aUnr1cCnxGAduYo6A5KSlIAWb3WruDEdYSgrSHpWhqrdL/+0UotdruvJ/f79vRmxv66OhsjVCauW18LNJd2M4wlpSkHZmftXk48PYi0pVrbUosbTHKBMx/8FIEdZTg3ScnKqeK1u68bRSkB5B7JNwnjf+FA8Pko2ht39D5st4ykoCns3jg6TTc7aM+yzxzB4eJBnb5M5m54ZlPLVHBSl4+ULoaui2MbPDE3t4kOpB+6pD8wes2eH5pCA9usnAgITnZpreg3gE8a2h/IWEJ2T6+LcWZJAj/GFMf8FbC6K8ZIQ/iw29ig3cv3RMUnIEZD1X5nXAzxNX3tUDHsB8RIoGMQI6PfuOV4wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPhfe3BIAAAAACDo/2tvGAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAhwDWjgbLhu6a/gAAAABJRU5ErkJggg==",
"module": "add_recipient",
"module_name": "MercadoPago",
"visible": true,
"options": false,
"father": "module",
"group": "scripts",
"linux": true,
"windows": true,
"mac": true,
"docker": true
}"""
| 77.547569
| 8,651
| 0.803353
| 1,613
| 36,680
| 18.231246
| 0.271544
| 0.00272
| 0.005101
| 0.006801
| 0.917979
| 0.914986
| 0.914272
| 0.910532
| 0.910327
| 0.910327
| 0
| 0.117069
| 0.128571
| 36,680
| 473
| 8,652
| 77.547569
| 0.802935
| 0.017394
| 0
| 0.376068
| 0
| 0
| 0.175161
| 0
| 0
| 1
| 0
| 0.002114
| 0
| 1
| 0
| false
| 0
| 0.034188
| 0
| 0.034188
| 0.068376
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
f502f8940f229ec87510fd54d1854e03d2eab714
| 16,838
|
py
|
Python
|
tests/util/test_filter_fileset.py
|
strawpants/cate
|
eeef7da204b2f5c6dab1a90cb240aa5158c44513
|
[
"MIT"
] | 34
|
2017-09-28T19:08:59.000Z
|
2022-02-09T14:53:26.000Z
|
tests/util/test_filter_fileset.py
|
strawpants/cate
|
eeef7da204b2f5c6dab1a90cb240aa5158c44513
|
[
"MIT"
] | 608
|
2017-09-25T20:29:52.000Z
|
2022-03-28T11:02:21.000Z
|
tests/util/test_filter_fileset.py
|
strawpants/cate
|
eeef7da204b2f5c6dab1a90cb240aa5158c44513
|
[
"MIT"
] | 14
|
2017-10-16T07:34:59.000Z
|
2021-02-22T15:52:37.000Z
|
from unittest import TestCase
from cate.util.misc import filter_fileset
class IncludeExcludeDataSourcesTest(TestCase):
def test_none(self):
filtered_data_sources = filter_fileset(ALL_DATA_SOURCES)
self.assertIs(filtered_data_sources, ALL_DATA_SOURCES)
def test_include(self):
filtered_data_sources = filter_fileset(ALL_DATA_SOURCES,
includes=['esacci.CLOUD.mon.*.multi-platform.*', 'esacci.OZONE.*'])
self.assertEqual(sorted(filtered_data_sources),
['esacci.CLOUD.mon.L3C.CLD_PRODUCTS.AVHRR.multi-platform.AVHRR-AM.2-0.r1',
'esacci.CLOUD.mon.L3C.CLD_PRODUCTS.AVHRR.multi-platform.AVHRR-PM.2-0.r1',
'esacci.CLOUD.mon.L3C.CLD_PRODUCTS.multi-sensor.multi-platform.ATSR2-AATSR.2-0.r1',
'esacci.OZONE.day.L3S.TC.multi-sensor.multi-platform.MERGED.fv0100.r1',
'esacci.OZONE.mon.L3.NP.multi-sensor.multi-platform.MERGED.fv0002.r1',
])
def test_exclude(self):
filtered_data_sources = filter_fileset(ALL_DATA_SOURCES,
excludes=['*.day.*', '*.satellite-orbit-frequency.*',
'*.multi-sensor.*'])
print(repr(sorted(filtered_data_sources)))
self.assertEqual(sorted(filtered_data_sources),
['esacci.AEROSOL.5-days.L3C.AEX.GOMOS.Envisat.aergom.2-19.r1',
'esacci.AEROSOL.mon.L3C.AER_PRODUCTS.AATSR.Envisat.ORAC.03-02.r1',
'esacci.AEROSOL.mon.L3C.AER_PRODUCTS.AATSR.Envisat.SU.4-21.r1',
'esacci.AEROSOL.mon.L3C.AER_PRODUCTS.ATSR-2.ERS-2.ORAC.03-02.r1',
'esacci.AEROSOL.mon.L3C.AER_PRODUCTS.ATSR-2.ERS-2.SU.4-21.r1',
'esacci.AEROSOL.mon.L3C.AOD.MERIS.Envisat.MERIS_ENVISAT.2-2.r1',
'esacci.CLOUD.mon.L3C.CLD_PRODUCTS.AVHRR.multi-platform.AVHRR-AM.2-0.r1',
'esacci.CLOUD.mon.L3C.CLD_PRODUCTS.AVHRR.multi-platform.AVHRR-PM.2-0.r1',
'esacci.CLOUD.mon.L3C.CLD_PRODUCTS.MODIS.Aqua.MODIS_AQUA.2-0.r1',
'esacci.CLOUD.mon.L3C.CLD_PRODUCTS.MODIS.Terra.MODIS_TERRA.2-0.r1',
'esacci.LC.13-yrs.L4.WB.asar.Envisat.Map.4-0.r1'
])
def test_include_exclude(self):
filtered_data_sources = filter_fileset(ALL_DATA_SOURCES,
includes=['esacci.CLOUD.mon.*.multi-platform.*', 'esacci.OZONE.*'],
excludes=['*.day.*', '*.ATSR2-AATSR.*'])
print(repr(sorted(filtered_data_sources)))
self.assertEqual(sorted(filtered_data_sources),
['esacci.CLOUD.mon.L3C.CLD_PRODUCTS.AVHRR.multi-platform.AVHRR-AM.2-0.r1',
'esacci.CLOUD.mon.L3C.CLD_PRODUCTS.AVHRR.multi-platform.AVHRR-PM.2-0.r1',
'esacci.OZONE.mon.L3.NP.multi-sensor.multi-platform.MERGED.fv0002.r1',
])
ALL_DATA_SOURCES = [
'esacci.AEROSOL.5-days.L3C.AEX.GOMOS.Envisat.aergom.2-19.r1',
'esacci.AEROSOL.climatology.L3.AAI.multi-sensor.multi-platform.ms_uvai.1-5-7.r1',
'esacci.AEROSOL.day.L3.AAI.multi-sensor.multi-platform.ms_uvai.1-5-7.r1',
'esacci.AEROSOL.day.L3C.AER_PRODUCTS.AATSR.Envisat.ORAC.03-02.r1',
'esacci.AEROSOL.day.L3C.AER_PRODUCTS.AATSR.Envisat.SU.4-21.r1',
'esacci.AEROSOL.day.L3C.AER_PRODUCTS.ATSR-2.ERS-2.ORAC.03-02.r1',
'esacci.AEROSOL.day.L3C.AER_PRODUCTS.ATSR-2.ERS-2.SU.4-21.r1',
'esacci.AEROSOL.day.L3C.AOD.MERIS.Envisat.MERIS_ENVISAT.2-2.r1',
'esacci.AEROSOL.mon.L3.AAI.multi-sensor.multi-platform.ms_uvai.1-5-7.r1',
'esacci.AEROSOL.mon.L3C.AER_PRODUCTS.AATSR.Envisat.ORAC.03-02.r1',
'esacci.AEROSOL.mon.L3C.AER_PRODUCTS.AATSR.Envisat.SU.4-21.r1',
'esacci.AEROSOL.mon.L3C.AER_PRODUCTS.ATSR-2.ERS-2.ORAC.03-02.r1',
'esacci.AEROSOL.mon.L3C.AER_PRODUCTS.ATSR-2.ERS-2.SU.4-21.r1',
'esacci.AEROSOL.mon.L3C.AOD.MERIS.Envisat.MERIS_ENVISAT.2-2.r1',
'esacci.AEROSOL.satellite-orbit-frequency.L2P.AOD.MERIS.Envisat.MERIS_ENVISAT.2-2.r1',
'esacci.CLOUD.mon.L3C.CLD_PRODUCTS.AVHRR.multi-platform.AVHRR-AM.2-0.r1',
'esacci.CLOUD.mon.L3C.CLD_PRODUCTS.AVHRR.multi-platform.AVHRR-PM.2-0.r1',
'esacci.CLOUD.mon.L3C.CLD_PRODUCTS.MODIS.Aqua.MODIS_AQUA.2-0.r1',
'esacci.CLOUD.mon.L3C.CLD_PRODUCTS.MODIS.Terra.MODIS_TERRA.2-0.r1',
'esacci.CLOUD.mon.L3C.CLD_PRODUCTS.multi-sensor.Envisat.MERIS-AATSR.2-0.r1',
'esacci.CLOUD.mon.L3C.CLD_PRODUCTS.multi-sensor.multi-platform.ATSR2-AATSR.2-0.r1',
'esacci.FIRE.day.L4.BA.multi-sensor.multi-platform.MERIS.v4-1.r1',
'esacci.GHG.satellite-orbit-frequency.L2.CH4.SCIAMACHY.Envisat.SCIAMACHY.v3-9.r1',
'esacci.GHG.satellite-orbit-frequency.L2.CH4.SCIAMACHY.Envisat.SCIAMACHY.v7-1.r1',
'esacci.GHG.satellite-orbit-frequency.L2.CH4.TANSO-FTS.GOSAT.GOSAT.v1.r1',
'esacci.GHG.satellite-orbit-frequency.L2.CH4.TANSO-FTS.GOSAT.GOSAT.v2-3-7.r1',
'esacci.GHG.satellite-orbit-frequency.L2.CH4.TANSO-FTS.GOSAT.GOSAT.v2-3-7.r2',
'esacci.GHG.satellite-orbit-frequency.L2.CH4.TANSO-FTS.GOSAT.GOSAT.v6.r1',
'esacci.GHG.satellite-orbit-frequency.L2.CH4.multi-sensor.multi-platform.VARIOUS.ch4_v1-0.r1',
'esacci.GHG.satellite-orbit-frequency.L2.CO2.SCIAMACHY.Envisat.SCIAMACHY.v02-01-01.r1',
'esacci.GHG.satellite-orbit-frequency.L2.CO2.SCIAMACHY.Envisat.SCIAMACHY.v3-9.r1',
'esacci.GHG.satellite-orbit-frequency.L2.CO2.TANSO-FTS.GOSAT.GOSAT.v2-3-7.r1',
'esacci.GHG.satellite-orbit-frequency.L2.CO2.TANSO-FTS.GOSAT.GOSAT.v6.r1',
'esacci.GHG.satellite-orbit-frequency.L2.CO2.multi-sensor.multi-platform.VARIOUS.v2-1a.r1',
'esacci.GHG.satellite-orbit-frequency.L2.CO2.multi-sensor.multi-platform.VARIOUS.v2-1b.r1',
'esacci.GHG.satellite-orbit-frequency.L2.CO2.multi-sensor.multi-platform.VARIOUS.v2-1c.r1',
'esacci.LC.13-yrs.L4.WB.asar.Envisat.Map.4-0.r1',
'esacci.LC.5-yrs.L4.LCCS.multi-sensor.multi-platform.Map.1-6-1.r1',
'esacci.OC.5-days.L3S.CHLOR_A.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.5-days.L3S.CHLOR_A.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.5-days.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.5-days.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.5-days.L3S.K_490.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.5-days.L3S.K_490.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.5-days.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.5-days.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.5-days.L3S.RRS.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.5-days.L3S.RRS.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.8-days.L3S.CHLOR_A.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.8-days.L3S.CHLOR_A.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.8-days.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.8-days.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.8-days.L3S.K_490.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.8-days.L3S.K_490.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.8-days.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.8-days.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.8-days.L3S.RRS.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.8-days.L3S.RRS.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.climatology.L3S.OC_PRODUCTS.multi-sensor.multi-platform.merged.2-0.0-25deg',
'esacci.OC.climatology.L3S.OC_PRODUCTS.multi-sensor.multi-platform.merged.2-0.0-5deg',
'esacci.OC.climatology.L3S.OC_PRODUCTS.multi-sensor.multi-platform.merged.2-0.1deg',
'esacci.OC.climatology.L3S.OC_PRODUCTS.multi-sensor.multi-platform.merged.2-0.4km',
'esacci.OC.day.L3S.CHLOR_A.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.day.L3S.CHLOR_A.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.1997-r1',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.1998-r1',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.1999-r1',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.2000-r1',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.2001-r1',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.2002-r1',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.2003-r1',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.2004-r1',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.2005-r1',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.2006-r1',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.2007-r1',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.2008-r1',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.2009-r1',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.2010-r1',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.2011-r1',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.2012-r1',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.2013-r1',
'esacci.OC.day.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.day.L3S.K_490.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.day.L3S.K_490.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.1997-r1',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.1997-r2',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.1998-r1',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.1998-r2',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.1999-r1',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.1999-r2',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2000-r1',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2000-r2',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2001-r1',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2001-r2',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2002-r1',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2002-r2',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2003-r1',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2003-r2',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2004-r1',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2004-r2',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2005-r1',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2005-r2',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2006-r1',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2006-r2',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2007-r1',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2007-r2',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2008-r1',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2008-r2',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2009-r1',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2009-r2',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2010-r1',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2010-r2',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2011-r1',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2011-r2',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2012-r1',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2012-r2',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2013-r1',
'esacci.OC.day.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.2013-r2',
'esacci.OC.day.L3S.RRS.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.day.L3S.RRS.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.mon.L3S.CHLOR_A.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.mon.L3S.CHLOR_A.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.mon.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.mon.L3S.IOP.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.mon.L3S.K_490.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.mon.L3S.K_490.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.mon.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.mon.L3S.OC_PRODUCTS.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OC.mon.L3S.RRS.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.OC.mon.L3S.RRS.multi-sensor.multi-platform.MERGED.2-0.r2',
'esacci.OZONE.day.L3S.TC.multi-sensor.multi-platform.MERGED.fv0100.r1',
'esacci.OZONE.mon.L3.NP.multi-sensor.multi-platform.MERGED.fv0002.r1',
'esacci.SEAICE.day.L4.SICONC.multi-sensor.multi-platform.AMSR_25kmEASE2.2-0.NH',
'esacci.SEAICE.day.L4.SICONC.multi-sensor.multi-platform.AMSR_25kmEASE2.2-0.SH',
'esacci.SEAICE.day.L4.SICONC.multi-sensor.multi-platform.AMSR_50kmEASE2.2-0.NH',
'esacci.SEAICE.day.L4.SICONC.multi-sensor.multi-platform.AMSR_50kmEASE2.2-0.SH',
'esacci.SEALEVEL.mon.IND.MSL.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.SEALEVEL.mon.IND.MSLAMPH.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.SEALEVEL.mon.IND.MSLTR.multi-sensor.multi-platform.MERGED.2-0.r1',
'esacci.SOILMOISTURE.day.L3S.SSMS.multi-sensor.multi-platform.ACTIVE.03-2.r1',
'esacci.SOILMOISTURE.day.L3S.SSMV.multi-sensor.multi-platform.COMBINED.03-2.r1',
'esacci.SOILMOISTURE.day.L3S.SSMV.multi-sensor.multi-platform.PASSIVE.03-2.r1',
'esacci.SST.day.L4.SSTdepth.multi-sensor.multi-platform.OSTIA.1-1.r1',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.AATSR.Envisat.AATSR.1-1.2002',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.AATSR.Envisat.AATSR.1-1.2003',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.AATSR.Envisat.AATSR.1-1.2004',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.AATSR.Envisat.AATSR.1-1.2005',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.AATSR.Envisat.AATSR.1-1.2006',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.AATSR.Envisat.AATSR.1-1.2007',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.AATSR.Envisat.AATSR.1-1.2008',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.AATSR.Envisat.AATSR.1-1.2009',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.AATSR.Envisat.AATSR.1-1.2010',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.AATSR.Envisat.AATSR.1-1.2011',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.AATSR.Envisat.AATSR.1-1.2012',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.ATSR-2.ERS-2.ATSR2.1-1.1995',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.ATSR-2.ERS-2.ATSR2.1-1.1996',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.ATSR-2.ERS-2.ATSR2.1-1.1997',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.ATSR-2.ERS-2.ATSR2.1-1.1998',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.ATSR-2.ERS-2.ATSR2.1-1.1999',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.ATSR-2.ERS-2.ATSR2.1-1.2000',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.ATSR-2.ERS-2.ATSR2.1-1.2001',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.ATSR-2.ERS-2.ATSR2.1-1.2002',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.ATSR-2.ERS-2.ATSR2.1-1.2003',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.ATSR.ERS-1.ATSR1.1-1.1991',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.ATSR.ERS-1.ATSR1.1-1.1992',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.ATSR.ERS-1.ATSR1.1-1.1993',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.ATSR.ERS-1.ATSR1.1-1.1994',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.ATSR.ERS-1.ATSR1.1-1.1995',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.ATSR.ERS-1.ATSR1.1-1.1996',
'esacci.SST.satellite-orbit-frequency.L3U.SSTskin.ATSR.ERS-1.ATSR1.1-1.1997']
| 75.169643
| 114
| 0.70816
| 2,776
| 16,838
| 4.243156
| 0.064481
| 0.142372
| 0.161644
| 0.242465
| 0.943119
| 0.938365
| 0.938365
| 0.938365
| 0.937856
| 0.930045
| 0
| 0.080805
| 0.109217
| 16,838
| 223
| 115
| 75.506726
| 0.704514
| 0
| 0
| 0.186047
| 0
| 0.87907
| 0.810013
| 0.805678
| 0
| 0
| 0
| 0
| 0.018605
| 1
| 0.018605
| false
| 0.004651
| 0.009302
| 0
| 0.032558
| 0.009302
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
eeb3983f96cb34453fdf117ceaf927381072f908
| 46,655
|
py
|
Python
|
tests/test_blobxfer_operations_download.py
|
temporaer/blobxfer
|
8602006192c0f8f7bb078e3d6da20396c07f302a
|
[
"MIT"
] | null | null | null |
tests/test_blobxfer_operations_download.py
|
temporaer/blobxfer
|
8602006192c0f8f7bb078e3d6da20396c07f302a
|
[
"MIT"
] | null | null | null |
tests/test_blobxfer_operations_download.py
|
temporaer/blobxfer
|
8602006192c0f8f7bb078e3d6da20396c07f302a
|
[
"MIT"
] | null | null | null |
# coding=utf-8
"""Tests for download operations"""
# stdlib imports
import datetime
try:
import unittest.mock as mock
except ImportError: # noqa
import mock
import multiprocessing
try:
import pathlib2 as pathlib
except ImportError: # noqa
import pathlib
try:
import queue
except ImportError: # noqa
import Queue as queue
# non-stdlib imports
import azure.storage.blob
import dateutil.tz
import pytest
# local imports
import blobxfer.models.azure as azmodels
import blobxfer.models.download as models
import blobxfer.models.options as options
import blobxfer.operations.azure as azops
import blobxfer.util as util
# module under test
import blobxfer.operations.download as ops
@mock.patch('blobxfer.operations.azure.file.check_if_single_file')
@mock.patch('blobxfer.operations.azure.blob.check_if_single_blob')
def test_ensure_local_destination(patched_blob, patched_file, tmpdir):
downdir = tmpdir.join('down')
downdir.mkdir()
# no spec sources
ds = models.Specification(
download_options=options.Download(
check_file_md5=True,
chunk_size_bytes=4194304,
delete_extraneous_destination=False,
delete_only=False,
max_single_object_concurrency=8,
mode=azmodels.StorageModes.Auto,
overwrite=True,
recursive=True,
rename=False,
restore_file_properties=options.FileProperties(
attributes=False,
cache_control=None,
content_type=None,
lmt=False,
md5=None,
),
rsa_private_key=None,
strip_components=0,
),
skip_on_options=mock.MagicMock(),
local_destination_path=models.LocalDestinationPath(
str(downdir)
),
)
with pytest.raises(RuntimeError):
ops.Downloader.ensure_local_destination(mock.MagicMock(), ds, False)
# blob directory
asp = azops.SourcePath()
p = 'cont/remote/path'
asp.add_path_with_storage_account(p, 'sa')
ds.add_azure_source_path(asp)
patched_blob.return_value = False
ops.Downloader.ensure_local_destination(mock.MagicMock(), ds, False)
assert ds.destination.is_dir
# blob single file + rename
ds = models.Specification(
download_options=options.Download(
check_file_md5=True,
chunk_size_bytes=4194304,
delete_extraneous_destination=False,
delete_only=False,
max_single_object_concurrency=8,
mode=azmodels.StorageModes.Auto,
overwrite=True,
recursive=True,
rename=True,
restore_file_properties=options.FileProperties(
attributes=False,
cache_control=None,
content_type=None,
lmt=False,
md5=None,
),
rsa_private_key=None,
strip_components=0,
),
skip_on_options=mock.MagicMock(),
local_destination_path=models.LocalDestinationPath(
str(downdir)
),
)
ds.add_azure_source_path(asp)
patched_blob.return_value = True
with pytest.raises(RuntimeError):
ops.Downloader.ensure_local_destination(mock.MagicMock(), ds, False)
# file directory
ds = models.Specification(
download_options=options.Download(
check_file_md5=True,
chunk_size_bytes=4194304,
delete_extraneous_destination=False,
delete_only=False,
max_single_object_concurrency=8,
mode=azmodels.StorageModes.File,
overwrite=True,
recursive=True,
rename=False,
restore_file_properties=options.FileProperties(
attributes=False,
cache_control=None,
content_type=None,
lmt=False,
md5=None,
),
rsa_private_key=None,
strip_components=0,
),
skip_on_options=mock.MagicMock(),
local_destination_path=models.LocalDestinationPath(
str(downdir)
),
)
ds.add_azure_source_path(asp)
patched_file.return_value = (False, None)
ops.Downloader.ensure_local_destination(mock.MagicMock(), ds, True)
assert ds.destination.is_dir
# file single + rename
ds = models.Specification(
download_options=options.Download(
check_file_md5=True,
chunk_size_bytes=4194304,
delete_extraneous_destination=False,
delete_only=False,
max_single_object_concurrency=8,
mode=azmodels.StorageModes.File,
overwrite=True,
recursive=True,
rename=True,
restore_file_properties=options.FileProperties(
attributes=False,
cache_control=None,
content_type=None,
lmt=False,
md5=None,
),
rsa_private_key=None,
strip_components=0,
),
skip_on_options=mock.MagicMock(),
local_destination_path=models.LocalDestinationPath(
str(downdir)
),
)
ds.add_azure_source_path(asp)
patched_file.return_value = (True, mock.MagicMock())
with pytest.raises(RuntimeError):
ops.Downloader.ensure_local_destination(mock.MagicMock(), ds, False)
# no read access
sa = mock.MagicMock()
sa.can_read_object = False
creds = mock.MagicMock()
creds.get_storage_account.return_value = sa
ds = models.Specification(
download_options=options.Download(
check_file_md5=True,
chunk_size_bytes=4194304,
delete_extraneous_destination=False,
delete_only=False,
max_single_object_concurrency=8,
mode=azmodels.StorageModes.File,
overwrite=True,
recursive=True,
rename=False,
restore_file_properties=options.FileProperties(
attributes=False,
cache_control=None,
content_type=None,
lmt=False,
md5=None,
),
rsa_private_key=None,
strip_components=0,
),
skip_on_options=mock.MagicMock(),
local_destination_path=models.LocalDestinationPath(
str(downdir)
),
)
ds.add_azure_source_path(asp)
patched_file.return_value = (True, mock.MagicMock())
with pytest.raises(RuntimeError):
ops.Downloader.ensure_local_destination(creds, ds, False)
def test_check_download_conditions(tmpdir):
ap = tmpdir.join('a')
ap.write('abc')
ep = pathlib.Path(str(ap))
nep = pathlib.Path(str(tmpdir.join('nep')))
ds = models.Specification(
download_options=options.Download(
check_file_md5=True,
chunk_size_bytes=4194304,
delete_extraneous_destination=False,
delete_only=False,
max_single_object_concurrency=8,
mode=azmodels.StorageModes.Auto,
overwrite=False,
recursive=True,
rename=False,
restore_file_properties=options.FileProperties(
attributes=False,
cache_control=None,
content_type=None,
lmt=False,
md5=None,
),
rsa_private_key=None,
strip_components=0,
),
skip_on_options=options.SkipOn(
filesize_match=True,
lmt_ge=True,
md5_match=True,
),
local_destination_path=models.LocalDestinationPath('dest'),
)
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), ds)
result = d._check_download_conditions(nep, mock.MagicMock())
assert result == ops.DownloadAction.Download
rfile = mock.MagicMock()
rfile.vectored_io = None
result = d._check_download_conditions(nep, rfile)
assert result == ops.DownloadAction.Download
result = d._check_download_conditions(ep, mock.MagicMock())
assert result == ops.DownloadAction.Skip
ds = models.Specification(
download_options=options.Download(
check_file_md5=True,
chunk_size_bytes=4194304,
delete_extraneous_destination=False,
delete_only=False,
max_single_object_concurrency=8,
mode=azmodels.StorageModes.Auto,
overwrite=True,
recursive=True,
rename=False,
restore_file_properties=options.FileProperties(
attributes=False,
cache_control=None,
content_type=None,
lmt=False,
md5=None,
),
rsa_private_key=None,
strip_components=0,
),
skip_on_options=options.SkipOn(
filesize_match=True,
lmt_ge=True,
md5_match=True,
),
local_destination_path=models.LocalDestinationPath('dest'),
)
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), ds)
rfile = mock.MagicMock()
rfile.md5 = 'abc'
rfile._encryption = None
result = d._check_download_conditions(ep, rfile)
assert result == ops.DownloadAction.CheckMd5
ds = models.Specification(
download_options=options.Download(
check_file_md5=True,
chunk_size_bytes=4194304,
delete_extraneous_destination=False,
delete_only=False,
max_single_object_concurrency=8,
mode=azmodels.StorageModes.Auto,
overwrite=True,
recursive=True,
rename=False,
restore_file_properties=options.FileProperties(
attributes=False,
cache_control=None,
content_type=None,
lmt=False,
md5=None,
),
rsa_private_key=None,
strip_components=0,
),
skip_on_options=options.SkipOn(
filesize_match=False,
lmt_ge=False,
md5_match=False,
),
local_destination_path=models.LocalDestinationPath('dest'),
)
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), ds)
result = d._check_download_conditions(ep, mock.MagicMock())
assert result == ops.DownloadAction.Download
ds = models.Specification(
download_options=options.Download(
check_file_md5=True,
chunk_size_bytes=4194304,
delete_extraneous_destination=False,
delete_only=False,
max_single_object_concurrency=8,
mode=azmodels.StorageModes.Auto,
overwrite=True,
recursive=True,
rename=False,
restore_file_properties=options.FileProperties(
attributes=False,
cache_control=None,
content_type=None,
lmt=False,
md5=None,
),
rsa_private_key=None,
strip_components=0,
),
skip_on_options=options.SkipOn(
filesize_match=True,
lmt_ge=False,
md5_match=False,
),
local_destination_path=models.LocalDestinationPath('dest'),
)
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), ds)
rfile = azmodels.StorageEntity('cont')
rfile._size = util.page_align_content_length(ep.stat().st_size)
rfile._mode = azmodels.StorageModes.Page
rfile._encryption = None
result = d._check_download_conditions(ep, rfile)
assert result == ops.DownloadAction.Skip
rfile._size = ep.stat().st_size
rfile._mode = azmodels.StorageModes.Page
result = d._check_download_conditions(ep, rfile)
assert result == ops.DownloadAction.Download
ds = models.Specification(
download_options=options.Download(
check_file_md5=True,
chunk_size_bytes=4194304,
delete_extraneous_destination=False,
delete_only=False,
max_single_object_concurrency=8,
mode=azmodels.StorageModes.Auto,
overwrite=True,
recursive=True,
rename=False,
restore_file_properties=options.FileProperties(
attributes=False,
cache_control=None,
content_type=None,
lmt=False,
md5=None,
),
rsa_private_key=None,
strip_components=0,
),
skip_on_options=options.SkipOn(
filesize_match=False,
lmt_ge=True,
md5_match=False,
),
local_destination_path=models.LocalDestinationPath('dest'),
)
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), ds)
rfile = azmodels.StorageEntity('cont')
rfile._lmt = datetime.datetime.now(dateutil.tz.tzutc()) + \
datetime.timedelta(days=1)
rfile._encryption = None
result = d._check_download_conditions(ep, rfile)
assert result == ops.DownloadAction.Download
rfile._lmt = datetime.datetime.now(dateutil.tz.tzutc()) - \
datetime.timedelta(days=1)
result = d._check_download_conditions(ep, rfile)
assert result == ops.DownloadAction.Skip
def test_pre_md5_skip_on_check():
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._md5_offload = mock.MagicMock()
rfile = azmodels.StorageEntity('cont')
rfile._encryption = mock.MagicMock()
rfile._encryption.blobxfer_extensions = mock.MagicMock()
rfile._encryption.blobxfer_extensions.pre_encrypted_content_md5 = 'abc'
rfile._client = mock.MagicMock()
rfile._client.primary_endpoint = 'ep'
rfile._name = 'name'
rfile._size = 32
rfile._vio = mock.MagicMock()
rfile._vio.offset_start = 0
rfile._vio.total_size = 32
lpath = pathlib.Path('lpath')
key = ops.Downloader.create_unique_transfer_operation_id(rfile)
d._pre_md5_skip_on_check(lpath, rfile)
assert key in d._md5_map
rfile._name = 'name2'
rfile._vio = None
lpath = 'lpath2'
rfile._encryption = None
rfile._md5 = 'abc'
key = ops.Downloader.create_unique_transfer_operation_id(rfile)
d._pre_md5_skip_on_check(lpath, rfile)
assert key in d._md5_map
assert len(d._md5_map) == 2
def test_post_md5_skip_on_check(tmpdir):
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._download_total = 0
d._download_bytes_total = 0
d._md5_offload = mock.MagicMock()
lp = tmpdir.join('lpath').ensure(file=True)
lpath = str(lp)
rfile = azmodels.StorageEntity('cont')
rfile._md5 = 'abc'
rfile._client = mock.MagicMock()
rfile._client.primary_endpoint = 'ep'
rfile._name = 'name'
rfile._vio = None
rfile._size = 256
d._pre_md5_skip_on_check(lpath, rfile)
key = ops.Downloader.create_unique_transfer_operation_id(rfile)
d._transfer_set.add(key)
assert key in d._md5_map
d._general_options.dry_run = True
d._post_md5_skip_on_check(key, lpath, None, 'abc', True)
assert key not in d._md5_map
d._general_options.dry_run = False
d._add_to_download_queue = mock.MagicMock()
d._pre_md5_skip_on_check(lpath, rfile)
d._transfer_set.add(key)
d._post_md5_skip_on_check(key, lpath, rfile._size, 'labc', False)
assert d._add_to_download_queue.call_count == 1
d._general_options.dry_run = True
d._pre_md5_skip_on_check(lpath, rfile)
d._transfer_set.add(key)
d._post_md5_skip_on_check(key, lpath, rfile._size, 'labc', False)
assert d._add_to_download_queue.call_count == 1
def test_check_for_downloads_from_md5():
lpath = 'lpath'
rfile = azmodels.StorageEntity('cont')
rfile._md5 = 'abc'
rfile._client = mock.MagicMock()
rfile._client.primary_endpoint = 'ep'
rfile._name = 'name'
rfile._vio = None
rfile._size = 256
key = ops.Downloader.create_unique_transfer_operation_id(rfile)
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._md5_map[key] = rfile
d._transfer_set.add(key)
d._md5_offload = mock.MagicMock()
d._md5_offload.done_cv = multiprocessing.Condition()
d._md5_offload.pop_done_queue.side_effect = [
None,
(key, lpath, rfile._size, 'labc', False),
]
d._add_to_download_queue = mock.MagicMock()
d._all_remote_files_processed = False
d._download_terminate = True
d._check_for_downloads_from_md5()
assert d._add_to_download_queue.call_count == 0
with mock.patch(
'blobxfer.operations.download.Downloader.'
'termination_check_md5',
new_callable=mock.PropertyMock) as patched_tc:
d = ops.Downloader(
mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._md5_map[key] = (rfile, 'labc')
d._transfer_set.add(key)
d._md5_offload = mock.MagicMock()
d._md5_offload.done_cv = multiprocessing.Condition()
d._md5_offload.pop_done_queue.side_effect = [
None,
(key, lpath, rfile._size, 'labc', False),
]
d._add_to_download_queue = mock.MagicMock()
patched_tc.side_effect = [False, False, True]
d._check_for_downloads_from_md5()
assert d._add_to_download_queue.call_count == 1
with mock.patch(
'blobxfer.operations.download.Downloader.'
'termination_check_md5',
new_callable=mock.PropertyMock) as patched_tc:
d = ops.Downloader(
mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._md5_map[key] = (rfile, 'labc')
d._transfer_set.add(key)
d._md5_offload = mock.MagicMock()
d._md5_offload.done_cv = multiprocessing.Condition()
d._md5_offload.pop_done_queue.side_effect = [None]
d._add_to_download_queue = mock.MagicMock()
patched_tc.side_effect = [False, True, True]
d._check_for_downloads_from_md5()
assert d._add_to_download_queue.call_count == 0
def test_check_for_crypto_done():
lpath = 'lpath'
rfile = azmodels.StorageEntity('cont')
rfile._md5 = 'abc'
rfile._client = mock.MagicMock()
rfile._client.primary_endpoint = 'ep'
rfile._name = 'name'
rfile._vio = None
rfile._size = 256
key = ops.Downloader.create_unique_transfer_operation_id(rfile)
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._transfer_set.add(key)
dd = mock.MagicMock()
d._dd_map[lpath] = dd
offsets = mock.MagicMock()
offsets.range_start = 0
d._crypto_offload = mock.MagicMock()
d._crypto_offload.done_cv = multiprocessing.Condition()
d._crypto_offload.pop_done_queue.side_effect = [
None,
(lpath, offsets)
]
d._all_remote_files_processed = False
d._download_terminate = True
d._check_for_crypto_done()
assert dd.perform_chunked_integrity_check.call_count == 0
# check successful integrity check call
with mock.patch(
'blobxfer.operations.download.Downloader.termination_check',
new_callable=mock.PropertyMock) as patched_tc:
d = ops.Downloader(
mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._transfer_set.add(key)
dd = mock.MagicMock()
dd.entity = rfile
dd.final_path = lpath
d._dd_map[lpath] = dd
d._crypto_offload = mock.MagicMock()
d._crypto_offload.done_cv = multiprocessing.Condition()
d._crypto_offload.pop_done_queue.side_effect = [
None,
(lpath, offsets),
None,
]
patched_tc.side_effect = [False, False, False, True, True]
d._complete_chunk_download = mock.MagicMock()
d._check_for_crypto_done()
assert dd.perform_chunked_integrity_check.call_count == 1
# check KeyError on result
with mock.patch(
'blobxfer.operations.download.Downloader.termination_check',
new_callable=mock.PropertyMock) as patched_tc:
d = ops.Downloader(
mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._transfer_set.add(key)
dd = mock.MagicMock()
dd.entity = rfile
dd.final_path = lpath
d._crypto_offload = mock.MagicMock()
d._crypto_offload.done_cv = multiprocessing.Condition()
d._crypto_offload.pop_done_queue.side_effect = [
None,
(lpath, offsets),
]
patched_tc.side_effect = [False, False, True]
d._complete_chunk_download = mock.MagicMock()
d._check_for_crypto_done()
assert dd.perform_chunked_integrity_check.call_count == 0
def test_add_to_download_queue(tmpdir):
path = tmpdir.join('a')
lpath = pathlib.Path(str(path))
ase = azmodels.StorageEntity('cont')
ase._size = 1
ase._encryption = mock.MagicMock()
ase._encryption.symmetric_key = b'abc'
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._spec.options.chunk_size_bytes = 1
d._add_to_download_queue(lpath, ase)
assert d._transfer_queue.qsize() == 1
assert path in d._dd_map
def test_initialize_and_terminate_threads():
opts = mock.MagicMock()
opts.concurrency.transfer_threads = 2
opts.concurrency.disk_threads = 2
d = ops.Downloader(opts, mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._worker_thread_transfer = mock.MagicMock()
d._initialize_transfer_threads()
assert len(d._transfer_threads) == 2
d._wait_for_transfer_threads(terminate=True)
assert d._download_terminate
for thr in d._transfer_threads:
assert not thr.is_alive()
d._initialize_disk_threads()
assert len(d._disk_threads) == 2
d._wait_for_disk_threads(terminate=True)
assert d._download_terminate
for thr in d._disk_threads:
assert not thr.is_alive()
def test_process_download_descriptor_vio(tmpdir):
with mock.patch(
'blobxfer.models.download.Descriptor.all_operations_completed',
new_callable=mock.PropertyMock) as patched_aoc:
d = ops.Downloader(
mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._general_options.concurrency.transfer_threads = 1
d._general_options.concurrency.disk_threads = 1
opts = mock.MagicMock()
opts.check_file_md5 = True
opts.chunk_size_bytes = 16
ase = azmodels.StorageEntity('cont')
ase._mode = azmodels.StorageModes.File
ase._size = 16
ase._client = mock.MagicMock()
ase._client.primary_endpoint = 'ep'
ase._name = 'name'
ase._vio = mock.MagicMock()
ase._vio.total_slices = 2
lp = pathlib.Path(str(tmpdir.join('b')))
dd = models.Descriptor(lp, ase, opts, mock.MagicMock(), None)
dd.next_offsets = mock.MagicMock()
dd.next_offsets.return_value = (None, None)
patched_aoc.return_value = True
dd.finalize_file = mock.MagicMock()
key = ops.Downloader.create_unique_transfer_operation_id(ase)
d._transfer_set.add(key)
d._dd_map[str(lp)] = mock.MagicMock()
d._process_download_descriptor(dd)
assert dd.finalize_file.call_count == 0
d._transfer_set.add(key)
d._dd_map[str(lp)] = mock.MagicMock()
d._process_download_descriptor(dd)
assert dd.finalize_file.call_count == 1
@mock.patch('blobxfer.operations.crypto.aes_cbc_decrypt_data')
@mock.patch('blobxfer.operations.azure.file.get_file_range')
@mock.patch('blobxfer.operations.azure.blob.get_blob_range')
def test_worker_thread_transfer(
patched_gbr, patched_gfr, patched_acdd, tmpdir):
# test disk set > max set length
with mock.patch(
'blobxfer.operations.download.Downloader.termination_check',
new_callable=mock.PropertyMock) as patched_tc:
d = ops.Downloader(
mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._process_download_descriptor = mock.MagicMock()
d._general_options.concurrency.disk_threads = 1
d._disk_set.add(0)
d._disk_set.add(1)
d._disk_set.add(2)
d._disk_set.add(3)
d._disk_set.add(4)
patched_tc.side_effect = [False, True]
d._worker_thread_transfer()
assert d._process_download_descriptor.call_count == 0
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._process_download_descriptor = mock.MagicMock()
d._download_terminate = True
d._general_options.concurrency.transfer_threads = 1
d._general_options.concurrency.disk_threads = 1
d._worker_thread_transfer()
assert d._process_download_descriptor.call_count == 0
d._download_terminate = False
d._all_remote_files_processed = True
d._worker_thread_transfer()
assert d._process_download_descriptor.call_count == 0
with mock.patch(
'blobxfer.operations.download.Downloader.termination_check',
new_callable=mock.PropertyMock) as patched_tc:
patched_tc.side_effect = [False, False, True]
ase = azmodels.StorageEntity('cont')
ase._size = 16
ase._encryption = mock.MagicMock()
ase._encryption.symmetric_key = b'abc'
lp = pathlib.Path(str(tmpdir.join('exc')))
opts = mock.MagicMock()
opts.check_file_md5 = False
opts.chunk_size_bytes = 16
dd = models.Descriptor(lp, ase, opts, mock.MagicMock(), None)
d._transfer_queue = mock.MagicMock()
d._transfer_queue.get.side_effect = [queue.Empty, dd]
d._process_download_descriptor = mock.MagicMock()
d._process_download_descriptor.side_effect = RuntimeError('oops')
d._worker_thread_transfer()
assert len(d._exceptions) == 1
assert d._process_download_descriptor.call_count == 1
with mock.patch(
'blobxfer.operations.download.Downloader.termination_check',
new_callable=mock.PropertyMock) as patched_tc:
with mock.patch(
'blobxfer.models.download.Descriptor.'
'all_operations_completed',
new_callable=mock.PropertyMock) as patched_aoc:
d = ops.Downloader(
mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._general_options.concurrency.transfer_threads = 1
d._general_options.concurrency.disk_threads = 1
opts = mock.MagicMock()
opts.check_file_md5 = False
opts.chunk_size_bytes = 16
ase = azmodels.StorageEntity('cont')
ase._size = 16
ase._client = mock.MagicMock()
ase._client.primary_endpoint = 'ep'
ase._name = 'name'
ase._vio = None
key = ops.Downloader.create_unique_transfer_operation_id(ase)
ase._encryption = mock.MagicMock()
ase._encryption.symmetric_key = b'abc'
lp = pathlib.Path(str(tmpdir.join('a')))
dd = models.Descriptor(lp, ase, opts, mock.MagicMock(), None)
dd.next_offsets = mock.MagicMock(
side_effect=[(None, 1), (None, 2)])
dd.finalize_integrity = mock.MagicMock()
dd.finalize_file = mock.MagicMock()
dd.perform_chunked_integrity_check = mock.MagicMock()
dd.all_operations_completed.side_effect = [False, True]
patched_aoc.side_effect = [False, True]
patched_tc.side_effect = [False, False, False, True]
d._dd_map[str(lp)] = dd
d._transfer_set.add(key)
d._transfer_queue = mock.MagicMock()
d._transfer_queue.get.side_effect = [queue.Empty, dd, dd]
d._worker_thread_transfer()
assert str(lp) not in d._dd_map
assert dd.finalize_file.call_count == 1
assert d._download_sofar == 1
assert d._download_bytes_sofar == 3
with mock.patch(
'blobxfer.operations.download.Downloader.termination_check',
new_callable=mock.PropertyMock) as patched_tc:
d = ops.Downloader(
mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._general_options.concurrency.transfer_threads = 1
d._general_options.concurrency.disk_threads = 1
opts = mock.MagicMock()
opts.check_file_md5 = True
opts.chunk_size_bytes = 16
ase = azmodels.StorageEntity('cont')
ase._mode = azmodels.StorageModes.File
ase._size = 16
ase._client = mock.MagicMock()
ase._client.primary_endpoint = 'ep'
ase._name = 'name'
ase._vio = None
key = ops.Downloader.create_unique_transfer_operation_id(ase)
patched_gfr.return_value = b'0' * ase._size
lp = pathlib.Path(str(tmpdir.join('b')))
dd = models.Descriptor(lp, ase, opts, mock.MagicMock(), None)
dd.finalize_file = mock.MagicMock()
dd.perform_chunked_integrity_check = mock.MagicMock()
d._dd_map[str(lp)] = mock.MagicMock()
d._transfer_cc[dd.entity.path] = 0
d._transfer_set.add(key)
d._transfer_queue = mock.MagicMock()
d._transfer_queue.get.side_effect = [dd]
patched_tc.side_effect = [False, True]
d._spec.options.max_single_object_concurrency = 0
d._worker_thread_transfer()
assert len(d._disk_set) == 1
a, b, c = d._disk_queue.get()
d._process_data(a, b, c)
assert dd.perform_chunked_integrity_check.call_count == 1
with mock.patch(
'blobxfer.operations.download.Downloader.termination_check',
new_callable=mock.PropertyMock) as patched_tc:
d = ops.Downloader(
mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._general_options.concurrency.transfer_threads = 1
d._general_options.concurrency.disk_threads = 1
d._spec.options.max_single_object_concurrency = 8
opts = mock.MagicMock()
opts.check_file_md5 = False
opts.chunk_size_bytes = 16
ase = azmodels.StorageEntity('cont')
ase._mode = azmodels.StorageModes.Auto
ase._size = 32
ase._encryption = mock.MagicMock()
ase._encryption.symmetric_key = b'abc'
ase._encryption.content_encryption_iv = b'0' * 16
ase._client = mock.MagicMock()
ase._client.primary_endpoint = 'ep'
ase._name = 'name'
ase._vio = None
key = ops.Downloader.create_unique_transfer_operation_id(ase)
patched_gfr.return_value = b'0' * ase._size
lp = pathlib.Path(str(tmpdir.join('c')))
dd = models.Descriptor(lp, ase, opts, mock.MagicMock(), None)
dd.finalize_file = mock.MagicMock()
dd.write_unchecked_hmac_data = mock.MagicMock()
dd.perform_chunked_integrity_check = mock.MagicMock()
d._crypto_offload = mock.MagicMock()
d._crypto_offload.add_decrypt_chunk = mock.MagicMock()
d._dd_map[str(lp)] = dd
d._transfer_cc[dd.entity.path] = 0
d._transfer_set.add(key)
d._transfer_queue = mock.MagicMock()
d._transfer_queue.get.side_effect = [dd]
patched_tc.side_effect = [False, True]
d._worker_thread_transfer()
assert len(d._disk_set) == 1
a, b, c = d._disk_queue.get()
d._process_data(a, b, c)
assert d._crypto_offload.add_decrypt_chunk.call_count == 1
assert dd.write_unchecked_hmac_data.call_count == 1
with mock.patch(
'blobxfer.operations.download.Downloader.termination_check',
new_callable=mock.PropertyMock) as patched_tc:
d = ops.Downloader(
mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._general_options.concurrency.crypto_processes = 0
d._general_options.concurrency.transfer_threads = 1
d._general_options.concurrency.disk_threads = 1
d._spec.options.max_single_object_concurrency = 8
opts = mock.MagicMock()
opts.check_file_md5 = False
opts.chunk_size_bytes = 16
ase = azmodels.StorageEntity('cont')
ase._mode = azmodels.StorageModes.Auto
ase._size = 32
ase._encryption = mock.MagicMock()
ase._encryption.symmetric_key = b'abc'
ase._encryption.content_encryption_iv = b'0' * 16
ase._client = mock.MagicMock()
ase._client.primary_endpoint = 'ep'
ase._name = 'name'
ase._vio = None
key = ops.Downloader.create_unique_transfer_operation_id(ase)
patched_gfr.return_value = b'0' * ase._size
lp = pathlib.Path(str(tmpdir.join('d')))
dd = models.Descriptor(lp, ase, opts, mock.MagicMock(), None)
dd.next_offsets()
dd.write_unchecked_hmac_data = mock.MagicMock()
dd.perform_chunked_integrity_check = mock.MagicMock()
dd.mark_unchecked_chunk_decrypted = mock.MagicMock()
patched_acdd.return_value = b'0' * 16
d._dd_map[str(lp)] = mock.MagicMock()
d._transfer_cc[dd.entity.path] = 0
d._transfer_set.add(key)
d._transfer_queue = mock.MagicMock()
d._transfer_queue.get.side_effect = [dd, dd]
patched_tc.side_effect = [False, True]
d._worker_thread_transfer()
assert len(d._disk_set) == 1
a, b, c = d._disk_queue.get()
d._process_data(a, b, c)
assert patched_acdd.call_count == 1
assert dd.write_unchecked_hmac_data.call_count == 1
assert dd.perform_chunked_integrity_check.call_count == 1
def test_worker_thread_disk():
with mock.patch(
'blobxfer.operations.download.Downloader.termination_check',
new_callable=mock.PropertyMock) as patched_tc:
d = ops.Downloader(
mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._general_options.concurrency.disk_threads = 1
d._disk_queue = mock.MagicMock()
d._disk_queue.get.side_effect = [
(mock.MagicMock(), mock.MagicMock(), mock.MagicMock()),
]
d._process_data = mock.MagicMock()
patched_tc.side_effect = [False, True]
d._worker_thread_disk()
assert d._process_data.call_count == 1
with mock.patch(
'blobxfer.operations.download.Downloader.termination_check',
new_callable=mock.PropertyMock) as patched_tc:
d = ops.Downloader(
mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._general_options.concurrency.disk_threads = 1
d._disk_queue = mock.MagicMock()
d._disk_queue.get.side_effect = [
(mock.MagicMock(), mock.MagicMock(), mock.MagicMock()),
]
d._process_data = mock.MagicMock()
d._process_data.side_effect = Exception()
patched_tc.side_effect = [False, True]
d._worker_thread_disk()
assert len(d._exceptions) == 1
with mock.patch(
'blobxfer.operations.download.Downloader.termination_check',
new_callable=mock.PropertyMock) as patched_tc:
d = ops.Downloader(
mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._general_options.concurrency.disk_threads = 1
d._disk_queue = mock.MagicMock()
d._disk_queue.get.side_effect = queue.Empty()
d._process_data = mock.MagicMock()
patched_tc.side_effect = [False, True]
d._worker_thread_disk()
assert d._process_data.call_count == 0
def test_cleanup_temporary_files(tmpdir):
lp = pathlib.Path(str(tmpdir.join('a')))
opts = mock.MagicMock()
opts.check_file_md5 = False
opts.chunk_size_bytes = 16
ase = azmodels.StorageEntity('cont')
ase._size = 16
dd = models.Descriptor(lp, ase, opts, mock.MagicMock(), None)
dd._allocate_disk_space()
dd.cleanup_all_temporary_files = mock.MagicMock()
dd.cleanup_all_temporary_files.side_effect = Exception
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._general_options.resume_file = pathlib.Path('abc')
d._dd_map[0] = dd
d._cleanup_temporary_files()
assert dd.final_path.exists()
lp = pathlib.Path(str(tmpdir.join('b')))
opts = mock.MagicMock()
opts.check_file_md5 = False
opts.chunk_size_bytes = 16
ase = azmodels.StorageEntity('cont')
ase._size = 16
dd = models.Descriptor(lp, ase, opts, mock.MagicMock(), None)
dd._allocate_disk_space()
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._general_options.resume_file = None
d._dd_map[0] = dd
d._cleanup_temporary_files()
assert not dd.final_path.exists()
lp = pathlib.Path(str(tmpdir.join('c')))
opts = mock.MagicMock()
opts.check_file_md5 = False
opts.chunk_size_bytes = 16
ase = azmodels.StorageEntity('cont')
ase._size = 16
dd = models.Descriptor(lp, ase, opts, mock.MagicMock(), None)
dd._allocate_disk_space()
dd.cleanup_all_temporary_files = mock.MagicMock()
dd.cleanup_all_temporary_files.side_effect = Exception
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._general_options.resume_file = None
d._dd_map[0] = dd
d._cleanup_temporary_files()
assert dd.final_path.exists()
def test_catalog_local_files_for_deletion(tmpdir):
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._spec.options.delete_extraneous_destination = False
d._spec.options.delete_only = False
d._catalog_local_files_for_deletion()
assert len(d._delete_after) == 0
a = tmpdir.join('a')
a.write('abc')
d._spec.destination.path = tmpdir
d._spec.options.delete_extraneous_destination = True
d._spec.destination.is_dir = True
d._catalog_local_files_for_deletion()
assert len(d._delete_after) == 1
assert pathlib.Path(str(a)) in d._delete_after
def test_delete_extraneous_files(tmpdir):
a = tmpdir.join('a')
a.write('abc')
fp = pathlib.Path(str(a))
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._spec.options.delete_extraneous_destination = True
d._spec.options.delete_only = False
d._spec.destination.is_dir = True
d._delete_after.add(fp)
d._general_options.dry_run = True
d._delete_extraneous_files()
assert fp.exists()
d._general_options.dry_run = False
d._delete_extraneous_files()
assert not fp.exists()
# following should not throw exception
d._delete_extraneous_files()
def _create_downloader_for_start(td):
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._cleanup_temporary_files = mock.MagicMock()
d._download_start = datetime.datetime.now(tz=dateutil.tz.tzlocal())
d._initialize_disk_threads = mock.MagicMock()
d._initialize_transfer_threads = mock.MagicMock()
d._general_options.concurrency.crypto_processes = 1
d._general_options.concurrency.md5_processes = 1
d._general_options.concurrency.disk_threads = 1
d._general_options.concurrency.transfer_threads = 1
d._general_options.resume_file = pathlib.Path(str(td.join('rf')))
d._spec.sources = []
d._spec.options = mock.MagicMock()
d._spec.options.chunk_size_bytes = 1
d._spec.options.mode = azmodels.StorageModes.Auto
d._spec.options.overwrite = True
d._spec.options.rename = False
d._spec.options.strip_components = 0
d._spec.options.delete_only = False
d._spec.skip_on = mock.MagicMock()
d._spec.skip_on.md5_match = False
d._spec.skip_on.lmt_ge = False
d._spec.skip_on.filesize_match = False
d._spec.destination = mock.MagicMock()
d._spec.destination.path = pathlib.Path(str(td))
d._download_start_time = util.datetime_now()
d._pre_md5_skip_on_check = mock.MagicMock()
d._check_download_conditions = mock.MagicMock()
d._all_remote_files_processed = False
p = '/cont/remote/path'
asp = azops.SourcePath()
asp.add_path_with_storage_account(p, 'sa')
d._spec.sources.append(asp)
return d
@mock.patch('blobxfer.operations.md5.LocalFileMd5Offload')
@mock.patch('blobxfer.operations.azure.blob.list_blobs')
@mock.patch(
'blobxfer.operations.download.Downloader.ensure_local_destination',
return_value=True
)
@mock.patch(
'blobxfer.operations.download.Downloader.'
'create_unique_transfer_operation_id',
return_value='id'
)
@mock.patch(
'blobxfer.operations.download.Downloader._wait_for_transfer_threads',
return_value=None
)
@mock.patch(
'blobxfer.operations.download.Downloader._wait_for_disk_threads',
return_value=None
)
@mock.patch(
'blobxfer.operations.crypto.CryptoOffload', return_value=mock.MagicMock())
def test_start(
patched_crypto, patched_wdt, patched_wtt, patched_cutoi, patched_eld,
patched_lb, patched_lfmo, tmpdir):
patched_lfmo._check_thread = mock.MagicMock()
b = azure.storage.blob.models.Blob(name='remote/path/name')
b.properties.content_length = 1
b.metadata = {}
patched_lb.side_effect = [[b]]
d = _create_downloader_for_start(tmpdir)
d._check_download_conditions.return_value = ops.DownloadAction.Skip
d._download_sofar = 0
d._download_bytes_sofar = 0
d.start()
assert d._pre_md5_skip_on_check.call_count == 0
# test delete only
patched_lb.side_effect = [[b]]
d = _create_downloader_for_start(tmpdir)
d._spec.options.delete_extraneous_destination = True
d._spec.options.delete_only = True
d.start()
assert d._transfer_queue.qsize() == 0
d._spec.options.delete_extraneous_destination = False
d._spec.options.delete_only = False
patched_lb.side_effect = [[b]]
d = _create_downloader_for_start(tmpdir)
d._check_download_conditions.return_value = ops.DownloadAction.Skip
d._general_options.dry_run = True
d.start()
assert d._pre_md5_skip_on_check.call_count == 0
patched_lb.side_effect = [[b]]
d = _create_downloader_for_start(tmpdir)
d._check_download_conditions.return_value = ops.DownloadAction.CheckMd5
d._download_sofar = -1
with pytest.raises(RuntimeError):
d.start()
d._download_terminate = True
assert d._pre_md5_skip_on_check.call_count == 1
b.properties.content_length = 0
patched_lb.side_effect = [[b]]
d = _create_downloader_for_start(tmpdir)
d._check_download_conditions.return_value = ops.DownloadAction.Download
with pytest.raises(RuntimeError):
d.start()
d._download_terminate = True
assert d._transfer_queue.qsize() == 1
dd = d._transfer_queue.get()
assert 'remote' in dd.final_path.parts
b.properties.content_length = 0
patched_lb.side_effect = [[b]]
d = _create_downloader_for_start(tmpdir)
d._check_download_conditions.return_value = ops.DownloadAction.Download
d._spec.options.strip_components = 1
with pytest.raises(RuntimeError):
d.start()
d._download_terminate = True
assert d._transfer_queue.qsize() == 1
dd = d._transfer_queue.get()
assert 'remote' not in dd.final_path.parts
b.properties.content_length = 0
patched_lb.side_effect = [[b]]
d = _create_downloader_for_start(tmpdir)
d._general_options.dry_run = True
d._check_download_conditions.return_value = ops.DownloadAction.Download
d.start()
assert d._transfer_queue.qsize() == 0
# test exception count
b = azure.storage.blob.models.Blob(name='name')
b.metadata = {}
b.properties.content_length = 1
patched_lb.side_effect = [[b]]
d = _create_downloader_for_start(tmpdir)
d._spec.destination.is_dir = False
d._spec.options.rename = True
d._check_download_conditions.return_value = ops.DownloadAction.Skip
d._exceptions = [RuntimeError('oops')]
with pytest.raises(RuntimeError):
d.start()
d._download_terminate = True
assert d._pre_md5_skip_on_check.call_count == 0
def test_start_exception():
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._general_options.resume_file = None
d._run = mock.MagicMock(side_effect=RuntimeError('oops'))
d._wait_for_disk_threads = mock.MagicMock()
d._wait_for_transfer_threads = mock.MagicMock()
d._cleanup_temporary_files = mock.MagicMock()
d._md5_offload = mock.MagicMock()
with pytest.raises(RuntimeError):
d.start()
assert d._wait_for_disk_threads.call_count == 1
assert d._wait_for_transfer_threads.call_count == 1
assert d._cleanup_temporary_files.call_count == 1
with pytest.raises(RuntimeError):
d._wait_for_transfer_threads = mock.MagicMock(
side_effect=RuntimeError('oops'))
d.start()
assert d._wait_for_disk_threads.call_count == 2
assert d._wait_for_transfer_threads.call_count == 1
assert d._cleanup_temporary_files.call_count == 2
def test_start_keyboard_interrupt():
d = ops.Downloader(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())
d._general_options.dry_run = False
d._general_options.resume_file = None
d._run = mock.MagicMock(side_effect=KeyboardInterrupt)
d._wait_for_disk_threads = mock.MagicMock()
d._wait_for_transfer_threads = mock.MagicMock()
d._cleanup_temporary_files = mock.MagicMock()
d._md5_offload = mock.MagicMock()
with pytest.raises(KeyboardInterrupt):
d.start()
assert d._wait_for_transfer_threads.call_count == 1
assert d._cleanup_temporary_files.call_count == 1
| 36.852291
| 78
| 0.657464
| 5,645
| 46,655
| 5.1093
| 0.052967
| 0.104119
| 0.036891
| 0.057694
| 0.874905
| 0.844636
| 0.811352
| 0.785764
| 0.75976
| 0.74388
| 0
| 0.009999
| 0.241153
| 46,655
| 1,265
| 79
| 36.881423
| 0.804655
| 0.008595
| 0
| 0.788411
| 0
| 0
| 0.04017
| 0.032426
| 0
| 0
| 0
| 0
| 0.071115
| 1
| 0.015803
| false
| 0
| 0.017559
| 0
| 0.034241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e11bccc33b30658fd1c4eb343192fee1a95e51f9
| 12,429
|
py
|
Python
|
Code/neighbors.py
|
mehran66/surface-area-computations
|
b933565961c743d34e50db0ec680110faaade2cf
|
[
"MIT"
] | 1
|
2018-07-26T15:48:48.000Z
|
2018-07-26T15:48:48.000Z
|
Code/neighbors.py
|
mehran66/surface-area-computations
|
b933565961c743d34e50db0ec680110faaade2cf
|
[
"MIT"
] | null | null | null |
Code/neighbors.py
|
mehran66/surface-area-computations
|
b933565961c743d34e50db0ec680110faaade2cf
|
[
"MIT"
] | 1
|
2020-06-28T06:14:02.000Z
|
2020-06-28T06:14:02.000Z
|
import numpy as np
'''Zone
Each pixels is devided into 4 parts as coded in the following
--------------------
| | |
| (1) | (2) |
| | |
|________|__________|
| | |
| (4) | (3) |
| | |
|________|__________|
'''
def neibr(rasterBlock_x, rasterBlock_y, rasterBlock_elev, zone, m):
'''
-This function create the neighborhod matrices
-rasterBlock is the 5 by 5 matrix
'''
if m == 1:
xCoor = rasterBlock_x[2,2]
yCoor = rasterBlock_y[2,2]
elev = rasterBlock_elev[2,2]
elif m == 3:
if zone == 1:
xCoor = np.array([rasterBlock_x[2,2], rasterBlock_x [2,1], rasterBlock_x[1,2]])
yCoor = np.array([rasterBlock_y[2,2], rasterBlock_y [2,1], rasterBlock_y[1,2]])
elev = np.array([rasterBlock_elev[2,2], rasterBlock_elev [2,1], rasterBlock_elev[1,2]])
elif zone == 4:
xCoor = np.array([rasterBlock_x[2,2], rasterBlock_x [2,1], rasterBlock_x[3,2]])
yCoor = np.array([rasterBlock_y[2,2], rasterBlock_y [2,1], rasterBlock_y[3,2]])
elev = np.array([rasterBlock_elev[2,2], rasterBlock_elev [2,1], rasterBlock_elev[3,2]])
if zone == 2:
xCoor = np.array([rasterBlock_x[2,2], rasterBlock_x[1,2], rasterBlock_x[2,3]])
yCoor = np.array([rasterBlock_y[2,2], rasterBlock_y[1,2], rasterBlock_y[2,3]])
elev = np.array([rasterBlock_elev[2,2], rasterBlock_elev[1,2], rasterBlock_elev[2,3]])
elif zone == 3:
xCoor = np.array([rasterBlock_x[2,2], rasterBlock_x [2,3], rasterBlock_x[3,2]])
yCoor = np.array([rasterBlock_y[2,2], rasterBlock_y [2,3], rasterBlock_y[3,2]])
elev = np.array([rasterBlock_elev[2,2], rasterBlock_elev [2,3], rasterBlock_elev[3,2]])
elif m ==4:
if zone == 1:
xCoor = np.array([rasterBlock_x[2,2], rasterBlock_x [2,1], rasterBlock_x[1,1], rasterBlock_x[1,2]])
yCoor = np.array([rasterBlock_y[2,2], rasterBlock_y [2,1], rasterBlock_y[1,1], rasterBlock_y[1,2]])
elev = np.array([rasterBlock_elev[2,2], rasterBlock_elev [2,1], rasterBlock_elev[1,1], rasterBlock_elev[1,2]])
elif zone == 4:
xCoor = np.array([rasterBlock_x[2,2], rasterBlock_x [2,1], rasterBlock_x[3,1], rasterBlock_x[3,2]])
yCoor = np.array([rasterBlock_y[2,2], rasterBlock_y [2,1], rasterBlock_y[3,1], rasterBlock_y[3,2]])
elev = np.array([rasterBlock_elev[2,2], rasterBlock_elev [2,1], rasterBlock_elev[3,1], rasterBlock_elev[3,2]])
if zone == 2:
xCoor = np.array([rasterBlock_x[2,2], rasterBlock_x [1,3], rasterBlock_x[1,2], rasterBlock_x[2,3]])
yCoor = np.array([rasterBlock_y[2,2], rasterBlock_y [1,3], rasterBlock_y[1,2], rasterBlock_y[2,3]])
elev = np.array([rasterBlock_elev[2,2], rasterBlock_elev [1,3], rasterBlock_elev[1,2], rasterBlock_elev[2,3]])
elif zone == 3:
xCoor = np.array([rasterBlock_x[2,2], rasterBlock_x [2,3], rasterBlock_x[3,2], rasterBlock_x[3,3]])
yCoor = np.array([rasterBlock_y[2,2], rasterBlock_y [2,3], rasterBlock_y[3,2], rasterBlock_y[3,3]])
elev = np.array([rasterBlock_elev[2,2], rasterBlock_elev [2,3], rasterBlock_elev[3,2], rasterBlock_elev[3,3]])
elif m ==5:
xCoor = np.array([rasterBlock_x[2,2], rasterBlock_x [2,1], rasterBlock_x[1,2], rasterBlock_x[2,3], rasterBlock_x[3,2]])
yCoor = np.array([rasterBlock_y[2,2], rasterBlock_y [2,1], rasterBlock_y[1,2], rasterBlock_y[2,3], rasterBlock_y[3,2]])
elev = np.array([rasterBlock_elev[2,2], rasterBlock_elev [2,1], rasterBlock_elev[1,2], rasterBlock_elev[2,3], rasterBlock_elev[3,2]])
elif m ==8:
xCoor = np.array([rasterBlock_x [2,1], rasterBlock_x[1,2], rasterBlock_x[2,3], rasterBlock_x[3,2], rasterBlock_x[1,1],
rasterBlock_x[1,3], rasterBlock_x[3,1], rasterBlock_x[3,3]])
yCoor = np.array([rasterBlock_y [2,1], rasterBlock_y[1,2], rasterBlock_y[2,3], rasterBlock_y[3,2], rasterBlock_y[1,1],
rasterBlock_y[1,3], rasterBlock_y[3,1], rasterBlock_y[3,3]])
elev = np.array([rasterBlock_elev [2,1], rasterBlock_elev[1,2], rasterBlock_elev[2,3], rasterBlock_elev[3,2], rasterBlock_elev[1,1],
rasterBlock_elev[1,3], rasterBlock_elev[3,1], rasterBlock_elev[3,3]])
elif m ==9:
xCoor = np.array([rasterBlock_x [2,2], rasterBlock_x[1,2], rasterBlock_x[2,3], rasterBlock_x[3,2], rasterBlock_x[1,1],
rasterBlock_x[1,3], rasterBlock_x[2,1], rasterBlock_x[3,1], rasterBlock_x[3,3]])
yCoor = np.array([rasterBlock_y [2,2], rasterBlock_y[1,2], rasterBlock_y[2,3], rasterBlock_y[3,2], rasterBlock_y[1,1],
rasterBlock_y[1,3], rasterBlock_y[2,1], rasterBlock_y[3,1], rasterBlock_y[3,3]])
elev = np.array([rasterBlock_elev [2,2], rasterBlock_elev[1,2], rasterBlock_elev[2,3], rasterBlock_elev[3,2], rasterBlock_elev[1,1],
rasterBlock_elev[1,3], rasterBlock_elev[2,1], rasterBlock_elev[3,1], rasterBlock_elev[3,3]])
elif m ==16:
if zone ==1:
xCoor = np.array([rasterBlock_x [2,2], rasterBlock_x[0,1], rasterBlock_x[0,2], rasterBlock_x[0,3],
rasterBlock_x [1,0], rasterBlock_x[1,1], rasterBlock_x[1,2], rasterBlock_x[1,3],
rasterBlock_x [2,0], rasterBlock_x[2,1], rasterBlock_x[0,0], rasterBlock_x[2,3],
rasterBlock_x [3,0], rasterBlock_x[3,1], rasterBlock_x[3,2], rasterBlock_x[3,3]])
yCoor = np.array([rasterBlock_y [2,2], rasterBlock_y[0,1], rasterBlock_y[0,2], rasterBlock_y[0,3],
rasterBlock_y [1,0], rasterBlock_y[1,1], rasterBlock_y[1,2], rasterBlock_y[1,3],
rasterBlock_y [2,0], rasterBlock_y[2,1], rasterBlock_y[0,0], rasterBlock_y[2,3],
rasterBlock_y [3,0], rasterBlock_y[3,1], rasterBlock_y[3,2], rasterBlock_y[3,3]])
elev = np.array([rasterBlock_elev [2,2], rasterBlock_elev[0,1], rasterBlock_elev[0,2], rasterBlock_elev[0,3],
rasterBlock_elev [1,0], rasterBlock_elev[1,1], rasterBlock_elev[1,2], rasterBlock_elev[1,3],
rasterBlock_elev [2,0], rasterBlock_elev[2,1], rasterBlock_elev[0,0], rasterBlock_elev[2,3],
rasterBlock_elev [3,0], rasterBlock_elev[3,1], rasterBlock_elev[3,2], rasterBlock_elev[3,3]])
elif zone ==4:
xCoor = np.array([rasterBlock_x [2,2], rasterBlock_x[4,1], rasterBlock_x[4,2], rasterBlock_x[4,3],
rasterBlock_x [1,0], rasterBlock_x[1,1], rasterBlock_x[1,2], rasterBlock_x[1,3],
rasterBlock_x [2,0], rasterBlock_x[2,1], rasterBlock_x[4,0], rasterBlock_x[2,3],
rasterBlock_x [3,0], rasterBlock_x[3,1], rasterBlock_x[3,2], rasterBlock_x[3,3]])
yCoor = np.array([rasterBlock_y [2,2], rasterBlock_y[4,1], rasterBlock_y[4,2], rasterBlock_y[4,3],
rasterBlock_y [1,0], rasterBlock_y[1,1], rasterBlock_y[1,2], rasterBlock_y[1,3],
rasterBlock_y [2,0], rasterBlock_y[2,1], rasterBlock_y[4,0], rasterBlock_y[2,3],
rasterBlock_y [3,0], rasterBlock_y[3,1], rasterBlock_y[3,2], rasterBlock_y[3,3]])
elev = np.array([rasterBlock_elev [2,2], rasterBlock_elev[4,1], rasterBlock_elev[4,2], rasterBlock_elev[4,3],
rasterBlock_elev [1,0], rasterBlock_elev[1,1], rasterBlock_elev[1,2], rasterBlock_elev[1,3],
rasterBlock_elev [2,0], rasterBlock_elev[2,1], rasterBlock_elev[4,0], rasterBlock_elev[2,3],
rasterBlock_elev [3,0], rasterBlock_elev[3,1], rasterBlock_elev[3,2], rasterBlock_elev[3,3]])
if zone ==2:
xCoor = np.array([rasterBlock_x [2,2], rasterBlock_x[0,1], rasterBlock_x[0,2], rasterBlock_x[0,3],
rasterBlock_x [1,4], rasterBlock_x[1,1], rasterBlock_x[1,2], rasterBlock_x[1,3],
rasterBlock_x [2,4], rasterBlock_x[2,1], rasterBlock_x[0,4], rasterBlock_x[2,3],
rasterBlock_x [3,4], rasterBlock_x[3,1], rasterBlock_x[3,2], rasterBlock_x[3,3]])
yCoor = np.array([rasterBlock_y [2,2], rasterBlock_y[0,1], rasterBlock_y[0,2], rasterBlock_y[0,3],
rasterBlock_y [1,4], rasterBlock_y[1,1], rasterBlock_y[1,2], rasterBlock_y[1,3],
rasterBlock_y [2,4], rasterBlock_y[2,1], rasterBlock_y[0,4], rasterBlock_y[2,3],
rasterBlock_y [3,4], rasterBlock_y[3,1], rasterBlock_y[3,2], rasterBlock_y[3,3]])
elev = np.array([rasterBlock_elev [2,2], rasterBlock_elev[0,1], rasterBlock_elev[0,2], rasterBlock_elev[0,3],
rasterBlock_elev [1,4], rasterBlock_elev[1,1], rasterBlock_elev[1,2], rasterBlock_elev[1,3],
rasterBlock_elev [2,4], rasterBlock_elev[2,1], rasterBlock_elev[0,4], rasterBlock_elev[2,3],
rasterBlock_elev [3,4], rasterBlock_elev[3,1], rasterBlock_elev[3,2], rasterBlock_elev[3,3]])
elif zone ==3:
xCoor = np.array([rasterBlock_x [2,2], rasterBlock_x[4,1], rasterBlock_x[4,2], rasterBlock_x[4,3],
rasterBlock_x [1,4], rasterBlock_x[1,1], rasterBlock_x[1,2], rasterBlock_x[1,3],
rasterBlock_x [2,4], rasterBlock_x[2,1], rasterBlock_x[4,4], rasterBlock_x[2,3],
rasterBlock_x [3,4], rasterBlock_x[3,1], rasterBlock_x[3,2], rasterBlock_x[3,3]])
yCoor = np.array([rasterBlock_y [2,2], rasterBlock_y[4,1], rasterBlock_y[4,2], rasterBlock_y[4,3],
rasterBlock_y [1,4], rasterBlock_y[1,1], rasterBlock_y[1,2], rasterBlock_y[1,3],
rasterBlock_y [2,4], rasterBlock_y[2,1], rasterBlock_y[4,4], rasterBlock_y[2,3],
rasterBlock_y [3,4], rasterBlock_y[3,1], rasterBlock_y[3,2], rasterBlock_y[3,3]])
elev = np.array([rasterBlock_elev [2,2], rasterBlock_elev[4,1], rasterBlock_elev[4,2], rasterBlock_elev[4,3],
rasterBlock_elev [1,4], rasterBlock_elev[1,1], rasterBlock_elev[1,2], rasterBlock_elev[1,3],
rasterBlock_elev [2,4], rasterBlock_elev[2,1], rasterBlock_elev[4,4], rasterBlock_elev[2,3],
rasterBlock_elev [3,4], rasterBlock_elev[3,1], rasterBlock_elev[3,2], rasterBlock_elev[3,3]])
elif m ==17:
xCoor = np.array([rasterBlock_x [2,2], rasterBlock_x[1,2], rasterBlock_x[2,3], rasterBlock_x[3,2], rasterBlock_x[1,1],
rasterBlock_x[1,3], rasterBlock_x[3,1], rasterBlock_x[3,3], rasterBlock_x[0,0], rasterBlock_x[0,2], rasterBlock_x[0,4],
rasterBlock_x[2,0], rasterBlock_x[2,1], rasterBlock_x[2,4], rasterBlock_x[4,0], rasterBlock_x[4,2], rasterBlock_x[4,4]])
yCoor = np.array([rasterBlock_y [2,2], rasterBlock_y[1,2], rasterBlock_y[2,3], rasterBlock_y[3,2], rasterBlock_y[1,1],
rasterBlock_y[1,3], rasterBlock_y[3,1], rasterBlock_y[3,3], rasterBlock_y[0,0], rasterBlock_y[0,2], rasterBlock_y[0,4],
rasterBlock_y[2,0], rasterBlock_y[2,1], rasterBlock_y[2,4], rasterBlock_y[4,0], rasterBlock_y[4,2], rasterBlock_y[4,4]])
elev = np.array([rasterBlock_elev [2,2], rasterBlock_elev[1,2], rasterBlock_elev[2,3], rasterBlock_elev[3,2], rasterBlock_elev[1,1],
rasterBlock_elev[1,3], rasterBlock_elev[3,1], rasterBlock_elev[3,3], rasterBlock_elev[0,0], rasterBlock_elev[0,2], rasterBlock_elev[0,4],
rasterBlock_elev[2,0], rasterBlock_elev[2,1], rasterBlock_elev[2,4], rasterBlock_elev[4,0], rasterBlock_elev[4,2], rasterBlock_elev[4,4]])
elif m ==25:
xCoor = rasterBlock_x.flatten()
yCoor = rasterBlock_y.flatten()
elev = rasterBlock_elev.flatten()
return xCoor, yCoor, elev
| 79.165605
| 165
| 0.594416
| 1,854
| 12,429
| 3.748652
| 0.025351
| 0.231367
| 0.124317
| 0.03223
| 0.946475
| 0.946475
| 0.930935
| 0.903309
| 0.903309
| 0.903309
| 0
| 0.087909
| 0.246762
| 12,429
| 157
| 166
| 79.165605
| 0.654454
| 0.006437
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007937
| false
| 0
| 0.007937
| 0
| 0.02381
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0127ee2566a7231f66823aaa9bb1addb31d7cbf2
| 5,534
|
py
|
Python
|
tests/test_gui_weight_frame.py
|
rbotter/pyDEA
|
2c8b4a70e8c071d580eff26a040efc22fc264045
|
[
"MIT"
] | 29
|
2017-10-22T03:03:20.000Z
|
2022-03-21T09:15:22.000Z
|
tests/test_gui_weight_frame.py
|
rbotter/pyDEA
|
2c8b4a70e8c071d580eff26a040efc22fc264045
|
[
"MIT"
] | 6
|
2018-07-18T01:40:43.000Z
|
2021-04-11T00:38:30.000Z
|
tests/test_gui_weight_frame.py
|
rbotter/pyDEA
|
2c8b4a70e8c071d580eff26a040efc22fc264045
|
[
"MIT"
] | 20
|
2018-01-23T05:50:29.000Z
|
2022-02-22T05:04:56.000Z
|
from tkinter import Tk, StringVar, END
import pytest
from pyDEA.core.gui_modules.weight_frame_gui import WeightFrame
from pyDEA.core.data_processing.parameters import Parameters
from tests.test_gui_data_tab_frame import ParamsFrameMock
@pytest.fixture
def weight_frame(request):
parent = Tk()
current_categories = ['I1', 'I2', 'I3']
params = ParamsFrameMock(parent)
weight_frame = WeightFrame(
params, current_categories, Parameters(), StringVar(master=parent))
request.addfinalizer(parent.destroy)
return weight_frame
def test_add_given_weights(weight_frame):
weight_frame.params.update_parameter('INPUT_CATEGORIES', 'I1; I2; I3')
weight_frame.params.update_parameter(
'ABS_WEIGHT_RESTRICTIONS', 'I1 >= 0.1; 0.5 >= I2')
weight_frame._add_given_weights(
weight_frame.abs_weights, 'ABS_WEIGHT_RESTRICTIONS')
assert weight_frame.parent.weight_frame_name == 'Weights editor*'
text = weight_frame.abs_weights.text.get('1.0', END)
assert 'I1 >= 0.1' in text and '0.5 >= I2' in text
assert weight_frame.virtual_weights.text.get('1.0', END) == '\n'
assert weight_frame.price_ratio_weights.text.get('1.0', END) == '\n'
def test_add_given_weights_invalid(weight_frame):
weight_frame.params.update_parameter('INPUT_CATEGORIES', 'I1; I2; I3')
weight_frame.params.update_parameter(
'VIRTUAL_WEIGHT_RESTRICTIONS', 'I1 = 0.1; 0.5 >= I2')
weight_frame._add_given_weights(
weight_frame.virtual_weights, 'VIRTUAL_WEIGHT_RESTRICTIONS')
assert weight_frame.parent.weight_frame_name == 'Weights editor*'
text = weight_frame.virtual_weights.text.get('1.0', END)
assert 'I1 = 0.1' not in text and '0.5 >= I2' in text
assert weight_frame.abs_weights.text.get('1.0', END) == '\n'
assert weight_frame.price_ratio_weights.text.get('1.0', END) == '\n'
def test_add_given_weights_all_invalid(weight_frame):
weight_frame.params.update_parameter('INPUT_CATEGORIES', 'I1; I2; I3')
weight_frame.params.update_parameter(
'VIRTUAL_WEIGHT_RESTRICTIONS', 'I1 = 0.1; 0.5 >= O2')
assert weight_frame.virtual_weights.text.get('1.0', END) == '\n'
assert weight_frame.abs_weights.text.get('1.0', END) == '\n'
assert weight_frame.price_ratio_weights.text.get('1.0', END) == '\n'
def test_add_weights(weight_frame):
weight_frame.params.update_parameter('INPUT_CATEGORIES', 'I1; I2; I3')
weight_frame.params.update_parameter(
'ABS_WEIGHT_RESTRICTIONS', 'I1 >= 0.1; 0.5 >= I2')
weight_frame.params.update_parameter(
'VIRTUAL_WEIGHT_RESTRICTIONS', 'I1 = 0.1; 0.5 >= I2')
weight_frame.params.update_parameter(
'PRICE_RATIO_RESTRICTIONS', 'I1/I2 <=2')
weight_frame.add_weights()
assert weight_frame.parent.weight_frame_name == 'Weights editor*'
abs_text = weight_frame.abs_weights.text.get('1.0', END)
assert 'I1 >= 0.1' in abs_text and '0.5 >= I2' in abs_text
vir_text = weight_frame.virtual_weights.text.get('1.0', END)
assert '0.5 >= I2' in vir_text
pr_text = weight_frame.price_ratio_weights.text.get('1.0', END)
assert 'I1/I2 <=2' in pr_text
def test_remove_all_weights(weight_frame):
weight_frame.params.update_parameter('INPUT_CATEGORIES', 'I1; I2; I3')
weight_frame.params.update_parameter(
'ABS_WEIGHT_RESTRICTIONS', 'I1 >= 0.1; 0.5 >= I2')
weight_frame.params.update_parameter(
'VIRTUAL_WEIGHT_RESTRICTIONS', 'I1 = 0.1; 0.5 >= I2')
weight_frame.params.update_parameter(
'PRICE_RATIO_RESTRICTIONS', 'I1/I2 <=2')
weight_frame.remove_all_weights()
assert weight_frame.parent.weight_frame_name == 'Weights editor'
assert weight_frame.virtual_weights.text.get('1.0', END) == '\n'
assert weight_frame.abs_weights.text.get('1.0', END) == '\n'
assert weight_frame.price_ratio_weights.text.get('1.0', END) == '\n'
# parameters are not updated by weight_frame
assert weight_frame.params.get_parameter_value(
'ABS_WEIGHT_RESTRICTIONS') == 'I1 >= 0.1; 0.5 >= I2'
assert weight_frame.params.get_parameter_value(
'VIRTUAL_WEIGHT_RESTRICTIONS') == 'I1 = 0.1; 0.5 >= I2'
assert weight_frame.params.get_parameter_value(
'PRICE_RATIO_RESTRICTIONS') == 'I1/I2 <=2'
# parameters are updated on validation
weight_frame.on_validate_weights()
assert weight_frame.params.get_parameter_value(
'ABS_WEIGHT_RESTRICTIONS') == ''
assert weight_frame.params.get_parameter_value(
'VIRTUAL_WEIGHT_RESTRICTIONS') == ''
assert weight_frame.params.get_parameter_value(
'PRICE_RATIO_RESTRICTIONS') == ''
def test_on_validate_weights(weight_frame):
weight_frame.params.update_parameter('INPUT_CATEGORIES', 'I1; I2; I3')
weight_frame.params.update_parameter(
'VIRTUAL_WEIGHT_RESTRICTIONS', 'I1 = 0.1; 0.5 >= I2')
weight_frame.add_weights()
weight_frame.on_validate_weights()
assert weight_frame.params.get_parameter_value(
'VIRTUAL_WEIGHT_RESTRICTIONS') == '0.5 >= I2'
assert weight_frame.parent.weight_frame_name == 'Weights editor*'
assert weight_frame.weights_status_str.get() == ''
weight_frame.abs_weights.insert_weight('I2 == 0')
weight_frame.on_validate_weights()
assert weight_frame.weights_status_str.get(
) == 'Some of the weight restrictions cannot be parsed. \nFor error details, see Weights editor tab.'
weight_frame.abs_weights.text.delete(1.0, END)
weight_frame.on_validate_weights()
assert weight_frame.weights_status_str.get() == ''
| 45.735537
| 105
| 0.716299
| 782
| 5,534
| 4.763427
| 0.109974
| 0.221477
| 0.114094
| 0.098792
| 0.802148
| 0.791409
| 0.782819
| 0.777718
| 0.77557
| 0.77557
| 0
| 0.032313
| 0.155584
| 5,534
| 120
| 106
| 46.116667
| 0.764819
| 0.014275
| 0
| 0.54902
| 0
| 0
| 0.216434
| 0.087491
| 0
| 0
| 0
| 0
| 0.294118
| 1
| 0.068627
| false
| 0
| 0.04902
| 0
| 0.127451
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
013624528be63acc13cfeb61380af43ce489f159
| 3,901
|
py
|
Python
|
tests/intersim/envs/test_intersimple_rasterized.py
|
sisl/InteractionSimulator
|
a4f68349eb7fa55ed5855a94bb97d8242869149d
|
[
"MIT"
] | 3
|
2021-07-13T07:28:34.000Z
|
2021-07-29T12:37:20.000Z
|
tests/intersim/envs/test_intersimple_rasterized.py
|
sisl/InteractionSimulator
|
a4f68349eb7fa55ed5855a94bb97d8242869149d
|
[
"MIT"
] | 6
|
2021-08-30T15:51:19.000Z
|
2022-02-21T12:39:08.000Z
|
tests/intersim/envs/test_intersimple_rasterized.py
|
sisl/InteractionSimulator
|
a4f68349eb7fa55ed5855a94bb97d8242869149d
|
[
"MIT"
] | 1
|
2021-08-29T20:28:54.000Z
|
2021-08-29T20:28:54.000Z
|
from intersim.envs import IntersimpleRasterized, NRasterized
import numpy as np
import os
def test_obs_shape_dtype():
env = IntersimpleRasterized(height=152, width=491)
obs = env.reset()
assert obs.shape == (1, 152, 491)
assert obs.dtype == np.uint8
def test_agent51_frame0():
env = IntersimpleRasterized()
env._agent = 51
obs = env.reset()
expected = np.load(os.path.join(os.path.dirname(__file__), 'agent51frame0.npy'))
assert np.array_equal(obs, expected)
def test_image_observation_rendering(tmp_path):
env = IntersimpleRasterized()
env._agent = 51
obs = env.reset()
assert not env._observations
assert np.array_equal(env._last_observation, obs)
env.render()
assert len(env._observations) == 1
assert np.array_equal(env._last_observation, obs)
obs, _, _, _ = env.step(1)
assert len(env._observations) == 1
assert np.array_equal(env._last_observation, obs)
env.render()
assert len(env._observations) == 2
assert np.array_equal(env._last_observation, obs)
obs, _, _, _ = env.step(1)
assert len(env._observations) == 2
assert np.array_equal(env._last_observation, obs)
env.render()
assert len(env._observations) == 3
assert np.array_equal(env._last_observation, obs)
env.close(filestr=str(tmp_path/'render'))
assert len(env._observations) == 3
assert np.array_equal(env._last_observation, obs)
assert (tmp_path / 'render_observation.mp4').is_file()
def test_rollout(tmp_path):
env = NRasterized()
env.reset()
env.render()
for _ in range(1000):
_, _, done, _ = env.step(0.1)
env.render()
if done:
break
env.close(filestr=str(tmp_path))
def test_n_rasterized_check_env():
env = NRasterized()
def test_n_rasterized_obs_shape_dtype():
env = NRasterized(n_frames=10, height=152, width=491)
obs = env.reset()
assert obs.shape == (10, 152, 491)
assert obs.dtype == np.uint8
def test_n_rasterized_agent51_frame0():
env = IntersimpleRasterized()
env._agent = 51
obs = env.reset()
expected = np.load(os.path.join(os.path.dirname(__file__), 'agent51frame0.npy'))
assert np.array_equal(obs.sum(0, keepdims=True), expected)
def test_n_rasterized_observation_rendering(tmp_path):
env = NRasterized()
env._agent = 51
obs = env.reset()
assert not env._observations
assert np.array_equal(env._last_observation, obs)
env.render()
assert len(env._observations) == 1
assert np.array_equal(env._last_observation, obs)
obs, _, _, _ = env.step(1)
assert len(env._observations) == 1
assert np.array_equal(env._last_observation, obs)
env.render()
assert len(env._observations) == 2
assert np.array_equal(env._last_observation, obs)
obs, _, _, _ = env.step(1)
assert len(env._observations) == 2
assert np.array_equal(env._last_observation, obs)
env.render()
assert len(env._observations) == 3
assert np.array_equal(env._last_observation, obs)
env.close(filestr=str(tmp_path/'render'))
assert len(env._observations) == 3
assert np.array_equal(env._last_observation, obs)
assert (tmp_path / 'render_observation.mp4').is_file()
def test_n_rasterized_rollout(tmp_path):
env = NRasterized()
env.reset()
env.render()
for _ in range(1000):
_, _, done, _ = env.step(0.1)
env.render()
if done:
break
env.close(filestr=str(tmp_path))
def test_agent0_step10_5frames():
env = NRasterized(
agent=17,
n_frames=5,
skip_frames=1,
)
env.reset()
env.render()
for _ in range(10):
env.step(0.1)
env.render()
obs, _, _, _ = env.step(0.1)
expected = np.load(os.path.join(os.path.dirname(__file__), 'agent17_step10_5frames.npy'))
assert np.array_equal(obs, expected)
| 27.471831
| 93
| 0.668547
| 528
| 3,901
| 4.655303
| 0.149621
| 0.046379
| 0.089911
| 0.124491
| 0.845403
| 0.817738
| 0.810415
| 0.783157
| 0.773393
| 0.745728
| 0
| 0.032633
| 0.206614
| 3,901
| 141
| 94
| 27.666667
| 0.761551
| 0
| 0
| 0.756757
| 0
| 0
| 0.029736
| 0.017944
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.09009
| false
| 0
| 0.027027
| 0
| 0.117117
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
01578b2a028842aa221483f53868c624260f9127
| 1,677
|
py
|
Python
|
migrations/versions/e07abccfa1_.py
|
Encrylize/flask-blogger
|
c63982b7b97604c5092cb2532359401e8b9e7243
|
[
"MIT"
] | 1
|
2016-01-03T19:40:42.000Z
|
2016-01-03T19:40:42.000Z
|
migrations/versions/e07abccfa1_.py
|
Encrylize/yetanotherblog
|
c63982b7b97604c5092cb2532359401e8b9e7243
|
[
"MIT"
] | 1
|
2016-01-10T16:39:04.000Z
|
2016-05-04T15:00:30.000Z
|
migrations/versions/e07abccfa1_.py
|
Encrylize/yetanotherblog
|
c63982b7b97604c5092cb2532359401e8b9e7243
|
[
"MIT"
] | null | null | null |
"""empty message
Revision ID: e07abccfa1
Revises: 3890f70717a
Create Date: 2015-12-27 16:08:09.859981
"""
# revision identifiers, used by Alembic.
revision = 'e07abccfa1'
down_revision = '3890f70717a'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('role', 'name',
existing_type=sa.VARCHAR(length=80),
nullable=False)
op.alter_column('tag', 'name',
existing_type=sa.VARCHAR(length=60),
nullable=False)
op.alter_column('tag', 'slug',
existing_type=sa.VARCHAR(length=80),
nullable=False)
op.alter_column('user', 'email',
existing_type=sa.VARCHAR(length=255),
nullable=False)
op.alter_column('user', 'password',
existing_type=sa.VARCHAR(length=255),
nullable=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('user', 'password',
existing_type=sa.VARCHAR(length=255),
nullable=True)
op.alter_column('user', 'email',
existing_type=sa.VARCHAR(length=255),
nullable=True)
op.alter_column('tag', 'slug',
existing_type=sa.VARCHAR(length=80),
nullable=True)
op.alter_column('tag', 'name',
existing_type=sa.VARCHAR(length=60),
nullable=True)
op.alter_column('role', 'name',
existing_type=sa.VARCHAR(length=80),
nullable=True)
### end Alembic commands ###
| 30.490909
| 63
| 0.59034
| 188
| 1,677
| 5.154255
| 0.292553
| 0.072239
| 0.134159
| 0.216718
| 0.742002
| 0.737874
| 0.732714
| 0.732714
| 0.714138
| 0.714138
| 0
| 0.056338
| 0.280262
| 1,677
| 54
| 64
| 31.055556
| 0.746479
| 0.171735
| 0
| 0.833333
| 0
| 0
| 0.078909
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0.055556
| 0.055556
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
6d8bbd86bf1ad9430ecfb711882e54ea4258a43d
| 15,759
|
py
|
Python
|
rlkit/samplers/data_collector/path_collector.py
|
YashBit/generalized-hindsight
|
2ba2e1ba0caa20b71d01c8ad70fbc7f47d61bd6c
|
[
"MIT"
] | null | null | null |
rlkit/samplers/data_collector/path_collector.py
|
YashBit/generalized-hindsight
|
2ba2e1ba0caa20b71d01c8ad70fbc7f47d61bd6c
|
[
"MIT"
] | null | null | null |
rlkit/samplers/data_collector/path_collector.py
|
YashBit/generalized-hindsight
|
2ba2e1ba0caa20b71d01c8ad70fbc7f47d61bd6c
|
[
"MIT"
] | null | null | null |
import numpy as np
from collections import deque, OrderedDict
from rlkit.core import logger
from rlkit.core.eval_util import create_stats_ordered_dict
from rlkit.samplers.rollout_functions import diayn_multitask_rollout_with_relabeler, rollout, multitask_rollout, multitask_rollout_with_relabeler
from rlkit.samplers.data_collector.base import PathCollector
from rlkit.torch.multitask.pointmass_rewards import PointMassBestRandomRelabeler
import os.path as osp
class MdpPathCollector(PathCollector):
def __init__(
self,
env,
policy,
max_num_epoch_paths_saved=None,
render=False,
render_kwargs=None,
):
if render_kwargs is None:
render_kwargs = {}
self._env = env
self._policy = policy
self._max_num_epoch_paths_saved = max_num_epoch_paths_saved
self._epoch_paths = deque(maxlen=self._max_num_epoch_paths_saved)
self._render = render
self._render_kwargs = render_kwargs
self._num_steps_total = 0
self._num_paths_total = 0
def collect_new_paths(
self,
max_path_length,
num_steps,
discard_incomplete_paths,
):
paths = []
num_steps_collected = 0
while num_steps_collected < num_steps:
max_path_length_this_loop = min( # Do not go over num_steps
max_path_length,
num_steps - num_steps_collected,
)
path = rollout(
self._env,
self._policy,
max_path_length=max_path_length_this_loop,
render=self._render,
render_kwargs=self._render_kwargs
)
path_len = len(path['actions'])
if (
path_len != max_path_length
and not path['terminals'][-1]
and discard_incomplete_paths
):
break
num_steps_collected += path_len
paths.append(path)
self._num_paths_total += len(paths)
self._num_steps_total += num_steps_collected
self._epoch_paths.extend(paths)
return paths
def get_epoch_paths(self):
return self._epoch_paths
def end_epoch(self, epoch):
self._epoch_paths = deque(maxlen=self._max_num_epoch_paths_saved)
def get_diagnostics(self):
path_lens = [len(path['actions']) for path in self._epoch_paths]
stats = OrderedDict([
('num steps total', self._num_steps_total),
('num paths total', self._num_paths_total),
])
stats.update(create_stats_ordered_dict(
"path length",
path_lens,
always_show_all_stats=True,
))
return stats
def get_snapshot(self):
return dict(
env=self._env,
policy=self._policy,
)
class GoalConditionedPathCollector(PathCollector):
def __init__(
self,
env,
policy,
max_num_epoch_paths_saved=None,
render=False,
render_kwargs=None,
observation_key='observation',
desired_goal_key='desired_goal',
):
if render_kwargs is None:
render_kwargs = {}
self._env = env
self._policy = policy
self._max_num_epoch_paths_saved = max_num_epoch_paths_saved
self._render = render
self._render_kwargs = render_kwargs
self._epoch_paths = deque(maxlen=self._max_num_epoch_paths_saved)
self._observation_key = observation_key
self._desired_goal_key = desired_goal_key
self._num_steps_total = 0
self._num_paths_total = 0
def collect_new_paths(
self,
max_path_length,
num_steps,
discard_incomplete_paths,
):
paths = []
num_steps_collected = 0
while num_steps_collected < num_steps:
print(f"MAX PATH LENGTH FOR THIS LOOP IS: {max_path_length}")
max_path_length_this_loop = min( # Do not go over num_steps
max_path_length,
num_steps - num_steps_collected,
)
path = multitask_rollout(
self._env,
self._policy,
max_path_length=max_path_length_this_loop,
render=self._render,
render_kwargs=self._render_kwargs,
observation_key=self._observation_key,
desired_goal_key=self._desired_goal_key,
return_dict_obs=True,
)
path_len = len(path['actions'])
if (
path_len != max_path_length
and not path['terminals'][-1]
and discard_incomplete_paths
):
break
num_steps_collected += path_len
paths.append(path)
self._num_paths_total += len(paths)
self._num_steps_total += num_steps_collected
self._epoch_paths.extend(paths)
return paths
def get_epoch_paths(self):
return self._epoch_paths
def end_epoch(self, epoch):
self._epoch_paths = deque(maxlen=self._max_num_epoch_paths_saved)
def get_diagnostics(self):
path_lens = [len(path['actions']) for path in self._epoch_paths]
stats = OrderedDict([
('num steps total', self._num_steps_total),
('num paths total', self._num_paths_total),
])
stats.update(create_stats_ordered_dict(
"path length",
path_lens,
always_show_all_stats=True,
))
return stats
def get_snapshot(self):
return dict(
env=self._env,
policy=self._policy,
observation_key=self._observation_key,
desired_goal_key=self._desired_goal_key,
)
class DIAYNTaskConditionedPathCollector(PathCollector):
def __init__(
self,
env,
policy,
relabeler,
agent,
max_num_epoch_paths_saved=None,
render=False,
render_kwargs=None,
is_eval=False,
calculate_r_d=True,
hide_latent=False,
normalize_performance=False,
save_videos=False,
cfg = None
):
if render_kwargs is None:
render_kwargs = {}
self._env = env
self._policy = policy
self._relabeler = relabeler
self._max_num_epoch_paths_saved = max_num_epoch_paths_saved
self._render = render
self._render_kwargs = render_kwargs
self._epoch_paths = deque(maxlen=self._max_num_epoch_paths_saved)
self.is_eval = is_eval
self.calculate_r_d = calculate_r_d
self.hide_latent = hide_latent
self.normalize_performance = normalize_performance
self._num_steps_total = 0
self._num_paths_total = 0
self._epoch = 0
self.agent = agent
self.cfg = cfg
# for video saving
self.save_video = save_videos and not self.is_eval and hasattr(relabeler, 'to_save_video')
# for normalizing later
self.eval_traj_infos = {}
def collect_new_paths(
self,
max_path_length,
num_steps,
discard_incomplete_paths,
rollType,
):
paths = []
num_steps_collected = 0
if self.save_video and self._relabeler.to_save_video(self._epoch):
render = True
render_kwargs = dict(mode='rgb_array')
else:
render = self._render
render_kwargs = self._render_kwargs
#print(f"type of num_steps : {type(num_steps)}")
# if (isinstance(num_steps, str)):
# print(f"Num steps is : {num_steps}")
# num_steps = int(num_steps)
num_steps = 1000
#1000 PATHS, with each trajectory of 1000 steps.
# print(f"Type of num_steps_collected: {type(num_steps_collected)}, num_steps: {type(num_steps)}")
# print(f"NUM steps is: {num_steps}")
# print(f"I am in DIAYNTaskConditionedPathCollector")
while num_steps_collected < num_steps:
max_path_length_this_loop = min( # Do not go over num_steps
max_path_length,
num_steps - num_steps_collected,
)
# print(f"Max path length is: {max_path_length}")
# print(f"Num steps: {num_steps}, difference is: {num_steps - num_steps_collected}")
# print(f"Max path length with this loop is: {max_path_length_this_loop}")
path = diayn_multitask_rollout_with_relabeler(
self._env,
self.agent,
self._relabeler,
rollType=rollType,
max_path_length=max_path_length_this_loop,
render=render,
render_kwargs=render_kwargs,
return_dict_obs=False,
calculate_r_d=self.calculate_r_d,
hide_latent=self.hide_latent,
cfg = self.cfg
)
# print(f"Path keys in collect paths is : {path.keys()}")
path_len = len(path['actions'])
if (
path_len != max_path_length
and not path['terminals'][-1]
and discard_incomplete_paths
):
break
# print(f"Path_len in DIAYN-HUSK: {path_len}")
num_steps_collected += path_len
paths.append(path)
self._num_paths_total += len(paths)
self._num_steps_total += num_steps_collected
if self.normalize_performance:
for path in paths:
path['normalized_rewards'] = self._relabeler.get_normalized_path_rewards(path)
self._epoch_paths.extend(paths)
print(f"The len of paths: {len(paths)}")
return paths
def get_epoch_paths(self):
return self._epoch_paths
def end_epoch(self, epoch):
if self.is_eval:
epoch_path_info = []
for path in self._epoch_paths:
latent = path['latents'][0]
rewards = self._relabeler.calculate_path_reward(path, latent)
epoch_path_info.append((latent, rewards.sum()))
self.eval_traj_infos['epoch{}'.format(self._epoch)] = epoch_path_info
logger.save_extra_data(self.eval_traj_infos, 'eval_traj_infos.pkl')
self._epoch_paths = deque(maxlen=self._max_num_epoch_paths_saved)
self._epoch += 1
def get_diagnostics(self):
path_lens = [len(path['actions']) for path in self._epoch_paths]
stats = OrderedDict([
('num steps total', self._num_steps_total),
('num paths total', self._num_paths_total),
])
stats.update(create_stats_ordered_dict(
"path length",
path_lens,
always_show_all_stats=True,
))
return stats
def get_snapshot(self):
return dict(
env=self._env,
policy=self._policy,
)
class TaskConditionedPathCollector(PathCollector):
def __init__(
self,
env,
policy,
relabeler,
max_num_epoch_paths_saved=None,
render=False,
render_kwargs=None,
is_eval=False,
calculate_r_d=True,
hide_latent=False,
normalize_performance=False,
save_videos=False
):
if render_kwargs is None:
render_kwargs = {}
self._env = env
self._policy = policy
self._relabeler = relabeler
self._max_num_epoch_paths_saved = max_num_epoch_paths_saved
self._render = render
self._render_kwargs = render_kwargs
self._epoch_paths = deque(maxlen=self._max_num_epoch_paths_saved)
self.is_eval = is_eval
self.calculate_r_d = calculate_r_d
self.hide_latent = hide_latent
self.normalize_performance = normalize_performance
self._num_steps_total = 0
self._num_paths_total = 0
self._epoch = 0
# for video saving
self.save_video = save_videos and not self.is_eval and hasattr(relabeler, 'to_save_video')
# for normalizing later
self.eval_traj_infos = {}
def collect_new_paths(
self,
max_path_length,
num_steps,
discard_incomplete_paths,
):
paths = []
num_steps_collected = 0
print(f"Num steps in rollouts are: {num_steps}")
if self.save_video and self._relabeler.to_save_video(self._epoch):
render = True
render_kwargs = dict(mode='rgb_array')
else:
render = self._render
render_kwargs = self._render_kwargs
print(f"NUM_STEPS IN GHER PATH COLLECTOR IS: {num_steps}")
while num_steps_collected < num_steps:
max_path_length_this_loop = min( # Do not go over num_steps
max_path_length,
num_steps - num_steps_collected,
)
print(f"Max path len in loop: {max_path_length_this_loop}")
path = multitask_rollout_with_relabeler(
self._env,
self._policy,
self._relabeler,
max_path_length=max_path_length_this_loop,
render=render,
render_kwargs=render_kwargs,
return_dict_obs=False,
calculate_r_d=self.calculate_r_d,
hide_latent=self.hide_latent
)
path_len = len(path['actions'])
print(f"Path_len in GHER: {path_len}")
if (
path_len != max_path_length
and not path['terminals'][-1]
and discard_incomplete_paths
):
break
num_steps_collected += path_len
paths.append(path)
self._num_paths_total += len(paths)
self._num_steps_total += num_steps_collected
if self.normalize_performance:
for path in paths:
path['normalized_rewards'] = self._relabeler.get_normalized_path_rewards(path)
self._epoch_paths.extend(paths)
print(f"Len of paths in TaskConditionedPathCollector: {len(paths)}")
return paths
def get_epoch_paths(self):
return self._epoch_paths
def end_epoch(self, epoch):
if self.is_eval:
epoch_path_info = []
for path in self._epoch_paths:
latent = path['latents'][0]
rewards = self._relabeler.calculate_path_reward(path, latent)
epoch_path_info.append((latent, rewards.sum()))
self.eval_traj_infos['epoch{}'.format(self._epoch)] = epoch_path_info
logger.save_extra_data(self.eval_traj_infos, 'eval_traj_infos.pkl')
self._epoch_paths = deque(maxlen=self._max_num_epoch_paths_saved)
self._epoch += 1
def get_diagnostics(self):
path_lens = [len(path['actions']) for path in self._epoch_paths]
stats = OrderedDict([
('num steps total', self._num_steps_total),
('num paths total', self._num_paths_total),
])
stats.update(create_stats_ordered_dict(
"path length",
path_lens,
always_show_all_stats=True,
))
return stats
def get_snapshot(self):
return dict(
env=self._env,
policy=self._policy,
)
| 33.745182
| 145
| 0.585126
| 1,796
| 15,759
| 4.729399
| 0.084076
| 0.069696
| 0.047445
| 0.037674
| 0.87968
| 0.847069
| 0.834825
| 0.809866
| 0.808335
| 0.808335
| 0
| 0.003268
| 0.339869
| 15,759
| 466
| 146
| 33.817597
| 0.813227
| 0.054953
| 0
| 0.863636
| 0
| 0
| 0.048887
| 0.003766
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060606
| false
| 0
| 0.020202
| 0.020202
| 0.131313
| 0.017677
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6db4f527837059f1e960a7ce74561d98be705f1c
| 9,531
|
py
|
Python
|
tests/test_meal.py
|
coosoti/Osoti-Small
|
ccd147768aeae234eb45bf0a78257aafc3a8a16f
|
[
"MIT"
] | null | null | null |
tests/test_meal.py
|
coosoti/Osoti-Small
|
ccd147768aeae234eb45bf0a78257aafc3a8a16f
|
[
"MIT"
] | 2
|
2018-04-26T17:14:47.000Z
|
2018-04-30T12:04:14.000Z
|
tests/test_meal.py
|
coosoti/Osoti-Small
|
ccd147768aeae234eb45bf0a78257aafc3a8a16f
|
[
"MIT"
] | null | null | null |
import unittest
import json
from api import db
from api.models.models import Meal, User
from tests.main import MainTestCase
sample_admin = {'username': 'victorvenosa',
'email': 'osoticharles@gmail.com',
'designation': 'caterer',
'password': 'kulundeng',
'confirm_password': 'kulundeng'
}
login_admin = {
'email': 'osoticharles@gmail.com',
'password': 'kulundeng'
}
class TestMealEndpoint(MainTestCase):
def test_create_meal(self):
"""Testing meal creation"""
with self.client:
response = self.client.post(
'api/v2/auth/register',
data=json.dumps(sample_admin),
content_type='application/json',
)
data = json.loads(response.data.decode())
response = self.client.post('api/v2/meals', data=json.dumps({
'title': 'Beef with chapati',
'price': '600.00'
}), headers=dict(
Authorization='Bearer ' + json.loads(
response.data.decode()
)['auth_token']
))
self.assertEqual(response.status_code, 201)
self.assertIn(b'Meal has been successfully created', response.data)
def test_get_all_meals(self):
"""Testing retrieval of all meals"""
with self.client:
response = self.client.post(
'api/v2/auth/register',
data=json.dumps(sample_admin),
content_type='application/json',
)
data = json.loads(response.data.decode())
response = self.client.post('api/v2/meals', data=json.dumps({
'title': 'Beef mink chapati',
'price': '600.00'
}), headers=dict(
Authorization='Bearer ' + json.loads(
response.data.decode()
)['auth_token']
))
self.assertEqual(response.status_code, 201)
self.assertIn(b'Meal has been successfully created', response.data)
response = self.client.post(
'api/v2/auth/login',
data=json.dumps(login_admin),
content_type='application/json'
)
data = json.loads(response.data.decode())
response = self.client.get('api/v2/meals', headers=dict(
Authorization='Bearer ' + json.loads(
response.data.decode()
)['auth_token']
))
self.assertEqual(response.status_code, 200)
def test_get_meal(self):
"""Test retrieve meal details
"""
with self.client:
response = self.client.post(
'api/v2/auth/register',
data=json.dumps(sample_admin),
content_type='application/json',
)
data = json.loads(response.data.decode())
response = self.client.post('api/v2/meals', data=json.dumps({
'title': 'Beef mink chapati',
'price': '600.00'
}), headers=dict(
Authorization='Bearer ' + json.loads(
response.data.decode()
)['auth_token']
))
self.assertEqual(response.status_code, 201)
self.assertIn(b'Meal has been successfully created', response.data)
response = self.client.post(
'api/v2/auth/login',
data=json.dumps(login_admin),
content_type='application/json'
)
data = json.loads(response.data.decode())
response = self.client.get('api/v2/meals/1', headers=dict(
Authorization='Bearer ' + json.loads(
response.data.decode()
)['auth_token']
))
self.assertEqual(response.status_code, 200)
def test_duplicate_attempts(self):
"""Testing attempt to create a duplicate meal
"""
with self.client:
response = self.client.post(
'api/v2/auth/register',
data=json.dumps(sample_admin),
content_type='application/json',
)
data = json.loads(response.data.decode())
response = self.client.post('api/v2/meals', data=json.dumps({
'title': 'Beef mink chapati',
'price': '600.00'
}), headers=dict(
Authorization='Bearer ' + json.loads(
response.data.decode()
)['auth_token']
))
self.assertEqual(response.status_code, 201)
self.assertIn(b'Meal has been successfully created', response.data)
response = self.client.post(
'api/v2/auth/login',
data=json.dumps(login_admin),
content_type='application/json'
)
data = json.loads(response.data.decode())
response = self.client.post('api/v2/meals', data=json.dumps({
'title': 'Beef mink chapati',
'price': '600.00'
}), headers=dict(
Authorization='Bearer ' + json.loads(
response.data.decode()
)['auth_token']
))
self.assertEqual(response.status_code, 400)
self.assertIn(
b'You have already submitted a meal with the same title', response.data)
def test_invalid_or_empty_data_input(self):
"""Testing attempt to create meal with invalid data"""
with self.client:
response = self.client.post(
'api/v2/auth/register',
data=json.dumps(sample_admin),
content_type='application/json',
)
data = json.loads(response.data.decode())
response = self.client.post('api/v2/meals', data=json.dumps({
'title': 'Beef with Chicken'
}), headers=dict(
Authorization='Bearer ' + json.loads(
response.data.decode()
)['auth_token']
))
self.assertEqual(response.status_code, 400)
self.assertIn(b'Please fill in with valid data', response.data)
def test_delete_meal(self):
"""Testing delete function
"""
with self.client:
response = self.client.post(
'api/v2/auth/register',
data=json.dumps(sample_admin),
content_type='application/json',
)
data = json.loads(response.data.decode())
response = self.client.post('api/v2/meals', data=json.dumps({
'title': 'Beef mink chapati',
'price': '600.00'
}), headers=dict(
Authorization='Bearer ' + json.loads(
response.data.decode()
)['auth_token']
))
self.assertEqual(response.status_code, 201)
self.assertIn(b'Meal has been successfully created', response.data)
response = self.client.post(
'api/v2/auth/login',
data=json.dumps(login_admin),
content_type='application/json'
)
data = json.loads(response.data.decode())
response = self.client.delete('api/v2/meals/1', headers=dict(
Authorization='Bearer ' + json.loads(
response.data.decode()
)['auth_token']
))
self.assertEqual(response.status_code, 202)
self.assertIn(b'Meal has been successfully deleted', response.data)
def test_update_meal(self):
"""Testing meal update function
"""
new_data = {
'title': 'Chicken with Ugali',
'price': '1000.00'
}
with self.client:
response = self.client.post(
'api/v2/auth/register',
data=json.dumps(sample_admin),
content_type='application/json',
)
data = json.loads(response.data.decode())
response = self.client.post('api/v2/meals', data=json.dumps({
'title': 'Beef mink chapati',
'price': '600.00'
}), headers=dict(
Authorization='Bearer ' + json.loads(
response.data.decode()
)['auth_token']
))
self.assertEqual(response.status_code, 201)
self.assertIn(b'Meal has been successfully created', response.data)
# user login
response = self.client.post(
'api/v2/auth/login',
data=json.dumps(login_admin),
content_type='application/json'
)
data = json.loads(response.data.decode())
response = self.client.put('api/v2/meals/1',
data=json.dumps(new_data), headers=dict(
Authorization='Bearer ' + json.loads(
response.data.decode()
)['auth_token']
))
self.assertEqual(response.status_code, 202)
self.assertIn(
b'The meal has been successfully updated', response.data)
if __name__ == '__main__':
unittest.main()
| 39.547718
| 88
| 0.509076
| 912
| 9,531
| 5.233553
| 0.122807
| 0.085481
| 0.090509
| 0.105594
| 0.818353
| 0.807459
| 0.807459
| 0.80264
| 0.80264
| 0.80264
| 0
| 0.017415
| 0.373413
| 9,531
| 240
| 89
| 39.7125
| 0.781815
| 0.028538
| 0
| 0.768519
| 0
| 0
| 0.170986
| 0.004771
| 0
| 0
| 0
| 0
| 0.101852
| 1
| 0.032407
| false
| 0.013889
| 0.023148
| 0
| 0.060185
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6db668dfed48dade1d099a7a2058d1217bd2a34b
| 7,503
|
py
|
Python
|
rsbook_code/utilities/search.py
|
patricknaughton01/RoboticSystemsBook
|
0fc67cbccee0832b5f9b00d848c55697fa69bedf
|
[
"Apache-2.0"
] | 116
|
2018-08-27T15:32:59.000Z
|
2022-02-28T10:41:37.000Z
|
rsbook_code/utilities/search.py
|
patricknaughton01/RoboticSystemsBook
|
0fc67cbccee0832b5f9b00d848c55697fa69bedf
|
[
"Apache-2.0"
] | 2
|
2021-05-04T12:56:40.000Z
|
2022-02-18T23:13:33.000Z
|
rsbook_code/utilities/search.py
|
patricknaughton01/RoboticSystemsBook
|
0fc67cbccee0832b5f9b00d848c55697fa69bedf
|
[
"Apache-2.0"
] | 29
|
2019-06-20T20:13:36.000Z
|
2022-02-20T14:01:34.000Z
|
"""
Includes Dijkstra's algorithm and two A* implementations.
"""
from __future__ import print_function,division
import heapq #for a fast priority queue implementation
def predecessor_traverse(p,s,g):
"""Used by dijkstra's algorithm to traverse a predecessor dictionary"""
L = []
v = g
while v is not None:
L.append(v)
v = p.get(v,None)
#rather than prepending, we appended and now we'll reverse. This is a more efficient than prepending
return L[::-1]
def dijkstras(G,s,g,cost=(lambda v,w:1),verbose=1):
"""Completes a shortest-path search on graph G.
Args:
G (AdjListGraph or networkx Graph/DiGraph): the graph to search
s: the start node
g: the goal node or a goal test
cost (optional): a callback function c(v,w) that returns the edge cost
verbose (optional): if nonzero, will print information about search
progress.
Returns:
tuple: a triple (path,distances,predecessors) giving
- path: a list or None: either the path of nodes from s to g with
minimum cost, or None if no path exists.
- distances: a dictionary mapping nodes to distances from start
- predecessors: a dictionary mapping nodes to parent nodes
that can be walked by ``predecessor_traverse`` to get the optimal
path to any reached node.
"""
if not callable(g):
gtest = lambda x,goal=g: x==g
else:
gtest = g
d = dict((v,float('inf')) for v in G.nodes())
p = dict((v,None) for v in G.nodes())
d[s] = 0
Q = [(0,s)] #each element is a tuple (c,v) with c=cost from start, v=vertex
nnodes = 0
while len(Q) > 0:
c,v = heapq.heappop(Q) #get the element in the queue with the least value of c
nnodes += 1
if gtest(v):
#found a path
if verbose: print("Dijkstra's succeeded in",nnodes,"iterations")
return predecessor_traverse(p,s,v),d,p
for w in G.neighbors(v):
dcand = d[v] + cost(v,w) #this is the cost of going through v to w
if dcand < d[w]:
#going through v is optimal
#if the predecessor of w is not None, then we'll have to adjust the heap
if p[w] is not None:
Q = [(c,x) for (c,x) in Q if x is not w]
heapq.heapify(Q)
d[w] = dcand
p[w] = v
#put w on the queue
heapq.heappush(Q,(dcand,w))
#no path found
if verbose: print("Dijkstra's failed in",nnodes,"iterations")
return None,d,p
def astar(G,s,g,cost=(lambda v,w:1),heuristic=(lambda v:0),verbose=1):
"""Completes an A* search on graph G.
Args:
G (AdjListGraph, networkx Graph / DiGraph): the graph to search.
s: the start node
g: the goal node or goal test
cost (optional): a callback function c(v,w) that returns the edge cost
heuristic (optional): a callback function h(v) that returns the
heuristic cost-to-go between v and g
verbose (optional): if nonzero, will print information about search
progress.
Returns:
tuple: a triple (path,distances,predecessors) giving
- path: a list or None: either the path of nodes from s to g with
minimum cost, or None if no path exists.
- distances: a dictionary mapping nodes to distances from start
- predecessors: a dictionary mapping nodes to parent nodes
that can be walked by ``predecessor_traverse`` to get the optimal
path to any reached node.
"""
if not callable(g):
gtest = lambda x,goal=g: x==g
else:
gtest = g
d = dict((v,float('inf')) for v in G.nodes())
p = dict((v,None) for v in G.nodes())
d[s] = 0
Q = [(0,0,s)] #each element is a tuple (f,-c,v) with f=c + heuristic(v), c=cost from start, v=vertex
nnodes = 0
while len(Q) > 0:
f,minus_c,v = heapq.heappop(Q) #get the element in the queue with the least value of c
nnodes += 1
if gtest(v):
#found a path
if verbose: print("A* succeeded in",nnodes,"iterations")
return predecessor_traverse(p,s,v),d,p
for w in G.neighbors(v):
dcand = d[v] + cost(v,w) #this is the cost of going through v to w
if dcand < d[w]:
#going through v is optimal
#if the predecessor of w is not None, then we'll have to adjust the heap
if p[w] is not None:
Q = [(f,c,x) for (f,c,x) in Q if x is not w]
heapq.heapify(Q)
d[w] = dcand
p[w] = v
#put w back on the queue, with the heuristic value as its priority
heapq.heappush(Q,(dcand+heuristic(w),-dcand,w))
#no path found
if verbose: print("A* failed in",nnodes,"iterations")
return None,d,p
def astar_implicit(successors,s,g,cost=(lambda v,w:1),heuristic=(lambda v:0),verbose=1):
"""Completes an A* search on a large/infinite implicit graph.
Args:
successors: a callback function s(v) that returns a list of neighbors
of a node v.
s: the start node
g: the goal node or goal test
cost (optional): a callback function c(v,w) that returns the edge cost
heuristic (optional): a callback function h(v) that returns the
heuristic cost-to-go between v and g
verbose (optional): if nonzero, will print information about search
progress.
Returns:
tuple: a triple (path,distances,predecessors) giving
- path: a list or None: either the path of nodes from s to g with
minimum cost, or None if no path exists.
- distances: a dictionary mapping reached nodes to distances from start
- predecessors: a dictionary mapping reached nodes to parent nodes
that can be walked by ``predecessor_traverse`` to get the optimal
path to any reached node.
"""
if not callable(g):
gtest = lambda x,goal=g: x==g
else:
gtest = g
inf = float('inf')
d = dict()
p = dict()
d[s] = 0
Q = [(0,0,s)] #each element is a tuple (f,-c,v) with f=c + heuristic(v), c=cost from start, v=vertex
nnodes = 0
while len(Q) > 0:
f,minus_c,v = heapq.heappop(Q) #get the element in the queue with the least value of c
nnodes += 1
if gtest(v):
#found a path
if verbose: print("A* succeeded in",nnodes,"iterations")
return predecessor_traverse(p,s,v),d,p
for w in successors(v):
dcand = d[v] + cost(v,w) #this is the cost of going through v to w
if dcand < d.get(w,float('inf')):
#going through v is optimal
#if the predecessor of w is not None, then we'll have to adjust the heap
if w in p:
Q = [(f,c,x) for (f,c,x) in Q if x is not w]
heapq.heapify(Q)
d[w] = dcand
p[w] = v
#put w back on the queue, with the heuristic value as its priority
heapq.heappush(Q,(dcand+heuristic(w),-dcand,w))
#no path found
if verbose: print("A* failed in",nnodes,"iterations")
return None,d,p
| 41
| 106
| 0.572971
| 1,147
| 7,503
| 3.734961
| 0.133391
| 0.010504
| 0.012605
| 0.033613
| 0.865313
| 0.861111
| 0.857843
| 0.84057
| 0.829365
| 0.816527
| 0
| 0.005194
| 0.3328
| 7,503
| 182
| 107
| 41.225275
| 0.850579
| 0.523257
| 0
| 0.736264
| 0
| 0
| 0.051446
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043956
| false
| 0
| 0.021978
| 0
| 0.142857
| 0.076923
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6df268c4e43d3bd438749da771de993e27c85205
| 79
|
py
|
Python
|
pipe_tools/generator/__init__.py
|
GlobalFishingWatch/pipe-tools
|
34dff591997bb2c25e018df86d13a9d42972032b
|
[
"Apache-2.0"
] | 1
|
2018-05-26T20:10:51.000Z
|
2018-05-26T20:10:51.000Z
|
pipe_tools/generator/__init__.py
|
GlobalFishingWatch/pipe-tools
|
34dff591997bb2c25e018df86d13a9d42972032b
|
[
"Apache-2.0"
] | 37
|
2017-10-22T12:00:59.000Z
|
2022-02-08T19:17:58.000Z
|
pipe_tools/generator/__init__.py
|
GlobalFishingWatch/pipe-tools
|
34dff591997bb2c25e018df86d13a9d42972032b
|
[
"Apache-2.0"
] | null | null | null |
from .generator import MessageGenerator
from .generator import GenerateMessages
| 39.5
| 39
| 0.886076
| 8
| 79
| 8.75
| 0.625
| 0.371429
| 0.542857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088608
| 79
| 2
| 40
| 39.5
| 0.972222
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
098ae5fe0946f6509dc81a45f3d8b96c1f397cba
| 127
|
py
|
Python
|
bireme/thesaurus/models.py
|
viniciusandrade/fi-admin-fork
|
5187af08d1424f275222422ed12f9e54c14caa5b
|
[
"MIT",
"Python-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 5
|
2016-04-08T19:45:25.000Z
|
2022-03-24T16:56:49.000Z
|
bireme/thesaurus/models.py
|
viniciusandrade/fi-admin-fork
|
5187af08d1424f275222422ed12f9e54c14caa5b
|
[
"MIT",
"Python-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 1,214
|
2015-03-10T14:47:10.000Z
|
2022-03-31T12:15:05.000Z
|
bireme/thesaurus/models.py
|
viniciusandrade/fi-admin-fork
|
5187af08d1424f275222422ed12f9e54c14caa5b
|
[
"MIT",
"Python-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 7
|
2015-06-01T17:58:22.000Z
|
2021-09-29T12:34:19.000Z
|
from thesaurus.models_thesaurus import *
from thesaurus.models_qualifiers import *
from thesaurus.models_descriptors import *
| 25.4
| 42
| 0.850394
| 15
| 127
| 7
| 0.4
| 0.371429
| 0.542857
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102362
| 127
| 4
| 43
| 31.75
| 0.921053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
09c431aaa931457d3ad0820a7199a1a80b9785a6
| 195
|
py
|
Python
|
tests/fixtures/basic.py
|
jparise/nosecomplete
|
b5105cd72923f6ef5746d6e763e4a8f422674f1f
|
[
"BSD-3-Clause"
] | 23
|
2015-02-07T14:56:48.000Z
|
2021-09-29T18:50:38.000Z
|
tests/fixtures/basic.py
|
jparise/nosecomplete
|
b5105cd72923f6ef5746d6e763e4a8f422674f1f
|
[
"BSD-3-Clause"
] | 7
|
2015-08-09T18:11:31.000Z
|
2020-08-24T21:08:28.000Z
|
tests/fixtures/basic.py
|
jparise/nosecomplete
|
b5105cd72923f6ef5746d6e763e4a8f422674f1f
|
[
"BSD-3-Clause"
] | 4
|
2015-08-08T02:19:37.000Z
|
2019-10-18T14:01:59.000Z
|
import unittest
def test_red():
pass
class AwesomeTestCase(unittest.TestCase):
def test_yellow(self):
pass
def test_green(self):
pass
def test_blue():
pass
| 10.833333
| 41
| 0.635897
| 24
| 195
| 5
| 0.541667
| 0.233333
| 0.183333
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.282051
| 195
| 17
| 42
| 11.470588
| 0.857143
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.4
| 0.1
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
61eaa3c598d3c68e625c211ed2a3e76dcddc12ce
| 12,742
|
py
|
Python
|
third_party/tests/YosysTests/architecture/synth_xilinx_srl/generate.py
|
parzival3/Surelog
|
cf126533ebfb2af7df321057af9e3535feb30487
|
[
"Apache-2.0"
] | 156
|
2019-11-16T17:29:55.000Z
|
2022-01-21T05:41:13.000Z
|
third_party/tests/YosysTests/architecture/synth_xilinx_srl/generate.py
|
parzival3/Surelog
|
cf126533ebfb2af7df321057af9e3535feb30487
|
[
"Apache-2.0"
] | 414
|
2021-06-11T07:22:01.000Z
|
2022-03-31T22:06:14.000Z
|
third_party/tests/YosysTests/architecture/synth_xilinx_srl/generate.py
|
parzival3/Surelog
|
cf126533ebfb2af7df321057af9e3535feb30487
|
[
"Apache-2.0"
] | 30
|
2019-11-18T16:31:40.000Z
|
2021-12-26T01:22:51.000Z
|
#!/usr/bin/python3
import re, glob
N = 131
def assert_static_area(fp, i, name):
if i < 3:
srl32,srl16,fd = (0,0,i)
else:
srl32 = i // 32
if (i % 32) == 0:
srl16 = 0
fd = 0
elif (i % 32) == 1:
srl16 = 0
fd = 1
elif (i % 32) <= 17:
srl16 = 1
fd = (i % 32) - 16
else:
srl32 += 1
srl16 = 0
fd = 0
fp.write('''
`ifndef _AUTOTB
module __test ;
wire [4095:0] assert_area = "cd; select t:FD* -assert-count {0}; select t:SRL16E -assert-count {1}; select t:SRLC32E -assert-count {2}; cd {3}_{4}; select t:BUFG t:FD* t:SRL16E t:SRLC32E %% %n t:* %i -assert-none";
endmodule
`endif
'''.format(fd, srl16, srl32, name, i))
def assert_dynamic_area(fp, i, name):
if i < 3:
srl32,srl16,fd = (0,0,i)
lut3 = 1 if i > 1 else 0
lut5 = 0
else:
srl32 = i // 32
if (i % 128) == 0 or (i % 32) == 0:
srl16 = 0
fd = 0
elif (i % 128) == 1:
srl16 = 0
fd = 1
elif (i % 32) <= 16:
srl16 = 1
fd = 0
else:
srl32 += 1
srl16 = 0
fd = 0
lut3 = 1 if i > 128 and i < 257 else 0
lut5 = 1 if i > 256 else 0
muxf8 = (srl32+srl16) // 4
if ((srl32 + srl16) % 4) == 0:
muxf7 = muxf8 * 2
elif ((srl32 + srl16) % 4) == 3:
muxf8 += 1
muxf7 = muxf8 * 2
else:
muxf7 = (srl32+srl16) // 2
fp.write('''
`ifndef _AUTOTB
module __test ;
wire [4095:0] assert_area = "cd; select t:FD* -assert-count {0}; select t:SRL16E -assert-count {1}; select t:SRLC32E -assert-count {2}; select t:MUXF7 -assert-count {3}; select t:MUXF8 -assert-count {4}; select t:LUT3 -assert-count {5}; select t:LUT5 -assert-count {6}; cd {7}_{8}; select t:BUFG t:FD* t:SRL16E t:SRLC32E t:MUXF7 t:MUXF8 t:LUT3 t:LUT5 %% %n t:* %i -assert-none";
endmodule
`endif
'''.format(fd, srl16, srl32, muxf7, muxf8, lut3, lut5, name, i))
# Test 1: pos_clk_no_enable_no_init_not_inferred
for i in range(1,N+1):
with open('pos_clk_no_enable_no_init_not_inferred_%d.v' % i, 'w') as fp:
fp.write('''
(* top *)
module pos_clk_no_enable_no_init_not_inferred_{0} #(parameter width=1, depth={0}) (input clk, input [width-1:0] i, output [width-1:0] q);
generate
wire [depth:0] int [width-1:0];
genvar w, d;
for (w = 0; w < width; w=w+1) begin
assign int[w][0] = i[w];
for (d = 0; d < depth; d=d+1) begin
\$_DFFE_PP_ r(.C(clk), .D(int[w][d]), .E(1'b1), .Q(int[w][d+1]));
end
assign q[w] = int[w][depth];
end
endgenerate
endmodule
'''.format(i))
assert_static_area(fp, i, 'pos_clk_no_enable_no_init_not_inferred')
# Test 2: pos_clk_with_enable_no_init_not_inferred
for i in range(1,N+1):
with open('pos_clk_with_enable_no_init_not_inferred_%d.v' % i, 'w') as fp:
fp.write('''
(* top *)
module pos_clk_with_enable_no_init_not_inferred_{0} #(parameter width=1, depth={0}) (input clk, input [width-1:0] i, input e, output [width-1:0] q);
generate
wire [depth:0] int [width-1:0];
genvar w, d;
for (w = 0; w < width; w=w+1) begin
assign int[w][0] = i[w];
for (d = 0; d < depth; d=d+1) begin
\$_DFFE_PP_ r(.C(clk), .D(int[w][d]), .E(e), .Q(int[w][d+1]));
end
assign q[w] = int[w][depth];
end
endgenerate
endmodule
'''.format(i))
assert_static_area(fp, i, 'pos_clk_with_enable_no_init_not_inferred')
# Test 3: pos_clk_with_enable_with_init_inferred
for i in range(1,N+1):
with open('pos_clk_with_enable_with_init_inferred_%d.v' % i, 'w') as fp:
fp.write('''
(* top *)
module pos_clk_with_enable_with_init_inferred_{0} #(parameter width=1, depth={0}) (input clk, input [width-1:0] i, input e, output [width-1:0] q);
generate
reg [depth-1:0] int [width-1:0];
genvar w, d;
for (w = 0; w < width; w=w+1) begin
for (d = 0; d < depth; d=d+1)
initial int[w][d] <= ~((d+w) % 2);
if (depth == 1) begin
always @(posedge clk) if (e) int[w] <= i[w];
assign q[w] = int[w];
end
else begin
always @(posedge clk) if (e) int[w] <= {{ int[w][depth-2:0], i[w] }};
assign q[w] = int[w][depth-1];
end
end
endgenerate
endmodule
'''.format(i))
assert_static_area(fp, i, 'pos_clk_with_enable_with_init_inferred')
# Test 4: neg_clk_no_enable_no_init_not_inferred
for i in range(1,N+1):
with open('neg_clk_no_enable_no_init_not_inferred_%d.v' % i, 'w') as fp:
fp.write('''
(* top *)
module neg_clk_no_enable_no_init_not_inferred_{0} #(parameter width=1, depth={0}) (input clk, input [width-1:0] i, output [width-1:0] q);
generate
wire [depth:0] int [width-1:0];
genvar w, d;
for (w = 0; w < width; w=w+1) begin
assign int[w][0] = i[w];
for (d = 0; d < depth; d=d+1) begin
\$_DFFE_NP_ r(.C(clk), .D(int[w][d]), .E(1'b1), .Q(int[w][d+1]));
end
assign q[w] = int[w][depth];
end
endgenerate
endmodule
'''.format(i))
assert_static_area(fp, i, 'neg_clk_no_enable_no_init_not_inferred')
# Test 5: neg_clk_no_enable_no_init_inferred
for i in range(1,N+1):
with open('neg_clk_no_enable_no_init_inferred_%d.v' % i, 'w') as fp:
fp.write('''
(* top *)
module neg_clk_no_enable_no_init_inferred_{0} #(parameter width=1, depth={0}) (input clk, input [width-1:0] i, output [width-1:0] q);
generate
reg [depth-1:0] int [width-1:0];
genvar w, d;
for (w = 0; w < width; w=w+1) begin
if (depth == 1) begin
always @(negedge clk) int[w] <= i[w];
assign q[w] = int[w];
end
else begin
always @(negedge clk) int[w] <= {{ int[w][depth-2:0], i[w] }};
assign q[w] = int[w][depth-1];
end
end
endgenerate
endmodule
'''.format(i))
assert_static_area(fp, i, 'neg_clk_no_enable_no_init_inferred')
# Test 6: neg_clk_with_enable_with_init_inferred
for i in range(1,N+1):
with open('neg_clk_with_enable_with_init_inferred_%d.v' % i, 'w') as fp:
fp.write('''
(* top *)
module neg_clk_with_enable_with_init_inferred_{0} #(parameter width=1, depth={0}) (input clk, input [width-1:0] i, input e, output [width-1:0] q);
generate
reg [depth-1:0] int [width-1:0];
genvar w, d;
for (w = 0; w < width; w=w+1) begin
for (d = 0; d < depth; d=d+1)
initial int[w][d] <= ~((d+w) % 2);
if (depth == 1) begin
always @(negedge clk) if (e) int[w] <= i[w];
assign q[w] = int[w];
end
else begin
always @(negedge clk) if (e) int[w] <= {{ int[w][depth-2:0], i[w] }};
assign q[w] = int[w][depth-1];
end
end
endgenerate
endmodule
'''.format(i))
assert_static_area(fp, i, 'neg_clk_with_enable_with_init_inferred')
# Test 10: pos_clk_no_enable_no_init_not_inferred_var_len
for i in range(1,N+1):
with open('pos_clk_no_enable_no_init_not_inferred_var_len_%d.v' % i, 'w') as fp:
fp.write('''
(* top *)
module pos_clk_no_enable_no_init_not_inferred_var_len_{0} #(parameter width=1, depth={0}) (input clk, input [width-1:0] i, input [31:0] l, output [width-1:0] q);
generate
wire [depth:0] int [width-1:0];
genvar w, d;
for (w = 0; w < width; w=w+1) begin
assign int[w][0] = i[w];
for (d = 0; d < depth; d=d+1) begin
\$_DFFE_PP_ r(.C(clk), .D(int[w][d]), .E(1'b1), .Q(int[w][d+1]));
end
wire [depth-1:0] t;
assign t = int[w][depth:1];
assign q[w] = t[l];
end
endgenerate
endmodule
'''.format(i))
assert_dynamic_area(fp, i, 'pos_clk_no_enable_no_init_not_inferred_var_len')
# Test 11: neg_clk_with_enable_with_init_inferred_var_len
for i in range(1,N+1):
with open('neg_clk_with_enable_with_init_inferred_var_len_%d.v' % i, 'w') as fp:
fp.write('''
(* top *)
module neg_clk_with_enable_with_init_inferred_var_len_{0} #(parameter width=1, depth={0}) (input clk, input [width-1:0] i, input e, input [31:0] l, output [width-1:0] q);
generate
reg [depth-1:0] int [width-1:0];
genvar w, d;
for (w = 0; w < width; w=w+1) begin
for (d = 0; d < depth; d=d+1)
initial int[w][d] <= ~((d+w) % 2);
if (depth == 1) begin
always @(negedge clk) if (e) int[w] <= i[w];
assign q[w] = int[w];
end
else begin
always @(negedge clk) if (e) int[w] <= {{ int[w][depth-2:0], i[w] }};
assign q[w] = int[w][l];
end
end
endgenerate
endmodule
'''.format(i))
assert_dynamic_area(fp, i, 'neg_clk_with_enable_with_init_inferred_var_len')
import lfsr_area
re_lfsr = re.compile(r'lfsr_(\d+)\.v')
for fn in glob.glob('lfsr_*.v'):
m = re_lfsr.match(fn)
if not m: continue
W = int(m.group(1))
with open(fn, 'a') as f:
print('''
`ifndef _AUTOTB
module __test ;
wire [4095:0] assert_area = "%s";
endmodule
`endif
''' % lfsr_area.area[W], file=f)
# Test 15: pos_clk_no_enable_no_init_not_inferred
for i in range(128+1,128+N+1):
with open('pos_clk_no_enable_no_init_not_inferred_%d.v' % i, 'w') as fp:
fp.write('''
(* top *)
module pos_clk_no_enable_no_init_not_inferred_{0} #(parameter width=1, depth={0}) (input clk, input [width-1:0] i, output [width-1:0] q);
generate
wire [depth:0] int [width-1:0];
genvar w, d;
for (w = 0; w < width; w=w+1) begin
assign int[w][0] = i[w];
for (d = 0; d < depth; d=d+1) begin
\$_DFFE_PP_ r(.C(clk), .D(int[w][d]), .E(1'b1), .Q(int[w][d+1]));
end
assign q[w] = int[w][depth];
end
endgenerate
endmodule
'''.format(i))
assert_static_area(fp, i, 'pos_clk_no_enable_no_init_not_inferred')
# Test 16: neg_clk_with_enable_with_init_inferred_var_len
for i in range(128+1,128+N+1):
with open('neg_clk_with_enable_with_init_inferred_var_len_%d.v' % i, 'w') as fp:
fp.write('''
(* top *)
module neg_clk_with_enable_with_init_inferred_var_len_{0} #(parameter width=1, depth={0}) (input clk, input [width-1:0] i, input e, input [31:0] l, output [width-1:0] q);
generate
reg [depth-1:0] int [width-1:0];
genvar w, d;
for (w = 0; w < width; w=w+1) begin
for (d = 0; d < depth; d=d+1)
initial int[w][d] <= ~((d+w) % 2);
if (depth == 1) begin
always @(negedge clk) if (e) int[w] <= i[w];
assign q[w] = int[w];
end
else begin
always @(negedge clk) if (e) int[w] <= {{ int[w][depth-2:0], i[w] }};
assign q[w] = int[w][l];
end
end
endgenerate
endmodule
'''.format(i))
assert_dynamic_area(fp, i, 'neg_clk_with_enable_with_init_inferred_var_len')
# Test 18: neg_clk_with_enable_with_init_inferred2
for i in range(1,N+1):
with open('neg_clk_with_enable_with_init_inferred2_%d.v' % i, 'w') as fp:
fp.write('''
(* top *)
module neg_clk_with_enable_with_init_inferred2_{0} #(parameter width=1, depth={0}) (input clk, input [width-1:0] i, input e, output [width-1:0] q);
generate
reg [width-1:0] int [depth-1:0];
genvar w, d;
for (d = 0; d < depth; d=d+1) begin
for (w = 0; w < width; w=w+1) begin
//initial int[d][w] <= ~((d+w) % 2);
if (d == 0) begin
always @(negedge clk) if (e) int[d][w] <= i[w];
end
else begin
always @(negedge clk) if (e) int[d][w] <= int[d-1][w];
end
end
end
assign q = int[depth-1];
endgenerate
endmodule'''.format(i))
assert_static_area(fp, i, 'neg_clk_with_enable_with_init_inferred2')
# Test 19: pos_clk_with_enable_no_init_inferred2_var_len
for i in range(1,N+1):
with open('pos_clk_with_enable_no_init_inferred2_var_len_%d.v' % i, 'w') as fp:
fp.write('''
(* top *)
module pos_clk_with_enable_no_init_inferred2_var_len_{0} #(parameter width=1, depth={0}) (input clk, input [width-1:0] i, input e, input [31:0] l, output [width-1:0] q);
generate
reg [width-1:0] int [depth-1:0];
genvar w, d;
for (d = 0; d < depth; d=d+1) begin
for (w = 0; w < width; w=w+1) begin
initial int[d][w] <= ~((d+w) % 2);
if (d == 0) begin
always @(posedge clk) if (e) int[d][w] <= i[w];
end
else begin
always @(posedge clk) if (e) int[d][w] <= int[d-1][w];
end
end
end
assign q = int[l];
endgenerate
endmodule'''.format(i))
assert_dynamic_area(fp, i, 'pos_clk_with_enable_no_init_inferred2_var_len')
| 33.096104
| 382
| 0.567336
| 2,205
| 12,742
| 3.084807
| 0.057143
| 0.028815
| 0.037048
| 0.038224
| 0.909732
| 0.906057
| 0.901206
| 0.874596
| 0.862834
| 0.828286
| 0
| 0.054227
| 0.264794
| 12,742
| 384
| 383
| 33.182292
| 0.671862
| 0.048423
| 0
| 0.778761
| 0
| 0.076696
| 0.738606
| 0.129293
| 0
| 0
| 0
| 0
| 0.050147
| 1
| 0.0059
| false
| 0
| 0.0059
| 0
| 0.011799
| 0.00295
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
111589e56487524f50631e9b1bc88f87aa5fd11a
| 94,425
|
py
|
Python
|
workflow/migrations/0001_initial.py
|
sannleen/TolaActivity
|
b47154339c3a45583063ecad43b0b16ae2f8f36e
|
[
"Apache-2.0"
] | null | null | null |
workflow/migrations/0001_initial.py
|
sannleen/TolaActivity
|
b47154339c3a45583063ecad43b0b16ae2f8f36e
|
[
"Apache-2.0"
] | null | null | null |
workflow/migrations/0001_initial.py
|
sannleen/TolaActivity
|
b47154339c3a45583063ecad43b0b16ae2f8f36e
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-19 08:07
from __future__ import unicode_literals
from decimal import Decimal
from django.conf import settings
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0008_alter_user_username_max_length'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('indicators', '0001_initial'),
('sites', '0002_alter_domain_unique'),
]
operations = [
migrations.CreateModel(
name='AdminLevelFour',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, verbose_name='Admin Boundary 4')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('name',),
'verbose_name': 'Admin Boundary 4',
'verbose_name_plural': 'Admin Boundary 4',
},
),
migrations.CreateModel(
name='AdminLevelOne',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, verbose_name='Admin Boundary 1')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('name',),
'verbose_name': 'Admin Boundary 1',
'verbose_name_plural': 'Admin Boundary 1',
},
),
migrations.CreateModel(
name='AdminLevelThree',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, verbose_name='Admin Boundary 3')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('name',),
'verbose_name': 'Admin Boundary 3',
'verbose_name_plural': 'Admin Boundary 3',
},
),
migrations.CreateModel(
name='AdminLevelTwo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, verbose_name='Admin Boundary 2')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('adminlevelone', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='workflow.AdminLevelOne', verbose_name='Admin Level 1')),
],
options={
'ordering': ('name',),
'verbose_name': 'Admin Boundary 2',
'verbose_name_plural': 'Admin Boundary 2',
},
),
migrations.CreateModel(
name='ApprovalType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, verbose_name='Name')),
('default_global', models.BooleanField(default=0)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('name', 'organization'),
},
),
migrations.CreateModel(
name='ApprovalWorkflow',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('note', models.TextField(blank=True, null=True)),
('date_assigned', models.DateTimeField(blank=True, null=True)),
('date_approved', models.DateTimeField(blank=True, null=True)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('status', models.CharField(choices=[('open', 'Open'), ('awaiting_approval', 'Awaiting Approval'), ('tracking', 'Tracking'), ('awaiting_verification', 'Awaiting Verification'), ('closed', 'Closed')], default='open', max_length=50)),
('section', models.CharField(choices=[('workflowlevel1', 'Workflow Level 1'), ('workflowlevel2', 'Workflow Level 2'), ('workflowlevel3', 'Workflow Level 3'), ('sites', 'Sites'), ('stakeholders', 'Stakeholders'), ('documents', 'Documents')], default='workflowlevel1', max_length=50)),
('approval_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='approval_type', to='workflow.ApprovalType')),
],
options={
'ordering': ('approval_type',),
},
),
migrations.CreateModel(
name='Award',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=100, null=True, verbose_name='Award Name/Title')),
('amount', models.IntegerField(blank=True, default=0, verbose_name='Amount')),
('status', models.CharField(choices=[('open', 'Open'), ('funded', 'Funded'), ('awaiting', 'Awaiting Funding'), ('closed', 'Closed')], default='Open', max_length=50)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('name',),
},
),
migrations.CreateModel(
name='Budget',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('contributor', models.CharField(blank=True, max_length=135, null=True)),
('account_code', models.CharField(blank=True, max_length=135, null=True, verbose_name='Accounting Code')),
('cost_center', models.CharField(blank=True, max_length=135, null=True, verbose_name='Cost Center')),
('donor_code', models.CharField(blank=True, max_length=135, null=True, verbose_name='Donor Code')),
('description_of_contribution', models.CharField(blank=True, max_length=255, null=True)),
('proposed_value', models.IntegerField(blank=True, default=0, null=True, verbose_name='Budget')),
('actual_value', models.IntegerField(blank=True, default=0, null=True, verbose_name='Actual')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('contributor',),
},
),
migrations.CreateModel(
name='Checklist',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, default='Checklist', max_length=255, null=True)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('workflowlevel2',),
},
),
migrations.CreateModel(
name='ChecklistItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('item', models.CharField(max_length=255)),
('in_file', models.BooleanField(default=False)),
('not_applicable', models.BooleanField(default=False)),
('global_item', models.BooleanField(default=False)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('checklist', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='workflow.Checklist')),
],
options={
'ordering': ('item',),
},
),
migrations.CreateModel(
name='CodedField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, null=True, verbose_name='Field Name')),
('label', models.CharField(blank=True, max_length=255, null=True, verbose_name='Field Label')),
('is_required', models.BooleanField(default=0, verbose_name='Required Field?')),
('is_universal', models.BooleanField(default=0, verbose_name='Available in Every Level 2 Form?')),
('type', models.CharField(blank=True, max_length=255, null=True, verbose_name='Field Type')),
('default_value', models.CharField(blank=True, max_length=255, null=True, verbose_name='Field Default Value')),
('api_url', models.CharField(blank=True, max_length=255, null=True, verbose_name='Associated API URL')),
('api_token', models.CharField(blank=True, max_length=255, null=True, verbose_name='Associated API Token')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('name', 'type'),
'verbose_name_plural': 'CodedFields',
},
),
migrations.CreateModel(
name='CodedFieldValues',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('value', models.CharField(blank=True, max_length=255, null=True, verbose_name='Value')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('coded_field', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='workflow.CodedField')),
],
options={
'ordering': ('value', 'coded_field', 'workflowlevel2__name'),
'verbose_name_plural': 'CodedFields',
},
),
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, null=True, verbose_name='Name')),
('title', models.CharField(blank=True, max_length=255, null=True, verbose_name='Title')),
('city', models.CharField(blank=True, max_length=255, null=True, verbose_name='City/Town')),
('address', models.TextField(blank=True, max_length=255, null=True, verbose_name='Address')),
('email', models.CharField(blank=True, max_length=255, null=True, verbose_name='Email')),
('phone', models.CharField(blank=True, max_length=255, null=True, verbose_name='Phone')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('name', 'country', 'title'),
'verbose_name_plural': 'Contact',
},
),
migrations.CreateModel(
name='Country',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('country', models.CharField(blank=True, max_length=255, verbose_name='Country Name')),
('code', models.CharField(blank=True, max_length=4, verbose_name='2 Letter Country Code')),
('description', models.TextField(blank=True, max_length=765, verbose_name='Description/Notes')),
('latitude', models.CharField(blank=True, max_length=255, null=True, verbose_name='Latitude')),
('longitude', models.CharField(blank=True, max_length=255, null=True, verbose_name='Longitude')),
('zoom', models.IntegerField(default=5, verbose_name='Zoom')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('country',),
'verbose_name_plural': 'Countries',
},
),
migrations.CreateModel(
name='Currency',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('source_currency', models.CharField(blank=True, max_length=255, verbose_name='Source Currency Name')),
('target_currency', models.CharField(blank=True, max_length=255, verbose_name='Target Currency Name')),
('current_rate', models.IntegerField(blank=True, null=True, verbose_name='Conversion Rate')),
('conversion_date', models.DateTimeField(blank=True, null=True)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('source_currency',),
},
),
migrations.CreateModel(
name='Documentation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('document_uuid', models.CharField(blank=True, default=uuid.uuid4, max_length=255, unique=True, verbose_name='Document UUID')),
('name', models.CharField(blank=True, max_length=255, null=True, verbose_name='Name of Document')),
('url', models.CharField(blank=True, max_length=255, null=True, verbose_name='URL (Link to document or document repository)')),
('description', models.CharField(blank=True, max_length=255, null=True)),
('file_field', models.FileField(blank=True, null=True, upload_to='uploads')),
('file_type', models.CharField(blank=True, max_length=255, null=True)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('name',),
'verbose_name_plural': 'Documentation',
},
),
migrations.CreateModel(
name='FormGuidance',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('form', models.CharField(blank=True, max_length=135, null=True)),
('guidance_link', models.URLField(blank=True, null=True)),
('guidance', models.TextField(blank=True, null=True)),
('default_global', models.BooleanField(default=0)),
('create_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('create_date',),
},
),
migrations.CreateModel(
name='FundCode',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, verbose_name='Fund Code')),
('default_global', models.BooleanField(default=0)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('name',),
},
),
migrations.CreateModel(
name='HistoricalBudget',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('contributor', models.CharField(blank=True, max_length=135, null=True)),
('account_code', models.CharField(blank=True, max_length=135, null=True, verbose_name='Accounting Code')),
('cost_center', models.CharField(blank=True, max_length=135, null=True, verbose_name='Cost Center')),
('donor_code', models.CharField(blank=True, max_length=135, null=True, verbose_name='Donor Code')),
('description_of_contribution', models.CharField(blank=True, max_length=255, null=True)),
('proposed_value', models.IntegerField(blank=True, default=0, null=True, verbose_name='Budget')),
('actual_value', models.IntegerField(blank=True, default=0, null=True, verbose_name='Actual')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('donor_currency', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.Currency')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('local_currency', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.Currency')),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical budget',
},
),
migrations.CreateModel(
name='HistoricalIssueRegister',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, null=True)),
('type', models.CharField(blank=True, max_length=135, null=True)),
('impact', models.CharField(blank=True, max_length=255, null=True)),
('rating', models.CharField(blank=True, max_length=255, null=True)),
('status', models.IntegerField(blank=True, default=0, null=True)),
('cause', models.CharField(blank=True, max_length=255, null=True)),
('date_opened', models.DateTimeField(blank=True, null=True)),
('date_resolved', models.DateTimeField(blank=True, null=True)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical issue register',
},
),
migrations.CreateModel(
name='HistoricalRiskRegister',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, null=True)),
('type', models.CharField(blank=True, max_length=135, null=True)),
('impact', models.CharField(blank=True, max_length=255, null=True)),
('likelihood', models.CharField(blank=True, max_length=255, null=True)),
('rating', models.CharField(blank=True, max_length=255, null=True)),
('status', models.IntegerField(blank=True, default=0, null=True)),
('category', models.CharField(blank=True, max_length=255, null=True)),
('contingency_plan', models.CharField(blank=True, max_length=255, null=True)),
('mitigation_plan', models.CharField(blank=True, max_length=255, null=True)),
('post_mitigation_status', models.CharField(blank=True, max_length=255, null=True)),
('action_by', models.DateTimeField(blank=True, null=True)),
('action_when', models.DateTimeField(blank=True, null=True)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical risk register',
},
),
migrations.CreateModel(
name='HistoricalSiteProfile',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('site_uuid', models.CharField(db_index=True, default=uuid.uuid4, max_length=255, verbose_name='Site UUID')),
('name', models.CharField(max_length=255, verbose_name='Site Name')),
('contact_leader', models.CharField(blank=True, max_length=255, null=True, verbose_name='Contact Name')),
('date_of_firstcontact', models.DateTimeField(blank=True, null=True, verbose_name='Date of First Contact')),
('contact_number', models.CharField(blank=True, max_length=255, null=True, verbose_name='Contact Number')),
('num_members', models.CharField(blank=True, max_length=255, null=True, verbose_name='Number of Members')),
('info_source', models.CharField(blank=True, max_length=255, null=True, verbose_name='Data Source')),
('total_num_households', models.IntegerField(blank=True, null=True, verbose_name='Total # Households')),
('avg_household_size', models.DecimalField(blank=True, decimal_places=14, default=Decimal('0.00'), max_digits=25, null=True, verbose_name='Average Household Size')),
('total_population', models.IntegerField(blank=True, null=True)),
('total_male', models.IntegerField(blank=True, null=True)),
('total_female', models.IntegerField(blank=True, null=True)),
('total_land', models.IntegerField(blank=True, help_text='In hectares/jeribs', null=True, verbose_name='Total Land')),
('total_agricultural_land', models.IntegerField(blank=True, help_text='In hectares/jeribs', null=True, verbose_name='Total Agricultural Land')),
('total_rainfed_land', models.IntegerField(blank=True, help_text='In hectares/jeribs', null=True, verbose_name='Total Rain-fed Land')),
('total_horticultural_land', models.IntegerField(blank=True, help_text='In hectares/jeribs', null=True, verbose_name='Total Horticultural Land')),
('total_literate_peoples', models.IntegerField(blank=True, null=True, verbose_name='Total Literate People')),
('literate_males', models.IntegerField(blank=True, help_text='%', null=True, verbose_name='% of Literate Males')),
('literate_females', models.IntegerField(blank=True, help_text='%', null=True, verbose_name='% of Literate Females')),
('literacy_rate', models.IntegerField(blank=True, help_text='%', null=True, verbose_name='Literacy Rate (%)')),
('populations_owning_land', models.IntegerField(blank=True, help_text='(%)', null=True, verbose_name='Households Owning Land')),
('avg_landholding_size', models.DecimalField(decimal_places=14, default=Decimal('0.00'), help_text='In hectares/jeribs', max_digits=25, verbose_name='Average Landholding Size')),
('households_owning_livestock', models.IntegerField(blank=True, help_text='(%)', null=True, verbose_name='Households Owning Livestock')),
('animal_type', models.CharField(blank=True, help_text='List Animal Types', max_length=255, null=True, verbose_name='Animal Types')),
('latitude', models.DecimalField(decimal_places=16, default=Decimal('0.00'), max_digits=25, verbose_name='Latitude (Decimal Coordinates)')),
('longitude', models.DecimalField(decimal_places=16, default=Decimal('0.00'), max_digits=25, verbose_name='Longitude (Decimal Coordinates)')),
('status', models.BooleanField(default=True, verbose_name='Site Active')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('admin_level_three', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.AdminLevelThree')),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical site profile',
},
),
migrations.CreateModel(
name='HistoricalWorkflowLevel2',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('level2_uuid', models.CharField(blank=True, db_index=True, default=uuid.uuid4, max_length=255, verbose_name='WorkflowLevel2 UUID')),
('short', models.BooleanField(default=True, verbose_name='Short Form (recommended)')),
('parent_workflowlevel2', models.IntegerField(blank=True, default=0, verbose_name='Parent')),
('date_of_request', models.DateTimeField(blank=True, null=True, verbose_name='Date of Request')),
('name', models.CharField(max_length=255, verbose_name='Name')),
('description', models.TextField(blank=True, null=True, verbose_name='Description')),
('short_name', models.CharField(blank=True, max_length=20, null=True, verbose_name='Code')),
('staff_responsible', models.CharField(blank=True, max_length=255, null=True, verbose_name='Staff Responsible')),
('effect_or_impact', models.TextField(blank=True, null=True, verbose_name='What is the anticipated Outcome or Goal?')),
('expected_start_date', models.DateTimeField(blank=True, null=True, verbose_name='Expected starting date')),
('expected_end_date', models.DateTimeField(blank=True, null=True, verbose_name='Expected ending date')),
('total_estimated_budget', models.DecimalField(blank=True, decimal_places=2, default=Decimal('0.00'), help_text='In USD', max_digits=12, verbose_name='Total Project Budget')),
('justification_background', models.TextField(blank=True, null=True, verbose_name='General Background and Problem Statement')),
('risks_assumptions', models.TextField(blank=True, null=True, verbose_name='Risks and Assumptions')),
('description_of_government_involvement', models.TextField(blank=True, null=True)),
('description_of_community_involvement', models.TextField(blank=True, null=True)),
('actual_start_date', models.DateTimeField(blank=True, null=True)),
('actual_end_date', models.DateTimeField(blank=True, null=True)),
('actual_duration', models.CharField(blank=True, max_length=255, null=True)),
('on_time', models.BooleanField(default=True)),
('no_explanation', models.TextField(blank=True, null=True, verbose_name='If not on time explain delay')),
('actual_cost', models.DecimalField(blank=True, decimal_places=2, default=Decimal('0.00'), help_text='What was the actual final cost? This should match any financial documentation you have in the file. It should be completely documented and verifiable by finance and any potential audit', max_digits=20, verbose_name='Actual Cost')),
('actual_cost_date', models.DateTimeField(blank=True, null=True)),
('budget_variance', models.CharField(blank=True, max_length=255, null=True, verbose_name='Budget versus Actual variance')),
('explanation_of_variance', models.CharField(blank=True, max_length=255, null=True, verbose_name='Explanation of variance')),
('total_cost', models.DecimalField(blank=True, decimal_places=2, default=Decimal('0.00'), help_text='In USD', max_digits=12, verbose_name='Estimated Budget for Organization')),
('agency_cost', models.DecimalField(blank=True, decimal_places=2, default=Decimal('0.00'), help_text='In USD', max_digits=12, verbose_name='Actual Cost for Organization')),
('community_handover', models.BooleanField(default=False, help_text='Check box if it was completed', verbose_name='CommunityHandover/Sustainability Maintenance Plan')),
('capacity_built', models.TextField(blank=True, max_length=755, null=True, verbose_name='Describe how sustainability was ensured for this project?')),
('quality_assured', models.TextField(blank=True, max_length=755, null=True, verbose_name='How was quality assured for this project')),
('issues_and_challenges', models.TextField(blank=True, null=True, verbose_name='List any issues or challenges faced (include reasons for delays)')),
('lessons_learned', models.TextField(blank=True, null=True, verbose_name='Lessons learned')),
('sort', models.IntegerField(blank=True, default=0)),
('create_date', models.DateTimeField(blank=True, null=True, verbose_name='Date Created')),
('edit_date', models.DateTimeField(blank=True, null=True, verbose_name='Last Edit Date')),
('status', models.CharField(blank=True, choices=[('green', 'Green'), ('yellow', 'Yellow'), ('orange', 'Orange'), ('red', 'Red')], default='green', max_length=50)),
('progress', models.CharField(blank=True, choices=[('open', 'Open'), ('awaitingapproval', 'Awaiting Approval'), ('tracking', 'Tracking'), ('closed', 'Closed')], default='open', max_length=50)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('donor_currency', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.Currency')),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('local_currency', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.Currency')),
],
options={
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
'verbose_name': 'historical workflow level2',
},
),
migrations.CreateModel(
name='Industry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, default='TolaData', max_length=255, verbose_name='Industry Name')),
('description', models.TextField(blank=True, max_length=765, null=True, verbose_name='Description/Notes')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('name',),
'verbose_name_plural': 'Organizations',
},
),
migrations.CreateModel(
name='Internationalization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('language', models.CharField(blank=True, max_length=100, null=True, verbose_name='Language')),
('language_file', django.contrib.postgres.fields.jsonb.JSONField()),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('language',),
},
),
migrations.CreateModel(
name='IssueRegister',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, null=True)),
('type', models.CharField(blank=True, max_length=135, null=True)),
('impact', models.CharField(blank=True, max_length=255, null=True)),
('rating', models.CharField(blank=True, max_length=255, null=True)),
('status', models.IntegerField(blank=True, default=0, null=True)),
('cause', models.CharField(blank=True, max_length=255, null=True)),
('date_opened', models.DateTimeField(blank=True, null=True)),
('date_resolved', models.DateTimeField(blank=True, null=True)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('type', 'name'),
},
),
migrations.CreateModel(
name='LandType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('classify_land', models.CharField(blank=True, help_text='Rural, Urban, Peri-Urban', max_length=100, verbose_name='Land Classification')),
('default_global', models.BooleanField(default=0)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('classify_land',),
},
),
migrations.CreateModel(
name='Milestone',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('description', models.TextField(blank=True, null=True)),
('milestone_start_date', models.DateTimeField(blank=True, null=True)),
('milestone_end_date', models.DateTimeField(blank=True, null=True)),
('is_global', models.BooleanField(default=0)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('name',),
},
),
migrations.CreateModel(
name='Office',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, verbose_name='Office Name')),
('code', models.CharField(blank=True, max_length=255, verbose_name='Office Code')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('country', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='workflow.Country', verbose_name='Country')),
],
options={
'ordering': ('name',),
},
),
migrations.CreateModel(
name='Organization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('organization_uuid', models.CharField(default=uuid.uuid4, max_length=255, unique=True, verbose_name='Organization UUID')),
('name', models.CharField(blank=True, default='TolaData', max_length=255, verbose_name='Organization Name')),
('description', models.TextField(blank=True, max_length=765, null=True, verbose_name='Description/Notes')),
('organization_url', models.CharField(blank=True, max_length=255, null=True)),
('level_1_label', models.CharField(blank=True, default='Program', max_length=255, verbose_name='Project/Program Organization Level 1 label')),
('level_2_label', models.CharField(blank=True, default='Project', max_length=255, verbose_name='Project/Program Organization Level 2 label')),
('level_3_label', models.CharField(blank=True, default='Component', max_length=255, verbose_name='Project/Program Organization Level 3 label')),
('level_4_label', models.CharField(blank=True, default='Activity', max_length=255, verbose_name='Project/Program Organization Level 4 label')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('industry', models.ManyToManyField(blank=True, to='workflow.Industry')),
],
options={
'ordering': ('name',),
'verbose_name_plural': 'Organizations',
},
),
migrations.CreateModel(
name='Partner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('partners_uuid', models.CharField(default=uuid.uuid4, max_length=255, unique=True, verbose_name='Partner UUID')),
('name', models.CharField(blank=True, max_length=255, null=True, verbose_name='Partner/Organization Name')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('approval', models.ManyToManyField(blank=True, to='workflow.ApprovalWorkflow')),
('contact', models.ManyToManyField(blank=True, max_length=255, to='workflow.Contact')),
('country', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Country')),
('organization', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization')),
],
options={
'ordering': ('country', 'name', 'type'),
'verbose_name_plural': 'Partners',
},
),
migrations.CreateModel(
name='Portfolio',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('description', models.TextField(blank=True, null=True)),
('is_global', models.BooleanField(default=0)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('country', models.ManyToManyField(blank=True, to='workflow.Country')),
('organization', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization')),
],
options={
'ordering': ('name',),
},
),
migrations.CreateModel(
name='ProfileType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('profile', models.CharField(blank=True, max_length=255, verbose_name='Profile Type')),
('default_global', models.BooleanField(default=0)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('organization', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization')),
],
options={
'ordering': ('profile',),
},
),
migrations.CreateModel(
name='ProjectType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=135, verbose_name='Type of Activity')),
('description', models.CharField(max_length=765)),
('default_global', models.BooleanField(default=0)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('organization', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization')),
],
options={
'ordering': ('name',),
},
),
migrations.CreateModel(
name='RiskRegister',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, null=True)),
('type', models.CharField(blank=True, max_length=135, null=True)),
('impact', models.CharField(blank=True, max_length=255, null=True)),
('likelihood', models.CharField(blank=True, max_length=255, null=True)),
('rating', models.CharField(blank=True, max_length=255, null=True)),
('status', models.IntegerField(blank=True, default=0, null=True)),
('category', models.CharField(blank=True, max_length=255, null=True)),
('contingency_plan', models.CharField(blank=True, max_length=255, null=True)),
('mitigation_plan', models.CharField(blank=True, max_length=255, null=True)),
('post_mitigation_status', models.CharField(blank=True, max_length=255, null=True)),
('action_by', models.DateTimeField(blank=True, null=True)),
('action_when', models.DateTimeField(blank=True, null=True)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('type', 'name'),
},
),
migrations.CreateModel(
name='Sector',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sector', models.CharField(blank=True, max_length=255, verbose_name='Sector Name')),
('default_global', models.BooleanField(default=0)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('organization', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='org_specific_sector', to='workflow.Organization')),
],
options={
'ordering': ('sector',),
},
),
migrations.CreateModel(
name='SectorRelated',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order', models.PositiveIntegerField(default=0)),
('org_order', models.PositiveIntegerField(default=0)),
('organization', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization')),
('sector', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='workflow.Sector')),
('sector_related', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sector_related', to='workflow.Sector')),
],
options={
'ordering': ('order',),
},
),
migrations.CreateModel(
name='SiteProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('site_uuid', models.CharField(default=uuid.uuid4, max_length=255, unique=True, verbose_name='Site UUID')),
('name', models.CharField(max_length=255, verbose_name='Site Name')),
('contact_leader', models.CharField(blank=True, max_length=255, null=True, verbose_name='Contact Name')),
('date_of_firstcontact', models.DateTimeField(blank=True, null=True, verbose_name='Date of First Contact')),
('contact_number', models.CharField(blank=True, max_length=255, null=True, verbose_name='Contact Number')),
('num_members', models.CharField(blank=True, max_length=255, null=True, verbose_name='Number of Members')),
('info_source', models.CharField(blank=True, max_length=255, null=True, verbose_name='Data Source')),
('total_num_households', models.IntegerField(blank=True, null=True, verbose_name='Total # Households')),
('avg_household_size', models.DecimalField(blank=True, decimal_places=14, default=Decimal('0.00'), max_digits=25, null=True, verbose_name='Average Household Size')),
('total_population', models.IntegerField(blank=True, null=True)),
('total_male', models.IntegerField(blank=True, null=True)),
('total_female', models.IntegerField(blank=True, null=True)),
('total_land', models.IntegerField(blank=True, help_text='In hectares/jeribs', null=True, verbose_name='Total Land')),
('total_agricultural_land', models.IntegerField(blank=True, help_text='In hectares/jeribs', null=True, verbose_name='Total Agricultural Land')),
('total_rainfed_land', models.IntegerField(blank=True, help_text='In hectares/jeribs', null=True, verbose_name='Total Rain-fed Land')),
('total_horticultural_land', models.IntegerField(blank=True, help_text='In hectares/jeribs', null=True, verbose_name='Total Horticultural Land')),
('total_literate_peoples', models.IntegerField(blank=True, null=True, verbose_name='Total Literate People')),
('literate_males', models.IntegerField(blank=True, help_text='%', null=True, verbose_name='% of Literate Males')),
('literate_females', models.IntegerField(blank=True, help_text='%', null=True, verbose_name='% of Literate Females')),
('literacy_rate', models.IntegerField(blank=True, help_text='%', null=True, verbose_name='Literacy Rate (%)')),
('populations_owning_land', models.IntegerField(blank=True, help_text='(%)', null=True, verbose_name='Households Owning Land')),
('avg_landholding_size', models.DecimalField(decimal_places=14, default=Decimal('0.00'), help_text='In hectares/jeribs', max_digits=25, verbose_name='Average Landholding Size')),
('households_owning_livestock', models.IntegerField(blank=True, help_text='(%)', null=True, verbose_name='Households Owning Livestock')),
('animal_type', models.CharField(blank=True, help_text='List Animal Types', max_length=255, null=True, verbose_name='Animal Types')),
('latitude', models.DecimalField(decimal_places=16, default=Decimal('0.00'), max_digits=25, verbose_name='Latitude (Decimal Coordinates)')),
('longitude', models.DecimalField(decimal_places=16, default=Decimal('0.00'), max_digits=25, verbose_name='Longitude (Decimal Coordinates)')),
('status', models.BooleanField(default=True, verbose_name='Site Active')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('admin_level_three', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='site_level3', to='workflow.AdminLevelThree', verbose_name='Administrative Level 3')),
('approval', models.ManyToManyField(blank=True, to='workflow.ApprovalWorkflow')),
('classify_land', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.LandType')),
('country', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='workflow.Country')),
('district', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.AdminLevelTwo', verbose_name='Administrative Level 2')),
('office', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Office')),
('province', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.AdminLevelOne', verbose_name='Administrative Level 1')),
('type', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.ProfileType')),
('village', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.AdminLevelThree', verbose_name='Administrative Level 4')),
],
options={
'ordering': ('name',),
'verbose_name_plural': 'Site Profiles',
},
),
migrations.CreateModel(
name='Stakeholder',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('stakeholder_uuid', models.CharField(default=uuid.uuid4, max_length=255, unique=True, verbose_name='Stakeholder UUID')),
('name', models.CharField(blank=True, max_length=255, null=True, verbose_name='Stakeholder/Organization Name')),
('role', models.CharField(blank=True, max_length=255, null=True, verbose_name='Role')),
('contribution', models.CharField(blank=True, max_length=255, null=True, verbose_name='Contribution')),
('stakeholder_register', models.BooleanField(default=0, verbose_name='Has this partner been added to stakeholder register?')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('approval', models.ManyToManyField(blank=True, to='workflow.ApprovalWorkflow')),
('contact', models.ManyToManyField(blank=True, max_length=255, to='workflow.Contact')),
('country', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Country')),
('formal_relationship_document', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='relationship_document', to='workflow.Documentation', verbose_name='Formal Written Description of Relationship')),
('organization', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization')),
('sectors', models.ManyToManyField(blank=True, to='workflow.Sector')),
],
options={
'ordering': ('country', 'name', 'type'),
'verbose_name_plural': 'Stakeholders',
},
),
migrations.CreateModel(
name='StakeholderType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, null=True, verbose_name='Stakeholder Type')),
('default_global', models.BooleanField(default=0)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('organization', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization')),
],
options={
'ordering': ('name',),
'verbose_name_plural': 'Stakeholder Types',
},
),
migrations.CreateModel(
name='TolaBookmarks',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, null=True)),
('bookmark_url', models.CharField(blank=True, max_length=255, null=True)),
('filter', models.CharField(blank=True, max_length=255, null=True)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ('name',),
},
),
migrations.CreateModel(
name='TolaSites',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, null=True)),
('agency_name', models.CharField(blank=True, max_length=255, null=True)),
('agency_url', models.CharField(blank=True, max_length=255, null=True)),
('tola_report_url', models.CharField(default='https://report.toladata.io', max_length=255, null=True)),
('tola_tables_url', models.CharField(default='https://activity.toladata.io', max_length=255, null=True)),
('front_end_url', models.CharField(default='https://activity.toladata.io', max_length=255, null=True)),
('tola_tables_user', models.CharField(blank=True, max_length=255, null=True)),
('tola_tables_token', models.CharField(blank=True, max_length=255, null=True)),
('privacy_disclaimer', models.TextField(blank=True, null=True)),
('created', models.DateTimeField(blank=True, null=True)),
('updated', models.DateTimeField(blank=True, null=True)),
('site', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='sites.Site')),
],
options={
'verbose_name_plural': 'Tola Sites',
},
),
migrations.CreateModel(
name='TolaUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tola_user_uuid', models.CharField(default=uuid.uuid4, max_length=255, unique=True, verbose_name='TolaUser UUID')),
('title', models.CharField(blank=True, choices=[('mr', 'Mr.'), ('mrs', 'Mrs.'), ('ms', 'Ms.')], max_length=3, null=True)),
('name', models.CharField(blank=True, max_length=100, null=True, verbose_name='Given Name')),
('employee_number', models.IntegerField(blank=True, null=True, verbose_name='Employee Number')),
('position_description', models.CharField(blank=True, max_length=255, null=True)),
('contact_info', models.CharField(blank=True, max_length=255, null=True)),
('tables_api_token', models.CharField(blank=True, max_length=255, null=True)),
('activity_api_token', models.CharField(blank=True, max_length=255, null=True)),
('privacy_disclaimer_accepted', models.BooleanField(default=False)),
('filter', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('countries', models.ManyToManyField(blank=True, related_name='countries', to='workflow.Country', verbose_name='Accessible Countries')),
('country', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Country')),
('organization', models.ForeignKey(blank=True, default=1, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='tola_user', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('name',),
},
),
migrations.CreateModel(
name='TolaUserFilter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start_date_filter', models.DateField(blank=True, null=True)),
('end_date_filter', models.DateField(blank=True, null=True)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('country_filter', models.ManyToManyField(blank=True, related_name='filter_country', to='workflow.Country')),
('sector_filter', models.ManyToManyField(blank=True, related_name='filter_sector', to='workflow.Sector')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='filter_user', to='workflow.TolaUser')),
],
options={
'ordering': ('user',),
},
),
migrations.CreateModel(
name='WorkflowLevel1',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('level1_uuid', models.CharField(default=uuid.uuid4, max_length=255, unique=True, verbose_name='WorkflowLevel1 UUID')),
('unique_id', models.CharField(blank=True, max_length=255, null=True, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=255, verbose_name='Name')),
('funding_status', models.CharField(blank=True, max_length=255, verbose_name='Funding Status')),
('cost_center', models.CharField(blank=True, max_length=255, null=True, verbose_name='Fund Code')),
('description', models.TextField(blank=True, max_length=765, null=True, verbose_name='Description')),
('public_dashboard', models.BooleanField(default=False, verbose_name='Enable Public Dashboard')),
('start_date', models.DateTimeField(blank=True, null=True)),
('end_date', models.DateTimeField(blank=True, null=True)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('sort', models.IntegerField(default=0)),
('award', models.ManyToManyField(blank=True, to='workflow.Award')),
('country', models.ManyToManyField(blank=True, to='workflow.Country')),
('fund_code', models.ManyToManyField(blank=True, to='workflow.FundCode')),
('milestone', models.ManyToManyField(blank=True, to='workflow.Milestone')),
('organization', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization')),
('portfolio', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Portfolio')),
('sector', models.ManyToManyField(blank=True, to='workflow.Sector')),
('sub_sector', models.ManyToManyField(blank=True, related_name='sub_sector', to='workflow.Sector')),
('user_access', models.ManyToManyField(blank=True, to='workflow.TolaUser')),
],
options={
'ordering': ('name',),
'verbose_name_plural': 'WorkflowLevel1',
},
),
migrations.CreateModel(
name='WorkflowLevel1Sector',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('sort', models.IntegerField(default=0)),
('sector', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='level1_primary_sector', to='workflow.Sector')),
('sub_sector', models.ManyToManyField(blank=True, related_name='level1_sub_sector', to='workflow.Sector')),
('workflowlevel1', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='level1_sectors', to='workflow.WorkflowLevel1')),
],
options={
'ordering': ('create_date',),
'verbose_name_plural': 'WorkflowLevel1 Sectors',
},
),
migrations.CreateModel(
name='WorkflowLevel2',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('level2_uuid', models.CharField(blank=True, default=uuid.uuid4, max_length=255, unique=True, verbose_name='WorkflowLevel2 UUID')),
('short', models.BooleanField(default=True, verbose_name='Short Form (recommended)')),
('parent_workflowlevel2', models.IntegerField(blank=True, default=0, verbose_name='Parent')),
('date_of_request', models.DateTimeField(blank=True, null=True, verbose_name='Date of Request')),
('name', models.CharField(max_length=255, verbose_name='Name')),
('description', models.TextField(blank=True, null=True, verbose_name='Description')),
('short_name', models.CharField(blank=True, max_length=20, null=True, verbose_name='Code')),
('staff_responsible', models.CharField(blank=True, max_length=255, null=True, verbose_name='Staff Responsible')),
('effect_or_impact', models.TextField(blank=True, null=True, verbose_name='What is the anticipated Outcome or Goal?')),
('expected_start_date', models.DateTimeField(blank=True, null=True, verbose_name='Expected starting date')),
('expected_end_date', models.DateTimeField(blank=True, null=True, verbose_name='Expected ending date')),
('total_estimated_budget', models.DecimalField(blank=True, decimal_places=2, default=Decimal('0.00'), help_text='In USD', max_digits=12, verbose_name='Total Project Budget')),
('justification_background', models.TextField(blank=True, null=True, verbose_name='General Background and Problem Statement')),
('risks_assumptions', models.TextField(blank=True, null=True, verbose_name='Risks and Assumptions')),
('description_of_government_involvement', models.TextField(blank=True, null=True)),
('description_of_community_involvement', models.TextField(blank=True, null=True)),
('actual_start_date', models.DateTimeField(blank=True, null=True)),
('actual_end_date', models.DateTimeField(blank=True, null=True)),
('actual_duration', models.CharField(blank=True, max_length=255, null=True)),
('on_time', models.BooleanField(default=True)),
('no_explanation', models.TextField(blank=True, null=True, verbose_name='If not on time explain delay')),
('actual_cost', models.DecimalField(blank=True, decimal_places=2, default=Decimal('0.00'), help_text='What was the actual final cost? This should match any financial documentation you have in the file. It should be completely documented and verifiable by finance and any potential audit', max_digits=20, verbose_name='Actual Cost')),
('actual_cost_date', models.DateTimeField(blank=True, null=True)),
('budget_variance', models.CharField(blank=True, max_length=255, null=True, verbose_name='Budget versus Actual variance')),
('explanation_of_variance', models.CharField(blank=True, max_length=255, null=True, verbose_name='Explanation of variance')),
('total_cost', models.DecimalField(blank=True, decimal_places=2, default=Decimal('0.00'), help_text='In USD', max_digits=12, verbose_name='Estimated Budget for Organization')),
('agency_cost', models.DecimalField(blank=True, decimal_places=2, default=Decimal('0.00'), help_text='In USD', max_digits=12, verbose_name='Actual Cost for Organization')),
('community_handover', models.BooleanField(default=False, help_text='Check box if it was completed', verbose_name='CommunityHandover/Sustainability Maintenance Plan')),
('capacity_built', models.TextField(blank=True, max_length=755, null=True, verbose_name='Describe how sustainability was ensured for this project?')),
('quality_assured', models.TextField(blank=True, max_length=755, null=True, verbose_name='How was quality assured for this project')),
('issues_and_challenges', models.TextField(blank=True, null=True, verbose_name='List any issues or challenges faced (include reasons for delays)')),
('lessons_learned', models.TextField(blank=True, null=True, verbose_name='Lessons learned')),
('sort', models.IntegerField(blank=True, default=0)),
('create_date', models.DateTimeField(blank=True, null=True, verbose_name='Date Created')),
('edit_date', models.DateTimeField(blank=True, null=True, verbose_name='Last Edit Date')),
('status', models.CharField(blank=True, choices=[('green', 'Green'), ('yellow', 'Yellow'), ('orange', 'Orange'), ('red', 'Red')], default='green', max_length=50)),
('progress', models.CharField(blank=True, choices=[('open', 'Open'), ('awaitingapproval', 'Awaiting Approval'), ('tracking', 'Tracking'), ('closed', 'Closed')], default='open', max_length=50)),
('approval', models.ManyToManyField(blank=True, to='workflow.ApprovalWorkflow')),
('donor_currency', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='donor_project', to='workflow.Currency')),
('indicators', models.ManyToManyField(blank=True, to='indicators.Indicator')),
('local_currency', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='local_project', to='workflow.Currency')),
('milestone', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Milestone')),
('office', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Office', verbose_name='Office')),
('partners', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Partner', verbose_name='Partners')),
('sector', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='workflow2_sector', to='workflow.Sector', verbose_name='Sector')),
('site', models.ManyToManyField(blank=True, to='workflow.SiteProfile')),
('stakeholder', models.ManyToManyField(blank=True, to='workflow.Stakeholder', verbose_name='Stakeholders')),
('sub_sector', models.ManyToManyField(blank=True, related_name='workflowlevel2_sub_sector', to='workflow.Sector', verbose_name='Sub-Sector')),
('workflowlevel1', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workflowlevel2', to='workflow.WorkflowLevel1', verbose_name='Program')),
],
options={
'ordering': ('name',),
'verbose_name_plural': 'WorkflowLevel2',
'permissions': (('can_approve', 'Can approve initiation'),),
},
),
migrations.CreateModel(
name='WorkflowLevel2Sort',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('workflowlevel2_id', models.IntegerField(default=0, verbose_name='ID to be Sorted')),
('sort_array', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('workflowlevel1', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.WorkflowLevel1')),
('workflowlevel2_parent_id', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.WorkflowLevel2')),
],
options={
'ordering': ('workflowlevel1', 'workflowlevel2_id'),
'verbose_name_plural': 'WorkflowLevel Sort',
},
),
migrations.CreateModel(
name='WorkflowLevel3',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('level3_uuid', models.CharField(default=uuid.uuid4, max_length=255, unique=True, verbose_name='WorkflowLevel3 UUID')),
('percent_complete', models.IntegerField(blank=True, null=True, verbose_name='% complete')),
('percent_cumulative', models.IntegerField(blank=True, null=True, verbose_name='% cumulative completion')),
('est_start_date', models.DateTimeField(blank=True, null=True)),
('est_end_date', models.DateTimeField(blank=True, null=True)),
('actual_start_date', models.DateTimeField(blank=True, null=True)),
('actual_end_date', models.DateTimeField(blank=True, null=True)),
('budget', models.IntegerField(blank=True, null=True, verbose_name='Estimated Budget')),
('cost', models.IntegerField(blank=True, null=True, verbose_name='Actual Cost')),
('description', models.CharField(blank=True, max_length=255, verbose_name='Description')),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('site', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.SiteProfile')),
('workflowlevel2', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.WorkflowLevel2', verbose_name='Project')),
],
options={
'ordering': ('description',),
'verbose_name_plural': 'WorkflowLevel3',
},
),
migrations.CreateModel(
name='WorkflowModules',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('modules', models.CharField(choices=[('approval', 'Approval'), ('budget', 'Budget'), ('stakeholders', 'Stakeholders'), ('documents', 'Documents'), ('sites', 'Sites'), ('indicators', 'Indicators')], default='open', max_length=50)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('workflowlevel2', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='workflow.WorkflowLevel2')),
],
options={
'ordering': ('modules',),
},
),
migrations.CreateModel(
name='WorkflowTeam',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('salary', models.CharField(blank=True, max_length=255, null=True)),
('start_date', models.DateTimeField(blank=True, null=True)),
('end_date', models.DateTimeField(blank=True, null=True)),
('status', models.CharField(blank=True, max_length=255, null=True)),
('budget_limit', models.IntegerField(blank=True, null=True)),
('create_date', models.DateTimeField(blank=True, null=True)),
('edit_date', models.DateTimeField(blank=True, null=True)),
('country', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Country')),
('partner_org', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization')),
('role', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='auth.Group')),
('workflow_user', models.ForeignKey(blank=True, help_text='User', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='auth_approving', to='workflow.TolaUser')),
('workflowlevel1', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.WorkflowLevel1')),
],
options={
'ordering': ('workflow_user',),
'verbose_name_plural': 'WorkflowTeam',
},
),
migrations.AddField(
model_name='tolauserfilter',
name='workflowlevel1_filter',
field=models.ManyToManyField(blank=True, related_name='filter_level1', to='workflow.WorkflowLevel1'),
),
migrations.AddField(
model_name='tolauserfilter',
name='workflowlevel2_filter',
field=models.ManyToManyField(blank=True, related_name='filter_level2', to='workflow.WorkflowLevel2'),
),
migrations.AddField(
model_name='tolabookmarks',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tolabookmark', to='workflow.TolaUser'),
),
migrations.AddField(
model_name='tolabookmarks',
name='workflowlevel1',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.WorkflowLevel1'),
),
migrations.AddField(
model_name='stakeholder',
name='type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.StakeholderType'),
),
migrations.AddField(
model_name='stakeholder',
name='vetting_document',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='vetting_document', to='workflow.Documentation', verbose_name='Vetting/ due diligence statement'),
),
migrations.AddField(
model_name='stakeholder',
name='workflowlevel1',
field=models.ManyToManyField(blank=True, to='workflow.WorkflowLevel1'),
),
migrations.AddField(
model_name='sector',
name='sector_nearest',
field=models.ManyToManyField(related_name='nearest', through='workflow.SectorRelated', to='workflow.Sector'),
),
migrations.AddField(
model_name='riskregister',
name='workflowlevel2',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.WorkflowLevel2'),
),
migrations.AddField(
model_name='partner',
name='sectors',
field=models.ManyToManyField(blank=True, to='workflow.Sector'),
),
migrations.AddField(
model_name='partner',
name='type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='stakeholder_partner', to='workflow.StakeholderType'),
),
migrations.AddField(
model_name='partner',
name='workflowlevel1',
field=models.ManyToManyField(blank=True, to='workflow.WorkflowLevel1'),
),
migrations.AddField(
model_name='organization',
name='sector',
field=models.ManyToManyField(blank=True, related_name='org_sector', to='workflow.Sector'),
),
migrations.AddField(
model_name='milestone',
name='organization',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization'),
),
migrations.AddField(
model_name='landtype',
name='organization',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization'),
),
migrations.AddField(
model_name='issueregister',
name='assigned',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.TolaUser'),
),
migrations.AddField(
model_name='issueregister',
name='organization',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization'),
),
migrations.AddField(
model_name='issueregister',
name='workflowlevel2',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.WorkflowLevel2'),
),
migrations.AddField(
model_name='historicalworkflowlevel2',
name='milestone',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.Milestone'),
),
migrations.AddField(
model_name='historicalworkflowlevel2',
name='office',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.Office'),
),
migrations.AddField(
model_name='historicalworkflowlevel2',
name='partners',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.Partner'),
),
migrations.AddField(
model_name='historicalworkflowlevel2',
name='sector',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.Sector'),
),
migrations.AddField(
model_name='historicalworkflowlevel2',
name='workflowlevel1',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.WorkflowLevel1'),
),
migrations.AddField(
model_name='historicalsiteprofile',
name='classify_land',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.LandType'),
),
migrations.AddField(
model_name='historicalsiteprofile',
name='country',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.Country'),
),
migrations.AddField(
model_name='historicalsiteprofile',
name='district',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.AdminLevelTwo'),
),
migrations.AddField(
model_name='historicalsiteprofile',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalsiteprofile',
name='office',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.Office'),
),
migrations.AddField(
model_name='historicalsiteprofile',
name='province',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.AdminLevelOne'),
),
migrations.AddField(
model_name='historicalsiteprofile',
name='type',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.ProfileType'),
),
migrations.AddField(
model_name='historicalsiteprofile',
name='village',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.AdminLevelThree'),
),
migrations.AddField(
model_name='historicalriskregister',
name='workflowlevel2',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.WorkflowLevel2'),
),
migrations.AddField(
model_name='historicalissueregister',
name='assigned',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.TolaUser'),
),
migrations.AddField(
model_name='historicalissueregister',
name='history_user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalissueregister',
name='organization',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.Organization'),
),
migrations.AddField(
model_name='historicalissueregister',
name='workflowlevel2',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.WorkflowLevel2'),
),
migrations.AddField(
model_name='historicalbudget',
name='workflowlevel2',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='workflow.WorkflowLevel2'),
),
migrations.AddField(
model_name='fundcode',
name='organization',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization'),
),
migrations.AddField(
model_name='formguidance',
name='organization',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization'),
),
migrations.AddField(
model_name='documentation',
name='workflowlevel1',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='workflow.WorkflowLevel1'),
),
migrations.AddField(
model_name='documentation',
name='workflowlevel2',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='doc_workflowlevel2', to='workflow.WorkflowLevel2'),
),
migrations.AddField(
model_name='contact',
name='country',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='workflow.Country'),
),
migrations.AddField(
model_name='contact',
name='organization',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization'),
),
migrations.AddField(
model_name='contact',
name='workflowlevel1',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.WorkflowLevel1'),
),
migrations.AddField(
model_name='codedfieldvalues',
name='workflowlevel2',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.WorkflowLevel2'),
),
migrations.AddField(
model_name='codedfield',
name='organization',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization'),
),
migrations.AddField(
model_name='codedfield',
name='workflowlevel1',
field=models.ManyToManyField(blank=True, to='workflow.WorkflowLevel1'),
),
migrations.AddField(
model_name='codedfield',
name='workflowlevel2',
field=models.ManyToManyField(blank=True, to='workflow.WorkflowLevel2'),
),
migrations.AddField(
model_name='checklistitem',
name='owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.TolaUser'),
),
migrations.AddField(
model_name='checklist',
name='country',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Country'),
),
migrations.AddField(
model_name='checklist',
name='owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.TolaUser'),
),
migrations.AddField(
model_name='checklist',
name='workflowlevel2',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.WorkflowLevel2', verbose_name='Project Initiation'),
),
migrations.AddField(
model_name='budget',
name='donor_currency',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='donor', to='workflow.Currency'),
),
migrations.AddField(
model_name='budget',
name='local_currency',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='local', to='workflow.Currency'),
),
migrations.AddField(
model_name='budget',
name='workflowlevel2',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='workflow.WorkflowLevel2'),
),
migrations.AddField(
model_name='award',
name='award_currency',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='award_currency', to='workflow.Currency'),
),
migrations.AddField(
model_name='award',
name='countries',
field=models.ManyToManyField(blank=True, related_name='countries_award', to='workflow.Country', verbose_name='Countries'),
),
migrations.AddField(
model_name='award',
name='currency',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.Currency'),
),
migrations.AddField(
model_name='award',
name='donors',
field=models.ManyToManyField(blank=True, to='workflow.Stakeholder'),
),
migrations.AddField(
model_name='award',
name='organization',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization'),
),
migrations.AddField(
model_name='approvalworkflow',
name='assigned_to',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='to_approval', to='workflow.TolaUser'),
),
migrations.AddField(
model_name='approvalworkflow',
name='requested_from',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='from_approval', to='workflow.TolaUser'),
),
migrations.AddField(
model_name='approvaltype',
name='organization',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='workflow.Organization'),
),
migrations.AddField(
model_name='adminlevelthree',
name='adminleveltwo',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.AdminLevelTwo', verbose_name='Admin Level 2'),
),
migrations.AddField(
model_name='adminlevelone',
name='country',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='workflow.Country'),
),
migrations.AddField(
model_name='adminlevelfour',
name='adminlevelthree',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.AdminLevelThree'),
),
migrations.AddField(
model_name='adminlevelfour',
name='adminleveltwo',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.AdminLevelTwo', verbose_name='Admin Boundary 3'),
),
migrations.CreateModel(
name='TolaUserProxy',
fields=[
],
options={
'verbose_name': 'Report Tola User',
'proxy': True,
'verbose_name_plural': 'Report Tola Users',
'indexes': [],
},
bases=('workflow.tolauser',),
),
]
| 67.446429
| 352
| 0.616235
| 9,744
| 94,425
| 5.816708
| 0.050903
| 0.076696
| 0.053901
| 0.070486
| 0.880289
| 0.867427
| 0.827429
| 0.79807
| 0.777603
| 0.755866
| 0
| 0.011593
| 0.237204
| 94,425
| 1,399
| 353
| 67.494639
| 0.775306
| 0.00072
| 0
| 0.690151
| 1
| 0.001438
| 0.208375
| 0.033968
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005032
| 0
| 0.007908
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
113b98f6b47fccdbba85713feadc0af1a901b14e
| 19,829
|
py
|
Python
|
python/app/ui/resources_rc.py
|
nfa-vfxim/tk-desktop-libraryimporter
|
2a77755a779d79e8d6e2cb53d9b0bca97cca5851
|
[
"MIT"
] | 1
|
2021-12-24T19:37:55.000Z
|
2021-12-24T19:37:55.000Z
|
python/app/ui/resources_rc.py
|
nfa-vfxim/tk-desktop-libraryimporter
|
2a77755a779d79e8d6e2cb53d9b0bca97cca5851
|
[
"MIT"
] | null | null | null |
python/app/ui/resources_rc.py
|
nfa-vfxim/tk-desktop-libraryimporter
|
2a77755a779d79e8d6e2cb53d9b0bca97cca5851
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Resource object code
#
# by: The Resource Compiler for PySide (Qt v4.8.7)
#
# WARNING! All changes made in this file will be lost!
from tank.platform.qt import QtCore
qt_resource_data = b"\x00\x00\x19\x90\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00M\x00\x00\x00L\x08\x06\x00\x00\x00(U:\x15\x00\x00\x04$iCCPICC Profile\x00\x008\x11\x85U\xdfo\xdbT\x14>\x89oR\xa4\x16? XG\x87\x8a\xc5\xafUS[\xb9\x1b\x1a\xad\xc6\x06I\x93\xa5\xedJ\x16\xa5\xe9\xd8*$\xe4:7\x89\xa9\x1b\x07\xdb\xe9\xb6\xaaO{\x817\x06\xfc\x01@\xd9\x03\x0fH<!\x0d\x06b{\xd9\xf6\xc0\xb4IS\x87*\xaaIH{\xe8\xc4\x0f!&\xed\x05U\xe1\xbbvb'S\xc4\x5c\xf5\xfa\xcb9\xdf9\xe7;\xe7^\xdbD=_i\xb5\x9a\x19U\x88\x96\xab\xae\x9d\xcf$\x95\x93\xa7\x16\x94\x9eM\x8a\xd2\xb3\xd4K\x03\xd4\xab\xe9N-\x91\xcb\xcd\x12.\xc1\x15\xf7\xce\xeb\xe1\x1d\x8a\x08\xcb\xed\x91\xee\xfeN\xf6#\xbfz\x8b\xdc\xd1\x89\x22O\xc0n\x15\x1d}\x19\xf8\x0cQ\xcc\xd4k\xb6K\x14\xbf\x07\xfb\xf8i\xb7\x06\xdc\xf3\x1c\xf0\xd36\x04\x02\xab\x02\x97}\x9c\x12x\xd1\xc7'=N!?\x09\x8e\xd0*\xeb\x15\xad\x08\xbc\x06<\xbc\xd8f/\xb7a_\x03\x18\xc8\x93\xe1Un\x1b\xba\x22f\x91\xb3\xad\x92ar\xcf\xe1/\x8fq\xb71\xff\x0f.\x9bu\xf4\xec]\x83X\xfb\x9c\xa5\xb9c\xb8\x0f\x89\xde+\xf6T\xbe\x89?\xd7\xb5\xf4\x1c\xf0K\xb0_\xaf\xb9Ia\x7f\x05\xf8\x8f\xfa\xd2|\x02x\x1fQ\xf4\xa9\x92}t\xde\xe7G__\xad\x14\xde\x06\xde\x05{\xd1p\xa7\x0bM\xfbju1{\x1c\x18\xb1\xd1\xf5%\xeb\x98\xc8#8\xd7ug\x12\xb3\xa4\x17\x80\xefV\xf8\xb4\xd8c\xe8\x91\xa8\xc8Si\xe0a\xe0\xc1J}\xaa\x99_\x9aqV\xe6\x84\xdd\xcb\xb3Z\x99\xcc\x02#\x8fd\xbf\xa7\xcd\xe4\x80\x07\x80?\xb4\xad\xbc\xa8\x05\xcd\xd2:73\xa2\x16\xf2KWkn\xae\xa9A\xda\xae\x9aYQ\x0b\xfd2\x99;^\x8f\xc8)m\xbb\x95\xc2\x94\x1f\xcb\x0e\xbav\xa1\x19\xcb\x16J\xc6\xd1\xe9&\x7f\xadfzg\x11\xda\xd8\x05\xbb\x9e\x17\xda\x90\x9f\xdd\xd2\xect\x06\x18y\xd8?\xbc:/\xe6\x06\x1c\xdb]\xd4Rb\xb6\xa3\xc0G\xe8DD#N\x16-b\xd5\xa9J;\xa4P\x9e2\x94\xc4\xbdF6<%2\xc8\x84\x85\xc3\xcba1\x22O\xd2\x12l\xddy9\x8f\xe3\xc7\x84\x8c\xb2\x17\xbd-\xa2Q\xa5;\xc7\xafp\xbf\xc9\xb1X?S\xd9\x01\xfc\x1fb\xb3\xec0\x1bg\x13\xa4\xb07\xd8\x9b\xec\x08K\xc1:\xc1\x0e\x05\x0arm:\x15*\xd3}(\xf5\xf3\xbcOuT\x16:NP\xf2\xfc@}(\x88Q\xce\xd9\xef\x1a\xfa\xcd\x8f\xfe\x86\x9a\xb0K+\xf4#O\xd014[\xed\x13 \x07\x13hu7\xd2>\xa7kk?\xec\x0e<\xca\x06\xbb\xf8\xce\xed\xbekkt\xfcq\xf3\x8d\xdd\x8bm\xc76\xb0n\xc6\xb6\xc2\xf8\xd8\xaf\xb1-\xfcmR\x02;`z\x8a\x96\xa1\xca\xf0v\xc2\x09x#\x1d=\x5c\x01\xd3%\x0d\xebo\xe0Y\xd0\xdaR\xda\xb1\xa3\xa5\xea\xf9\x81\xd0#&\xc1?\xc8>\xcc\xd2\xb9\xe1\xd0\xaa\xfe\xa2\xfe\xa9n\xa8_\xa8\x17\xd4\xdf;j\x84\x19;\xa6$}*}+\xfd(}'}/\xfdL\x8atY\xba\x22\xfd$]\x95\xbe\x91.\x059\xbb\xef\xbd\x9f%\xd8{\xaf_a\x13\xdd\x8a]h\xd5k\x9f5'SN\xca{\xe4\x17\xe5\x94\xfc\xbc\xfc\xb2<\x1b\xb0\x14\xb9_\x1e\x93\xa7\xe4\xbd\xf0\xec\x09\xf6\xcd\x0c\xfd\x1d\xbd\x18t\x0a\xb3jM\xb5{-\xf1\x04\x184\x0f%\x06\x9d\x06\xd7\xc6T\xc5\x84\xabt\x16Y\xdb\x9f\x93\xa6R6\xc8\xc6\xd8\xf4#\xa7v\x5c\x9c\xe5\x96\x8ax:\x9e\x8a'H\x89\xef\x8bO\xc4\xc7\xe23\x02\xb7\x9e\xbc\xf8^\xf8&\xb0\xa6\x03\xf5\xfe\x93\x130::\xe0m,L%\xc83\xe2\x9d:qVE\xf4\x0at\x9b\xd0\xcd]~\x06\xdf\x0e\xa2I\xabv\xd66\xca\x15W\xd9\xaf\xaa\xaf)\x09|\xca\xb82]\xd5G\x87\x15\xcd4\x15\xcf\xe5(6w\xb8\xbd\xc2\x8b\xa3$\xbe\x83\x22\x8e\xe8A\xde\xfb\xbeEv\xdd\x0cm\xee[D\x87\xff\xc2;\xebVh[\xa8\x13}\xed\x10\xf5\xbf\x1a\xda\x86\xf0N|\xe63\xa2\x8b\x07\xf5\xba\xbd\xe2\xe7\xa3H\xe4\x06\x91S:\xb0\xdf\xfb\x1d\xe9K\xe2\xddt\xb7\xd1x\x80\xf7U\xcf'D;\x1f7\x1a\xff\xae7\x1a;_\x22\xff\x16\xd1e\xf3?Y\x00qxl\x1b+\x13\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x15\x12IDATx\x01\xed\x9c\x09\x94\x15ev\xc7\xab\xea\xed\x8f^\xe8f\x07em\x14e\x11\x0f\x02\x01\xa1\xbb\x05\x141\xa8d\x94\x83\xa8\x18\x97\x13\x8f\x991:\xe8\x19\x97\x99\xe38N\x9cq\xd4q\x99hLBfF4\xca\x8c\xc11\x9e@tp\x90\xd3\x0b\x88\x08\x18$\x8a\x86\x1ddm\x18\xb6^\xde^\x95\xdf\xad~\xf5\xac\xb7\xf5{\xaf\x17\x14Lq\x8a\xfa\xea\xfb\xeew\xbf{\xff\xdf\xbd\xf7\xdb\xea\xb5j\x18\x86\xf2\xffWa\x088\x0b#\xef|\xea}\xfb\xf6\xf9v\xee\xdcY\x01\xe7~\xdcE\x1a\xb7\xae\xeb>M\xd3\x02\xba\xa249\x0c\xa31\xa6\xaa\x87\x86\x0e\x1d\xba\xfd\x9cs\xce\x09t\xbe\x04\x85sTO\xa7\xa5\x09@\xbb\xb7o\x9f\xa88\x9dUF,6\x05\x1b\x1f\xa1\xaa\xea\x00dPs\x89\x0e\x1dd\xc6~EU?\x07\xd05J4Z;\xb8\xa2b\xdd\xd7\x01d\x97\x83\xb6y\xf3\xe6n\xa7\x8e\x1f\xbf\x06\x80nD\xe1+P\xdcm\x07\x080\xc2\x94\xed$\xefK\x05\xab\x82\xa6\xc9\xb42\xac\x8d\xf7\x22\xde\x8b);\x17T\x87f\xaa\x0b\xcd{\x94-))+\xfb\xcf1c\xc64\xdbywU\xba\xcb@\xab\xab\xab\xbb\x00\xe6?0Tu\x1e\x8a\xf9E\x01\x00\xd2\x01h\x93\xaa\xebu1E\xa9\xf5\xfb\xfd\x9fL\x980a\x0fExb\xceK\xfb\xe8\xa3\x8f\x06\xb5\xb4\xb4\x5c\xe4P\x94*C\xd3*\x01k,@\xe2\xd1\xe6\xd5\xcc\xfb\xbf\xd3\xde\xd3\x95\x95\x95\x9f\xe7\xe4\xd6\x01\x82N\x07m\xf5\xea\xd5\x13\xf5X\xec\x87\x00t5\x0a\xf1\xa0\x09E\xf9\x80\xf4\x12,h\xe9\xd4\xa9S\x8ft@\xde\xa4\xaa\xf5\xf5\xf5\xbd\x88\x7fsi\xe3F\x00\x9b\x9ch\xcf0\x96i\x0e\xc7\xcf\xa7L\x99\xb2.\xa9B'\xbdt\x1ah\xeb\xd7\xaf\xef\x1bliy\x0as\xbaY\x84G\xbe\x00\x16\xf6[\x7fQ\xd1\xb3\x97\x5cr\x89\xb8_\x97^\x1b6l\x18\xda\xd2\xd4t\x1f\xed\xdfNC>\xe9,\xda\x7f\xcd\xeb\xf7?0~\xfc\xf8C\x9d\xd9xg\x80\xa6\xd2\xe3\xf7\x1a\xba\xfeS\x04\x93\xf8\x13\x04\xb1g\x15M{\xbe3\xad*_\xa5\xc5\xfa\x14]_\x88u/\xa4\x8e\x97\xbbQ\xd5\xb4\x1f#\xcb\xafH\x93\xdd\xf1\xabC\xa0}\xf0\xc1\x07\xbd\xa3\xd1\xe8\xbf\xd1\xa3W\x88(H\xf4\xb6\xcf\xe7\xbb\x8f8\xb5\xab\xe3\xa2u\x8c\x03\xf1oH \x10x\x96\x0e\x9ccrR\xd5\xf7\x9cN\xe7\x82\xc9\x93'7t\x8c3\xb1\x19Wj\x17\x0f\x02\xfd4\xc0z\x9d\xca}q\x85#\xb8\xc5m\xf4\xe6\x7f\xb5\x8bY\x17V\x22\xc6\xce\xc6\x0b~\x8b\x9e\xbdh\xe6\x10r\xde\xc4@\xb1\xaa#MZ#OA<\x10d\x01\x80\xad\xa0\x92\x00\xb6\x0aA.\xfa&\x02&J1\x18,\x17\xf9L9\x91W\xe46\xe5/H\xe3d\xe2\x82-\x8d\x98\xf1\x03\x1a~\xd2\x0c\xf6\xaa\xfa\x0bz\xedG\xb0\xccg\xca\x90\xdc\xf2\xe9\x7f\xd3\xf0\x8e\x9f!\xfbC\xe6 \xa1\xaa\x0f\xd2\xd1O\xb7G\x8c\x82,mMm\xedc\x98\xfaS4$\xc3\xe3=\x00\xf60\xe93\x010\xc1F\x17yEn\x91_\xf4\x10}\xa4\xa0\xd0+oK\xa3\x97\xbeGS/\xd2@\x14s\xbf\x19\x01\xde(\xb4\xb1o\x0a=\xba\xc8\x84\xfb5\xe4q\xa2\xcb\xdd\xe8\xf2\x8f\x85\xc8\x96\x17h\xf555\xd73\x85x\x03\x97\xd4h\xe4V\x1ay\xa5\x90F\xbe\x89\xb4\x00\xf7\xd7\x00\xb7\x18W\xd5\x99\xa2\xcc\x9bZ]\xfdf\xber\xe6\x04\x8d\x186\x06S\x96\x99\xb5WS\xd5\x87\xa6TV>\x99/\xf3o:\xdd\xea\xba\xba\x07u\xc3\xf8\x05r\x06\x99\xcbM$\xc6m\xceG\xe66c\x9a,\xb6\xb1.qC/\xb1\xe0_\xcf&\xc0\x04\x1c\xd1G\xf4\x12\xfdDO\xd1W\xf2s]m\x82v\xf2\xf8\xf1\x170\xe1\x11\xcc\xe4>\x1d2l\xd8\xbd\xb9\x98\x9d\x89\xe5\xa2\x97\xe8'z\x9a\xfa\xe6\xa1DV\xd0\x98\xcb\x5c\x09\xb3\xdb\x88a-,\xb4\xe7}\x1d\xfbVy\xc8\xdfa\x12\xd1K\xf4\x13=E_\xd1;\x17\xd3\x8c\xa0m\xd9\xb2\xc5\xcdN\x85\xac\xd5\x14\xcd0\x1e\xc1\xd7\xb7\xe4bt&\x97\x8b~\xa2\xa7\xe8 z\x8b\xfem\xe9\x93\x11\xb4?\x1f9\xf2}*\x9d\xc7\xc8\xf2\xb9\xaf\xa8\xe8\x85\xb6\x18\x9c-e\xa2\xa7\xe8+z\xc7\xf5\xcf\xaaZ\xda\xe8\xb9r\xe5\xcaR\xb7\xcb\xb5\x97\x1a%\xecI]\xce2de\xd6\xdagY\x01\xae9\x03K\xfb\x13j\x9d\x0aG\x22\x03g\xcc\x98q2\x93\x8ai\x96\xe6v\xbb\xff\x16\xc2\x12F\x95\x95\xdf&\xc0\x04\x1c\xd1W\xf4\x16\xfd\xe38Hv\xda\x95\x04\xda\xf6\xed\xdb=\x8c\x22\xe6(\xa9:\x1cg\xcd|,M\xeb62\x12z\x83\x83\x89G\x06\xda$\xd0\x0e\x1c8p\x034}\x19I>\xfe\xb6Y\x99\x85\x8d\xa97\xfa\x0b\x0eq<\xac\xa2\xc43\xe9\xdc\x93\x00w3\xc3.+&\xad\xa0\xb5X\x82[<QSS\xe3<r\xe4\x88\xb7W\xaf^\xdeH$\xe2.**\xc2\xea\xd9\xff\x0e\x04\x14W$\xa2\x87\x9d\xcep\xbf~\xfdB\xbc\x87\xc6\x8d\x1b\x17I\xad_\xc0\xbb\xc6H\xe7?z\xf4h\x99\xcb\xe5\xea\xc7\x86h/6\x1a]R?\x16\x0a\x05\x1d\x1e\xcfa\x92\x0d\x1c\x1d\x9e\x9c;wn3iQ/\xe7\xe5\xd0\xf5\x978k\xfd5\x847q\xbf\x92Z!1\x10\xac[\xb7\xaeO0\x10\xd8\x0f`\xb1\x98\xae\xf7\xa9\xae\xae>\x91J\xdc\xd6\xfb\xc6\x8d\x1b]\x9c\x14]\xecP\xd5\xcb@}T$\x14\x1a\xe8t8J\x14\x87\xc3\x85\xcb\xab\xa4\x15\xf8\xb2o\x1f\xe3\x9f\x1a\x86W\xa3\x11\x8d\xeew\xba\x5c\x1b5\xa7\xf3\xad\x15+V\xec}\xf4\xd1G\xf3\xda1\x81\xb6\xbc\xb8\xb8xR,\x16\x9b\xce\x12o,\x03W\xdf\x98a\xf4\xa4\x83\xca\x1d\x9a\xc6a\x15\xbb\xab\x9a\x16\xc2\x22\x8es\xb4s\x0c\xa6\x879\x80\xf9\xcc\xe1p\xd4\xb8\xc2\xe15\xe3\xab\xab\x05\xcc\xac\x00\xd2\xe9\xdd\xe1#u\x1c^\x9fo\xc0\xc4\x89\x13\x85>q%@cQ~7\x0d\xbc\x00\xa7\xb7\xab\xaa\xaa\xfe*A\x91#\xf1\xd8c\x8fi3g\xce\x1c\x88\x02wbNW9\x9c\xce>\x8c@%\xf4\xba\x87!\xdcT \x95\x05K\x16pUu\xf4\x0b\x12\x1fN\xd2\xeeZ\xd6\x80\xafb\x91+s\x9c]:V\xadZUE\x90\xbe\x09\x00&\x22|\xffH4Zb\xb6\x03\xcfT\x14\xc87\x9b\xe6\x19\xc5\x18\x9ay6@\xbf\x13+\xbf\xb6\xa2\xa2\x22\x94*\x97\xfd\xbd\xb6\xb6\xf6?\xa8=\x876\xfe\x8e\xc5\xbc\xec\xee$\xae\x84{\x22\xf8\xe5\x92\x8b0o%Js'4v<\x86\xd3#\x8f\xbb\x1c\x8ej\x86\xe9\x9eF8\x9c\x10^\xc0\xc9vat\x9a\xa1G\xfd\xb8\x81\x1f\x85f\x11G\xcf9y\xf2d1\x16\xfb\x16.\xdb\x92Zo\xe9\xd2\xa5E\xbd{\xf7^\x00\xb0\x0bb\xd1\xe8(=\x1a-\x82\xbb\x89J\xb6v\xac|\x9eN\xeeRx\x96Ra\xb0\xd7\xebM\x8a\xe5\xa9m\xc9\xbb\xe0@\xe7\xcf\x89\xe3\x92\x114:A\x9d*\x8d\xf0\xac\xcd\xc4$S\xde\xc6\x9a\x9ar\x98\xdf\x87)_E\x0f\xfa\xa5\xbe\x1d&\xc0\x90\xf8(\x02\x98\xd5\xb1>vat\xbc\xb5\x95\xca\xfc\x9f4y>\xa7\xa6]\xcc\xbc\xf0\xf6`0(\xae \xc3~\x82\x15\xee\xe2\xed\xd3\xa7\xcf]^\x8f\xe7\xf6h(4\x1c\xabt&\x0a3\x09\x96!\xcfj\x93\x223\xe6e I\xca\xb2p\x10\x5c(\x90\xceI4i\x22\xce\xf6\xcfh\x98\x96A\xb0\xfb\xd2K/\xdd\x9bT;\xcb\xcb\xbb\xef\xbe\xebidj\x03(\xb7\x00\x86_\xc0\xb0_\xf0\x92\xd7cF$\xb2<\x14\x0e\xbf\x18\x0d\x87_\xa6\xe7>&?\x028vR\x13\xc4\xa8\xae\xbb\xe01\x059n`\x929\xc4F\xa0\x11\xfbfz\x5c\xae\xbb\x89Y#\x88]\x19\x01\x93\x8e\xf1\xb8\xdd\x0a\xae\xcb\xa7\x22NEb\xa8\xa4=\x1e\x8fB\xc8P\xd8\xd6\xb2\xb1\xcc\x9d\x14\x1c\x04\x0f\xc1E\xf0\xb1\xd70\xdd\x93\xd0?\x1e7\x91\xfc5\xf6\xc2\xb6\xd2\xbeP\xc8\xe7\xe9\xde\xbd\x0a\x0b\xf1\x06C\xc9\xe1A8\x01\xcc)\xe2\xdc\xe3\x98\xdak<#\xaa\xd3\xa9J\xbcC\x88\x9b\x89\x13\xf79\xddn\x9f\xf0\x17p\xc9\x935\x9f\xc2\x1c\xc9\xc9\x87-\xe3\xa2\x91\xc8X\x8avJ9\x9b\x85#=~\xff|\x06\x91s\x015Ms\xe9\x00\xf8\x09\x8f]\xe1Ph#f\xbd\x09_<\xa6\xb8\x5cJ4\x16\xeb\x89\x1b\x0f\xa1\x8d\x91X\xe7\x08@,\xe6\x99\xc6C\xda\xc9r\x09\x1e\x83\x05\x1f\x9e\x89\xbd6\x13\xb4\x98\xa6\x0d\xa7U\xb1\xc1/\xb2TN\xcb\x0ex<.w4Z\xa1#\x5c\xda\xd5\xda\x01\x81\xe6\xe6\xe6\xd5W^ye\xe23\x04\x06\x8d\x93\x97]v\xd9\xeb(\xd9\x87\x18x\x01\xf1\xaf\x81\xb6\x1b\x18q\xf7ciG\xc9?\x8a55`)[\x85\xe7\xee\xdd\xbb\xbd(<\x1d\xd9\xaa\x01,-\x0e\x89E\x81\xfan\x00[\x0c`o\x97\x94\x94\xec\xff\xe4\x93OB\x17]t\x11\x9f\x8a0\xd7hhp\x94\x97\x97;eP\x82f\x18\xfe5\x8d\xe7,&\xad\x06\xbb\x1bB\xd2\xe6%x\x88O\x9a\xf8\xd8([\x07\x02\xc3\x18.y8\xd86[Y\x9bI\x02\xb2\xd0\xbb\x92\x1d-^\x85\x0e@\xd1\x12>p\xb9u]}}\xc8WZ\xbaCF\xc5\xf8\x94b\xeb\x8a\xa5K\x1f\xec>p\xa0+\xa8\xeb1gcc\xecH,\x16\x1d0`@\xac\xb4\xb44\xc6`\xa0/_\xbe<\xc6\xce\x83r`\xf7\xee\x0a\xa7\xc73\x19^\xbdQ6I\x9ex\x9c\xdc\xa4\xc4b?mln\xae\xfd\xf0\xc3\x0fO\x08\x7f\x06\x91\x04\x9d=M\x5c<\xca\xdcqs\x8f\x1e=~\xc3t*\xd95\x125\x92\x13\x09<\xe2\xf8X\xa5&h *\xbd A{\xbbU\x90\xeb\x89\x05HT\xdf'\x81>\xf5\x12^X\x86\x8f\x01\xe2oB\xb1\xd8\xbc\xd0\xf1\xe3{\x89S\xbb\xa2zt\xaf\x1a3\xbe\x04\xb0\x1d\xe1px\xd7\x89\x13'\xf6\xce\x993'm>h)\xcb<a\xa4S\xd7%\x9e$\xb9\x94\xb8$\xfc\x0f2\xf8\xfcd\xd3\xa6M\xef-\x5c\xb80\xc0\xb4'U\x8c\xa4w\x80\x8a\x92A\x186\xef\xa4\xb2l/\x82\x87\x196\xc0\xc7N\xd3ji\x8a\xd2]2!:j/l+-\x9bw\xcc\xb4\xeb\x98f\xdc\x91\x89.n\x19.x\xf6@\xc9r\xace\xb4\xdf\xe3\x13<\x09Oz\x8c\xf7p\xef\x1e=\xf6\xad\xae\xaf\xdfD\xe6\x1b\xb8\xda\xdaI\x93&\x1d\xb3xa\x19^\xe2\xe5`\x88\xfb\xa5\x06qyg\x92\xf7\xa6_U7\x0a`V\x9d\xce~\x0a\x1e\x02\x1a\x97\x89\x8f\xc5\xdf4\x13\x82c\x91d x\x93U\x90\xeb9d\xc8\x90\x10\xd6VC\x97\xd7\xc9(\x95\xe9\x12\xe0\xe4&\x18\xf3I\x9a\xee\xc4\xba\x98\x90\x87\xbd\xc4\x98n\xd4-#\xf0\x8fd\xf9s\x033\xfaW\x89e\x8b\xd7\xacYs\xb9\xb5\x01\xd8\xd4\xd4TD\x9c\x93\x13\xf1n\xcc\xfa\x13\xecQDPo\x82G=h\xe5\xdd\xc9\x09\x06\x05$,<,|\xac\xaa&h\x08b\x82F\xd0\xcc\x1b4\xc00\x98$\x1eD\x9d\x1f\x93\xdc%C;@X|\x93\x9eb^\x02^\xea\x8d\xe2\x1a`\xb9p\xb3r\x97\xd3y\x15\xf7/\x89;\xf3d:C\xec\x91\x15E\x11\xb7\xe5\x0d\xad<\xa5\x0d\xc38\x0e\xaf\x83\xf9\xc6\xa6$a\x0ax\xb1\xf0\xb0\xf0\xb1\xaa\x9a\x02\xa1\x94\x09\x9e\x04a\xab \x9f'\xb1'\x82e\xac=t\xe8\xd0\xad\x00\xf8\xf7\xb8a%.\xa5D0i\xb1\x0e\x01)\xd7e\xd2@\x87\xf5\x10\x02\xb5QX\xed]XeC(\x14\xfa\x0c+4\xf9\xd8yH\xb70K\xd7=\x10\xd9\xf3S\xd3,\x83fy|\x9e\xd1\x91\x80,A\x93/\xbe\x99{ZdO\xceM\x7f\xb3\xf0\xb0\xf0\xb1(L\x86\x08\xd2DA\x19\xbb\x0ebq\xc7\xad\xc2|\x9e\x17^xa\x98\xbbn\xd1\xa2E\xb3xN\x0fG\xa3\xb2\xf4\xf8\x0bx\x0ee\xb2\xe9%P\x9a\x8a3W\x93\xe0\x9d\x15H\x817\x12\x0e\xcba\xf4D\xb7\xa6\xfdeX\xd7w\xe3\xd6\xa7\xe0\x15\xa3G\x13\x83\xb4i\xad\xb1XY\xd8\xed\xee\xc1\xd2\xca\xc5\xee\x85,\xfe\xd3.,\xf4z\xaf\xcb3\x9f\xd9\xa1/\xb5\xf3X]<G\x85\x9c\xa0\xc5\xf1\x90Q(\xc9\x03[-\xcd0$\xb3\x0c\x17+\x184K\xda;\xef\xbcS\xd6\x8b\xcb\xe4^\xbeh\x91\xbfd\xc4\x88a\xa1Hd\x9cC5\xc6\xc6te4J\xc8\x08\xd4\x0fK2\x03 \xb1\xcd\xaa\x9ax\x0ap\xcc\xe4\x1d\x04\xd7\x91Z8\xdc\x9f\xd7\xc3\xcc\xe1\x84o\xb1\xa5\xb8\x00O\xac)\x01\xcc\x89\xb8\xcfGB\xc3\x9d~\xe9:S\xb7\x10\x0b\x92H\xd6\x8eJ\xaf\x94\x9c#x\x04ZZ\xa4~F\xd0N\xa1\x94\xc2\xba\xaf\x8cj_&W-\xfcmv+\x80\xffCM\xb9\x15\xd9\x09\xc1\x0a\xbb\xf7\xef\xdf\x7f(\xab\x87\xc9\xb8\xe1w\xc0f*\xd6g\x86\x05{\x0b\x02\x0e\xb2\xf4\xd2t\xbd\x9c\xc4\x01\xb6x\x04\x94b\xe6c\x092\x01\x17+\xbc\x11\x1e\xb2F\x95\xc1\xe0\xab\xc2\x04U\xc7\x13q<\x04\xb4Svn\xa6\xa51\x84\xefB\x90\x91qkH,\x17\xec\x84\xf64t\x12Z\x94w\xdey\xc7\xcd\xbe\x96|\xb9]\xa6\x12\xcc\xe1\xd1\x13\xb7\xeaG\x04\xefkx\x5c\x1e\xd5P\x17\xb1\x86;\x10\x9f\xd4\xcatB\xee\x0d\xef\xbf\xff\xfe\x1f\xfd\x1e\xcf/1\xba\xab3Y\x1c~\xec`J\xc1\xa9\x9a\xf1\x051\xf2\x0b\xde+\xa8\x97\xb8\xc4\xd5\xddN\xe7\x10\x80\xfd\x09\xf3\xbf\xfb\x0f\x1e<\xb8\x097\xedt\xe0\x04\x0f\xe9 \xc1'\xd18\x09\x134\xe2\xce6\x04c\x7f0f\xae\x0c\xec\x04\x99\xd2\x9f}\xf6\x99\x8bQ\xee5\x16\xd1\xa3X\x88\x97\xeb\xb2\xbd\xa3(~\xcd\xedv\xc8\x82\x12\xf71\xa7/|8\x1cd\xab\xe7\x99\xd4\xa0\x0b\xd0\x11|\xe7P\x9a\x99QWz\x03ae\xc2{\x02\xba-\x8d\x8d\x8dk\xe0w)yeb\x85\xd6E\xec\x14W\xae\xc2}_\xc1\x82_d\x9fm\x19\x9f\xae\x1e\xa3#\xa2\xd41X\x92}ElU*\xf0\x99\xc0C\xf0\xb1]&h\xc4\x89m\x22,q\xe2<[Y\xd6$k:\x1d\x7fG\x19\xc7u\xec:h\xfc\x14\xc7\x8c\x1b\xecs\x99u\x98J\x88\xe2E\xf4\xd0\xed\x04\xd3\x03\xec\xb4.\xef\xd9\xb3g\x90\x91Ve\xb9\xd4\x1d#\xba\x86A\xe2\x1a\x16\xd4Im\x88\x0c\xcc\x89\x0c&u;y\xec\x02\xec\x16&\xb9+q\xc3\xc9\xb8\xe7\xech\xcab[\xeaS6\x92j/\xd2\x81\xdfe\x22Z\x87K\xad\x07dq\xa7\xc1\x94;\x10,\xa9\x8dB^L<\xa8/\xf8\xd8\xeb\x99\xa01\xb4obgAzy\x92\xbd0[\x9a\xf9Q\x0c\xb7XF\x9d;\x10z4\xd3\x854R\xe9h\x1a\x1d\x0e@\xff\xd4\xcd\xef\xbf\x07\xf0\xf6\x0c\x1a4\xc8\x01\x90C\xc8\x1b\x018r\x8a\x9fT\x8f\xf8%\xef\x07\x22\xba\xfe\x01V\xb6G^h\xeb\xbf\x99>\xfc\xce\xe7\xf5N\xd0#\x91>2\x10\xd8/qU:H\xa6+\xa3\xb1\xbc\xd1L\x88\xcd\xf9\x93\xd0\xa5\xd2\xda\xeb\xe5\x93\x16<\x04r\xc1\xc7NoJ\x89\xd5l\xa0e\x19\xa5.\xa0g{\xda\x09\xb2\xa4\x0d\xc0:\x88k<Ky\xc0\xdc\x9e\xc90\xb1\x05\x18\x09\xd8~\xca\xc71q\xfd\x8e\xc7\xe9\xbc\x16`\xc6\xa0L\x12`ba\x02\x18\x80\x86\xa1\xfd#@\xbcgs\xe9\x98\xd1\xd0\xb0,\x10\x0c\xbe\x85\x05\x1d\xe3N3\x1d\xe9 \x01\x8f};E\xac],P\xda\xee\xc8\x15\xc7\xe1\x02\xc1\xc5\xc4\xc7\xc6\xcc\x04M\x04\xc4%\xd6\x8ag\xd0c\x95\xb6\xf2\xacI\xd6\x89\x81\x1e\xbd{\xff\x81\xde\x7f\x12\xa2\x00 bX\xaa\xf4\xba\x19\x97\xac\x8a\x22\xba(\x22\x01_\xe2\x90X\x97()\x97\x09\x16\xf4\x1c\xac\x18XI\x0b\x8a\xbe\x8dk\xbf4m\xda\xb4\x1d&A\xfc\xbf\xea\xb9se\xd9\xf4\x08\xeb\xdc_;U\xf5 m\xb1\xfd\xd7\xda\x96\x9d\xae\xad\xb4E\x8f~m\x91%\xca\x04\x07\xc1Cp\xb1u\xa0Y\x9e\xe0\xa0;\xd4\xf7\xe35\xaeM\xd4\xcc\x918\xff\xfc\xf3\x1b}\xc5\xc5O\xc3\xfc{X\xc9F\x82\xf0q\x14\x0a2\xb0\x98JIuKX\xfbS\xf2\x11J\x8e\xa8\x22\xac;\x1b)\xdb\x14\x0a\x06\x1f\xa7\xc7\x1e`K\xe8c)O\xbd\xa6O\x9f\xfeg\x96V\x8f4\x87B\xf7\xd2\xde*\xda:\x8cU\x86h\xd7\xec\xacTzy\xb7\xb5\xc9O\xa98\x9dr:O\xb2R\xd9J\x07\xe4c\x86&\x0e6\x5c\x12M$\x06\x19F\x9fa\xf4\xb6l\x0d\x9d\x1a8hP\x9f\xc1\x83\x07\x07\x13T9\x122\x0fcsq\x00\x82\xcd\x82\xb4\x0ak\x92\x8fgz\xb3S\xeb\xc1\x9a\xd0\xcdav\x8e4F\xbc\x92\x8f\x84#\xb2\xe8\xc6\xbdw\x93\xb5\x1e\xad\x97\xe0^[\x89_\xad#I\x8e\xf6j\xdey\xa7\xaf\xab\xb4t&Vp5|\xc6\xba\xdc\xee2\xda\xecFZ&\xceb\xc0\x02\x98\x84\x90\x00\xe8\xb40\x0a\x9e`\xc0\xfa_\xe4{7t\xea\xd4\x9fV\xaf_\xbf\xbd\xad\xe3B\xd9\xfc\xdc\xbbg\x8f\xcc\x0fKp\xf5\x8aT\xcbO\x80&\x0d\xb1\xb5\xbc\x0e\xdf\x99@\xe6\xdcB\xbeA\x95\xba\xd6%\x00\xce\x9e=\xdb{\xf8\xf0\xe1\x12\x02h1kR\x1f=K\xd2%#\x9d\xcet@\x96/\xcd\xa4Oq\x94\xd6(\xcb0\xabn\xa1O9k\x85_\x0fx\x0f\x06\xfc1\x80s\x0e\xaen\x0en\xb1H$\x0cH;\xe9\x90O\x09\x0d\xfb\xb1\xd4\xbc'\xc1\xf2\x8d1\xf5\x96\x82\xfcG\x9c\xb6ML\x95\xabu\x9e\xf6U\xee\x12\x92\x13\xb0\xe5;y\xbe\xf9Uv\xfe\xa9x\x0f\xca\xa0\x22w\xa7\xfe\x90+U\x8ax\xac\x916\xe4\xfe0\xb5\xbc\xbd\xef\xa6\xfe\xadqW\xf0H\xbb\x121MJ\x18\xd5^\xe1\xd1\x88\xb5]\xbev\xed\xda\x8b\xd3\xa8\xbf\x05\x19\xa6\xde\xe8/8\xc4\xf1H\xd3:\x094b\xca\x09Lr\x91PE\xc3\xa1\x07\xd2\xa8\xbf\x05\x19\x9c\xab>(j\x12\x13\xff\xc5\xc4#\x83\xceI\xa0I9\xf1\xe19*\x84\xf9\xeab\xae|\x0e\x9f\xa1\xceY\x9b%\xfa2u\xbe^\xf4\xe7\x1b\x93\xe7\xb3)\x9a\x06\x1a\xf3\xaf\xfd\xf8\xe9KT\x94\xd3\x8b\x17\xb2U<+\xf3\xd17\xae\xf7K&\x0eY\x94L\x03M\xe8B\x9c\xf2P\xb9\x81\xe1\xbarum\xed\xfc,u\xcf\xaal\xd1S\xf4\x15\xbdE\xff\xb6\x94\xcb\x08\x9a|k\xcaD\xf3!\xa9\xc8\xfc\xe69\xf9\x0c\xab-&gz\x99\xe8'z\x8a\x1e\xa2w\xb6om-=3\x82&\x85|\x11\xb8\x98Y\xe2J\x90\xef\x13\x0e\x06_\x13~V\xa5\xb3\xec\xa9\x8a~\xa2\xa7\xe8+z\xe7\xd2/+hT4\xf8\xd1\xfc\x82\xb8\x9b\xce\xa8\xad\xaf5-/\x17\xc33\xad\x5c\xf4\xc2-g\x88\x9e\xa2\xaf\xe8\x9dK\x87\xb6@S\xcc\xbf2\xa0\xaa\xb7\xc0\x90\x8f\xc9\xd4\xc7\xf9\xcd\xfau\xb9\x18\x9eI\xe5\xa2\x8f\xe8%\xfa1\xc7\xb8%\xdf\xbf\xaa\xd0&h\x02\x00\x0b\xe8\x15@\xffC\x96)\x1a\xbb\x15\xaf\xaf\xae\xa9\xa9>\x93\x80\xc9&\xab\xe8!\xfa\x88^\xa2\x9f\xe8\x99\x8d65?i\xed\x99Zh\x7fg]\xfa+V\x0a\xf7\xd0\xc0I\x8e\xd8\xae\x984u\xaa\x9c\x04\x9d\x91\xd7\xda\xfa\xfa\x09\x9ce\xc8\x9f\xdb)\xc5\xc2\xfe\x81\xf5\xe5\xbd\x85(\x927h0U\xebkk\x96`\xc77\x90nf\x94\xb9\xae\x90\xde)D\xa8\xae\xa4e\x02;\x93]\x96?\xd0F7\xbe\x8c\xfb\xfd\xd4\xaa\xea\x1bI\xe7\x8ccv\x99r\xba\xa7\x8d\xd8\xa0\x81\x05\xf4\xceb\xf2\xe4w\xa0\xcb\xd8\xf2\x96\xc0y\xc6\x5c\x22\xaf\xc8\x8d\xc0\x00\xa6\xbc,\xfa\x90.\x080Q\xb6\x10K\x13z\xf3\xc2U\x9f\xc0U[GSU}i\xe0\xc0\x81\xf7\x17\xb2\xfff\xf19]Os\x7fl\xef\xdeg\x90\xf9\xbbf\x9b\xad\x7f\xe5\xe1\xe1\xf6\xb6\xdf.\xd0\xa41\xcc\xfc.\x84x\x9e\x9e\x93\x0fU6\xb3\x018\x8f\x05\xee\x17\xed\x15\xa4\xab\xea\xb1\xd7?\x82S\x17\xf9\xfd\xfd\x18\xe4\x0c\x11\xc3\xbeOX\xf9\xe7\x8e\xb4\xd7n\xd0\xa4Q\x86\xec\xb1\x9cH\xbdARvjC\x80\xf8\x0c\x1f\x97\xfc\x8c}.\xd9K\xfbZ/6(\xfd\x9c\xbb\xfe\x08\x90\xee\x97\x8eE\x98\xad,\xc2\xe7\xf1gs\x92N\x96\xda#d\x87@\x93\x069\xcb,\xe2\xe0\xf89\xf6\xea\xef@8B\x05\x9f5\xa8\xea\xc3\x8cH\xbf'\x1d\x13\x9a\xd3|9\x08\x1f7\xd0\x81O\xd0\xee\xb92\x07c\x83\xfd7\xfc\xe4h!\xbb\xc4I\xdfd\xb4W\xae\x0e\x83f5L\x90\x95\xbf\x9b\xf6\x22\xef\x97\xc4\xf3v\x12a\x9f\x196l\xd8\xcb\xa7\xe3\xa7\xdc\xf2\xa7\x13w\xec\xd8q\x1b\xbdv?\xed\x0f\x8d\xcb\xb0\x81\x03\x8a\xbbY\x1a\xad\x8b\xbfw\xca\xa3\xd3@\x8bK\xa3\x11\xebnaH\x97 {^<O\xbe\xdfx\x93)\xca\x12bI=\xe9\xe4\xd3\xde8Q;\x1f\xd2\xdeT\xda\x93i\xc3\xf5\xdc\xe5q>[i\xef\x09\xda{\x95\xf7\xcel\xcfd\xdf\xd9\xa0\xc5eV4\x5cd\x0e/\x0f\xe0&\x89\x83\x09\x5c\xe5\x00\xef\xefs~WG\xba\x0e\x0b\x90\xe3\xfeB\x86|\x15\x8b\x1eN\x18\xa8d\xaf\xaf\x9200\x9dt\x7f\xabQ\xde\xc5\xa2\x9e\x224\xbc\xcd\xb3\xd3\xc1\xb2\xda\xe9*\xd0,\xfe\x0a\xdf\xd1\x8e\xe4xn>`\xcd'\xd3r\x9b\xd6rN\xafq\xa7\xed\x00 \xe0}\xc9\x81F\x13\x025\x92\x0ep\xfb8\x11*\xe6\x98N\xbe\x99;\x17\xa0\x87\x83n\x05|\xe4+%\xfb\xb5\x13\xb0~G'\x88%o\xb1\x17tU\xba\xcbA\xb3\x0b\xceh;\x8a\xe3\xbc*\x14\xacB\xf9K\x93\xac\xc4N\xd8F\xda\xb4VU]C\xddZN\xebj\x19\x0d?m\x83\xbcK\x8aN+h\xa9\x1a\xf0\xcb\x91\x12\xbe\xd7\x15w\x93O\xbc\xfa\x22L\x91X\x17\xa7@\xe6\x1f\x07\x16\xab\xe3\xbd\x89\xb2C\x80\xb5\xado\xdf\xbe\xdb\xf8\xc9a\xd2\x07v\xa9<O\xc7\xfb\xd7\x0a\xda\xe9P\xb0+\xda\xf8?^\xbfL2\xf4\xd4!\x91\x00\x00\x00\x00IEND\xaeB`\x82"
qt_resource_name = b"\x00\x03\x00\x00x\xc3\x00r\x00e\x00s\x00\x0b\x05U\xfb'\x00s\x00g\x00_\x00l\x00o\x00g\x00o\x00.\x00p\x00n\x00g"
qt_resource_struct = b"\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 944.238095
| 19,044
| 0.741591
| 4,495
| 19,829
| 3.259622
| 0.273415
| 0.01638
| 0.012285
| 0.005733
| 0.014947
| 0.014537
| 0.014537
| 0.013718
| 0.011876
| 0.011876
| 0
| 0.238624
| 0.004791
| 19,829
| 20
| 19,045
| 991.45
| 0.503851
| 0.007514
| 0
| 0
| 0
| 0.333333
| 0.980989
| 0.979718
| 0
| 0
| 0.000407
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3a00c3668f7c6f8a14b5ca71fe36deb2b55d5283
| 7,312
|
py
|
Python
|
commitlog/tests.py
|
k1000/django-commitlog
|
89c544a63898870c7d29beda4505c1da6a64029d
|
[
"MIT"
] | 1
|
2015-11-05T15:17:06.000Z
|
2015-11-05T15:17:06.000Z
|
commitlog/tests.py
|
k1000/django-commitlog
|
89c544a63898870c7d29beda4505c1da6a64029d
|
[
"MIT"
] | null | null | null |
commitlog/tests.py
|
k1000/django-commitlog
|
89c544a63898870c7d29beda4505c1da6a64029d
|
[
"MIT"
] | null | null | null |
import unittest
from django.test import TestCase, Client
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
REPO_DATA = ("local", "master")
COMMIT_SHA = ("78a325da2d00c5071ddddc8b35dfb0e1241660b1",)
PATH = ("plantilla.html",)
class TestCommitViews(TestCase):
def setUp(self):
self.u = User.objects.create_user('test','test','test')
self.u.is_active = True
self.u.save()
self.client.login(username='test',password='test')
def test_log(self):
"""Test the creation of the Instance from the form data."""
url = reverse('commitlog-log', args=REPO_DATA)
response = self.client.get( url )
self.assertNotContains(response,'error')
response = self.client.get( url,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertNotContains(response,'error')
def test_commit_view(self):
"""Test the creation of the Instance from the form data."""
data = REPO_DATA + COMMIT_SHA
url = reverse('commitlog-commit-view', args=data)
response = self.client.get( url )
self.assertNotContains(response,'error')
response = self.client.get( url,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertNotContains(response,'error')
def test_file_history(self):
"""Test the creation of the Instance from the form data."""
data = REPO_DATA + PATH
url = reverse('commitlog-history-file', args=data)
response = self.client.get( url )
self.assertNotContains(response,'error')
response = self.client.get( url ,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertNotContains(response,'error')
def test_undo(self):
"""Test the creation of the Instance from the form data."""
data = REPO_DATA
url = reverse('commitlog-undo', args=data)
response = self.client.get( url )
self.assertNotContains(response,'error')
response = self.client.get( url ,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertNotContains(response,'error')
class TestFileViews(TestCase):
def setUp(self):
self.u = User.objects.create_user('test','test','test')
self.u.is_active = True
self.u.save()
self.client.login(username='test',password='test')
def test_edit(self):
"""Test the creation of the Instance from the form data."""
data = REPO_DATA + PATH
url = reverse('commitlog-edit-file', args=data)
response = self.client.get( url )
self.assertNotContains(response,'error')
response = self.client.get( url,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertNotContains(response,'error')
def test_new(self):
"""Test the creation of the Instance from the form data."""
data = REPO_DATA + PATH
url = reverse('ccommitlog-new-file', args=data)
response = self.client.get( url )
self.assertNotContains(response,'error')
response = self.client.get( url,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertNotContains(response,'error')
def test_upload(self):
"""Test the creation of the Instance from the form data."""
data = REPO_DATA + PATH
url = reverse('commitlog-history-file', args=data)
response = self.client.get( url )
self.assertNotContains(response,'error')
response = self.client.get( url ,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertNotContains(response,'error')
def test_delete(self):
"""Test the creation of the Instance from the form data."""
data = REPO_DATA + PATH
url = reverse('commitlog-delete-file', args=data)
response = self.client.get( url )
self.assertNotContains(response,'error')
response = self.client.get( url ,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertNotContains(response,'error')
def test_rename(self):
"""Test the creation of the Instance from the form data."""
data = REPO_DATA + PATH
url = reverse('commitlog-delete-file', args=data)
response = self.client.get( url )
self.assertNotContains(response,'error')
response = self.client.get( url ,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertNotContains(response,'error')
def test_view(self):
"""Test the creation of the Instance from the form data."""
data = REPO_DATA + PATH + COMMIT_SHA
url = reverse('commitlog-delete-file', args=data)
response = self.client.get( url )
self.assertNotContains(response,'error')
response = self.client.get( url ,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertNotContains(response,'error')
class TestTreeViews(TestCase):
def setUp(self):
self.u = User.objects.create_user('test','test','test')
self.u.is_active = True
self.u.save()
self.client.login(username='test',password='test')
def test_tree_view(self):
"""Test the creation of the Instance from the form data."""
data = REPO_DATA + PATH
url = reverse('commitlog-tree-view', args=data)
response = self.client.get( url )
self.assertNotContains(response,'error')
response = self.client.get( url,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertNotContains(response,'error')
def test_commit_tree_view(self):
"""Test the creation of the Instance from the form data."""
data = REPO_DATA + PATH
url = reverse('commitlog-commit-tree-view', args=data)
response = self.client.get( url )
self.assertNotContains(response,'error')
response = self.client.get( url,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertNotContains(response,'error')
class TestMetaViews(TestCase):
def setUp(self):
self.u = User.objects.create_user('test','test','test')
self.u.is_active = True
self.u.save()
self.client.login(username='test',password='test')
def test_search(self):
"""Test the creation of the Instance from the form data."""
data = REPO_DATA + PATH
url = reverse('commitlog-search', args=data)
response = self.client.get( url )
self.assertNotContains(response,'error')
response = self.client.get( url,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertNotContains(response,'error')
def test_repos(self):
"""Test the creation of the Instance from the form data."""
url = reverse('commitlog-repos', )
response = self.client.get( url )
self.assertNotContains(response,'error')
response = self.client.get( url,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertNotContains(response,'error')
| 41.545455
| 74
| 0.611734
| 818
| 7,312
| 5.363081
| 0.089242
| 0.072943
| 0.114885
| 0.134032
| 0.909733
| 0.901299
| 0.901299
| 0.901299
| 0.901299
| 0.901299
| 0
| 0.004526
| 0.274754
| 7,312
| 176
| 75
| 41.545455
| 0.822742
| 0.103255
| 0
| 0.737589
| 0
| 0
| 0.115616
| 0.029906
| 0
| 0
| 0
| 0
| 0.198582
| 1
| 0.12766
| false
| 0.028369
| 0.028369
| 0
| 0.184397
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
28acbc5c721e6a2aa0bd764872fdc71523414008
| 2,487
|
py
|
Python
|
Bugscan_exploits-master/exp_list/exp-1408.py
|
csadsl/poc_exp
|
e3146262e7403f19f49ee2db56338fa3f8e119c9
|
[
"MIT"
] | 11
|
2020-05-30T13:53:49.000Z
|
2021-03-17T03:20:59.000Z
|
Bugscan_exploits-master/exp_list/exp-1408.py
|
csadsl/poc_exp
|
e3146262e7403f19f49ee2db56338fa3f8e119c9
|
[
"MIT"
] | 6
|
2020-05-13T03:25:18.000Z
|
2020-07-21T06:24:16.000Z
|
Bugscan_exploits-master/exp_list/exp-1408.py
|
csadsl/poc_exp
|
e3146262e7403f19f49ee2db56338fa3f8e119c9
|
[
"MIT"
] | 6
|
2020-05-30T13:53:51.000Z
|
2020-12-01T21:44:26.000Z
|
#!/usr/bin/python
#-*- encoding:utf-8 -*-
#__author__ = '1c3z'
import socket
socket.setdefaulttimeout(30)
def assign(service, arg):
if service == "ip":
return True, arg
def poc(ip):
addr =(ip,6605)
s=socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
data = '#\x08\x01\x00\x00\x01\x01\x00\xd4\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00V3.0.1.2 120109\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00demo\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00670b14728ad9902aecba32e22fa4f6bd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
try:
s.sendto(data,addr)
res= s.recv(1024)
if not res or len(res) < 100:
return False
if res[0:2] != '#\x08' or ip not in res:
return False
s.close()
return True
except:
pass
def audit(arg):
if poc(arg):
security_warning("demo:000000")
if __name__ == '__main__':
from dummy import *
audit(assign('ip', '122.115.41.8')[1])
| 75.363636
| 1,730
| 0.682348
| 519
| 2,487
| 3.240848
| 0.127168
| 1.423306
| 2.102854
| 2.768133
| 0.722354
| 0.722354
| 0.722354
| 0.722354
| 0.722354
| 0.722354
| 0
| 0.402609
| 0.106152
| 2,487
| 33
| 1,731
| 75.363636
| 0.354026
| 0.022919
| 0
| 0.076923
| 0
| 0.038462
| 0.732888
| 0.715776
| 0
| 1
| 0
| 0
| 0
| 1
| 0.115385
| false
| 0.038462
| 0.076923
| 0
| 0.346154
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
e93505d76ae62dff23162d4e8f5ff36b5a0356d6
| 37,991
|
py
|
Python
|
tests/quara/protocol/qtomography/standard/test_standard_qmpt.py
|
tknrsgym/quara
|
8f3337af83cdd02bb85632bb1e297902b1fff8fb
|
[
"Apache-2.0"
] | 3
|
2021-05-19T11:44:30.000Z
|
2022-03-30T07:13:49.000Z
|
tests/quara/protocol/qtomography/standard/test_standard_qmpt.py
|
tknrsgym/quara
|
8f3337af83cdd02bb85632bb1e297902b1fff8fb
|
[
"Apache-2.0"
] | 2
|
2021-06-02T01:24:59.000Z
|
2021-06-02T12:20:31.000Z
|
tests/quara/protocol/qtomography/standard/test_standard_qmpt.py
|
tknrsgym/quara
|
8f3337af83cdd02bb85632bb1e297902b1fff8fb
|
[
"Apache-2.0"
] | 1
|
2021-10-14T13:21:27.000Z
|
2021-10-14T13:21:27.000Z
|
import itertools
import numpy as np
import numpy.testing as npt
import pytest
from quara.objects.composite_system import CompositeSystem
from quara.objects.composite_system_typical import generate_composite_system
from quara.objects.elemental_system import ElementalSystem
from quara.objects.matrix_basis import get_normalized_pauli_basis
from quara.objects.mprocess import MProcess
from quara.objects.povm import (
get_x_povm,
get_y_povm,
get_z_povm,
)
from quara.objects.qoperation_typical import (
generate_qoperation,
generate_qoperation_object,
)
from quara.objects.tester_typical import (
generate_tester_states,
generate_tester_povms,
)
from quara.protocol.qtomography.standard.linear_estimator import LinearEstimator
from quara.protocol.qtomography.standard.standard_qmpt import (
cqpt_to_cqmpt,
StandardQmpt,
)
from quara.protocol.qtomography.standard.loss_minimization_estimator import (
LossMinimizationEstimator,
)
from quara.loss_function.standard_qtomography_based_weighted_relative_entropy import (
StandardQTomographyBasedWeightedRelativeEntropy,
StandardQTomographyBasedWeightedRelativeEntropyOption,
)
from quara.loss_function.standard_qtomography_based_weighted_probability_based_squared_error import (
StandardQTomographyBasedWeightedProbabilityBasedSquaredError,
StandardQTomographyBasedWeightedProbabilityBasedSquaredErrorOption,
)
from quara.minimization_algorithm.projected_gradient_descent_backtracking import (
ProjectedGradientDescentBacktracking,
ProjectedGradientDescentBacktrackingOption,
)
class TestStandardQmpt:
def test_testers(self):
# Arrange
num_qubits = 1
c_sys = generate_composite_system(mode="qubit", num=num_qubits)
# Tester Objects
state_names = ["x0", "y0", "z0", "z1"]
povm_names = ["x", "y", "z"]
tester_states = [
generate_qoperation_object(
mode="state", object_name="state", name=name, c_sys=c_sys
)
for name in state_names
]
tester_povms = [
generate_qoperation_object(
mode="povm", object_name="povm", name=name, c_sys=c_sys
)
for name in povm_names
]
# True Object
true_object = generate_qoperation(mode="mprocess", name="x-type1", c_sys=c_sys)
# Qmpt
qmpt = StandardQmpt(
states=tester_states,
povms=tester_povms,
num_outcomes=true_object.num_outcomes,
on_para_eq_constraint=True,
schedules="all",
)
assert len(qmpt.testers) == 7
def test_is_valid_experiment(self):
# Arrange
num_qubits = 1
c_sys = generate_composite_system(mode="qubit", num=num_qubits)
# Tester Objects
state_names = ["x0", "y0", "z0", "z1"]
povm_names = ["x", "y", "z"]
tester_states = [
generate_qoperation_object(
mode="state", object_name="state", name=name, c_sys=c_sys
)
for name in state_names
]
tester_povms = [
generate_qoperation_object(
mode="povm", object_name="povm", name=name, c_sys=c_sys
)
for name in povm_names
]
# True Object
true_object = generate_qoperation(mode="mprocess", name="x-type1", c_sys=c_sys)
# Qmpt
qmpt = StandardQmpt(
states=tester_states,
povms=tester_povms,
num_outcomes=true_object.num_outcomes,
on_para_eq_constraint=True,
schedules="all",
)
# is_valid_experiment == True
assert qmpt.is_valid_experiment() == True
# is_valid_experiment == False
e_sys0 = ElementalSystem(0, get_normalized_pauli_basis())
c_sys0 = CompositeSystem([e_sys0])
e_sys1 = ElementalSystem(1, get_normalized_pauli_basis())
c_sys1 = CompositeSystem([e_sys1])
povm_x = get_x_povm(c_sys1)
povm_y = get_y_povm(c_sys0)
povm_z = get_z_povm(c_sys0)
povms = [povm_x, povm_y, povm_z]
qmpt.experiment.povms = povms
assert qmpt.is_valid_experiment() == False
def test_cqpt_to_cqmpt():
# Case 1: on_para_eq_constraint=False
# Arrange
c_qpt = np.array(list(range(1, 17)))
dim = 2
m = 3
# Act
actual_a_qmpt, actual_b_qmpt = cqpt_to_cqmpt(
c_qpt=c_qpt, dim=dim, m_mprocess=m, on_para_eq_constraint=False
)
# Assert
expected_a_qmpt = np.array(
[
[
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
],
]
)
npt.assert_almost_equal(actual_a_qmpt, expected_a_qmpt, decimal=15)
expected_b_qmpt = np.array([0, 0, 0.0])
npt.assert_almost_equal(actual_b_qmpt, expected_b_qmpt, decimal=15)
# Case 2: on_para_eq_constraint=True
# Act
actual_a_qmpt, actual_b_qmpt = cqpt_to_cqmpt(
c_qpt=c_qpt, dim=dim, m_mprocess=m, on_para_eq_constraint=True
)
# Assert
expected_a_qmpt = np.array(
[
[
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.0,
],
[
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0.0,
],
[
-1,
-2,
-3,
-4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
-1,
-2,
-3,
-4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16.0,
],
]
)
npt.assert_almost_equal(actual_a_qmpt, expected_a_qmpt, decimal=15)
expected_b_qmpt = np.array([0, 0, 1.0])
npt.assert_almost_equal(actual_b_qmpt, expected_b_qmpt, decimal=15)
def test_set_coeffs():
c_sys = generate_composite_system(mode="qubit", num=1, ids_esys=[1])
# Tester Objects
tester_states = [
generate_qoperation_object(
mode="state", object_name="state", name=name, c_sys=c_sys
)
for name in ["x0", "y0", "z0", "z1"]
]
tester_povms = [
generate_qoperation_object(
mode="povm", object_name="povm", name=name, c_sys=c_sys
)
for name in ["x", "y", "z"]
]
# Case 1: on_para_eq_constarint = True
on_para_eq_constraint = True
num_outcomes = 2
actual = StandardQmpt(
tester_states,
tester_povms,
num_outcomes=num_outcomes,
on_para_eq_constraint=on_para_eq_constraint,
seed_data=7,
)
# Assert
assert actual.calc_matA().shape == (48, 28)
assert actual.calc_vecB().shape == (48,)
# Case 1: on_para_eq_constarint = False
on_para_eq_constraint = False
num_outcomes = 2
actual = StandardQmpt(
tester_states,
tester_povms,
num_outcomes=num_outcomes,
on_para_eq_constraint=on_para_eq_constraint,
seed_data=7,
)
# Assert
assert actual.calc_matA().shape == (48, 32)
assert actual.calc_vecB().shape == (48,)
def calc_prob_dist_with_experiment(source_qmpt, qope):
tmp_experiment = source_qmpt._experiment.copy()
for schedule_index in range(len(tmp_experiment.schedules)):
target_index = source_qmpt._get_target_index(tmp_experiment, schedule_index)
tmp_experiment.mprocesses[target_index] = qope
return tmp_experiment.calc_prob_dists()
@pytest.mark.parametrize(("on_para_eq_constraint"), [(True), (False)])
def test_compare_prob_dist_1qubit(on_para_eq_constraint: bool):
# Arrange
c_sys = generate_composite_system(mode="qubit", num=1, ids_esys=[1])
# Tester Objects
state_names = ["x0", "y0", "z0", "z1"]
povm_names = ["x", "y", "z"]
tester_states = [
generate_qoperation_object(
mode="state", object_name="state", name=name, c_sys=c_sys
)
for name in state_names
]
tester_povms = [
generate_qoperation_object(
mode="povm", object_name="povm", name=name, c_sys=c_sys
)
for name in povm_names
]
# Qmpt
num_outcomes = 2
qmpt = StandardQmpt(
tester_states,
tester_povms,
num_outcomes=num_outcomes,
on_para_eq_constraint=on_para_eq_constraint,
seed_data=7,
)
# TrueObject
true_object_name = "x-type1"
true_object = generate_qoperation_object(
mode="mprocess", object_name="mprocess", name=true_object_name, c_sys=c_sys
)
if on_para_eq_constraint is False:
true_object = MProcess(
hss=true_object.hss, on_para_eq_constraint=False, c_sys=c_sys
)
# Act
actual_list = qmpt.calc_prob_dists(true_object)
# Assert
expected_list = calc_prob_dist_with_experiment(qmpt, true_object)
for actual, expected in zip(actual_list, expected_list):
npt.assert_almost_equal(actual, expected, decimal=15)
@pytest.mark.qmpt_twoqubit
@pytest.mark.parametrize(("on_para_eq_constraint"), [(True), (False)])
def test_compare_prob_dist_2qubit(on_para_eq_constraint: bool):
c_sys = generate_composite_system(mode="qubit", num=2, ids_esys=[1, 2])
# Tester Objects
state_names = ["x0", "y0", "z0", "z1"]
povm_names = ["x", "y", "z"]
tester_states = [
generate_qoperation_object(
mode="state", object_name="state", name=f"{a}_{b}", c_sys=c_sys
)
for a, b in itertools.product(state_names, state_names)
]
tester_povms = [
generate_qoperation_object(
mode="povm", object_name="povm", name=f"{a}_{b}", c_sys=c_sys
)
for a, b in itertools.product(povm_names, povm_names)
]
# True Object
true_object_name = "x-type1_x-type1"
true_object = generate_qoperation_object(
mode="mprocess", object_name="mprocess", name=true_object_name, c_sys=c_sys
)
if on_para_eq_constraint is False:
true_object = MProcess(
hss=true_object.hss, on_para_eq_constraint=False, c_sys=c_sys
)
# StandardQmpt
num_outcomes = true_object.num_outcomes # 4
qmpt = StandardQmpt(
tester_states,
tester_povms,
num_outcomes=num_outcomes,
on_para_eq_constraint=on_para_eq_constraint,
seed_data=7,
)
# Act
actual_list = qmpt.calc_prob_dists(true_object)
# Assert
expected_list = calc_prob_dist_with_experiment(qmpt, true_object)
for actual, expected in zip(actual_list, expected_list):
# If decimal is set to 15, the test will fail.
npt.assert_almost_equal(actual, expected, decimal=14)
@pytest.mark.qmpt_onequtrit
@pytest.mark.parametrize(("on_para_eq_constraint"), [(True), (False)])
def test_compare_prob_dist_1qutrit(on_para_eq_constraint: bool):
c_sys = generate_composite_system(mode="qutrit", num=1, ids_esys=[1])
# Tester Objects
state_names = [
"01z0",
"12z0",
"02z1",
"01x0",
"01y0",
"12x0",
"12y0",
"02x0",
"02y0",
]
povm_names = ["01x3", "01y3", "z3", "12x3", "12y3", "02x3", "02y3"]
tester_states = [
generate_qoperation_object(
mode="state", object_name="state", name=name, c_sys=c_sys
)
for name in state_names
]
tester_povms = [
generate_qoperation_object(
mode="povm", object_name="povm", name=name, c_sys=c_sys
)
for name in povm_names
]
num_outcomes = 3
qmpt = StandardQmpt(
tester_states,
tester_povms,
num_outcomes=num_outcomes,
on_para_eq_constraint=on_para_eq_constraint,
seed_data=7,
)
# True Object
true_object_name = "z3-type1"
true_object = generate_qoperation_object(
mode="mprocess", object_name="mprocess", name=true_object_name, c_sys=c_sys
)
if on_para_eq_constraint is False:
true_object = MProcess(
hss=true_object.hss, on_para_eq_constraint=False, c_sys=c_sys
)
# Act
actual_list = qmpt.calc_prob_dists(true_object)
# Assert
expected_list = calc_prob_dist_with_experiment(qmpt, true_object)
for actual, expected in zip(actual_list, expected_list):
npt.assert_almost_equal(actual, expected, decimal=15)
@pytest.mark.parametrize(
("true_object_name", "on_para_eq_constraint"),
[("z-type1", True), ("z-type1", False)],
)
def test_calc_estimate_LinearEstimator_1qubit(
true_object_name: str, on_para_eq_constraint: bool
):
# Arrange
num_qubits = 1
c_sys = generate_composite_system(mode="qubit", num=num_qubits)
# Tester Objects
state_names = ["x0", "y0", "z0", "z1"]
povm_names = ["x", "y", "z"]
tester_states = [
generate_qoperation_object(
mode="state", object_name="state", name=name, c_sys=c_sys
)
for name in state_names
]
tester_povms = [
generate_qoperation_object(
mode="povm", object_name="povm", name=name, c_sys=c_sys
)
for name in povm_names
]
# True Object
true_object = generate_qoperation(
mode="mprocess", name=true_object_name, c_sys=c_sys
)
if on_para_eq_constraint is False:
true_object = MProcess(
hss=true_object.hss, on_para_eq_constraint=False, c_sys=c_sys
)
# Qmpt
qmpt = StandardQmpt(
states=tester_states,
povms=tester_povms,
num_outcomes=true_object.num_outcomes,
on_para_eq_constraint=on_para_eq_constraint,
schedules="all",
)
# empi_dists
prob_dists = qmpt.calc_prob_dists(true_object)
empi_dists = [(10, prob_dist) for prob_dist in prob_dists]
# Estimator
estimator = LinearEstimator()
# Act
result = estimator.calc_estimate(
qtomography=qmpt, empi_dists=empi_dists, is_computation_time_required=True
)
actual = result.estimated_qoperation
# Assert
for a, e in zip(actual.hss, true_object.hss):
npt.assert_almost_equal(a, e, decimal=15)
@pytest.mark.qmpt_twoqubit
@pytest.mark.parametrize(
("true_object_name", "on_para_eq_constraint"),
[
("x-type1_x-type1", True),
("x-type1_x-type1", False),
("bell-type1", True),
("bell-type1", False),
],
)
def test_calc_estimate_LinearEstimator_2qubit(
true_object_name: str, on_para_eq_constraint: bool
):
# Arrange
num_qubits = 2
c_sys = generate_composite_system(mode="qubit", num=num_qubits)
# Tester Objects
state_names = ["x0", "y0", "z0", "z1"]
povm_names = ["x", "y", "z"]
tester_states = [
generate_qoperation_object(
mode="state", object_name="state", name=f"{a}_{b}", c_sys=c_sys
)
for a, b in itertools.product(state_names, repeat=num_qubits)
]
tester_povms = [
generate_qoperation_object(
mode="povm", object_name="povm", name=f"{a}_{b}", c_sys=c_sys
)
for a, b in itertools.product(povm_names, repeat=num_qubits)
]
# True Object
true_object = generate_qoperation(
mode="mprocess", name=true_object_name, c_sys=c_sys
)
if on_para_eq_constraint is False:
true_object = MProcess(
hss=true_object.hss, on_para_eq_constraint=False, c_sys=c_sys
)
# Qmpt
qmpt = StandardQmpt(
states=tester_states,
povms=tester_povms,
num_outcomes=true_object.num_outcomes,
on_para_eq_constraint=on_para_eq_constraint,
schedules="all",
)
# empi_dists
prob_dists = qmpt.calc_prob_dists(true_object)
empi_dists = [(10, prob_dist) for prob_dist in prob_dists]
# Estimator
estimator = LinearEstimator()
# Act
result = estimator.calc_estimate(
qtomography=qmpt, empi_dists=empi_dists, is_computation_time_required=True
)
actual = result.estimated_qoperation
# Assert
for a, e in zip(actual.hss, true_object.hss):
npt.assert_almost_equal(a, e, decimal=14)
@pytest.mark.qmpt_onequtrit
@pytest.mark.parametrize(
("true_object_name", "on_para_eq_constraint"),
[("z3-type1", True), ("z3-type1", False), ("z2-type1", True), ("z2-type1", False)],
)
def test_calc_estimate_LinearEstimator_1qutrit(
true_object_name: str, on_para_eq_constraint: bool
):
# Arrange
num_qubits = 1
c_sys = generate_composite_system(mode="qutrit", num=num_qubits)
# Tester Objects
state_names = [
"01z0",
"12z0",
"02z1",
"01x0",
"01y0",
"12x0",
"12y0",
"02x0",
"02y0",
]
povm_names = ["01x3", "01y3", "z3", "12x3", "12y3", "02x3", "02y3"]
tester_states = [
generate_qoperation_object(
mode="state", object_name="state", name=name, c_sys=c_sys
)
for name in state_names
]
tester_povms = [
generate_qoperation_object(
mode="povm", object_name="povm", name=name, c_sys=c_sys
)
for name in povm_names
]
# True Object
true_object = generate_qoperation(
mode="mprocess", name=true_object_name, c_sys=c_sys
)
if on_para_eq_constraint is False:
true_object = MProcess(
hss=true_object.hss, on_para_eq_constraint=False, c_sys=c_sys
)
# Qmpt
qmpt = StandardQmpt(
states=tester_states,
povms=tester_povms,
num_outcomes=true_object.num_outcomes,
on_para_eq_constraint=on_para_eq_constraint,
schedules="all",
)
# empi_dists
prob_dists = qmpt.calc_prob_dists(true_object)
empi_dists = [(10, prob_dist) for prob_dist in prob_dists]
# Estimator
estimator = LinearEstimator()
# Act
result = estimator.calc_estimate(
qtomography=qmpt, empi_dists=empi_dists, is_computation_time_required=True
)
actual = result.estimated_qoperation
# Assert
for a, e in zip(actual.hss, true_object.hss):
npt.assert_almost_equal(a, e, decimal=14)
@pytest.mark.parametrize(
("true_object_name", "on_para_eq_constraint"),
[("z-type1", True), ("z-type1", False)],
)
def test_calc_estimate_MLE_1qubit(true_object_name: str, on_para_eq_constraint: bool):
# Arrange
num_qubits = 1
c_sys = generate_composite_system(mode="qubit", num=num_qubits)
# Tester Objects
state_names = ["x0", "y0", "z0", "z1"]
povm_names = ["x", "y", "z"]
tester_states = [
generate_qoperation_object(
mode="state", object_name="state", name=name, c_sys=c_sys
)
for name in state_names
]
tester_povms = [
generate_qoperation_object(
mode="povm", object_name="povm", name=name, c_sys=c_sys
)
for name in povm_names
]
# True Object
true_object = generate_qoperation(
mode="mprocess", name=true_object_name, c_sys=c_sys
)
if on_para_eq_constraint is False:
true_object = MProcess(
hss=true_object.hss, on_para_eq_constraint=False, c_sys=c_sys
)
# Qmpt
qmpt = StandardQmpt(
states=tester_states,
povms=tester_povms,
num_outcomes=true_object.num_outcomes,
on_para_eq_constraint=on_para_eq_constraint,
# eps_proj_physical=1e-5,
schedules="all",
)
# empi_dists
prob_dists = qmpt.calc_prob_dists(true_object)
empi_dists = [(10, prob_dist) for prob_dist in prob_dists]
# Estimator
estimator = LossMinimizationEstimator()
loss = StandardQTomographyBasedWeightedRelativeEntropy()
loss_option = StandardQTomographyBasedWeightedRelativeEntropyOption("identity")
algo = ProjectedGradientDescentBacktracking()
algo_option = ProjectedGradientDescentBacktrackingOption(
mode_stopping_criterion_gradient_descent="sum_absolute_difference_variable",
num_history_stopping_criterion_gradient_descent=1,
eps=1e-9,
)
# Act
result = estimator.calc_estimate(
qtomography=qmpt,
empi_dists=empi_dists,
loss=loss,
loss_option=loss_option,
algo=algo,
algo_option=algo_option,
is_computation_time_required=True,
)
actual = result.estimated_qoperation
# Assert
for a, e in zip(actual.hss, true_object.hss):
npt.assert_almost_equal(a, e, decimal=6)
@pytest.mark.qmpt_twoqubit
@pytest.mark.parametrize(
("true_object_name", "on_para_eq_constraint"),
[
("x-type1_x-type1", True),
("x-type1_x-type1", False),
("bell-type1", True),
("bell-type1", False),
],
)
def test_calc_estimate_MLE_2qubit(true_object_name: str, on_para_eq_constraint: bool):
# Arrange
num_qubits = 2
c_sys = generate_composite_system(mode="qubit", num=num_qubits)
# Tester Objects
state_names = ["x0", "y0", "z0", "z1"]
povm_names = ["x", "y", "z"]
tester_states = [
generate_qoperation_object(
mode="state", object_name="state", name=f"{a}_{b}", c_sys=c_sys
)
for a, b in itertools.product(state_names, repeat=num_qubits)
]
tester_povms = [
generate_qoperation_object(
mode="povm", object_name="povm", name=f"{a}_{b}", c_sys=c_sys
)
for a, b in itertools.product(povm_names, repeat=num_qubits)
]
# True Object
true_object = generate_qoperation(
mode="mprocess", name=true_object_name, c_sys=c_sys
)
if on_para_eq_constraint is False:
true_object = MProcess(
hss=true_object.hss, on_para_eq_constraint=False, c_sys=c_sys
)
# Qmpt
qmpt = StandardQmpt(
states=tester_states,
povms=tester_povms,
num_outcomes=true_object.num_outcomes,
on_para_eq_constraint=on_para_eq_constraint,
eps_proj_physical=1e-5,
eps_truncate_imaginary_part=1e-12,
schedules="all",
)
# empi_dists
prob_dists = qmpt.calc_prob_dists(true_object)
empi_dists = [(10, prob_dist) for prob_dist in prob_dists]
# Estimator
estimator = LossMinimizationEstimator()
loss = StandardQTomographyBasedWeightedRelativeEntropy()
loss_option = StandardQTomographyBasedWeightedRelativeEntropyOption("identity")
algo = ProjectedGradientDescentBacktracking()
algo_option = ProjectedGradientDescentBacktrackingOption(
mode_stopping_criterion_gradient_descent="sum_absolute_difference_variable",
num_history_stopping_criterion_gradient_descent=1,
eps=1e-9,
)
# Act
result = estimator.calc_estimate(
qtomography=qmpt,
empi_dists=empi_dists,
loss=loss,
loss_option=loss_option,
algo=algo,
algo_option=algo_option,
is_computation_time_required=True,
)
actual = result.estimated_qoperation
# Assert
for a, e in zip(actual.hss, true_object.hss):
npt.assert_almost_equal(a, e, decimal=1)
@pytest.mark.qmpt_onequtrit
@pytest.mark.parametrize(
("true_object_name", "on_para_eq_constraint"),
[("z3-type1", True), ("z3-type1", False), ("z2-type1", True), ("z2-type1", False)],
)
def test_calc_estimate_MLE_1qutrit(true_object_name: str, on_para_eq_constraint: bool):
# Arrange
num_qubits = 1
c_sys = generate_composite_system(mode="qutrit", num=num_qubits)
# Tester Objects
state_names = [
"01z0",
"12z0",
"02z1",
"01x0",
"01y0",
"12x0",
"12y0",
"02x0",
"02y0",
]
povm_names = ["01x3", "01y3", "z3", "12x3", "12y3", "02x3", "02y3"]
tester_states = [
generate_qoperation_object(
mode="state", object_name="state", name=name, c_sys=c_sys
)
for name in state_names
]
tester_povms = [
generate_qoperation_object(
mode="povm", object_name="povm", name=name, c_sys=c_sys
)
for name in povm_names
]
# True Object
true_object = generate_qoperation(
mode="mprocess", name=true_object_name, c_sys=c_sys
)
if on_para_eq_constraint is False:
true_object = MProcess(
hss=true_object.hss, on_para_eq_constraint=False, c_sys=c_sys
)
# Qmpt
qmpt = StandardQmpt(
states=tester_states,
povms=tester_povms,
num_outcomes=true_object.num_outcomes,
on_para_eq_constraint=on_para_eq_constraint,
eps_proj_physical=1e-5,
schedules="all",
)
# empi_dists
prob_dists = qmpt.calc_prob_dists(true_object)
empi_dists = [(10, prob_dist) for prob_dist in prob_dists]
# Estimator
estimator = LossMinimizationEstimator()
loss = StandardQTomographyBasedWeightedRelativeEntropy()
loss_option = StandardQTomographyBasedWeightedRelativeEntropyOption("identity")
algo = ProjectedGradientDescentBacktracking()
algo_option = ProjectedGradientDescentBacktrackingOption(
mode_stopping_criterion_gradient_descent="sum_absolute_difference_variable",
num_history_stopping_criterion_gradient_descent=1,
eps=1e-9,
)
# Act
result = estimator.calc_estimate(
qtomography=qmpt,
empi_dists=empi_dists,
loss=loss,
loss_option=loss_option,
algo=algo,
algo_option=algo_option,
is_computation_time_required=True,
)
actual = result.estimated_qoperation
# Assert
for a, e in zip(actual.hss, true_object.hss):
npt.assert_almost_equal(a, e, decimal=1)
@pytest.mark.parametrize(
("true_object_name", "on_para_eq_constraint"),
[("z-type1", True), ("z-type1", False)],
)
def test_calc_estimate_LSE_1qubit(true_object_name: str, on_para_eq_constraint: bool):
# Arrange
num_qubits = 1
c_sys = generate_composite_system(mode="qubit", num=num_qubits)
# Tester Objects
state_names = ["x0", "y0", "z0", "z1"]
povm_names = ["x", "y", "z"]
tester_states = [
generate_qoperation_object(
mode="state", object_name="state", name=name, c_sys=c_sys
)
for name in state_names
]
tester_povms = [
generate_qoperation_object(
mode="povm", object_name="povm", name=name, c_sys=c_sys
)
for name in povm_names
]
# True Object
true_object = generate_qoperation(
mode="mprocess", name=true_object_name, c_sys=c_sys
)
if on_para_eq_constraint is False:
true_object = MProcess(
hss=true_object.hss, on_para_eq_constraint=False, c_sys=c_sys
)
# Qmpt
qmpt = StandardQmpt(
states=tester_states,
povms=tester_povms,
num_outcomes=true_object.num_outcomes,
on_para_eq_constraint=on_para_eq_constraint,
# eps_proj_physical=1e-5,
schedules="all",
)
# empi_dists
prob_dists = qmpt.calc_prob_dists(true_object)
empi_dists = [(10, prob_dist) for prob_dist in prob_dists]
# Estimator
estimator = LossMinimizationEstimator()
loss = StandardQTomographyBasedWeightedProbabilityBasedSquaredError()
loss_option = StandardQTomographyBasedWeightedProbabilityBasedSquaredErrorOption(
"identity"
)
algo = ProjectedGradientDescentBacktracking()
algo_option = ProjectedGradientDescentBacktrackingOption(
mode_stopping_criterion_gradient_descent="sum_absolute_difference_variable",
num_history_stopping_criterion_gradient_descent=1,
eps=1e-9,
)
# Act
result = estimator.calc_estimate(
qtomography=qmpt,
empi_dists=empi_dists,
loss=loss,
loss_option=loss_option,
algo=algo,
algo_option=algo_option,
is_computation_time_required=True,
)
actual = result.estimated_qoperation
# Assert
for a, e in zip(actual.hss, true_object.hss):
npt.assert_almost_equal(a, e, decimal=7)
@pytest.mark.qmpt_twoqubit
@pytest.mark.parametrize(
("true_object_name", "on_para_eq_constraint"),
[
("x-type1_x-type1", True),
("x-type1_x-type1", False),
("bell-type1", True),
("bell-type1", False),
],
)
def test_calc_estimate_LSE_2qubit(true_object_name: str, on_para_eq_constraint: bool):
# Arrange
num_qubits = 2
c_sys = generate_composite_system(mode="qubit", num=num_qubits)
# Tester Objects
state_names = ["x0", "y0", "z0", "z1"]
povm_names = ["x", "y", "z"]
tester_states = [
generate_qoperation_object(
mode="state", object_name="state", name=f"{a}_{b}", c_sys=c_sys
)
for a, b in itertools.product(state_names, repeat=num_qubits)
]
tester_povms = [
generate_qoperation_object(
mode="povm", object_name="povm", name=f"{a}_{b}", c_sys=c_sys
)
for a, b in itertools.product(povm_names, repeat=num_qubits)
]
# True Object
true_object = generate_qoperation(
mode="mprocess", name=true_object_name, c_sys=c_sys
)
if on_para_eq_constraint is False:
true_object = MProcess(
hss=true_object.hss, on_para_eq_constraint=False, c_sys=c_sys
)
# Qmpt
qmpt = StandardQmpt(
states=tester_states,
povms=tester_povms,
num_outcomes=true_object.num_outcomes,
on_para_eq_constraint=on_para_eq_constraint,
eps_proj_physical=1e-5,
schedules="all",
)
# empi_dists
prob_dists = qmpt.calc_prob_dists(true_object)
empi_dists = [(10, prob_dist) for prob_dist in prob_dists]
# Estimator
estimator = LossMinimizationEstimator()
loss = StandardQTomographyBasedWeightedProbabilityBasedSquaredError()
loss_option = StandardQTomographyBasedWeightedProbabilityBasedSquaredErrorOption(
"identity"
)
algo = ProjectedGradientDescentBacktracking()
algo_option = ProjectedGradientDescentBacktrackingOption(
mode_stopping_criterion_gradient_descent="sum_absolute_difference_variable",
num_history_stopping_criterion_gradient_descent=1,
eps=1e-9,
)
# Act
result = estimator.calc_estimate(
qtomography=qmpt,
empi_dists=empi_dists,
loss=loss,
loss_option=loss_option,
algo=algo,
algo_option=algo_option,
is_computation_time_required=True,
)
actual = result.estimated_qoperation
# Assert
for a, e in zip(actual.hss, true_object.hss):
npt.assert_almost_equal(a, e, decimal=2)
@pytest.mark.qmpt_onequtrit
@pytest.mark.parametrize(
("true_object_name", "on_para_eq_constraint"),
[("z3-type1", True), ("z3-type1", False), ("z2-type1", True), ("z2-type1", False)],
)
def test_calc_estimate_LSE_1qutrit(true_object_name: str, on_para_eq_constraint: bool):
# Arrange
num_qubits = 1
c_sys = generate_composite_system(mode="qutrit", num=num_qubits)
# Tester Objects
state_names = [
"01z0",
"12z0",
"02z1",
"01x0",
"01y0",
"12x0",
"12y0",
"02x0",
"02y0",
]
povm_names = ["01x3", "01y3", "z3", "12x3", "12y3", "02x3", "02y3"]
tester_states = [
generate_qoperation_object(
mode="state", object_name="state", name=name, c_sys=c_sys
)
for name in state_names
]
tester_povms = [
generate_qoperation_object(
mode="povm", object_name="povm", name=name, c_sys=c_sys
)
for name in povm_names
]
# True Object
true_object = generate_qoperation(
mode="mprocess", name=true_object_name, c_sys=c_sys
)
if on_para_eq_constraint is False:
true_object = MProcess(
hss=true_object.hss, on_para_eq_constraint=False, c_sys=c_sys
)
# Qmpt
qmpt = StandardQmpt(
states=tester_states,
povms=tester_povms,
num_outcomes=true_object.num_outcomes,
on_para_eq_constraint=on_para_eq_constraint,
eps_proj_physical=1e-5,
schedules="all",
)
# empi_dists
prob_dists = qmpt.calc_prob_dists(true_object)
empi_dists = [(10, prob_dist) for prob_dist in prob_dists]
# Estimator
estimator = LossMinimizationEstimator()
loss = StandardQTomographyBasedWeightedProbabilityBasedSquaredError()
loss_option = StandardQTomographyBasedWeightedProbabilityBasedSquaredErrorOption(
"identity"
)
algo = ProjectedGradientDescentBacktracking()
algo_option = ProjectedGradientDescentBacktrackingOption(
mode_stopping_criterion_gradient_descent="sum_absolute_difference_variable",
num_history_stopping_criterion_gradient_descent=1,
eps=1e-9,
)
# Act
result = estimator.calc_estimate(
qtomography=qmpt,
empi_dists=empi_dists,
loss=loss,
loss_option=loss_option,
algo=algo,
algo_option=algo_option,
is_computation_time_required=True,
)
actual = result.estimated_qoperation
# Assert
for a, e in zip(actual.hss, true_object.hss):
npt.assert_almost_equal(a, e, decimal=1)
| 27.312006
| 101
| 0.568477
| 4,230
| 37,991
| 4.771631
| 0.05461
| 0.01744
| 0.02497
| 0.031906
| 0.908987
| 0.886692
| 0.877279
| 0.872473
| 0.866528
| 0.863654
| 0
| 0.03046
| 0.338054
| 37,991
| 1,390
| 102
| 27.331655
| 0.772149
| 0.032192
| 0
| 0.775221
| 1
| 0
| 0.052136
| 0.012113
| 0
| 0
| 0
| 0
| 0.020354
| 1
| 0.015044
| false
| 0
| 0.015929
| 0
| 0.032743
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3a5dbbe85400e757d6b9ffa5392c1cfc80f6d887
| 54,813
|
py
|
Python
|
turdshovel/_stubs/System/Collections/Generic.py
|
daddycocoaman/turdshovel
|
6f9d9b08734028fa819c590e8573ae49481dc769
|
[
"MIT"
] | 39
|
2021-10-30T06:34:21.000Z
|
2022-03-22T09:04:40.000Z
|
turdshovel/_stubs/System/Collections/Generic.py
|
daddycocoaman/turdshovel
|
6f9d9b08734028fa819c590e8573ae49481dc769
|
[
"MIT"
] | null | null | null |
turdshovel/_stubs/System/Collections/Generic.py
|
daddycocoaman/turdshovel
|
6f9d9b08734028fa819c590e8573ae49481dc769
|
[
"MIT"
] | 3
|
2021-10-30T03:56:16.000Z
|
2021-11-08T01:59:32.000Z
|
# encoding: utf-8
# module System.Collections.Generic calls itself Generic
# from mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089, System, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089, Microsoft.Bcl.AsyncInterfaces, Version=1.0.0.0, Culture=neutral, PublicKeyToken=cc7b13ffcd2ddd51
# by generator 1.145
# no doc
# no imports
# no functions
# classes
class Comparer(object, IComparer, IComparer[T]):
# no doc
def Compare(self, x, y):
""" Compare(self: Comparer[T], x: T, y: T) -> int """
pass
@staticmethod
def Create(comparison):
""" Create(comparison: Comparison[T]) -> Comparer[T] """
pass
def __cmp__(self, *args): #cannot find CLR method
""" x.__cmp__(y) <==> cmp(x,y) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __repr__(self, *args): #cannot find CLR method
""" __repr__(self: object) -> str """
pass
class Dictionary(object, IDictionary[TKey, TValue], ICollection[KeyValuePair[TKey, TValue]], IEnumerable[KeyValuePair[TKey, TValue]], IEnumerable, IDictionary, ICollection, IReadOnlyDictionary[TKey, TValue], IReadOnlyCollection[KeyValuePair[TKey, TValue]], ISerializable, IDeserializationCallback):
"""
Dictionary[TKey, TValue]()
Dictionary[TKey, TValue](capacity: int)
Dictionary[TKey, TValue](comparer: IEqualityComparer[TKey])
Dictionary[TKey, TValue](capacity: int, comparer: IEqualityComparer[TKey])
Dictionary[TKey, TValue](dictionary: IDictionary[TKey, TValue])
Dictionary[TKey, TValue](dictionary: IDictionary[TKey, TValue], comparer: IEqualityComparer[TKey])
"""
def Add(self, key, value):
""" Add(self: Dictionary[TKey, TValue], key: TKey, value: TValue) """
pass
def Clear(self):
""" Clear(self: Dictionary[TKey, TValue]) """
pass
def ContainsKey(self, key):
""" ContainsKey(self: Dictionary[TKey, TValue], key: TKey) -> bool """
pass
def ContainsValue(self, value):
""" ContainsValue(self: Dictionary[TKey, TValue], value: TValue) -> bool """
pass
def GetEnumerator(self):
""" GetEnumerator(self: Dictionary[TKey, TValue]) -> Enumerator """
pass
def GetObjectData(self, info, context):
""" GetObjectData(self: Dictionary[TKey, TValue], info: SerializationInfo, context: StreamingContext) """
pass
def OnDeserialization(self, sender):
""" OnDeserialization(self: Dictionary[TKey, TValue], sender: object) """
pass
def Remove(self, key):
""" Remove(self: Dictionary[TKey, TValue], key: TKey) -> bool """
pass
def TryGetValue(self, key, value):
""" TryGetValue(self: Dictionary[TKey, TValue], key: TKey) -> (bool, TValue) """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __contains__(self, *args): #cannot find CLR method
"""
__contains__(self: IDictionary[TKey, TValue], key: TKey) -> bool
__contains__(self: IDictionary, key: object) -> bool
"""
pass
def __getitem__(self, *args): #cannot find CLR method
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
@staticmethod # known case of __new__
def __new__(self, *__args):
"""
__new__(cls: type)
__new__(cls: type, capacity: int)
__new__(cls: type, comparer: IEqualityComparer[TKey])
__new__(cls: type, capacity: int, comparer: IEqualityComparer[TKey])
__new__(cls: type, dictionary: IDictionary[TKey, TValue])
__new__(cls: type, dictionary: IDictionary[TKey, TValue], comparer: IEqualityComparer[TKey])
__new__(cls: type, info: SerializationInfo, context: StreamingContext)
"""
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __repr__(self, *args): #cannot find CLR method
"""
__repr__(self: Dictionary[TKey, TValue]) -> str
__repr__(self: Dictionary[K, V]) -> str
"""
pass
def __setitem__(self, *args): #cannot find CLR method
""" x.__setitem__(i, y) <==> x[i]= """
pass
Comparer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Comparer(self: Dictionary[TKey, TValue]) -> IEqualityComparer[TKey]
"""
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: Dictionary[TKey, TValue]) -> int
"""
Keys = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Keys(self: Dictionary[TKey, TValue]) -> KeyCollection
"""
Values = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Values(self: Dictionary[TKey, TValue]) -> ValueCollection
"""
Enumerator = None
KeyCollection = None
ValueCollection = None
class EqualityComparer(object, IEqualityComparer, IEqualityComparer[T]):
# no doc
def Equals(self, *__args):
""" Equals(self: EqualityComparer[T], x: T, y: T) -> bool """
pass
def GetHashCode(self, obj=None):
""" GetHashCode(self: EqualityComparer[T], obj: T) -> int """
pass
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==y """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __repr__(self, *args): #cannot find CLR method
""" __repr__(self: object) -> str """
pass
class IAsyncEnumerable:
# no doc
def GetAsyncEnumerator(self, cancellationToken):
"""
GetAsyncEnumerator(self: IAsyncEnumerable[T], cancellationToken: CancellationToken) -> IAsyncEnumerator[T]
Returns an enumerator that iterates asynchronously through the collection.
cancellationToken: A System.Threading.CancellationToken that may be used to cancel the asynchronous iteration.
Returns: An enumerator that can be used to iterate asynchronously through the collection.
"""
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
class IAsyncEnumerator(IAsyncDisposable):
# no doc
def MoveNextAsync(self):
"""
MoveNextAsync(self: IAsyncEnumerator[T]) -> ValueTask[bool]
Advances the enumerator asynchronously to the next element of the collection.
Returns: A System.Threading.Tasks.ValueTask that will complete with a result of true if the enumerator
was successfully advanced to the next element, or false if the enumerator
has passed the end
of the collection.
"""
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
Current = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Gets the element in the collection at the current position of the enumerator.
Get: Current(self: IAsyncEnumerator[T]) -> T
"""
class ICollection(IEnumerable[T], IEnumerable):
# no doc
def Add(self, item):
""" Add(self: ICollection[T], item: T) """
pass
def Clear(self):
""" Clear(self: ICollection[T]) """
pass
def Contains(self, item):
""" Contains(self: ICollection[T], item: T) -> bool """
pass
def CopyTo(self, array, arrayIndex):
""" CopyTo(self: ICollection[T], array: Array[T], arrayIndex: int) """
pass
def Remove(self, item):
""" Remove(self: ICollection[T], item: T) -> bool """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __contains__(self, *args): #cannot find CLR method
""" __contains__[T](enumerable: IEnumerable[T], value: T) -> bool """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: ICollection[T]) -> int
"""
IsReadOnly = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IsReadOnly(self: ICollection[T]) -> bool
"""
class IComparer:
# no doc
def Compare(self, x, y):
""" Compare(self: IComparer[T], x: T, y: T) -> int """
pass
def __cmp__(self, *args): #cannot find CLR method
""" x.__cmp__(y) <==> cmp(x,y) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
class IDictionary(ICollection[KeyValuePair[TKey, TValue]], IEnumerable[KeyValuePair[TKey, TValue]], IEnumerable):
# no doc
def Add(self, key, value):
""" Add(self: IDictionary[TKey, TValue], key: TKey, value: TValue) """
pass
def ContainsKey(self, key):
""" ContainsKey(self: IDictionary[TKey, TValue], key: TKey) -> bool """
pass
def Remove(self, key):
""" Remove(self: IDictionary[TKey, TValue], key: TKey) -> bool """
pass
def TryGetValue(self, key, value):
""" TryGetValue(self: IDictionary[TKey, TValue], key: TKey) -> (bool, TValue) """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __contains__(self, *args): #cannot find CLR method
""" __contains__(self: ICollection[KeyValuePair[TKey, TValue]], item: KeyValuePair[TKey, TValue]) -> bool """
pass
def __getitem__(self, *args): #cannot find CLR method
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
def __setitem__(self, *args): #cannot find CLR method
""" x.__setitem__(i, y) <==> x[i]= """
pass
Keys = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Keys(self: IDictionary[TKey, TValue]) -> ICollection[TKey]
"""
Values = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Values(self: IDictionary[TKey, TValue]) -> ICollection[TValue]
"""
class IEnumerable(IEnumerable):
# no doc
def GetEnumerator(self):
""" GetEnumerator(self: IEnumerable[T]) -> IEnumerator[T] """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
class IEnumerator(IDisposable, IEnumerator):
# no doc
def next(self, *args): #cannot find CLR method
""" next(self: object) -> object """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__[T](self: IEnumerator[T]) -> object """
pass
Current = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Current(self: IEnumerator[T]) -> T
"""
class IEqualityComparer:
# no doc
def Equals(self, x, y):
""" Equals(self: IEqualityComparer[T], x: T, y: T) -> bool """
pass
def GetHashCode(self, obj):
""" GetHashCode(self: IEqualityComparer[T], obj: T) -> int """
pass
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==y """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
class IList(ICollection[T], IEnumerable[T], IEnumerable):
# no doc
def IndexOf(self, item):
""" IndexOf(self: IList[T], item: T) -> int """
pass
def Insert(self, index, item):
""" Insert(self: IList[T], index: int, item: T) """
pass
def RemoveAt(self, index):
""" RemoveAt(self: IList[T], index: int) """
pass
def __contains__(self, *args): #cannot find CLR method
""" __contains__(self: ICollection[T], item: T) -> bool """
pass
def __getitem__(self, *args): #cannot find CLR method
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
def __setitem__(self, *args): #cannot find CLR method
""" x.__setitem__(i, y) <==> x[i]= """
pass
class IReadOnlyCollection(IEnumerable[T], IEnumerable):
# no doc
def __contains__(self, *args): #cannot find CLR method
""" __contains__[T](enumerable: IEnumerable[T], value: T) -> bool """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: IReadOnlyCollection[T]) -> int
"""
class IReadOnlyDictionary(IReadOnlyCollection[KeyValuePair[TKey, TValue]], IEnumerable[KeyValuePair[TKey, TValue]], IEnumerable):
# no doc
def ContainsKey(self, key):
""" ContainsKey(self: IReadOnlyDictionary[TKey, TValue], key: TKey) -> bool """
pass
def TryGetValue(self, key, value):
""" TryGetValue(self: IReadOnlyDictionary[TKey, TValue], key: TKey) -> (bool, TValue) """
pass
def __contains__(self, *args): #cannot find CLR method
""" __contains__[KeyValuePair`2](enumerable: IEnumerable[KeyValuePair[TKey, TValue]], value: KeyValuePair[TKey, TValue]) -> bool """
pass
def __getitem__(self, *args): #cannot find CLR method
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
Keys = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Keys(self: IReadOnlyDictionary[TKey, TValue]) -> IEnumerable[TKey]
"""
Values = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Values(self: IReadOnlyDictionary[TKey, TValue]) -> IEnumerable[TValue]
"""
class IReadOnlyList(IReadOnlyCollection[T], IEnumerable[T], IEnumerable):
# no doc
def __contains__(self, *args): #cannot find CLR method
""" __contains__[T](enumerable: IEnumerable[T], value: T) -> bool """
pass
def __getitem__(self, *args): #cannot find CLR method
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
class ISet(ICollection[T], IEnumerable[T], IEnumerable):
# no doc
def Add(self, item):
""" Add(self: ISet[T], item: T) -> bool """
pass
def ExceptWith(self, other):
""" ExceptWith(self: ISet[T], other: IEnumerable[T]) """
pass
def IntersectWith(self, other):
""" IntersectWith(self: ISet[T], other: IEnumerable[T]) """
pass
def IsProperSubsetOf(self, other):
""" IsProperSubsetOf(self: ISet[T], other: IEnumerable[T]) -> bool """
pass
def IsProperSupersetOf(self, other):
""" IsProperSupersetOf(self: ISet[T], other: IEnumerable[T]) -> bool """
pass
def IsSubsetOf(self, other):
""" IsSubsetOf(self: ISet[T], other: IEnumerable[T]) -> bool """
pass
def IsSupersetOf(self, other):
""" IsSupersetOf(self: ISet[T], other: IEnumerable[T]) -> bool """
pass
def Overlaps(self, other):
""" Overlaps(self: ISet[T], other: IEnumerable[T]) -> bool """
pass
def SetEquals(self, other):
""" SetEquals(self: ISet[T], other: IEnumerable[T]) -> bool """
pass
def SymmetricExceptWith(self, other):
""" SymmetricExceptWith(self: ISet[T], other: IEnumerable[T]) """
pass
def UnionWith(self, other):
""" UnionWith(self: ISet[T], other: IEnumerable[T]) """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __contains__(self, *args): #cannot find CLR method
""" __contains__(self: ICollection[T], item: T) -> bool """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
class KeyNotFoundException(SystemException, ISerializable, _Exception):
"""
KeyNotFoundException()
KeyNotFoundException(message: str)
KeyNotFoundException(message: str, innerException: Exception)
"""
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod # known case of __new__
def __new__(self, message=None, innerException=None):
"""
__new__(cls: type)
__new__(cls: type, message: str)
__new__(cls: type, message: str, innerException: Exception)
__new__(cls: type, info: SerializationInfo, context: StreamingContext)
"""
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __str__(self, *args): #cannot find CLR method
pass
SerializeObjectState = None
class KeyValuePair(object):
""" KeyValuePair[TKey, TValue](key: TKey, value: TValue) """
def ToString(self):
""" ToString(self: KeyValuePair[TKey, TValue]) -> str """
pass
@staticmethod # known case of __new__
def __new__(self, key, value):
"""
__new__[KeyValuePair`2]() -> KeyValuePair[TKey, TValue]
__new__(cls: type, key: TKey, value: TValue)
"""
pass
Key = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Key(self: KeyValuePair[TKey, TValue]) -> TKey
"""
Value = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Value(self: KeyValuePair[TKey, TValue]) -> TValue
"""
class LinkedList(object, ICollection[T], IEnumerable[T], IEnumerable, ICollection, IReadOnlyCollection[T], ISerializable, IDeserializationCallback):
"""
LinkedList[T]()
LinkedList[T](collection: IEnumerable[T])
"""
def AddAfter(self, node, *__args):
""" AddAfter(self: LinkedList[T], node: LinkedListNode[T], newNode: LinkedListNode[T])AddAfter(self: LinkedList[T], node: LinkedListNode[T], value: T) -> LinkedListNode[T] """
pass
def AddBefore(self, node, *__args):
"""
AddBefore(self: LinkedList[T], node: LinkedListNode[T], value: T) -> LinkedListNode[T]
AddBefore(self: LinkedList[T], node: LinkedListNode[T], newNode: LinkedListNode[T])
"""
pass
def AddFirst(self, *__args):
"""
AddFirst(self: LinkedList[T], value: T) -> LinkedListNode[T]
AddFirst(self: LinkedList[T], node: LinkedListNode[T])
"""
pass
def AddLast(self, *__args):
"""
AddLast(self: LinkedList[T], value: T) -> LinkedListNode[T]
AddLast(self: LinkedList[T], node: LinkedListNode[T])
"""
pass
def Clear(self):
""" Clear(self: LinkedList[T]) """
pass
def Contains(self, value):
""" Contains(self: LinkedList[T], value: T) -> bool """
pass
def CopyTo(self, array, index):
""" CopyTo(self: LinkedList[T], array: Array[T], index: int) """
pass
def Find(self, value):
""" Find(self: LinkedList[T], value: T) -> LinkedListNode[T] """
pass
def FindLast(self, value):
""" FindLast(self: LinkedList[T], value: T) -> LinkedListNode[T] """
pass
def GetEnumerator(self):
""" GetEnumerator(self: LinkedList[T]) -> Enumerator """
pass
def GetObjectData(self, info, context):
""" GetObjectData(self: LinkedList[T], info: SerializationInfo, context: StreamingContext) """
pass
def OnDeserialization(self, sender):
""" OnDeserialization(self: LinkedList[T], sender: object) """
pass
def Remove(self, *__args):
"""
Remove(self: LinkedList[T], value: T) -> bool
Remove(self: LinkedList[T], node: LinkedListNode[T])
"""
pass
def RemoveFirst(self):
""" RemoveFirst(self: LinkedList[T]) """
pass
def RemoveLast(self):
""" RemoveLast(self: LinkedList[T]) """
pass
def __contains__(self, *args): #cannot find CLR method
""" __contains__(self: ICollection[T], item: T) -> bool """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
@staticmethod # known case of __new__
def __new__(self, collection=None):
"""
__new__(cls: type)
__new__(cls: type, collection: IEnumerable[T])
__new__(cls: type, info: SerializationInfo, context: StreamingContext)
"""
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __repr__(self, *args): #cannot find CLR method
""" __repr__(self: object) -> str """
pass
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: LinkedList[T]) -> int
"""
First = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: First(self: LinkedList[T]) -> LinkedListNode[T]
"""
Last = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Last(self: LinkedList[T]) -> LinkedListNode[T]
"""
Enumerator = None
class LinkedListNode(object):
""" LinkedListNode[T](value: T) """
@staticmethod # known case of __new__
def __new__(self, value):
""" __new__(cls: type, value: T) """
pass
List = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: List(self: LinkedListNode[T]) -> LinkedList[T]
"""
Next = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Next(self: LinkedListNode[T]) -> LinkedListNode[T]
"""
Previous = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Previous(self: LinkedListNode[T]) -> LinkedListNode[T]
"""
Value = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Value(self: LinkedListNode[T]) -> T
Set: Value(self: LinkedListNode[T]) = value
"""
class List(object, IList[T], ICollection[T], IEnumerable[T], IEnumerable, IList, ICollection, IReadOnlyList[T], IReadOnlyCollection[T]):
"""
List[T]()
List[T](capacity: int)
List[T](collection: IEnumerable[T])
"""
def Add(self, item):
""" Add(self: List[T], item: T) """
pass
def AddRange(self, collection):
""" AddRange(self: List[T], collection: IEnumerable[T]) """
pass
def AsReadOnly(self):
""" AsReadOnly(self: List[T]) -> ReadOnlyCollection[T] """
pass
def BinarySearch(self, *__args):
"""
BinarySearch(self: List[T], index: int, count: int, item: T, comparer: IComparer[T]) -> int
BinarySearch(self: List[T], item: T) -> int
BinarySearch(self: List[T], item: T, comparer: IComparer[T]) -> int
"""
pass
def Clear(self):
""" Clear(self: List[T]) """
pass
def Contains(self, item):
""" Contains(self: List[T], item: T) -> bool """
pass
def ConvertAll(self, converter):
""" ConvertAll[TOutput](self: List[T], converter: Converter[T, TOutput]) -> List[TOutput] """
pass
def CopyTo(self, *__args):
""" CopyTo(self: List[T], index: int, array: Array[T], arrayIndex: int, count: int)CopyTo(self: List[T], array: Array[T])CopyTo(self: List[T], array: Array[T], arrayIndex: int) """
pass
def Exists(self, match):
""" Exists(self: List[T], match: Predicate[T]) -> bool """
pass
def Find(self, match):
""" Find(self: List[T], match: Predicate[T]) -> T """
pass
def FindAll(self, match):
""" FindAll(self: List[T], match: Predicate[T]) -> List[T] """
pass
def FindIndex(self, *__args):
"""
FindIndex(self: List[T], match: Predicate[T]) -> int
FindIndex(self: List[T], startIndex: int, match: Predicate[T]) -> int
FindIndex(self: List[T], startIndex: int, count: int, match: Predicate[T]) -> int
"""
pass
def FindLast(self, match):
""" FindLast(self: List[T], match: Predicate[T]) -> T """
pass
def FindLastIndex(self, *__args):
"""
FindLastIndex(self: List[T], match: Predicate[T]) -> int
FindLastIndex(self: List[T], startIndex: int, match: Predicate[T]) -> int
FindLastIndex(self: List[T], startIndex: int, count: int, match: Predicate[T]) -> int
"""
pass
def ForEach(self, action):
""" ForEach(self: List[T], action: Action[T]) """
pass
def GetEnumerator(self):
""" GetEnumerator(self: List[T]) -> Enumerator """
pass
def GetRange(self, index, count):
""" GetRange(self: List[T], index: int, count: int) -> List[T] """
pass
def IndexOf(self, item, index=None, count=None):
"""
IndexOf(self: List[T], item: T) -> int
IndexOf(self: List[T], item: T, index: int) -> int
IndexOf(self: List[T], item: T, index: int, count: int) -> int
"""
pass
def Insert(self, index, item):
""" Insert(self: List[T], index: int, item: T) """
pass
def InsertRange(self, index, collection):
""" InsertRange(self: List[T], index: int, collection: IEnumerable[T]) """
pass
def LastIndexOf(self, item, index=None, count=None):
"""
LastIndexOf(self: List[T], item: T) -> int
LastIndexOf(self: List[T], item: T, index: int) -> int
LastIndexOf(self: List[T], item: T, index: int, count: int) -> int
"""
pass
def Remove(self, item):
""" Remove(self: List[T], item: T) -> bool """
pass
def RemoveAll(self, match):
""" RemoveAll(self: List[T], match: Predicate[T]) -> int """
pass
def RemoveAt(self, index):
""" RemoveAt(self: List[T], index: int) """
pass
def RemoveRange(self, index, count):
""" RemoveRange(self: List[T], index: int, count: int) """
pass
def Reverse(self, index=None, count=None):
""" Reverse(self: List[T], index: int, count: int)Reverse(self: List[T]) """
pass
def Sort(self, *__args):
""" Sort(self: List[T])Sort(self: List[T], comparer: IComparer[T])Sort(self: List[T], index: int, count: int, comparer: IComparer[T])Sort(self: List[T], comparison: Comparison[T]) """
pass
def ToArray(self):
""" ToArray(self: List[T]) -> Array[T] """
pass
def TrimExcess(self):
""" TrimExcess(self: List[T]) """
pass
def TrueForAll(self, match):
""" TrueForAll(self: List[T], match: Predicate[T]) -> bool """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __contains__(self, *args): #cannot find CLR method
"""
__contains__(self: ICollection[T], item: T) -> bool
__contains__(self: IList, value: object) -> bool
"""
pass
def __delitem__(self, *args): #cannot find CLR method
""" x.__delitem__(y) <==> del x[y]x.__delitem__(y) <==> del x[y]x.__delitem__(y) <==> del x[y] """
pass
def __getitem__(self, *args): #cannot find CLR method
""" x.__getitem__(y) <==> x[y] """
pass
def __getslice__(self, *args): #cannot find CLR method
"""
__getslice__(self: List[T], x: int, y: int) -> List[T]
__getslice__(self: List[T], x: int, y: int) -> List[T]
"""
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
@staticmethod # known case of __new__
def __new__(self, *__args):
"""
__new__(cls: type)
__new__(cls: type, capacity: int)
__new__(cls: type, collection: IEnumerable[T])
"""
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __repr__(self, *args): #cannot find CLR method
"""
__repr__(self: List[T]) -> str
__repr__(self: List[T]) -> str
"""
pass
def __setitem__(self, *args): #cannot find CLR method
""" x.__setitem__(i, y) <==> x[i]= """
pass
Capacity = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Capacity(self: List[T]) -> int
Set: Capacity(self: List[T]) = value
"""
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: List[T]) -> int
"""
Enumerator = None
class Queue(object, IEnumerable[T], IEnumerable, ICollection, IReadOnlyCollection[T]):
"""
Queue[T]()
Queue[T](capacity: int)
Queue[T](collection: IEnumerable[T])
"""
def Clear(self):
""" Clear(self: Queue[T]) """
pass
def Contains(self, item):
""" Contains(self: Queue[T], item: T) -> bool """
pass
def CopyTo(self, array, arrayIndex):
""" CopyTo(self: Queue[T], array: Array[T], arrayIndex: int) """
pass
def Dequeue(self):
""" Dequeue(self: Queue[T]) -> T """
pass
def Enqueue(self, item):
""" Enqueue(self: Queue[T], item: T) """
pass
def GetEnumerator(self):
""" GetEnumerator(self: Queue[T]) -> Enumerator """
pass
def Peek(self):
""" Peek(self: Queue[T]) -> T """
pass
def ToArray(self):
""" ToArray(self: Queue[T]) -> Array[T] """
pass
def TrimExcess(self):
""" TrimExcess(self: Queue[T]) """
pass
def __contains__(self, *args): #cannot find CLR method
""" __contains__[T](enumerable: IEnumerable[T], value: T) -> bool """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
@staticmethod # known case of __new__
def __new__(self, *__args):
"""
__new__(cls: type)
__new__(cls: type, capacity: int)
__new__(cls: type, collection: IEnumerable[T])
"""
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __repr__(self, *args): #cannot find CLR method
""" __repr__(self: object) -> str """
pass
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: Queue[T]) -> int
"""
Enumerator = None
class SortedDictionary(object, IDictionary[TKey, TValue], ICollection[KeyValuePair[TKey, TValue]], IEnumerable[KeyValuePair[TKey, TValue]], IEnumerable, IDictionary, ICollection, IReadOnlyDictionary[TKey, TValue], IReadOnlyCollection[KeyValuePair[TKey, TValue]]):
"""
SortedDictionary[TKey, TValue]()
SortedDictionary[TKey, TValue](dictionary: IDictionary[TKey, TValue])
SortedDictionary[TKey, TValue](dictionary: IDictionary[TKey, TValue], comparer: IComparer[TKey])
SortedDictionary[TKey, TValue](comparer: IComparer[TKey])
"""
def Add(self, key, value):
""" Add(self: SortedDictionary[TKey, TValue], key: TKey, value: TValue) """
pass
def Clear(self):
""" Clear(self: SortedDictionary[TKey, TValue]) """
pass
def ContainsKey(self, key):
""" ContainsKey(self: SortedDictionary[TKey, TValue], key: TKey) -> bool """
pass
def ContainsValue(self, value):
""" ContainsValue(self: SortedDictionary[TKey, TValue], value: TValue) -> bool """
pass
def CopyTo(self, array, index):
""" CopyTo(self: SortedDictionary[TKey, TValue], array: Array[KeyValuePair[TKey, TValue]], index: int) """
pass
def GetEnumerator(self):
""" GetEnumerator(self: SortedDictionary[TKey, TValue]) -> Enumerator """
pass
def Remove(self, key):
""" Remove(self: SortedDictionary[TKey, TValue], key: TKey) -> bool """
pass
def TryGetValue(self, key, value):
""" TryGetValue(self: SortedDictionary[TKey, TValue], key: TKey) -> (bool, TValue) """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __contains__(self, *args): #cannot find CLR method
"""
__contains__(self: IDictionary[TKey, TValue], key: TKey) -> bool
__contains__(self: IDictionary, key: object) -> bool
"""
pass
def __getitem__(self, *args): #cannot find CLR method
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
@staticmethod # known case of __new__
def __new__(self, *__args):
"""
__new__(cls: type)
__new__(cls: type, dictionary: IDictionary[TKey, TValue])
__new__(cls: type, dictionary: IDictionary[TKey, TValue], comparer: IComparer[TKey])
__new__(cls: type, comparer: IComparer[TKey])
"""
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __repr__(self, *args): #cannot find CLR method
""" __repr__(self: object) -> str """
pass
def __setitem__(self, *args): #cannot find CLR method
""" x.__setitem__(i, y) <==> x[i]= """
pass
Comparer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Comparer(self: SortedDictionary[TKey, TValue]) -> IComparer[TKey]
"""
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: SortedDictionary[TKey, TValue]) -> int
"""
Keys = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Keys(self: SortedDictionary[TKey, TValue]) -> KeyCollection
"""
Values = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Values(self: SortedDictionary[TKey, TValue]) -> ValueCollection
"""
Enumerator = None
KeyCollection = None
ValueCollection = None
class SortedList(object, IDictionary[TKey, TValue], ICollection[KeyValuePair[TKey, TValue]], IEnumerable[KeyValuePair[TKey, TValue]], IEnumerable, IDictionary, ICollection, IReadOnlyDictionary[TKey, TValue], IReadOnlyCollection[KeyValuePair[TKey, TValue]]):
"""
SortedList[TKey, TValue]()
SortedList[TKey, TValue](capacity: int)
SortedList[TKey, TValue](comparer: IComparer[TKey])
SortedList[TKey, TValue](capacity: int, comparer: IComparer[TKey])
SortedList[TKey, TValue](dictionary: IDictionary[TKey, TValue])
SortedList[TKey, TValue](dictionary: IDictionary[TKey, TValue], comparer: IComparer[TKey])
"""
def Add(self, key, value):
""" Add(self: SortedList[TKey, TValue], key: TKey, value: TValue) """
pass
def Clear(self):
""" Clear(self: SortedList[TKey, TValue]) """
pass
def ContainsKey(self, key):
""" ContainsKey(self: SortedList[TKey, TValue], key: TKey) -> bool """
pass
def ContainsValue(self, value):
""" ContainsValue(self: SortedList[TKey, TValue], value: TValue) -> bool """
pass
def GetEnumerator(self):
""" GetEnumerator(self: SortedList[TKey, TValue]) -> IEnumerator[KeyValuePair[TKey, TValue]] """
pass
def IndexOfKey(self, key):
""" IndexOfKey(self: SortedList[TKey, TValue], key: TKey) -> int """
pass
def IndexOfValue(self, value):
""" IndexOfValue(self: SortedList[TKey, TValue], value: TValue) -> int """
pass
def Remove(self, key):
""" Remove(self: SortedList[TKey, TValue], key: TKey) -> bool """
pass
def RemoveAt(self, index):
""" RemoveAt(self: SortedList[TKey, TValue], index: int) """
pass
def TrimExcess(self):
""" TrimExcess(self: SortedList[TKey, TValue]) """
pass
def TryGetValue(self, key, value):
""" TryGetValue(self: SortedList[TKey, TValue], key: TKey) -> (bool, TValue) """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __contains__(self, *args): #cannot find CLR method
"""
__contains__(self: IDictionary[TKey, TValue], key: TKey) -> bool
__contains__(self: IDictionary, key: object) -> bool
"""
pass
def __getitem__(self, *args): #cannot find CLR method
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
@staticmethod # known case of __new__
def __new__(self, *__args):
"""
__new__(cls: type)
__new__(cls: type, capacity: int)
__new__(cls: type, comparer: IComparer[TKey])
__new__(cls: type, capacity: int, comparer: IComparer[TKey])
__new__(cls: type, dictionary: IDictionary[TKey, TValue])
__new__(cls: type, dictionary: IDictionary[TKey, TValue], comparer: IComparer[TKey])
"""
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __repr__(self, *args): #cannot find CLR method
""" __repr__(self: object) -> str """
pass
def __setitem__(self, *args): #cannot find CLR method
""" x.__setitem__(i, y) <==> x[i]= """
pass
Capacity = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Capacity(self: SortedList[TKey, TValue]) -> int
Set: Capacity(self: SortedList[TKey, TValue]) = value
"""
Comparer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Comparer(self: SortedList[TKey, TValue]) -> IComparer[TKey]
"""
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: SortedList[TKey, TValue]) -> int
"""
Keys = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Keys(self: SortedList[TKey, TValue]) -> IList[TKey]
"""
Values = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Values(self: SortedList[TKey, TValue]) -> IList[TValue]
"""
class SortedSet(object, ISet[T], ICollection[T], IEnumerable[T], IEnumerable, ICollection, ISerializable, IDeserializationCallback, IReadOnlyCollection[T]):
"""
SortedSet[T]()
SortedSet[T](comparer: IComparer[T])
SortedSet[T](collection: IEnumerable[T])
SortedSet[T](collection: IEnumerable[T], comparer: IComparer[T])
"""
def Add(self, item):
""" Add(self: SortedSet[T], item: T) -> bool """
pass
def Clear(self):
""" Clear(self: SortedSet[T]) """
pass
def Contains(self, item):
""" Contains(self: SortedSet[T], item: T) -> bool """
pass
def CopyTo(self, array, index=None, count=None):
""" CopyTo(self: SortedSet[T], array: Array[T])CopyTo(self: SortedSet[T], array: Array[T], index: int)CopyTo(self: SortedSet[T], array: Array[T], index: int, count: int) """
pass
@staticmethod
def CreateSetComparer(memberEqualityComparer=None):
"""
CreateSetComparer() -> IEqualityComparer[SortedSet[T]]
CreateSetComparer(memberEqualityComparer: IEqualityComparer[T]) -> IEqualityComparer[SortedSet[T]]
"""
pass
def ExceptWith(self, other):
""" ExceptWith(self: SortedSet[T], other: IEnumerable[T]) """
pass
def GetEnumerator(self):
""" GetEnumerator(self: SortedSet[T]) -> Enumerator """
pass
def GetObjectData(self, *args): #cannot find CLR method
""" GetObjectData(self: SortedSet[T], info: SerializationInfo, context: StreamingContext) """
pass
def GetViewBetween(self, lowerValue, upperValue):
""" GetViewBetween(self: SortedSet[T], lowerValue: T, upperValue: T) -> SortedSet[T] """
pass
def IntersectWith(self, other):
""" IntersectWith(self: SortedSet[T], other: IEnumerable[T]) """
pass
def IsProperSubsetOf(self, other):
""" IsProperSubsetOf(self: SortedSet[T], other: IEnumerable[T]) -> bool """
pass
def IsProperSupersetOf(self, other):
""" IsProperSupersetOf(self: SortedSet[T], other: IEnumerable[T]) -> bool """
pass
def IsSubsetOf(self, other):
""" IsSubsetOf(self: SortedSet[T], other: IEnumerable[T]) -> bool """
pass
def IsSupersetOf(self, other):
""" IsSupersetOf(self: SortedSet[T], other: IEnumerable[T]) -> bool """
pass
def OnDeserialization(self, *args): #cannot find CLR method
""" OnDeserialization(self: SortedSet[T], sender: object) """
pass
def Overlaps(self, other):
""" Overlaps(self: SortedSet[T], other: IEnumerable[T]) -> bool """
pass
def Remove(self, item):
""" Remove(self: SortedSet[T], item: T) -> bool """
pass
def RemoveWhere(self, match):
""" RemoveWhere(self: SortedSet[T], match: Predicate[T]) -> int """
pass
def Reverse(self):
""" Reverse(self: SortedSet[T]) -> IEnumerable[T] """
pass
def SetEquals(self, other):
""" SetEquals(self: SortedSet[T], other: IEnumerable[T]) -> bool """
pass
def SymmetricExceptWith(self, other):
""" SymmetricExceptWith(self: SortedSet[T], other: IEnumerable[T]) """
pass
def TryGetValue(self, equalValue, actualValue):
""" TryGetValue(self: SortedSet[T], equalValue: T) -> (bool, T) """
pass
def UnionWith(self, other):
""" UnionWith(self: SortedSet[T], other: IEnumerable[T]) """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __contains__(self, *args): #cannot find CLR method
""" __contains__(self: ICollection[T], item: T) -> bool """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
@staticmethod # known case of __new__
def __new__(self, *__args):
"""
__new__(cls: type)
__new__(cls: type, comparer: IComparer[T])
__new__(cls: type, collection: IEnumerable[T])
__new__(cls: type, collection: IEnumerable[T], comparer: IComparer[T])
__new__(cls: type, info: SerializationInfo, context: StreamingContext)
"""
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __repr__(self, *args): #cannot find CLR method
""" __repr__(self: object) -> str """
pass
Comparer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Comparer(self: SortedSet[T]) -> IComparer[T]
"""
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: SortedSet[T]) -> int
"""
Max = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Max(self: SortedSet[T]) -> T
"""
Min = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Min(self: SortedSet[T]) -> T
"""
Enumerator = None
class Stack(object, IEnumerable[T], IEnumerable, ICollection, IReadOnlyCollection[T]):
"""
Stack[T]()
Stack[T](capacity: int)
Stack[T](collection: IEnumerable[T])
"""
def Clear(self):
""" Clear(self: Stack[T]) """
pass
def Contains(self, item):
""" Contains(self: Stack[T], item: T) -> bool """
pass
def CopyTo(self, array, arrayIndex):
""" CopyTo(self: Stack[T], array: Array[T], arrayIndex: int) """
pass
def GetEnumerator(self):
""" GetEnumerator(self: Stack[T]) -> Enumerator """
pass
def Peek(self):
""" Peek(self: Stack[T]) -> T """
pass
def Pop(self):
""" Pop(self: Stack[T]) -> T """
pass
def Push(self, item):
""" Push(self: Stack[T], item: T) """
pass
def ToArray(self):
""" ToArray(self: Stack[T]) -> Array[T] """
pass
def TrimExcess(self):
""" TrimExcess(self: Stack[T]) """
pass
def __contains__(self, *args): #cannot find CLR method
""" __contains__[T](enumerable: IEnumerable[T], value: T) -> bool """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
@staticmethod # known case of __new__
def __new__(self, *__args):
"""
__new__(cls: type)
__new__(cls: type, capacity: int)
__new__(cls: type, collection: IEnumerable[T])
"""
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
def __repr__(self, *args): #cannot find CLR method
""" __repr__(self: object) -> str """
pass
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: Stack[T]) -> int
"""
Enumerator = None
| 34.024209
| 299
| 0.582161
| 5,981
| 54,813
| 5.026417
| 0.046146
| 0.056348
| 0.056348
| 0.072448
| 0.818115
| 0.778199
| 0.732761
| 0.664105
| 0.620896
| 0.551941
| 0
| 0.001227
| 0.271323
| 54,813
| 1,610
| 300
| 34.045342
| 0.751458
| 0.460092
| 0
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.430769
| false
| 0.430769
| 0
| 0
| 0.549231
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
3a8b7ad68456f13d0ff20e658c208b7772b631cb
| 2,927
|
py
|
Python
|
test/test_algs.py
|
emccarthy23/example
|
3b30c22fb5ee36046ae238340380a8802afe879d
|
[
"Apache-2.0"
] | null | null | null |
test/test_algs.py
|
emccarthy23/example
|
3b30c22fb5ee36046ae238340380a8802afe879d
|
[
"Apache-2.0"
] | null | null | null |
test/test_algs.py
|
emccarthy23/example
|
3b30c22fb5ee36046ae238340380a8802afe879d
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from example import algs
def test_bubblesort():
#Empty vector
x = []
#check
if algs.only_integers(x) == x:
assert np.array_equal(algs.bubblesort(x), sorted(x))
else:
print(algs.only_integers(x))
#One entry
x = [1]
#check
if algs.only_integers(x) == x:
assert np.array_equal(algs.bubblesort(x), sorted(x))
else:
print(algs.only_integers(x))
#Character entry
x = ["a",1,2]
#check
if algs.only_integers(x) == x:
assert np.array_equal(algs.bubblesort(x), sorted(x))
else:
print(algs.only_integers(x))
#Non-integer entry
x = [0.5,1,2]
#check
if algs.only_integers(x) == x:
assert np.array_equal(algs.bubblesort(x), sorted(x))
else:
print(algs.only_integers(x))
#Repeating entry (case 1)
x = [6,1,3,6]
#check
if algs.only_integers(x) == x:
assert np.array_equal(algs.bubblesort(x), sorted(x))
else:
print(algs.only_integers(x))
#Repeating entry (case 2)
x = [8,9,8,9]
#check
if algs.only_integers(x) == x:
assert np.array_equal(algs.bubblesort(x), sorted(x))
else:
print(algs.only_integers(x))
#Odd length entry
x = [7,4,5]
#check
if algs.only_integers(x) == x:
assert np.array_equal(algs.bubblesort(x), sorted(x))
else:
print(algs.only_integers(x))
#Even length entry
x = [7,4,5,1]
#check
if algs.only_integers(x) == x:
assert np.array_equal(algs.bubblesort(x), sorted(x))
else:
print(algs.only_integers(x))
def test_quicksort():
#Empty vector
x = []
#check
if algs.only_integers(x) == x:
assert np.array_equal(algs.quicksort(x), sorted(x))
else:
print(algs.only_integers(x))
#One entry
x = [1]
#check
if algs.only_integers(x) == x:
assert np.array_equal(algs.quicksort(x), sorted(x))
else:
print(algs.only_integers(x))
#Character entry
x = ["a",1,2]
#check
if algs.only_integers(x) == x:
assert np.array_equal(algs.quicksort(x), sorted(x))
else:
print(algs.only_integers(x))
#Non-integer entry
x = [0.5,1,2]
#check
if algs.only_integers(x) == x:
assert np.array_equal(algs.quicksort(x), sorted(x))
else:
print(algs.only_integers(x))
#Repeating entry (case 1)
x = [6,1,3,6]
#check
if algs.only_integers(x) == x:
assert np.array_equal(algs.quicksort(x), sorted(x))
else:
print(algs.only_integers(x))
#Repeating entry (case 2)
x = [8,9,8,9]
#check
if algs.only_integers(x) == x:
assert np.array_equal(algs.quicksort(x), sorted(x))
else:
print(algs.only_integers(x))
#Odd length entry
x = [7,4,5]
#check
if algs.only_integers(x) == x:
assert np.array_equal(algs.quicksort(x), sorted(x))
else:
print(algs.only_integers(x))
#Even length entry
x = [7,4,5,1]
#check
if algs.only_integers(x) == x:
assert np.array_equal(algs.quicksort(x), sorted(x))
else:
print(algs.only_integers(x))
| 22.007519
| 56
| 0.635121
| 474
| 2,927
| 3.816456
| 0.094937
| 0.141515
| 0.283029
| 0.300719
| 0.961857
| 0.961857
| 0.961857
| 0.961857
| 0.961857
| 0.961857
| 0
| 0.020627
| 0.204988
| 2,927
| 132
| 57
| 22.174242
| 0.756768
| 0.118893
| 0
| 0.952381
| 0
| 0
| 0.000785
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 1
| 0.02381
| false
| 0
| 0.02381
| 0
| 0.047619
| 0.190476
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
aaeff0b982333821b78ea9046c55b10b6d635527
| 586
|
py
|
Python
|
ss5/4-6.py
|
DuongVu39/C4E10_Duong
|
60ec59bddbb3397b5a1804930d5bdfd81107dcae
|
[
"MIT"
] | null | null | null |
ss5/4-6.py
|
DuongVu39/C4E10_Duong
|
60ec59bddbb3397b5a1804930d5bdfd81107dcae
|
[
"MIT"
] | null | null | null |
ss5/4-6.py
|
DuongVu39/C4E10_Duong
|
60ec59bddbb3397b5a1804930d5bdfd81107dcae
|
[
"MIT"
] | null | null | null |
<<<<<<< HEAD
from turtle import *
bgcolor('lightgreen')
color('blue')
t= Turtle()
def draw_poly (t, n, sz):
t = Turtle()
for i in range (n):
t.fd(sz)
t.left(360/n)
def draw_equitriangle (t,sz):
draw_poly(t,3,sz)
draw_equitriangle (t, 50)
=======
from turtle import *
bgcolor('lightgreen')
color('blue')
t= Turtle()
def draw_poly (t, n, sz):
t = Turtle()
for i in range (n):
t.fd(sz)
t.left(360/n)
def draw_equitriangle (t,sz):
draw_poly(t,3,sz)
draw_equitriangle (t, 50)
>>>>>>> 687005e51286e9522a42a2d33dcef452fb0a05b2
| 15.421053
| 48
| 0.600683
| 88
| 586
| 3.909091
| 0.284091
| 0.081395
| 0.104651
| 0.133721
| 0.872093
| 0.872093
| 0.872093
| 0.872093
| 0.872093
| 0.872093
| 0
| 0.085714
| 0.223549
| 586
| 37
| 49
| 15.837838
| 0.67033
| 0
| 0
| 0.888889
| 0
| 0
| 0.047782
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.074074
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a34db0ed0a9d3ac5f110e112bc5a14d3e505e4fb
| 158
|
py
|
Python
|
attachments/ros_ws/local_planner_py/scripts/crowd_nav/policy/__init__.py
|
sustech-isus/AEMCARL
|
241b3c94112efb2944a27e4cc3eb1d65775edc10
|
[
"MIT"
] | 6
|
2021-05-18T15:21:12.000Z
|
2021-12-12T08:28:46.000Z
|
attachments/ros_ws/local_planner_py/scripts/crowd_nav/policy/__init__.py
|
SJWang2015/ACTENVCARL
|
241b3c94112efb2944a27e4cc3eb1d65775edc10
|
[
"MIT"
] | null | null | null |
attachments/ros_ws/local_planner_py/scripts/crowd_nav/policy/__init__.py
|
SJWang2015/ACTENVCARL
|
241b3c94112efb2944a27e4cc3eb1d65775edc10
|
[
"MIT"
] | 1
|
2021-11-30T13:08:45.000Z
|
2021-11-30T13:08:45.000Z
|
import sys
sys.path.append('/home/kevinlad/icra_ws/src/local_planner_py/scripts')
sys.path.append('/home/kevinlad/icra_ws/src/local_planner_py/scripts/envs')
| 39.5
| 75
| 0.816456
| 27
| 158
| 4.555556
| 0.518519
| 0.113821
| 0.211382
| 0.276423
| 0.894309
| 0.894309
| 0.894309
| 0.894309
| 0.894309
| 0.894309
| 0
| 0
| 0.025316
| 158
| 3
| 76
| 52.666667
| 0.798701
| 0
| 0
| 0
| 0
| 0
| 0.677215
| 0.677215
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 14
|
6e66c95c28c162c16e91297410a4d4dc186f6c42
| 130
|
py
|
Python
|
tests/services/blockchain_follower/handlers/test_mention.py
|
dpays/dsocial-notifications
|
32b1cdcd58d622407fd50206053c5b9735a56ba9
|
[
"MIT"
] | 10
|
2017-10-22T20:07:40.000Z
|
2018-08-01T21:48:49.000Z
|
tests/services/blockchain_follower/handlers/test_mention.py
|
dpays/dsocial-notifications
|
32b1cdcd58d622407fd50206053c5b9735a56ba9
|
[
"MIT"
] | 81
|
2017-08-19T15:38:32.000Z
|
2020-05-12T09:56:14.000Z
|
tests/services/blockchain_follower/handlers/test_mention.py
|
dpays/dsocial-notifications
|
32b1cdcd58d622407fd50206053c5b9735a56ba9
|
[
"MIT"
] | 9
|
2017-09-19T07:12:20.000Z
|
2021-05-25T17:09:27.000Z
|
# -*- coding: utf-8 -*-
from yo.services.blockchain_follower.handlers import handle_mention
def test_handle_mention():
pass
| 18.571429
| 67
| 0.746154
| 17
| 130
| 5.470588
| 0.882353
| 0.27957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008929
| 0.138462
| 130
| 6
| 68
| 21.666667
| 0.821429
| 0.161538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
6e986bda12041dda95290cbde987c718a46c2ef2
| 137
|
py
|
Python
|
openff/toolkit/utils/__init__.py
|
ijpulidos/openff-toolkit
|
24953a407c853411bee9584d29fa0fb953e59151
|
[
"MIT"
] | 120
|
2017-11-21T16:47:37.000Z
|
2021-01-11T09:59:24.000Z
|
openff/toolkit/utils/__init__.py
|
ijpulidos/openff-toolkit
|
24953a407c853411bee9584d29fa0fb953e59151
|
[
"MIT"
] | 725
|
2017-11-14T22:21:48.000Z
|
2021-01-11T16:51:47.000Z
|
openff/toolkit/utils/__init__.py
|
justinGilmer/openforcefield
|
1bb07cfa4ceffee4a1df760e44fdbdb1d281d1c7
|
[
"MIT"
] | 37
|
2017-12-02T01:14:58.000Z
|
2020-11-05T00:10:18.000Z
|
# General utilities for force fields
from openff.toolkit.utils.utils import * # isort:skip
from openff.toolkit.utils.toolkits import *
| 27.4
| 54
| 0.788321
| 19
| 137
| 5.684211
| 0.684211
| 0.185185
| 0.314815
| 0.407407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131387
| 137
| 4
| 55
| 34.25
| 0.907563
| 0.328467
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6ebaeb8471ae318a03e25e5dce70dd01b217df8a
| 176
|
py
|
Python
|
treebankanalytics/writers/__init__.py
|
Cocophotos/TreebankAnalytics
|
cf45e24cecb0b187a9b6ec5a55a836c7ab5ffb01
|
[
"MIT"
] | 2
|
2015-10-28T21:12:36.000Z
|
2016-09-08T14:00:41.000Z
|
treebankanalytics/writers/__init__.py
|
Cocophotos/TreebankAnalytics
|
cf45e24cecb0b187a9b6ec5a55a836c7ab5ffb01
|
[
"MIT"
] | null | null | null |
treebankanalytics/writers/__init__.py
|
Cocophotos/TreebankAnalytics
|
cf45e24cecb0b187a9b6ec5a55a836c7ab5ffb01
|
[
"MIT"
] | null | null | null |
from treebankanalytics.writers import sagae
from treebankanalytics.writers import sdp
from treebankanalytics.writers import sequoia
from treebankanalytics.writers import tikz
| 29.333333
| 45
| 0.880682
| 20
| 176
| 7.75
| 0.4
| 0.541935
| 0.722581
| 0.877419
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096591
| 176
| 5
| 46
| 35.2
| 0.974843
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
6e3e8ddc7b1a9547109aec62d494145d2ed336e3
| 49,939
|
py
|
Python
|
btk20_src/lib/cepstralFrontend.py
|
musiclvme/distant_speech_recognition
|
60f867383488ac45c2fa3a5433736fdf00dd4f1d
|
[
"MIT"
] | 136
|
2018-12-06T06:35:44.000Z
|
2022-03-27T15:07:42.000Z
|
btk20_src/lib/cepstralFrontend.py
|
musiclvme/distant_speech_recognition
|
60f867383488ac45c2fa3a5433736fdf00dd4f1d
|
[
"MIT"
] | 25
|
2018-12-03T04:33:24.000Z
|
2021-07-28T22:01:37.000Z
|
btk20_src/lib/cepstralFrontend.py
|
musiclvme/distant_speech_recognition
|
60f867383488ac45c2fa3a5433736fdf00dd4f1d
|
[
"MIT"
] | 68
|
2019-01-08T06:33:30.000Z
|
2021-11-17T09:33:10.000Z
|
#
# Beamforming Toolkit
# (btk)
#
# Module: btk.cepstralFrontend
# Purpose: Cepstral feature extraction.
# Author: Dominik Raub and Matthias Woelfel
from numpy import *
from MLab import *
from FFT import *
from LinearAlgebra import *
from btk.sound import *
from btk.portnoff import *
from btk.subbandBeamforming import *
from __future__ import generators
# filterbanks as found in Janus
melMatrix = array((
[0,0.0085315,0.015373,0.007508,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0.00074718,0.0079985,0.014617,0.0079314,0.0012462,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0.00028003,0.0064433,0.012606,0.0091326,0.0034506,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0.0042619,0.0095003,0.010785,0.0059554,0.001126,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0.0017485,0.0062009,0.010653,0.0085436,0.0044389,0.00033412,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0.002902,0.0066863,0.010471,0.0075737,0.0040849,0.00059607,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0029546,0.0061711,0.0093875,0.0074784,0.004513,0.0015477,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0022667,0.0050005,0.0077344,0.0079563,0.005436,0.0029156,0.00039522,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011108,0.0034344,0.005758,0.0080816,0.0066397,0.0044975,0.0023553,0.00021316,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0016652,0.0036402,0.0056151,0.0075901,0.006147,0.0043263,0.0025055,0.00068476,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0015116,0.0031902,0.0048687,0.0065473,0.0062131,0.0046656,0.003118,0.0015705,2.2965e-005,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00089014,0.0023169,0.0037436,0.0051703,0.006597,0.0053224,0.004007,0.0026917,0.0013764,6.107e-005,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0011946,0.0024073,0.0036199,0.0048325,0.0060452,0.0050354,0.0039175,0.0027995,0.0016815,0.00056358,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00098282,0.0020135,0.0030442,0.0040748,0.0051055,0.0051539,0.0042037,0.0032535,0.0023033,0.0013531,0.00040288,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0004344,0.0013104,0.0021864,0.0030625,0.0039385,0.0048145,0.0047207,0.0039131,0.0031055,0.0022979,0.0014902,0.00068261,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00042888,0.0011734,0.001918,0.0026626,0.0034072,0.0041517,0.0046748,0.0039883,0.0033019,0.0026155,0.001929,0.0012426,0.00055615,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.7963e-005,0.00073081,0.0013637,0.0019965,0.0026293,0.0032622,0.003895,0.004297,0.0037136,0.0031302,0.0025467,0.0019633,0.0013798,0.00079641,0.00021297,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00010209,0.00063998,0.0011779,0.0017158,0.0022536,0.0027915,0.0033294,0.0038673,0.0037488,0.0032529,0.002757,0.0022611,0.0017652,0.0012693,0.00077341,0.00027752,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0002903,0.00074747,0.0012046,0.0016618,0.002119,0.0025762,0.0030333,0.0034905,0.0035608,0.0031393,0.0027178,0.0022963,0.0018748,0.0014534,0.0010319,0.0006104,0.00018892,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00017112,0.00055969,0.00094826,0.0013368,0.0017254,0.002114,0.0025026,0.0028911,0.0032797,0.0032562,0.002898,0.0025397,0.0021815,0.0018233,0.001465,0.0011068,0.00074855,0.00039031,3.2073e-005,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00018224,0.0005125,0.00084277,0.001173,0.0015033,0.0018336,0.0021638,0.0024941,0.0028244,0.0031547,0.002907,0.0026025,0.002298,0.0019936,0.0016891,0.0013846,0.0010801,0.00077562,0.00047114,0.00016665,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00025558,0.00053629,0.000817,0.0010977,0.0013784,0.0016591,0.0019398,0.0022206,0.0025013,0.002782,0.0028185,0.0025597,0.0023009,0.0020421,0.0017833,0.0015245,0.0012657,0.0010069,0.00074811,0.00048931,0.00023052,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.000108,0.00034659,0.00058518,0.00082377,0.0010624,0.001301,0.0015395,0.0017781,0.0020167,0.0022553,0.0024939,0.0026824,0.0024624,0.0022425,0.0020225,0.0018025,0.0015826,0.0013626,0.0011427,0.00092269,0.00070273,0.00048277,0.00026281,4.2848e-005,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.2158e-005,0.00022495,0.00042773,0.00063052,0.00083331,0.0010361,0.0012389,0.0014417,0.0016445,0.0018472,0.00205,0.0022528,0.0024556,0.0023446,0.0021576,0.0019707,0.0017837,0.0015968,0.0014098,0.0012228,0.0010359,0.00084894,0.00066198,0.00047502,0.00028807,0.00010111,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00013878,0.00031114,0.0004835,0.00065586,0.00082822,0.0010006,0.0011729,0.0013453,0.0015177,0.00169,0.0018624,0.0020347,0.0022071,0.0022274,0.0020685,0.0019095,0.0017506,0.0015917,0.0014328,0.0012739,0.001115,0.00095613,0.00079723,0.00063832,0.00047942,0.00032052,0.00016161,2.7113e-006,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.7265e-005,0.00021376,0.00036026,0.00050676,0.00065325,0.00079975,0.00094625,0.0010927,0.0012392,0.0013857,0.0015322,0.0016787,0.0018252,0.0019717,0.0021182,0.001988,0.0018529,0.0017178,0.0015828,0.0014477,0.0013127,0.0011776,0.0010426,0.00090749,0.00077243,0.00063737,0.00050231,0.00036725,0.00023219,9.7135e-005,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00012239,0.00024691,0.00037142,0.00049593,0.00062045,0.00074496,0.00086948,0.00099399,0.0011185,0.001243,0.0013675,0.0014921,0.0016166,0.0017411,0.0018656,0.0019229,0.0018081,0.0016933,0.0015785,0.0014637,0.001349,0.0012342,0.0011194,0.0010046,0.00088978,0.00077499,0.00066019,0.0005454,0.00043061,0.00031581,0.00020102,8.6227e-005,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.9717e-005,0.00013555,0.00024138,0.00034721,0.00045304,0.00055887,0.0006647,0.00077053,0.00087636,0.00098219,0.001088,0.0011939,0.0012997,0.0014055,0.0015113,0.0016172,0.001723,0.0017782,0.0016807,0.0015831,0.0014855,0.0013879,0.0012904,0.0011928,0.0010952,0.00099768,0.00090011,0.00080254,0.00070497,0.0006074,0.00050984,0.00041227,0.0003147,0.00021713,0.00011956,2.1997e-005,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.2384e-005,0.00011233,0.00020228,0.00029224,0.00038219,0.00047214,0.00056209,0.00065204,0.00074199,0.00083194,0.00092189,0.0010118,0.0011018,0.0011917,0.0012817,0.0013716,0.0014616,0.0015515,0.0016415,0.0015975,0.0015146,0.0014317,0.0013488,0.0012658,0.0011829,0.0011,0.001017,0.00093412,0.00085119,0.00076826,0.00068533,0.00060241,0.00051948,0.00043655,0.00035362,0.0002707,0.00018777,0.00010484,2.1911e-005,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.9217e-005,0.00013567,0.00021212,0.00028858,0.00036503,0.00044148,0.00051794,0.00059439,0.00067084,0.0007473,0.00082375,0.00090021,0.00097666,0.0010531,0.0011296,0.001206,0.0012825,0.0013589,0.0014354,0.0015118,0.0014802,0.0014097,0.0013392,0.0012687,0.0011982,0.0011278,0.0010573,0.00098678,0.0009163,0.00084581,0.00077533,0.00070484,0.00063436,0.00056387,0.00049339,0.00042291,0.00035242,0.00028194,0.00021145,0.00014097,7.0482e-005,0]
))
# filterbanks as calculated by Matthias Woelfel
# melMatrix = array((
# [0.11489,0.23388,0.33197,0.21707,0.10218,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0.0076742,0.11511,0.22255,0.31427,0.21067,0.11474,0.014977,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0.0036402,0.10193,0.19293,0.28758,0.22534,0.13798,0.050611,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0.071767,0.15379,0.23581,0.25256,0.17396,0.095359,0.016757,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0.028882,0.10269,0.1765,0.25031,0.21149,0.1441,0.076708,0.0093168,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.051576,0.11529,0.179,0.24271,0.19387,0.1332,0.072519,0.011842,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.054191,0.11123,0.16828,0.22532,0.19082,0.13663,0.082436,0.031098,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.042836,0.093704,0.14457,0.19276,0.20054,0.15234,0.10415,0.058641,0.01045,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.020074,0.065242,0.11041,0.15307,0.19824,0.17541,0.13275,0.090092,0.047434,0.0072851,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.030623,0.070669,0.11071,0.15076,0.18845,0.16467,0.12698,0.089286,0.051596,0.016261,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.028926,0.064528,0.10013,0.13573,0.16911,0.16667,0.13329,0.099914,0.066537,0.033161,0.0020098,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.016687,0.047975,0.079263,0.11055,0.14184,0.17104,0.14581,0.11452,0.085318,0.058202,0.029,-0.00020186,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.025469,0.054857,0.082285,0.10775,0.13518,0.16261,0.13695,0.10952,0.084055,0.058586,0.033116,0.0096063,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.024104,0.050063,0.074167,0.098271,0.12238,0.14463,0.13888,0.11478,0.092529,0.068425,0.046175,0.023925,0.0016747,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013833,0.036312,0.057062,0.07954,0.10029,0.12104,0.14179,0.12433,0.10358,0.08456,0.063811,0.04479,0.024041,0.0050201,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.018142,0.037934,0.056076,0.075867,0.094009,0.1138,0.13194,0.12189,0.10375,0.085603,0.067461,0.049319,0.031177,0.013035,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013863,0.030806,0.047749,0.064692,0.081636,0.098579,0.11552,0.12461,0.10767,0.092269,0.075325,0.059922,0.04452,0.029117,0.013714,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0028813,0.018729,0.033135,0.048982,0.063389,0.077795,0.092202,0.10661,0.11799,0.10359,0.089181,0.074775,0.061809,0.047402,0.034436,0.02003,0.0070639,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0013603,0.014963,0.028566,0.042169,0.054412,0.068015,0.080258,0.093861,0.1061,0.10597,0.093729,0.081487,0.069244,0.057001,0.043398,0.031155,0.020273,0.0080302,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0063936,0.017902,0.029411,0.040919,0.052428,0.065215,0.076723,0.086953,0.098461,0.10217,0.090665,0.079157,0.068927,0.057419,0.047189,0.03568,0.025451,0.015221,0.0037124,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036056,0.014423,0.025239,0.034854,0.045671,0.055286,0.066103,0.075718,0.085333,0.09615,0.09363,0.084015,0.0744,0.063583,0.053968,0.044353,0.034738,0.025123,0.01671,0.0070949,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0056374,0.014657,0.023677,0.033824,0.042844,0.051864,0.060884,0.069904,0.077796,0.086816,0.091217,0.082197,0.074304,0.065285,0.056265,0.048372,0.039353,0.03146,0.02244,0.014548,0.0066558,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.002128,0.01064,0.018088,0.0266,0.035112,0.04256,0.051073,0.058521,0.067033,0.074481,0.081929,0.086082,0.078634,0.071186,0.062674,0.055226,0.047778,0.04033,0.032881,0.025433,0.017985,0.010537,0.0030891,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0019983,0.0089925,0.015987,0.02398,0.030974,0.037968,0.044962,0.051956,0.058951,0.065945,0.072939,0.079933,0.078837,0.071843,0.065848,0.058854,0.05186,0.044866,0.038871,0.031876,0.025882,0.018887,0.011893,0.0058983,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0037501,0.010313,0.015938,0.0225,0.029063,0.035626,0.041251,0.047813,0.053439,0.060001,0.066564,0.072189,0.076786,0.071161,0.065536,0.058973,0.053348,0.047723,0.04116,0.035535,0.02991,0.024285,0.017722,0.012097,0.0064719,0.00084679,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.00089474,0.0062632,0.011632,0.017895,0.023263,0.028632,0.034895,0.040263,0.045632,0.051,0.057263,0.062632,0.068,0.073369,0.068808,0.06344,0.058071,0.052703,0.047335,0.041966,0.036598,0.031229,0.025861,0.020492,0.016019,0.01065,0.0052818,-8.6588e-005,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0049853,0.0099705,0.014956,0.019941,0.024926,0.029912,0.034897,0.039882,0.044867,0.049853,0.054007,0.058992,0.063977,0.068963,0.064728,0.059743,0.054757,0.050603,0.045618,0.041463,0.036478,0.031493,0.027339,0.022353,0.018199,0.013214,0.0090592,0.0049049,-8.0407e-005,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0039079,0.0085974,0.013287,0.017195,0.021884,0.025792,0.030482,0.035171,0.039079,0.043769,0.047677,0.052366,0.056274,0.060182,0.064872,0.060888,0.056198,0.052291,0.048383,0.044475,0.039785,0.035877,0.031969,0.028061,0.023372,0.019464,0.015556,0.011648,0.0077402,0.0038323,-7.5637e-005,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0036672,0.0080678,0.011735,0.015402,0.019069,0.02347,0.027137,0.030804,0.034471,0.038872,0.042539,0.046206,0.049873,0.053541,0.057208,0.060875,0.057137,0.05347,0.049802,0.046135,0.042468,0.038801,0.035134,0.031467,0.027799,0.024132,0.020465,0.016798,0.013864,0.010197,0.0065299,0.0028628,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
# [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.0034598,0.0069196,0.010379,0.013839,0.017299,0.020759,0.024219,0.027679,0.031138,0.034598,0.038058,0.041518,0.044286,0.047746,0.051205,0.054665,0.056674,0.053906,0.050446,0.046987,0.044219,0.040759,0.037299,0.034531,0.031071,0.027612,0.024844,0.021384,0.018616,0.015156,0.011696,0.0089286,0.0054687,0.0027009,-6.6964e-005]
# ))
#
dctMatrix = array((
[1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000,1.000000e+000],
[9.986295e-001,9.876884e-001,9.659258e-001,9.335804e-001,8.910065e-001,8.386706e-001,7.771460e-001,7.071068e-001,6.293204e-001,5.446391e-001,4.539905e-001,3.583679e-001,2.588190e-001,1.564345e-001,5.233596e-002,-5.233596e-002,-1.564345e-001,-2.588190e-001,-3.583679e-001,-4.539905e-001,-5.446391e-001,-6.293204e-001,-7.071068e-001,-7.771460e-001,-8.386706e-001,-8.910065e-001,-9.335804e-001,-9.659258e-001,-9.876884e-001,-9.986295e-001],
[9.945219e-001,9.510565e-001,8.660254e-001,7.431448e-001,5.877852e-001,4.067366e-001,2.079117e-001,6.123032e-017,-2.079117e-001,-4.067366e-001,-5.877852e-001,-7.431448e-001,-8.660254e-001,-9.510565e-001,-9.945219e-001,-9.945219e-001,-9.510565e-001,-8.660254e-001,-7.431448e-001,-5.877852e-001,-4.067366e-001,-2.079117e-001,-1.836910e-016,2.079117e-001,4.067366e-001,5.877852e-001,7.431448e-001,8.660254e-001,9.510565e-001,9.945219e-001],
[9.876884e-001,8.910065e-001,7.071068e-001,4.539905e-001,1.564345e-001,-1.564345e-001,-4.539905e-001,-7.071068e-001,-8.910065e-001,-9.876884e-001,-9.876884e-001,-8.910065e-001,-7.071068e-001,-4.539905e-001,-1.564345e-001,1.564345e-001,4.539905e-001,7.071068e-001,8.910065e-001,9.876884e-001,9.876884e-001,8.910065e-001,7.071068e-001,4.539905e-001,1.564345e-001,-1.564345e-001,-4.539905e-001,-7.071068e-001,-8.910065e-001,-9.876884e-001],
[9.781476e-001,8.090170e-001,5.000000e-001,1.045285e-001,-3.090170e-001,-6.691306e-001,-9.135454e-001,-1.000000e+000,-9.135454e-001,-6.691306e-001,-3.090170e-001,1.045285e-001,5.000000e-001,8.090170e-001,9.781476e-001,9.781476e-001,8.090170e-001,5.000000e-001,1.045285e-001,-3.090170e-001,-6.691306e-001,-9.135454e-001,-1.000000e+000,-9.135454e-001,-6.691306e-001,-3.090170e-001,1.045285e-001,5.000000e-001,8.090170e-001,9.781476e-001],
[9.659258e-001,7.071068e-001,2.588190e-001,-2.588190e-001,-7.071068e-001,-9.659258e-001,-9.659258e-001,-7.071068e-001,-2.588190e-001,2.588190e-001,7.071068e-001,9.659258e-001,9.659258e-001,7.071068e-001,2.588190e-001,-2.588190e-001,-7.071068e-001,-9.659258e-001,-9.659258e-001,-7.071068e-001,-2.588190e-001,2.588190e-001,7.071068e-001,9.659258e-001,9.659258e-001,7.071068e-001,2.588190e-001,-2.588190e-001,-7.071068e-001,-9.659258e-001],
[9.510565e-001,5.877852e-001,6.123032e-017,-5.877852e-001,-9.510565e-001,-9.510565e-001,-5.877852e-001,-1.836910e-016,5.877852e-001,9.510565e-001,9.510565e-001,5.877852e-001,3.061516e-016,-5.877852e-001,-9.510565e-001,-9.510565e-001,-5.877852e-001,-4.286122e-016,5.877852e-001,9.510565e-001,9.510565e-001,5.877852e-001,5.510729e-016,-5.877852e-001,-9.510565e-001,-9.510565e-001,-5.877852e-001,-2.449890e-015,5.877852e-001,9.510565e-001],
[9.335804e-001,4.539905e-001,-2.588190e-001,-8.386706e-001,-9.876884e-001,-6.293204e-001,5.233596e-002,7.071068e-001,9.986295e-001,7.771460e-001,1.564345e-001,-5.446391e-001,-9.659258e-001,-8.910065e-001,-3.583679e-001,3.583679e-001,8.910065e-001,9.659258e-001,5.446391e-001,-1.564345e-001,-7.771460e-001,-9.986295e-001,-7.071068e-001,-5.233596e-002,6.293204e-001,9.876884e-001,8.386706e-001,2.588190e-001,-4.539905e-001,-9.335804e-001],
[9.135454e-001,3.090170e-001,-5.000000e-001,-9.781476e-001,-8.090170e-001,-1.045285e-001,6.691306e-001,1.000000e+000,6.691306e-001,-1.045285e-001,-8.090170e-001,-9.781476e-001,-5.000000e-001,3.090170e-001,9.135454e-001,9.135454e-001,3.090170e-001,-5.000000e-001,-9.781476e-001,-8.090170e-001,-1.045285e-001,6.691306e-001,1.000000e+000,6.691306e-001,-1.045285e-001,-8.090170e-001,-9.781476e-001,-5.000000e-001,3.090170e-001,9.135454e-001],
[8.910065e-001,1.564345e-001,-7.071068e-001,-9.876884e-001,-4.539905e-001,4.539905e-001,9.876884e-001,7.071068e-001,-1.564345e-001,-8.910065e-001,-8.910065e-001,-1.564345e-001,7.071068e-001,9.876884e-001,4.539905e-001,-4.539905e-001,-9.876884e-001,-7.071068e-001,1.564345e-001,8.910065e-001,8.910065e-001,1.564345e-001,-7.071068e-001,-9.876884e-001,-4.539905e-001,4.539905e-001,9.876884e-001,7.071068e-001,-1.564345e-001,-8.910065e-001],
[8.660254e-001,6.123032e-017,-8.660254e-001,-8.660254e-001,-1.836910e-016,8.660254e-001,8.660254e-001,1.194330e-015,-8.660254e-001,-8.660254e-001,-4.286122e-016,8.660254e-001,8.660254e-001,2.327430e-015,-8.660254e-001,-8.660254e-001,-2.449890e-015,8.660254e-001,8.660254e-001,2.572351e-015,-8.660254e-001,-8.660254e-001,-2.694812e-015,8.660254e-001,8.660254e-001,2.817272e-015,-8.660254e-001,-8.660254e-001,-2.939733e-015,8.660254e-001],
[8.386706e-001,-1.564345e-001,-9.659258e-001,-6.293204e-001,4.539905e-001,9.986295e-001,3.583679e-001,-7.071068e-001,-9.335804e-001,-5.233596e-002,8.910065e-001,7.771460e-001,-2.588190e-001,-9.876884e-001,-5.446391e-001,5.446391e-001,9.876884e-001,2.588190e-001,-7.771460e-001,-8.910065e-001,5.233596e-002,9.335804e-001,7.071068e-001,-3.583679e-001,-9.986295e-001,-4.539905e-001,6.293204e-001,9.659258e-001,1.564345e-001,-8.386706e-001],
[8.090170e-001,-3.090170e-001,-1.000000e+000,-3.090170e-001,8.090170e-001,8.090170e-001,-3.090170e-001,-1.000000e+000,-3.090170e-001,8.090170e-001,8.090170e-001,-3.090170e-001,-1.000000e+000,-3.090170e-001,8.090170e-001,8.090170e-001,-3.090170e-001,-1.000000e+000,-3.090170e-001,8.090170e-001,8.090170e-001,-3.090170e-001,-1.000000e+000,-3.090170e-001,8.090170e-001,8.090170e-001,-3.090170e-001,-1.000000e+000,-3.090170e-001,8.090170e-001]
))
class FftFB(SpectralSource):
"""
Class to apply Hamming window and return the FFT spectrum.
"""
def __init__(self, fftLn = 512, nBlks = 8, sbSmpRt = 2, windowLen = 320, windowShift = 160):
"""Initialize the fft filter bank."""
SpectralSource.__init__(self, fftLn, nBlks, sbSmpRt)
self._windowLen = windowLen
self._windowShift = windowShift
def __iter__(self):
"""Return the next spectral sample."""
sampleBlock = self._sampleIter.next()
while(1):
while (len(sampleBlock) < self._windowLen):
sampleBlock = concatenate((sampleBlock, self._sampleIter.next()))
spectralSample = fft(sampleBlock[:self._windowLen] * hamming(self._windowLen), self._fftLen)
# yield spectralSample[0:(self._fftLen2+1)]
yield spectralSample
sampleBlock = sampleBlock[self._windowShift:]
def nextUtt(self, soundSource):
"""Set up to process the next utterance."""
self.__soundSource = soundSource
self._sampleIter = self.__soundSource.__iter__()
class MelFB(SpectralSource):
"""
Constructs power and applies Mel filters to spectrum.
"""
def __init__(self, spectralSource):
SpectralSource.__init__(self,
spectralSource._fftLen,
spectralSource._nBlocks,
spectralSource._subSampRate)
self.__spectralSource = spectralSource
def __iter__(self):
"""Return the next block of Mel output."""
for nextSpecSample in self.__spectralSource:
yield matrixmultiply(melMatrix, abs(nextSpecSample[:(self._fftLen/2 +1)])**2)
class CepstralFeatures(SpectralSource):
"""
Applies log to Mel filters output and calculates cepstral features
"""
def __init__(self, spectralSource):
SpectralSource.__init__(self,
spectralSource._fftLen,
spectralSource._nBlocks,
spectralSource._subSampRate)
self.__spectralSource = spectralSource
def __iter__(self):
"""Return the next block of Mel output."""
for nextSpecSample in self.__spectralSource:
yield matrixmultiply(dctMatrix, log10(nextSpecSample + 1E-300))
class PowerFeatures(SpectralSource):
"""
Delivers the averaged power over segments of speech of deignated length for use in Janus
"""
def __init__(self, fftLn = 512, windowLen = 320, windowShift = 160):
"""Initialize the fft filter bank."""
SpectralSource.__init__(self, fftLn)
self._windowLen = windowLen
self._windowShift = windowShift
self._normalizer = (2.0**16-1)**2 / 4 * self._windowLen
def __iter__(self):
"""Return the next spectral sample."""
sampleBlock = self._sampleIter.next()
while(1):
while (len(sampleBlock) < self._windowLen):
sampleBlock = concatenate((sampleBlock, self._sampleIter.next()))
yield array(sum(sampleBlock[:self._windowLen].astype(float)**2)/self._normalizer)
sampleBlock = sampleBlock[self._windowShift:]
def nextUtt(self, soundSource):
"""Set up to process the next utterance."""
self.__soundSource = soundSource
self._sampleIter = self.__soundSource.__iter__()
class CepstralFrontend(SpectralSource):
"""
first shot at a speech recognizer frontend in python,
"""
def __init__(self, fftLn = 512, windowLen = 320, windowShift = 160):
"""Initialize the fft filter bank."""
SpectralSource.__init__(self, fftLn)
self._windowLen = windowLen
self._windowShift = windowShift
self.__fftFB = FftFB()
self.__melFB = MelFB(self.__fftFB)
self.__cepstralFeatures = CepstralFeatures(self.__melFB)
def __iter__(self):
"""Return the next block of Mel output."""
for nextSpecSample in self.__cepstralFeatures:
yield nextSpecSample
def attachFile(self, filename):
"""Set up to process the next utterance."""
self.__soundSource = OffsetCorrectedFileSoundSource(filename, blkLen = self._windowLen, lastBlk = "unmodified")
self.__fftFB.nextUtt(self.__soundSource)
class PowerFrontend(SpectralSource):
"""
first shot at a speech recognizer frontend in python
"""
def __init__(self, fftLn = 512, windowLen = 320, windowShift = 160):
"""Initialize the fft filter bank."""
SpectralSource.__init__(self, fftLn)
self._windowLen = windowLen
self._windowShift = windowShift
self.__powerFeatures = PowerFeatures()
def __iter__(self):
"""Return the next block of Mel output."""
for nextSpecSample in self.__powerFeatures:
yield nextSpecSample
def attachFile(self, filename):
"""Set up to process the next utterance."""
self.__soundSource = OffsetCorrectedFileSoundSource(filename, blkLen = self._windowLen, lastBlk = "unmodified")
self.__powerFeatures.nextUtt(self.__soundSource)
| 206.359504
| 869
| 0.597229
| 18,147
| 49,939
| 1.63454
| 0.064308
| 0.973097
| 1.453678
| 1.930416
| 0.685254
| 0.671533
| 0.669914
| 0.66506
| 0.660677
| 0.660677
| 0
| 0.533475
| 0.03244
| 49,939
| 241
| 870
| 207.215768
| 0.080403
| 0.402571
| 0
| 0.43609
| 0
| 0
| 0.000675
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.120301
| false
| 0
| 0.06015
| 0
| 0.225564
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9544ce53f69bfc734157aef711adb10ed37aee37
| 104,219
|
py
|
Python
|
Uncertainty/data/case-de/case_de_94.py
|
thanever/SOC
|
9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4
|
[
"MIT"
] | null | null | null |
Uncertainty/data/case-de/case_de_94.py
|
thanever/SOC
|
9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4
|
[
"MIT"
] | null | null | null |
Uncertainty/data/case-de/case_de_94.py
|
thanever/SOC
|
9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4
|
[
"MIT"
] | null | null | null |
from numpy import array
def case_de_94():
ppc = {"version": '2'}
ppc["baseMVA"] = 100.0
ppc["bus"] = array([
[75, 2, 106.89, 21.38, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[502, 2, 236.46, 47.29, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[44, 2, 145.73, 29.15, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[492, 2, 80.45, 16.09, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[180, 2, 46.67, 9.33, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[46, 1, 0, 0, 0, 0, 5, -9459348257962544.0,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[21, 2, 936.79, 187.36, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[33, 2, 192.86, 38.57, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[559, 2, 71.36, 14.27, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[18, 1, 0, 0, 0, 0, 5, -298671374221461.94,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[73, 2, 85.77, 17.15, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[503, 2, 72.42, 14.48, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[511, 2, 106.03, 21.21, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[72, 2, 267.9, 53.58, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[288, 2, 62.6, 12.52, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[339, 2, 156.36, 31.27, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[321, 2, 201.64, 40.33, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[8, 2, 154.9, 30.98, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[292, 2, 127.74, 25.55, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[343, 2, 113.74, 22.75, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[12, 1, 0, 0, 0, 0, 5, -1.1751688476898112e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[340, 2, 132.2, 26.44, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[177, 2, 27.21, 5.44, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[497, 2, 988.05, 197.61, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[102, 2, 143.33, 28.67, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[311, 2, 197.02, 39.4, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[429, 2, 337.24, 67.45, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[32, 1, 0, 0, 0, 0, 5, -5.1342979932743795e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[22, 1, 0, 0, 0, 0, 5, -5671809470184.078,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[101, 2, 74.05, 14.81, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[71, 2, 163.57, 32.71, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[558, 2, 133.34, 26.67, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[217, 2, 40.35, 8.07, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[322, 2, 25.67, 5.13, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[278, 2, 149.16, 29.83, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[498, 2, 46.34, 9.27, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[47, 2, 336.36, 67.27, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[346, 2, 309.56, 61.91, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[74, 1, 0, 0, 0, 0, 5, -2.976617981417387e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[557, 2, 226.13, 45.23, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[42, 1, 0, 0, 0, 0, 5, -4.5328966152673344e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[39, 3, 66.16, 13.23, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[45, 2, 77.36, 15.47, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[493, 2, 103.68, 20.74, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[98, 2, 104.58, 20.92, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[435, 2, 149.4, 29.88, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[338, 2, 252.82, 50.56, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[79, 2, 103.19, 20.64, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[78, 1, 0, 0, 0, 0, 5, -1.341969681764088e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[512, 2, 70.04, 14.01, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[276, 2, 191.07, 38.21, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[569, 2, 185.04, 37.01, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[37, 1, 0, 0, 0, 0, 5, -1.2587669044400394e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[179, 2, 53.09, 10.62, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[325, 2, 153.79, 30.76, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[20, 1, 0, 0, 0, 0, 5, -3.94166170764817e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[496, 2, 7.9, 1.58, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[279, 1, 0, 0, 0, 0, 5, -4.5750202643227136e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[436, 2, 79.76, 15.95, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[345, 2, 311.82, 62.36, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[505, 2, 336.36, 67.27, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[290, 1, 0, 0, 0, 0, 5, -2.6647078849768803e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[363, 2, 315.54, 63.11, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[11, 2, 91.79, 18.36, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[277, 1, 0, 0, 0, 0, 5, -3.3503478391818394e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[441, 2, 58.81, 11.76, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[280, 1, 0, 0, 0, 0, 5, -4449647309032538.0,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[504, 2, 47.42, 9.48, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[181, 2, 35.23, 7.05, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[291, 2, 64.79, 12.96, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[344, 2, 285.17, 57.03, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[40, 2, 69.11, 13.82, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[80, 2, 109.6, 21.92, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[183, 2, 477.66, 95.53, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[440, 2, 76.71, 15.34, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[43, 2, 113.91, 22.78, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[10, 1, 0, 0, 0, 0, 5, -1.5982281593127258e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[81, 2, 123.73, 24.75, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[560, 2, 111.49, 22.3, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[341, 2, 119.51, 23.9, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[17, 2, 88.18, 17.64, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[41, 2, 74.28, 14.86, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[192, 2, 55.97, 11.19, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[342, 2, 207.32, 41.46, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[218, 2, 122.92, 24.58, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[65, 2, 5.47, 1.09, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[289, 2, 98.46, 19.69, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[324, 2, 472.12, 94.42, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[48, 2, 231.2, 46.24, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[332, 1, 0, 0, 0, 0, 0, -4.878789524793977e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[422, 2, 76.97, 15.39, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[23, 2, 122.66, 24.53, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[570, 2, 288.89, 57.78, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[38, 2, 202.06, 40.41, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[31, 2, 153.82, 30.76, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[49, 2, 58.48, 11.7, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[182, 2, 1.6, 0.32, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[9, 2, 104.76, 20.95, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[323, 2, 2.67, 0.53, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[400, 2, 55.99, 11.2, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[30, 1, 0, 0, 0, 0, 0, -2.30483664454792e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[25, 2, 58.67, 11.73, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ]
])
ppc["gen"] = array([
[102, 0, 0, 33.95, -8.49, 1.0, 100, 1, 67.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 101.85, 13.58, 20.37, 20.37, 27.16 ],
[493, 0, 0, 75.0, -18.75, 1.0, 100, 1, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 225.0, 30.0, 45.0, 45.0, 60.0 ],
[493, 0, 0, 15.0, -3.75, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 45.0, 6.0, 9.0, 9.0, 12.0 ],
[177, 0, 0, 16.35, -4.09, 1.0, 100, 1, 32.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 49.05, 6.54, 9.81, 9.81, 13.08 ],
[180, 0, 0, 12.7, -3.18, 1.0, 100, 1, 25.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 38.1, 5.08, 7.62, 7.62, 10.16 ],
[180, 0, 0, 166.75, -41.69, 1.0, 100, 1, 333.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 500.25, 66.7, 100.05, 100.05, 133.4 ],
[180, 0, 0, 14.45, -3.61, 1.0, 100, 1, 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 43.35, 5.78, 8.67, 8.67, 11.56 ],
[183, 0, 0, 11.25, -2.81, 1.0, 100, 1, 22.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 33.75, 4.5, 6.75, 6.75, 9.0 ],
[183, 0, 0, 383.0, -95.75, 1.0, 100, 1, 766.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1149.0, 153.2, 229.8, 229.8, 306.4 ],
[183, 0, 0, 19.0, -4.75, 1.0, 100, 1, 38.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 57.0, 7.6, 11.4, 11.4, 15.2 ],
[183, 0, 0, 12.0, -3.0, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 36.0, 4.8, 7.2, 7.2, 9.6 ],
[496, 0, 0, 26.4, -6.6, 1.0, 100, 1, 52.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 79.2, 10.56, 15.84, 15.84, 21.12 ],
[21, 0, 0, 63.5, -15.88, 1.0, 100, 1, 127.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 190.5, 25.4, 38.1, 38.1, 50.8 ],
[21, 0, 0, 97.0, -24.25, 1.0, 100, 1, 194.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 291.0, 38.8, 58.2, 58.2, 77.6 ],
[21, 0, 0, 8.2, -2.05, 1.0, 100, 1, 16.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 24.6, 3.28, 4.92, 4.92, 6.56 ],
[217, 0, 0, 54.0, -13.5, 1.0, 100, 1, 108.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 162.0, 21.6, 32.4, 32.4, 43.2 ],
[217, 0, 0, 254.0, -63.5, 1.0, 100, 1, 508.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 762.0, 101.6, 152.4, 152.4, 203.2 ],
[217, 0, 0, 8.5, -2.12, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 25.5, 3.4, 5.1, 5.1, 6.8 ],
[498, 0, 0, 149.25, -37.31, 1.0, 100, 1, 298.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 447.75, 59.7, 89.55, 89.55, 119.4 ],
[557, 0, 0, 45.4, -11.35, 1.0, 100, 1, 90.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 136.2, 18.16, 27.24, 27.24, 36.32 ],
[558, 0, 0, 37.0, -9.25, 1.0, 100, 1, 74.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 111.0, 14.8, 22.2, 22.2, 29.6 ],
[559, 0, 0, 8.5, -2.12, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 25.5, 3.4, 5.1, 5.1, 6.8 ],
[288, 0, 0, 7.85, -1.96, 1.0, 100, 1, 15.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 23.55, 3.14, 4.71, 4.71, 6.28 ],
[289, 0, 0, 552.5, -138.12, 1.0, 100, 1, 1105.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1657.5, 221.0, 331.5, 331.5, 442.0 ],
[560, 0, 0, 10.15, -2.54, 1.0, 100, 1, 20.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 30.45, 4.06, 6.09, 6.09, 8.12 ],
[560, 0, 0, 108.0, -27.0, 1.0, 100, 1, 216.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 324.0, 43.2, 64.8, 64.8, 86.4 ],
[560, 0, 0, 29.5, -7.38, 1.0, 100, 1, 59.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 88.5, 11.8, 17.7, 17.7, 23.6 ],
[292, 0, 0, 6.75, -1.69, 1.0, 100, 1, 13.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 20.25, 2.7, 4.05, 4.05, 5.4 ],
[292, 0, 0, 5.6, -1.4, 1.0, 100, 1, 11.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 16.8, 2.24, 3.36, 3.36, 4.48 ],
[31, 0, 0, 97.7, -24.42, 1.0, 100, 1, 195.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 293.1, 39.08, 58.62, 58.62, 78.16 ],
[311, 0, 0, 437.5, -109.38, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1312.5, 175.0, 262.5, 262.5, 350.0 ],
[321, 0, 0, 6.45, -1.61, 1.0, 100, 1, 12.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 19.35, 2.58, 3.87, 3.87, 5.16 ],
[324, 0, 0, 159.9, -39.98, 1.0, 100, 1, 319.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 479.7, 63.96, 95.94, 95.94, 127.92 ],
[325, 0, 0, 13.45, -3.36, 1.0, 100, 1, 26.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 40.35, 5.38, 8.07, 8.07, 10.76 ],
[502, 0, 0, 54.55, -13.64, 1.0, 100, 1, 109.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 163.65, 21.82, 32.73, 32.73, 43.64 ],
[33, 0, 0, 15.9, -3.98, 1.0, 100, 1, 31.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 47.7, 6.36, 9.54, 9.54, 12.72 ],
[570, 0, 0, 26.4, -6.6, 1.0, 100, 1, 52.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 79.2, 10.56, 15.84, 15.84, 21.12 ],
[570, 0, 0, 44.5, -11.12, 1.0, 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 133.5, 17.8, 26.7, 26.7, 35.6 ],
[338, 0, 0, 149.8, -37.45, 1.0, 100, 1, 299.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 449.4, 59.92, 89.88, 89.88, 119.84 ],
[338, 0, 0, 41.25, -10.31, 1.0, 100, 1, 82.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 123.75, 16.5, 24.75, 24.75, 33.0 ],
[339, 0, 0, 67.0, -16.75, 1.0, 100, 1, 134.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 201.0, 26.8, 40.2, 40.2, 53.6 ],
[339, 0, 0, 79.5, -19.88, 1.0, 100, 1, 159.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 238.5, 31.8, 47.7, 47.7, 63.6 ],
[339, 0, 0, 55.5, -13.88, 1.0, 100, 1, 111.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 166.5, 22.2, 33.3, 33.3, 44.4 ],
[339, 0, 0, 21.35, -5.34, 1.0, 100, 1, 42.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 64.05, 8.54, 12.81, 12.81, 17.08 ],
[339, 0, 0, 29.0, -7.25, 1.0, 100, 1, 58.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 87.0, 11.6, 17.4, 17.4, 23.2 ],
[340, 0, 0, 9.75, -2.44, 1.0, 100, 1, 19.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 29.25, 3.9, 5.85, 5.85, 7.8 ],
[342, 0, 0, 34.98, -8.74, 1.0, 100, 1, 69.95, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 104.93, 13.99, 20.98, 20.98, 27.98 ],
[345, 0, 0, 105.5, -26.38, 1.0, 100, 1, 211.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 316.5, 42.2, 63.3, 63.3, 84.4 ],
[345, 0, 0, 44.5, -11.12, 1.0, 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 133.5, 17.8, 26.7, 26.7, 35.6 ],
[345, 0, 0, 163.5, -40.88, 1.0, 100, 1, 327.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 490.5, 65.4, 98.1, 98.1, 130.8 ],
[346, 0, 0, 229.45, -57.36, 1.0, 100, 1, 458.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 688.35, 91.78, 137.67, 137.67, 183.56 ],
[363, 0, 0, 40.9, -10.22, 1.0, 100, 1, 81.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 122.7, 16.36, 24.54, 24.54, 32.72 ],
[363, 0, 0, 344.0, -86.0, 1.0, 100, 1, 688.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1032.0, 137.6, 206.4, 206.4, 275.2 ],
[363, 0, 0, 18.0, -4.5, 1.0, 100, 1, 36.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 54.0, 7.2, 10.8, 10.8, 14.4 ],
[503, 0, 0, 26.0, -6.5, 1.0, 100, 1, 52.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 78.0, 10.4, 15.6, 15.6, 20.8 ],
[503, 0, 0, 680.0, -170.0, 1.0, 100, 1, 1360.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 2040.0, 272.0, 408.0, 408.0, 544.0 ],
[503, 0, 0, 29.2, -7.3, 1.0, 100, 1, 58.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 87.6, 11.68, 17.52, 17.52, 23.36 ],
[39, 0, 0, 1149.65, -287.41, 1.0, 100, 1, 2299.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 3448.95, 459.86, 689.79, 689.79, 919.72 ],
[40, 0, 0, 24.0, -6.0, 1.0, 100, 1, 48.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 72.0, 9.6, 14.4, 14.4, 19.2 ],
[400, 0, 0, 44.0, -11.0, 1.0, 100, 1, 88.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 132.0, 17.6, 26.4, 26.4, 35.2 ],
[400, 0, 0, 30.0, -7.5, 1.0, 100, 1, 60.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 90.0, 12.0, 18.0, 18.0, 24.0 ],
[400, 0, 0, 79.0, -19.75, 1.0, 100, 1, 158.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 237.0, 31.6, 47.4, 47.4, 63.2 ],
[422, 0, 0, 37.0, -9.25, 1.0, 100, 1, 74.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 111.0, 14.8, 22.2, 22.2, 29.6 ],
[43, 0, 0, 98.0, -24.5, 1.0, 100, 1, 196.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 294.0, 39.2, 58.8, 58.8, 78.4 ],
[43, 0, 0, 8.5, -2.12, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 25.5, 3.4, 5.1, 5.1, 6.8 ],
[429, 0, 0, 82.0, -20.5, 1.0, 100, 1, 164.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 246.0, 32.8, 49.2, 49.2, 65.6 ],
[44, 0, 0, 13.0, -3.25, 1.0, 100, 1, 26.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 39.0, 5.2, 7.8, 7.8, 10.4 ],
[435, 0, 0, 91.0, -22.75, 1.0, 100, 1, 182.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 273.0, 36.4, 54.6, 54.6, 72.8 ],
[435, 0, 0, 30.75, -7.69, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 92.25, 12.3, 18.45, 18.45, 24.6 ],
[436, 0, 0, 13.25, -3.31, 1.0, 100, 1, 26.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 39.75, 5.3, 7.95, 7.95, 10.6 ],
[440, 0, 0, 7.35, -1.84, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 22.05, 2.94, 4.41, 4.41, 5.88 ],
[441, 0, 0, 37.5, -9.38, 1.0, 100, 1, 75.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 112.5, 15.0, 22.5, 22.5, 30.0 ],
[45, 0, 0, 148.0, -37.0, 1.0, 100, 1, 296.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 444.0, 59.2, 88.8, 88.8, 118.4 ],
[45, 0, 0, 11.55, -2.89, 1.0, 100, 1, 23.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 34.65, 4.62, 6.93, 6.93, 9.24 ],
[47, 0, 0, 222.0, -55.5, 1.0, 100, 1, 444.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 666.0, 88.8, 133.2, 133.2, 177.6 ],
[47, 0, 0, 15.85, -3.96, 1.0, 100, 1, 31.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 47.55, 6.34, 9.51, 9.51, 12.68 ],
[49, 0, 0, 176.0, -44.0, 1.0, 100, 1, 352.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 528.0, 70.4, 105.6, 105.6, 140.8 ],
[49, 0, 0, 33.0, -8.25, 1.0, 100, 1, 66.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 99.0, 13.2, 19.8, 19.8, 26.4 ],
[49, 0, 0, 18.75, -4.69, 1.0, 100, 1, 37.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 56.25, 7.5, 11.25, 11.25, 15.0 ],
[65, 0, 0, 82.25, -20.56, 1.0, 100, 1, 164.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 246.75, 32.9, 49.35, 49.35, 65.8 ],
[71, 0, 0, 24.5, -6.12, 1.0, 100, 1, 49.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 73.5, 9.8, 14.7, 14.7, 19.6 ],
[71, 0, 0, 80.55, -20.14, 1.0, 100, 1, 161.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 241.65, 32.22, 48.33, 48.33, 64.44 ],
[71, 0, 0, 4.95, -1.24, 1.0, 100, 1, 9.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 14.85, 1.98, 2.97, 2.97, 3.96 ],
[72, 0, 0, 450.0, -112.5, 1.0, 100, 1, 900.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1350.0, 180.0, 270.0, 270.0, 360.0 ],
[72, 0, 0, 75.5, -18.88, 1.0, 100, 1, 151.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 226.5, 30.2, 45.3, 45.3, 60.4 ],
[72, 0, 0, 60.0, -15.0, 1.0, 100, 1, 120.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 180.0, 24.0, 36.0, 36.0, 48.0 ],
[511, 0, 0, 61.0, -15.25, 1.0, 100, 1, 122.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 183.0, 24.4, 36.6, 36.6, 48.8 ],
[511, 0, 0, 11.65, -2.91, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 34.95, 4.66, 6.99, 6.99, 9.32 ],
[75, 0, 0, 24.5, -6.12, 1.0, 100, 1, 49.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 73.5, 9.8, 14.7, 14.7, 19.6 ],
[79, 0, 0, 375.0, -93.75, 1.0, 100, 1, 750.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1125.0, 150.0, 225.0, 225.0, 300.0 ],
[79, 0, 0, 9.35, -2.34, 1.0, 100, 1, 18.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 28.05, 3.74, 5.61, 5.61, 7.48 ],
[81, 0, 0, 1417.5, -354.38, 1.0, 100, 1, 2835.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 4252.5, 567.0, 850.5, 850.5, 1134.0 ],
[81, 0, 0, 62.25, -15.56, 1.0, 100, 1, 124.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 186.75, 24.9, 37.35, 37.35, 49.8 ],
[218, 0, 0, 21.63, -5.41, 1.0, 100, 1, 43.26, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 64.89, 8.65, 12.98, 12.98, 17.3 ],
[498, 0, 0, 723.6, -180.9, 1.0, 100, 1, 1447.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 2170.8, 289.44, 434.16, 434.16, 578.88 ],
[8, 0, 0, 15.89, -3.97, 1.0, 100, 1, 31.79, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 47.68, 6.36, 9.54, 9.54, 12.72 ],
[9, 0, 0, 13.82, -3.46, 1.0, 100, 1, 27.64, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 41.46, 5.53, 8.29, 8.29, 11.06 ],
[11, 0, 0, 8.87, -2.22, 1.0, 100, 1, 17.74, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 26.6, 3.55, 5.32, 5.32, 7.09 ],
[17, 0, 0, 2.56, -0.64, 1.0, 100, 1, 5.11, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 7.67, 1.02, 1.53, 1.53, 2.04 ],
[21, 0, 0, 7.7, -1.93, 1.0, 100, 1, 15.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 23.1, 3.08, 4.62, 4.62, 6.16 ],
[23, 0, 0, 17.28, -4.32, 1.0, 100, 1, 34.56, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 51.84, 6.91, 10.37, 10.37, 13.82 ],
[25, 0, 0, 38.33, -9.58, 1.0, 100, 1, 76.65, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 114.98, 15.33, 23.0, 23.0, 30.66 ],
[31, 0, 0, 1.17, -0.29, 1.0, 100, 1, 2.34, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 3.51, 0.47, 0.7, 0.7, 0.94 ],
[33, 0, 0, 0.93, -0.23, 1.0, 100, 1, 1.86, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 2.8, 0.37, 0.56, 0.56, 0.75 ],
[38, 0, 0, 0.12, -0.03, 1.0, 100, 1, 0.24, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.37, 0.05, 0.07, 0.07, 0.1 ],
[39, 0, 0, 37.88, -9.47, 1.0, 100, 1, 75.76, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 113.63, 15.15, 22.73, 22.73, 30.3 ],
[40, 0, 0, 59.67, -14.92, 1.0, 100, 1, 119.33, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 179.0, 23.87, 35.8, 35.8, 47.73 ],
[41, 0, 0, 104.33, -26.08, 1.0, 100, 1, 208.65, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 312.98, 41.73, 62.6, 62.6, 83.46 ],
[43, 0, 0, 23.9, -5.98, 1.0, 100, 1, 47.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 71.7, 9.56, 14.34, 14.34, 19.12 ],
[44, 0, 0, 0.19, -0.05, 1.0, 100, 1, 0.39, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.58, 0.08, 0.12, 0.12, 0.15 ],
[45, 0, 0, 4.51, -1.13, 1.0, 100, 1, 9.02, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 13.53, 1.8, 2.71, 2.71, 3.61 ],
[47, 0, 0, 0.1, -0.02, 1.0, 100, 1, 0.19, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.29, 0.04, 0.06, 0.06, 0.08 ],
[48, 0, 0, 0.27, -0.07, 1.0, 100, 1, 0.55, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.82, 0.11, 0.16, 0.16, 0.22 ],
[49, 0, 0, 10.89, -2.72, 1.0, 100, 1, 21.77, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 32.66, 4.35, 6.53, 6.53, 8.71 ],
[65, 0, 0, 0.39, -0.1, 1.0, 100, 1, 0.77, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 1.16, 0.15, 0.23, 0.23, 0.31 ],
[71, 0, 0, 15.23, -3.81, 1.0, 100, 1, 30.46, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 45.69, 6.09, 9.14, 9.14, 12.19 ],
[72, 0, 0, 11.54, -2.88, 1.0, 100, 1, 23.08, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 34.62, 4.62, 6.92, 6.92, 9.23 ],
[73, 0, 0, 18.69, -4.67, 1.0, 100, 1, 37.37, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 56.06, 7.47, 11.21, 11.21, 14.95 ],
[75, 0, 0, 12.85, -3.21, 1.0, 100, 1, 25.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 38.56, 5.14, 7.71, 7.71, 10.28 ],
[79, 0, 0, 3.95, -0.99, 1.0, 100, 1, 7.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 11.85, 1.58, 2.37, 2.37, 3.16 ],
[80, 0, 0, 1.47, -0.37, 1.0, 100, 1, 2.94, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 4.41, 0.59, 0.88, 0.88, 1.18 ],
[81, 0, 0, 10.73, -2.68, 1.0, 100, 1, 21.45, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 32.18, 4.29, 6.44, 6.44, 8.58 ],
[98, 0, 0, 18.13, -4.53, 1.0, 100, 1, 36.26, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 54.38, 7.25, 10.88, 10.88, 14.5 ],
[101, 0, 0, 52.67, -13.17, 1.0, 100, 1, 105.33, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 158.0, 21.07, 31.6, 31.6, 42.13 ],
[102, 0, 0, 46.68, -11.67, 1.0, 100, 1, 93.36, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 140.05, 18.67, 28.01, 28.01, 37.35 ],
[177, 0, 0, 49.06, -12.27, 1.0, 100, 1, 98.13, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 147.19, 19.63, 29.44, 29.44, 39.25 ],
[179, 0, 0, 46.28, -11.57, 1.0, 100, 1, 92.56, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 138.84, 18.51, 27.77, 27.77, 37.02 ],
[180, 0, 0, 19.62, -4.9, 1.0, 100, 1, 39.24, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 58.86, 7.85, 11.77, 11.77, 15.69 ],
[181, 0, 0, 34.61, -8.65, 1.0, 100, 1, 69.22, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 103.84, 13.84, 20.77, 20.77, 27.69 ],
[182, 0, 0, 4.69, -1.17, 1.0, 100, 1, 9.38, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 14.08, 1.88, 2.82, 2.82, 3.75 ],
[183, 0, 0, 15.52, -3.88, 1.0, 100, 1, 31.05, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 46.57, 6.21, 9.31, 9.31, 12.42 ],
[192, 0, 0, 34.14, -8.53, 1.0, 100, 1, 68.28, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 102.42, 13.66, 20.48, 20.48, 27.31 ],
[217, 0, 0, 46.52, -11.63, 1.0, 100, 1, 93.04, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 139.56, 18.61, 27.91, 27.91, 37.22 ],
[218, 0, 0, 22.91, -5.73, 1.0, 100, 1, 45.83, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 68.74, 9.17, 13.75, 13.75, 18.33 ],
[276, 0, 0, 16.51, -4.13, 1.0, 100, 1, 33.02, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 49.53, 6.6, 9.91, 9.91, 13.21 ],
[278, 0, 0, 10.85, -2.71, 1.0, 100, 1, 21.71, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 32.56, 4.34, 6.51, 6.51, 8.68 ],
[288, 0, 0, 3.16, -0.79, 1.0, 100, 1, 6.31, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 9.47, 1.26, 1.89, 1.89, 2.53 ],
[289, 0, 0, 0.91, -0.23, 1.0, 100, 1, 1.83, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 2.74, 0.37, 0.55, 0.55, 0.73 ],
[291, 0, 0, 0.33, -0.08, 1.0, 100, 1, 0.67, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 1.0, 0.13, 0.2, 0.2, 0.27 ],
[292, 0, 0, 0.34, -0.09, 1.0, 100, 1, 0.68, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 1.03, 0.14, 0.21, 0.21, 0.27 ],
[311, 0, 0, 0.85, -0.21, 1.0, 100, 1, 1.69, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 2.54, 0.34, 0.51, 0.51, 0.68 ],
[321, 0, 0, 1.52, -0.38, 1.0, 100, 1, 3.04, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 4.57, 0.61, 0.91, 0.91, 1.22 ],
[322, 0, 0, 3.51, -0.88, 1.0, 100, 1, 7.02, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 10.54, 1.4, 2.11, 2.11, 2.81 ],
[323, 0, 0, 0.69, -0.17, 1.0, 100, 1, 1.38, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 2.07, 0.28, 0.41, 0.41, 0.55 ],
[324, 0, 0, 2.59, -0.65, 1.0, 100, 1, 5.18, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 7.77, 1.04, 1.55, 1.55, 2.07 ],
[325, 0, 0, 7.33, -1.83, 1.0, 100, 1, 14.66, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 21.98, 2.93, 4.4, 4.4, 5.86 ],
[338, 0, 0, 2.24, -0.56, 1.0, 100, 1, 4.47, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 6.71, 0.89, 1.34, 1.34, 1.79 ],
[339, 0, 0, 3.04, -0.76, 1.0, 100, 1, 6.07, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 9.11, 1.21, 1.82, 1.82, 2.43 ],
[340, 0, 0, 0.52, -0.13, 1.0, 100, 1, 1.03, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 1.55, 0.21, 0.31, 0.31, 0.41 ],
[341, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[342, 0, 0, 4.51, -1.13, 1.0, 100, 1, 9.02, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 13.53, 1.8, 2.71, 2.71, 3.61 ],
[343, 0, 0, 0.31, -0.08, 1.0, 100, 1, 0.63, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.94, 0.13, 0.19, 0.19, 0.25 ],
[344, 0, 0, 0.08, -0.02, 1.0, 100, 1, 0.15, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.23, 0.03, 0.05, 0.05, 0.06 ],
[345, 0, 0, 0.17, -0.04, 1.0, 100, 1, 0.34, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.51, 0.07, 0.1, 0.1, 0.14 ],
[346, 0, 0, 0.15, -0.04, 1.0, 100, 1, 0.31, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.46, 0.06, 0.09, 0.09, 0.12 ],
[363, 0, 0, 0.11, -0.03, 1.0, 100, 1, 0.21, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.32, 0.04, 0.06, 0.06, 0.08 ],
[400, 0, 0, 71.61, -17.9, 1.0, 100, 1, 143.23, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 214.84, 28.65, 42.97, 42.97, 57.29 ],
[422, 0, 0, 2.17, -0.54, 1.0, 100, 1, 4.35, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 6.52, 0.87, 1.3, 1.3, 1.74 ],
[429, 0, 0, 0.04, -0.01, 1.0, 100, 1, 0.07, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.11, 0.01, 0.02, 0.02, 0.03 ],
[435, 0, 0, 3.03, -0.76, 1.0, 100, 1, 6.06, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 9.1, 1.21, 1.82, 1.82, 2.43 ],
[436, 0, 0, 0.47, -0.12, 1.0, 100, 1, 0.93, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 1.4, 0.19, 0.28, 0.28, 0.37 ],
[440, 0, 0, 36.64, -9.16, 1.0, 100, 1, 73.28, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 109.92, 14.66, 21.98, 21.98, 29.31 ],
[441, 0, 0, 30.72, -7.68, 1.0, 100, 1, 61.44, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 92.16, 12.29, 18.43, 18.43, 24.58 ],
[492, 0, 0, 25.24, -6.31, 1.0, 100, 1, 50.48, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 75.72, 10.1, 15.14, 15.14, 20.19 ],
[493, 0, 0, 11.41, -2.85, 1.0, 100, 1, 22.82, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 34.22, 4.56, 6.84, 6.84, 9.13 ],
[496, 0, 0, 46.79, -11.7, 1.0, 100, 1, 93.57, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 140.36, 18.71, 28.07, 28.07, 37.43 ],
[497, 0, 0, 2.79, -0.7, 1.0, 100, 1, 5.58, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 8.37, 1.12, 1.67, 1.67, 2.23 ],
[498, 0, 0, 563.02, -140.75, 1.0, 100, 1, 1126.04, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 1689.05, 225.21, 337.81, 337.81, 450.41 ],
[502, 0, 0, 0.81, -0.2, 1.0, 100, 1, 1.62, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 2.43, 0.32, 0.49, 0.49, 0.65 ],
[503, 0, 0, 18.82, -4.71, 1.0, 100, 1, 37.64, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 56.46, 7.53, 11.29, 11.29, 15.06 ],
[504, 0, 0, 16.81, -4.2, 1.0, 100, 1, 33.61, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 50.42, 6.72, 10.08, 10.08, 13.44 ],
[505, 0, 0, 0.03, -0.01, 1.0, 100, 1, 0.06, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.08, 0.01, 0.02, 0.02, 0.02 ],
[511, 0, 0, 22.45, -5.61, 1.0, 100, 1, 44.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 67.36, 8.98, 13.47, 13.47, 17.96 ],
[512, 0, 0, 0.24, -0.06, 1.0, 100, 1, 0.48, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.72, 0.1, 0.14, 0.14, 0.19 ],
[557, 0, 0, 3.6, -0.9, 1.0, 100, 1, 7.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 10.8, 1.44, 2.16, 2.16, 2.88 ],
[558, 0, 0, 4.5, -1.12, 1.0, 100, 1, 9.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 13.5, 1.8, 2.7, 2.7, 3.6 ],
[559, 0, 0, 2.42, -0.6, 1.0, 100, 1, 4.83, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 7.25, 0.97, 1.45, 1.45, 1.93 ],
[560, 0, 0, 17.13, -4.28, 1.0, 100, 1, 34.26, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 51.39, 6.85, 10.28, 10.28, 13.7 ],
[569, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[570, 0, 0, 0.06, -0.01, 1.0, 100, 1, 0.12, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.17, 0.02, 0.03, 0.03, 0.05 ],
[8, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[9, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[11, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[17, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[21, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[23, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[25, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[31, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[33, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[38, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[39, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[40, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[41, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[43, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[44, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[45, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[47, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[48, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[49, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[65, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[71, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[72, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[73, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[75, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[79, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[80, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[81, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[98, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[101, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[102, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[177, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[179, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[180, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[181, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[182, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[183, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[192, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[217, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[218, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[276, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[278, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[288, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[289, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[291, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[292, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[311, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[321, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[322, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[323, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[324, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[325, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[338, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[339, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[340, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[341, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[342, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[343, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[344, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[345, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[346, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[363, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[400, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[422, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[429, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[435, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[436, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[440, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[441, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[492, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[493, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[496, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[497, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[498, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[502, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[503, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[504, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[505, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[511, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[512, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[557, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[558, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[559, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[560, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[569, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
[570, 0, 0, 0.0, -0.0, 1.0, 100, 1, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.0, 0.0, 0.0, 0.0, 0.0 ],
])
ppc["branch"] = array([
[8, 9, 0.00024379, 0.00243793, 0.35006327, 2395, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[492, 11, 0.0045562, 0.01822479, 0.04820045, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[11, 493, 0.00757174, 0.03028694, 0.0801021, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[492, 493, 0.01130413, 0.04521653, 0.11958747, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[17, 18, 0.00462352, 0.04623523, 0.9335989, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[17, 12, 0.0005602, 0.00560203, 0.1131183, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[20, 21, 0.00108334, 0.01083345, 0.09722357, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[20, 22, 0.00099339, 0.00993386, 0.3566014, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[497, 23, 0.0005476, 0.00219041, 0.00579315, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[25, 22, 0.00035578, 0.00355783, 0.03192931, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[8, 21, 0.00098947, 0.00989474, 0.0887992, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[31, 32, 0.00299776, 0.02997761, 0.60531903, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[32, 33, 0.00167622, 0.01676223, 0.33846928, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 10, 0.00240464, 0.0240464, 0.48555384, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[10, 38, 0.00068488, 0.0068488, 0.13829351, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 38, 0.00143783, 0.01437835, 1.16133176, 1796, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[39, 40, 0.00452163, 0.0452163, 0.91302431, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[39, 41, 0.0017467, 0.01746699, 0.35269996, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[42, 41, 0.00311454, 0.03114543, 0.6289001, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[18, 42, 0.00343975, 0.03439751, 0.69456727, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[492, 43, 0.00910612, 0.03642446, 0.09633445, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[492, 43, 0.00909587, 0.03638347, 0.09622603, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[44, 45, 0.00640579, 0.02562314, 0.0677674, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[44, 505, 0.00151537, 0.00606149, 0.01603126, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[46, 12, 0.00029449, 0.00294494, 0.1057163, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[46, 12, 0.00029482, 0.00294823, 0.10583438, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[47, 48, 0.00053442, 0.00534418, 0.01199019, 299, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[31, 33, 0.0013476, 0.01347599, 0.27211226, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[71, 72, 0.00088786, 0.00887864, 0.31872128, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[73, 74, 0.00125295, 0.01252955, 0.25300129, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 75, 0.00274591, 0.02745914, 0.5544652, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[72, 75, 0.00066887, 0.00668871, 0.24010838, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 72, 0.00362221, 0.03622207, 0.73140949, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[73, 72, 0.00254751, 0.02547507, 0.51440208, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[18, 40, 0.00130277, 0.0130277, 0.26306019, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[492, 45, 0.00771758, 0.0308703, 0.18370115, 520, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[10, 74, 0.00301674, 0.03016736, 0.60915055, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[45, 511, 0.02050843, 0.08203372, 0.05424015, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[78, 32, 0.00134588, 0.0134588, 0.48313778, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[79, 80, 0.00076233, 0.00762327, 0.06841417, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[79, 80, 0.00076174, 0.00761738, 0.06836134, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 79, 0.00215305, 0.02153047, 0.19322279, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 79, 0.00215357, 0.02153566, 0.1932694, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[42, 98, 0.00061861, 0.00618611, 0.22206638, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[42, 98, 0.00061835, 0.00618352, 0.22197315, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[42, 101, 0.00081653, 0.00816534, 0.29311568, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[102, 42, 0.0012403, 0.01240305, 0.44523901, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 39, 0.00065102, 0.00651021, 0.23370076, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[177, 496, 0.00932496, 0.03729983, 0.09864961, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[177, 496, 0.00931603, 0.03726413, 0.09855518, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[179, 493, 0.01426992, 0.05707967, 0.15096279, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[180, 181, 0.01025686, 0.04102744, 0.10850827, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[182, 180, 0.00433818, 0.01735273, 0.04589403, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[179, 181, 0.00489306, 0.01957223, 0.05176412, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[180, 493, 0.0166914, 0.06676562, 0.17657993, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[183, 30, 0.00049645, 0.00496451, 0.17821369, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[183, 21, 0.00025687, 0.00256873, 0.36884485, 2395, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[183, 21, 0.00051295, 0.0051295, 0.18413654, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[183, 30, 0.00049609, 0.00496087, 0.17808317, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[504, 192, 0.00015355, 0.00061421, 0.00162446, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[504, 192, 0.00015421, 0.00061686, 0.00163145, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[217, 98, 0.00012787, 0.00127874, 0.04590362, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[504, 218, 0.00687025, 0.02748099, 0.07268099, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[177, 504, 0.01763702, 0.0705481, 0.18658373, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 39, 0.00086777, 0.00867775, 0.1752243, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[20, 22, 0.00099413, 0.00994131, 0.35686864, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[20, 21, 0.00108314, 0.01083137, 0.09720492, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[503, 276, 0.00335322, 0.01341289, 0.03547406, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[503, 276, 0.00335372, 0.01341488, 0.03547931, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[503, 504, 0.03215471, 0.12861884, 0.34016769, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[503, 504, 0.03216364, 0.12865455, 0.34026211, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[177, 218, 0.01082595, 0.0433038, 0.11452874, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[182, 180, 0.00433157, 0.01732628, 0.04582409, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 278, 0.00143837, 0.01438366, 0.51633804, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 278, 0.00143823, 0.01438227, 0.51628832, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 558, 0.01085322, 0.04341289, 0.25833884, 520, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 559, 0.00853967, 0.03415868, 0.09034196, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[559, 558, 0.01118579, 0.04474314, 0.11833547, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 78, 0.00358577, 0.03585769, 0.32180078, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 279, 0.00213909, 0.02139093, 0.19197048, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[78, 279, 0.0015812, 0.01581198, 0.14190284, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[8, 102, 0.00151001, 0.01510007, 0.5420555, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[8, 101, 0.00192469, 0.01924688, 0.69091598, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 288, 0.00159713, 0.01597126, 0.14333228, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 288, 0.00159681, 0.01596814, 0.14330432, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 289, 0.00013825, 0.0013825, 0.027916, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 289, 0.00014241, 0.00142405, 0.02875502, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 289, 0.00015471, 0.00154709, 0.03123945, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 289, 0.00015129, 0.00151293, 0.03054959, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[276, 560, 0.00889322, 0.03557289, 0.02352056, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[276, 560, 0.00889157, 0.03556628, 0.02351619, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 290, 0.00112768, 0.01127678, 0.22770489, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[290, 74, 0.00414344, 0.04143444, 0.83665966, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[290, 74, 0.00414319, 0.0414319, 0.83660839, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 290, 0.0011259, 0.011259, 0.22734598, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[512, 291, 0.00265967, 0.01063868, 0.02813689, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[512, 291, 0.00266496, 0.01065983, 0.02819285, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[78, 292, 0.0011638, 0.01163804, 0.23499969, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[78, 292, 0.001163, 0.01162996, 0.23483654, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[71, 74, 0.00390452, 0.03904524, 0.78841612, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[74, 278, 0.00154224, 0.01542244, 0.55362774, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[74, 278, 0.00154245, 0.01542452, 0.55370232, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[32, 292, 0.00096794, 0.00967936, 0.34746542, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[503, 560, 0.00378512, 0.0151405, 0.16017272, 693, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[311, 280, 0.00034337, 0.00343369, 0.1232611, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[280, 278, 0.00097498, 0.00974977, 0.78748387, 1796, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[311, 32, 0.00241133, 0.02411334, 0.48690559, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 321, 0.00500298, 0.0200119, 0.05292694, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 65, 0.00188585, 0.01885849, 0.38079775, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[322, 288, 0.001309, 0.01309003, 0.26431871, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[322, 323, 0.00035076, 0.00350762, 0.07082712, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[322, 323, 0.00037006, 0.0037006, 0.0747239, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 324, 0.00197195, 0.01971953, 0.39818407, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[324, 325, 0.00110351, 0.01103509, 0.2228246, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 325, 0.00086657, 0.00866574, 0.17498191, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[332, 78, 0.00129444, 0.01294437, 0.26137749, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[322, 288, 0.001309, 0.01309003, 0.26431871, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[324, 288, 0.00126274, 0.01262742, 0.1133234, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[338, 559, 0.00230702, 0.0092281, 0.09762492, 693, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[339, 559, 0.00890149, 0.03560595, 0.02354242, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[339, 340, 0.02177884, 0.08711537, 0.23040041, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[559, 340, 0.05245818, 0.20983273, 0.13874, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[341, 292, 9.329e-05, 0.00093294, 0.07535316, 1796, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[338, 559, 0.00461405, 0.0184562, 0.04881246, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 342, 0.00302595, 0.0121038, 0.03201181, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[558, 343, 0.00266256, 0.01065025, 0.11266997, 693, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[502, 340, 0.01086926, 0.04347702, 0.11498688, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[502, 340, 0.01086876, 0.04347504, 0.11498163, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[72, 32, 0.00135107, 0.01351073, 0.48500226, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[72, 32, 0.001351, 0.01351004, 0.4849774, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[344, 345, 5.763e-05, 0.00057629, 0.04654687, 1796, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[346, 47, 0.0001134, 0.001134, 0.04070792, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[46, 47, 8.975e-05, 0.00089751, 0.0322183, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[346, 345, 7.218e-05, 0.00072178, 0.02591013, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[363, 344, 2.663e-05, 0.00026627, 0.00955859, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[332, 78, 0.00129421, 0.01294206, 0.26133088, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 49, 0.0016876, 0.01687604, 0.15145211, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 49, 0.0016883, 0.01688296, 0.15151426, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 74, 0.00150357, 0.01503566, 0.13493589, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 74, 0.00150416, 0.01504155, 0.13498871, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[278, 80, 0.00325679, 0.03256787, 0.29227666, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[278, 80, 0.0032572, 0.03257202, 0.29231395, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 278, 0.00421184, 0.04211842, 0.37798704, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 278, 0.0042108, 0.04210803, 0.37789381, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[569, 570, 0.00813488, 0.0325395, 0.08605961, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[498, 400, 0.00303355, 0.01213421, 0.03209225, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 342, 0.00300992, 0.01203967, 0.0318422, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 321, 0.00500231, 0.02000926, 0.05291995, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 65, 0.00188603, 0.01886034, 0.38083504, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[8, 21, 0.00098975, 0.00989751, 0.08882406, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[311, 32, 0.00241182, 0.02411819, 0.48700348, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.0002536, 0.00253601, 0.02275915, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.00023449, 0.00234488, 0.02104382, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.00023272, 0.00232722, 0.02088534, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.00023518, 0.0023518, 0.02110597, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.00023269, 0.00232687, 0.02088223, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.0002536, 0.00253601, 0.02275915, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[45, 429, 0.00640579, 0.02562314, 0.0677674, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[44, 429, 1.322e-05, 5.289e-05, 0.00013989, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[505, 429, 0.00150314, 0.00601256, 0.01590186, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[32, 436, 0.00044813, 0.0044813, 0.16086776, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[435, 436, 6.634e-05, 0.00066343, 0.02381569, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[78, 436, 0.00089768, 0.0089768, 0.32224515, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[181, 441, 0.01020132, 0.04080529, 0.10792074, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[440, 441, 3.306e-05, 0.00013223, 0.00034972, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[504, 441, 0.01479025, 0.05916099, 0.15646741, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[10, 492, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[12, 493, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[18, 496, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[20, 497, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[22, 498, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[32, 502, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[37, 503, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[42, 504, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[46, 505, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[74, 511, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[78, 512, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[277, 557, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[279, 558, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[280, 559, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[290, 560, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[332, 569, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ]
])
ppc["gencost"] = array([
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 28.0, 0, 42.0, 21.0, 33.6, 16.8 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 32.0, 0, 48.0, 24.0, 38.4, 19.2 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 8.0, 0, 12.0, 6.0, 9.6, 4.8 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 26.0, 0, 39.0, 19.5, 31.2, 15.6 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 32.0, 0, 48.0, 24.0, 38.4, 19.2 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
])
return ppc
| 126.020556
| 192
| 0.294265
| 20,128
| 104,219
| 1.523549
| 0.043372
| 0.449618
| 0.511152
| 0.570534
| 0.654242
| 0.615926
| 0.614426
| 0.604057
| 0.600111
| 0.600111
| 0
| 0.577112
| 0.492319
| 104,219
| 827
| 193
| 126.020556
| 0.002476
| 0
| 0
| 0.326481
| 0
| 0
| 0.000326
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001209
| false
| 0
| 0.001209
| 0
| 0.003628
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
954eaddc391f9dd7e3a586bd506c172d1b3be129
| 1,500
|
py
|
Python
|
ietf/person/migrations/0014_auto_20160613_0751.py
|
ekr/ietfdb
|
8d936836b0b9ff31cda415b0a423e3f5b33ab695
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2
|
2021-11-20T03:40:40.000Z
|
2021-11-20T03:40:42.000Z
|
ietf/person/migrations/0014_auto_20160613_0751.py
|
ekr/ietfdb
|
8d936836b0b9ff31cda415b0a423e3f5b33ab695
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
ietf/person/migrations/0014_auto_20160613_0751.py
|
ekr/ietfdb
|
8d936836b0b9ff31cda415b0a423e3f5b33ab695
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import ietf.utils.storage
class Migration(migrations.Migration):
dependencies = [
('person', '0013_add_plain_name_aliases'),
]
operations = [
migrations.AlterField(
model_name='person',
name='photo',
field=models.ImageField(default=None, storage=ietf.utils.storage.NoLocationMigrationFileSystemStorage(location=None), upload_to=b'photo', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='person',
name='photo_thumb',
field=models.ImageField(default=None, storage=ietf.utils.storage.NoLocationMigrationFileSystemStorage(location=None), upload_to=b'photo', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='personhistory',
name='photo',
field=models.ImageField(default=None, storage=ietf.utils.storage.NoLocationMigrationFileSystemStorage(location=None), upload_to=b'photo', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='personhistory',
name='photo_thumb',
field=models.ImageField(default=None, storage=ietf.utils.storage.NoLocationMigrationFileSystemStorage(location=None), upload_to=b'photo', blank=True),
preserve_default=True,
),
]
| 37.5
| 162
| 0.656
| 144
| 1,500
| 6.673611
| 0.291667
| 0.046826
| 0.083247
| 0.120708
| 0.811655
| 0.811655
| 0.811655
| 0.768991
| 0.768991
| 0.768991
| 0
| 0.004344
| 0.232667
| 1,500
| 39
| 163
| 38.461538
| 0.830582
| 0.014
| 0
| 0.727273
| 0
| 0
| 0.083277
| 0.01828
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
95e45429dffe87500e3a89c1d565879d89da52f5
| 4,905
|
py
|
Python
|
LaylaCoin/main.py
|
Bhaney44/AlgorandDevelopment
|
309e68337227af879f5c4e92c72156928a39fe32
|
[
"MIT"
] | null | null | null |
LaylaCoin/main.py
|
Bhaney44/AlgorandDevelopment
|
309e68337227af879f5c4e92c72156928a39fe32
|
[
"MIT"
] | 1
|
2021-04-24T19:24:05.000Z
|
2021-04-28T05:32:40.000Z
|
LaylaCoin/main.py
|
Bhaney44/AlgorandDevelopment
|
309e68337227af879f5c4e92c72156928a39fe32
|
[
"MIT"
] | 1
|
2022-01-17T18:00:56.000Z
|
2022-01-17T18:00:56.000Z
|
#Imports
from config import *
from algosdk.v2client import algod
from algosdk import account, mnemonic
from algosdk.future.transaction import write_to_file
from algosdk.future.transaction import AssetConfigTxn, AssetTransferTxn
from util import sign_and_send, balance_formatter
from algosdk.future.transaction import AssetTransferTxn
# Client
# client = algod.AlgodClient(algod_token, algod_address)
client = algod.AlgodClient("", algod_address, headers={'User-Agent': 'DoYouLoveMe?'})
# Create function.
# Returns an unsigned txn object and writes the unsigned transaction object to a file for offline signing.
# Uses current network params.
def create(passphrase=None):
params = client.suggested_params()
txn = AssetConfigTxn(creator_address, params, **asset_details)
if passphrase:
txinfo = sign_and_send(txn, passphrase, client)
asset_id = txinfo.get('asset-index')
print("Asset ID: {}".format(asset_id))
else:
write_to_file([txn], "create_coin.txn")
# Creates an unsigned opt-in transaction for the specified asset id and address.
# Uses current network params.
def optin(passphrase=None):
params = client.suggested_params()
txn = AssetTransferTxn(sender=receiver_address, sp=params, receiver=receiver_address, amt=0, index=asset_id)
if passphrase:
txinfo = sign_and_send(txn, passphrase, client)
print("Opted in to asset ID: {}".format(asset_id))
else:
write_to_file([txns], "optin.txn")
# Creates an unsigned transfer transaction for the specified asset id, to the specified address, for the specified amount.
#def transfer(passphrase=None):
#amount = 6000
#params = client.suggested_params()
#txn = AssetTransferTxn(sender=creator_address, sp=params, receiver=receiver_address, amt=amount, index=asset_id)
#if passphrase:
#txinfo = sign_and_send(txn, passphrase, client)
#formatted_amount = balance_formatter(amount, asset_id, client)
#print("Transferred {} from {} to {}".format(formatted_amount,
#creator_address, receiver_address))
#print("Transaction ID Confirmation: {}".format(txinfo.get("tx")))
#else:
#write_to_file([txns], "transfer.txn")
def transfer(passphrase=None):
amount = 6000
params = client.suggested_params()
txn = AssetTransferTxn(sender=creator_address, sp=params, receiver=receiver_address, amt=amount, index=asset_id)
if passphrase:
txinfo = sign_and_send(txn, passphrase, client)
formatted_amount = balance_formatter(amount, asset_id, client)
print("Transferred {} from {} to {}".format(formatted_amount,
creator_address, receiver_address))
print("Transaction ID Confirmation: {}".format(txinfo.get("tx")))
else:
write_to_file([txns], "transfer.txn")
# Checks the asset balance for the specific address and asset id.
def check_holdings(asset_id, address):
account_info = client.account_info(address)
assets = account_info.get("assets")
for asset in assets:
if asset['asset-id'] == asset_id:
amount = asset.get("amount")
print("Account {} has {}.".format(address, balance_formatter(amount, asset_id, client)))
return
print("Account {} must opt-in to Asset ID {}.".format(address, asset_id))
# Creates an unsigned opt-in transaction for the specified asset id and address.
# Uses current network params.
def optin(passphrase=None):
params = client.suggested_params()
txn = AssetTransferTxn(sender=receiver_address, sp=params, receiver=receiver_address, amt=0, index=asset_id)
if passphrase:
txinfo = sign_and_send(txn, passphrase, client)
print("Opted in to asset ID: {}".format(asset_id))
else:
write_to_file([txns], "optin.txn")
from util import balance_formatter
# Checks the asset balance for the specific address and asset id.
def check_holdings(asset_id, address):
account_info = client.account_info(address)
assets = account_info.get("assets")
for asset in assets:
if asset['asset-id'] == asset_id:
amount = asset.get("amount")
print("Account {} has {}.".format(address, balance_formatter(amount, asset_id, client)))
return
print("Account {} must opt-in to Asset ID {}.".format(address, asset_id))
# Creates an unsigned transfer transaction for the specified asset id, to the specified address, for the specified amount.
def transfer(passphrase=None):
amount = 6000
params = client.suggested_params()
txn = AssetTransferTxn(sender=creator_address, sp=params, receiver=receiver_address, amt=amount, index=asset_id)
if passphrase:
txinfo = sign_and_send(txn, passphrase, client)
formatted_amount = balance_formatter(amount, asset_id, client)
print("Transferred {} from {} to {}".format(formatted_amount,
creator_address, receiver_address))
print("Transaction ID Confirmation: {}".format(txinfo.get("tx")))
else:
write_to_file([txns], "transfer.txn")
| 41.923077
| 122
| 0.730479
| 634
| 4,905
| 5.498423
| 0.143533
| 0.066265
| 0.022088
| 0.046472
| 0.849684
| 0.812679
| 0.812679
| 0.800057
| 0.800057
| 0.776248
| 0
| 0.003632
| 0.158002
| 4,905
| 117
| 123
| 41.923077
| 0.840436
| 0.270133
| 0
| 0.815789
| 0
| 0
| 0.119269
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.092105
| false
| 0.197368
| 0.105263
| 0
| 0.223684
| 0.144737
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
25105c62df30d4774bfe05457d440c425ff6c41b
| 35,727
|
py
|
Python
|
data collection and processing/r.landslide/ANN_module.py
|
Landslide-Analytics-System/GLAS
|
1a51a7365dcd6a29ded6798d29f3a1f90b93a0c3
|
[
"MIT"
] | 1
|
2020-11-15T04:18:18.000Z
|
2020-11-15T04:18:18.000Z
|
data collection and processing/r.landslide/ANN_module.py
|
ShreyJ1729/Landslide-Prediction
|
1a51a7365dcd6a29ded6798d29f3a1f90b93a0c3
|
[
"MIT"
] | 1
|
2021-03-24T17:40:23.000Z
|
2021-05-29T20:36:24.000Z
|
data collection and processing/r.landslide/ANN_module.py
|
ShreyJ1729/Landslide-Prediction
|
1a51a7365dcd6a29ded6798d29f3a1f90b93a0c3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 19 16:52:59 2018
@author: Lucimara Bragagnolo
"""
def ann_module(input_data,output_data,reckon,num_hidden,coef,nr_epochs,val_samples,test_samples,directory,columnst,col,row,flag_train):
import numpy as np
import math
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
import os
import grass.script as gscript
os.mkdir(directory) #Create directory
#Scaling the data
max_in = np.zeros((1,input_data.shape[1]))
min_in = np.zeros((1,input_data.shape[1]))
max_out = 1
min_out = 0
alldata = np.concatenate((input_data,reckon))
for i in range(0,input_data.shape[1]):
max_in[0,i] = np.nanmax(alldata[:,i])
min_in[0,i] = np.nanmin(alldata[:,i])
os.chdir(directory)
np.savetxt('max_in.txt', (max_in), delimiter=',')
np.savetxt('min_in.txt', (min_in), delimiter=',')
os.chdir('../')
for j in range(0,input_data.shape[1]):
if max_in[0,j] != 0:
input_data[:,j] = (input_data[:,j] - min_in[0,j])/(max_in[0,j]-min_in[0,j])
for j in range(0,reckon.shape[1]):
if max_in[0,j] != 0:
reckon[:,j] = (reckon[:,j] - min_in[0,j])/(max_in[0,j]-min_in[0,j])
#Resample data
m = 1
n = input_data.shape[0]
n_test = int(math.ceil(test_samples*n)) #TORNAR GENERICA ESSAS %%
n_val = int(math.ceil(val_samples*n))
n_train = int(n - n_test - n_val)
reg_set = np.arange(0,n)
T = np.zeros((n_train,1))
V = np.zeros((n_val,1))
flag_test = 0
B_ind = ((n-m)*np.random.rand(2*n_test,1)+m)
B_ind = [math.floor(x) for x in B_ind]
while flag_test == 0:
if len(np.unique(B_ind)) == n_test:
flag_test = 1
else:
del B_ind[0]
B_ind = np.unique(B_ind)
B_ind = B_ind.astype(int)
B = reg_set[B_ind]-1 #Test
reg_rest = np.setdiff1d(reg_set,B) #Dataset for TRAINING and VALIDATIONS
flag_val = 0
V_buff = ((n-n_test-m)*np.random.rand(2*n_val,1)+m)
V_buff = [math.floor(x) for x in V_buff]
while flag_val == 0:
if len(np.unique(V_buff)) == n_val:
flag_val = 1
elif len(np.unique(V_buff)) > n_val:
del V_buff[0]
else:
V_buff = ((n-n_test-m)*np.random.rand(2*n_val,1)+m)
V_buff = [math.floor(x) for x in V_buff]
V_buff = [int(x) for x in V_buff] #Transform to integer
V[:,0] = (reg_rest[np.unique(V_buff)])
T[:,0] = np.setdiff1d(reg_rest,V[:,0])
T = T.astype(np.int64) #Training
V = V.astype(np.int64) #Validations
B = B.astype(np.int64) #Test
p = input_data.shape[1]
input_test = np.zeros((B.shape[0],p))
output_test = np.zeros((B.shape[0],1))
input_train = np.zeros((T.shape[0],p))
output_train = np.zeros((T.shape[0],1))
input_val = np.zeros((B.shape[0],p))
output_val = np.zeros((T.shape[0],1))
n1 = T.shape[0]
n2 = B.shape[0]
n3 = V.shape[0]
for i in range(0,n1-1):
input_train[i,:] = input_data[T[i],:]
output_train[i,:] = output_data[T[i]]
for i in range(0,n2-1):
input_test[i,:] = input_data[B[i],:]
output_test[i,:] = output_data[B[i]]
for i in range(0,n3-1):
input_val[i,:] = input_data[V[i],:]
output_val[i,:] = output_data[V[i]]
output_data = output_data.reshape((output_data.size,1))
#Validations
def ann_validate(input_data,output_data,weights,peights,biasH,biasO,max_in,max_out,min_in,min_out):
#Collect dimensions
num_input = input_data.shape[1] #Number of parameters
reg_size = input_data.shape[0] #Number of records
num_output = output_data.shape[1]
num_hidden = peights.shape[0] - 1 #Number of lines of weights matrix of the output layer - 1
S = np.zeros((num_hidden,1))
H = np.zeros((num_hidden,1))
R = np.zeros((num_output,1))
output = np.zeros((reg_size,num_output))
erro = np.zeros((1,reg_size))
def activation(x): #Sigmoid as activation function
fx = 1/(1+math.exp(-x))
return fx
for k in range(0,reg_size):
for i in range(0,num_hidden):
for j in range(0,num_input):
S[i,0] = S[i,0] + input_data[k,j]*weights[j,i]
S[i,0] = S[i,0] + biasH[i,0]*weights[num_input,i]
H[i,0] = activation(S[i,0])
S[i,0] = 0
for i in range(0,num_output):
for j in range(0,num_hidden):
R[i,0] = R[i,0] + H[j,0]*peights[j,i]
R[i,0] = R[i,0] + biasO[i,0]*peights[num_hidden,i]
output[k,i] = activation(R[i,0])
R[i,0] = 0
erro[0,k] = output[k,:] - output_data[k,:]
return output,erro
#Test
def ann_test(input_data,output_data,weights,peights,biasH,biasO):
#Collect dimensions
num_input = input_data.shape[1]
reg_size = input_data.shape[0]
num_output = output_data.shape[1]
num_hidden = peights.shape[0] - 1
def activation(x): #activation function
fx = 1/(1+math.exp(-x))
return fx
S = np.zeros((num_hidden,1))
H = np.zeros((num_hidden,1))
R = np.zeros((num_output,1))
output = np.zeros((reg_size,num_output))
erro_dec = np.zeros((1,reg_size))
erro_round = np.zeros((1,reg_size))
for k in range(0,reg_size):
for i in range(0,num_hidden):
for j in range(0,num_input):
S[i,0] = S[i,0] + input_data[k,j]*weights[j,i]
S[i,0] = S[i,0] + biasH[i,0]*weights[num_input,i]
H[i,0] = activation(S[i,0])
S[i,0] = 0
for i in range(0,num_output):
for j in range(0,num_hidden):
R[i,0] = R[i,0] + H[j,0]*peights[j,i]
R[i,0] = R[i,0] + biasO[i,0]*peights[num_hidden,i]
output[k,i] = activation(R[i,0])
R[i,0] = 0
erro_dec[0,k] = output[k,:] - output_data[k,:]
erro_round[0,k] = np.around(erro_dec[0,k])
return output,erro_dec,erro_round
#Reckon
def ann_reckon(input_data,weights,peights,biasH,biasO):
#Collect dimensions
num_input = input_data.shape[1]
reg_size = input_data.shape[0]
num_output = 1
num_hidden = peights.shape[0] - 1
def activation(x):
fx = 1/(1+math.exp(-x))
return fx
S = np.zeros((num_hidden,1))
H = np.zeros((num_hidden,1))
R = np.zeros((num_output,1))
output = np.zeros((reg_size,num_output))
for k in range(0,reg_size):
for i in range(0,num_hidden):
for j in range(0,num_input):
S[i,0] = S[i,0] + input_data[k,j]*weights[j,i]
S[i,0] = S[i,0] + biasH[i,0]*weights[num_input,i]
H[i,0] = activation(S[i,0])
S[i,0] = 0
for i in range(0,num_output):
for j in range(0,num_hidden):
R[i,0] = R[i,0] + H[j,0]*peights[j,i]
R[i,0] = R[i,0] + biasO[i,0]*peights[num_hidden,i]
output[k,i] = activation(R[i,0])
R[i,0] = 0
output_reckon = output
return output_reckon
#Training
num_input = input_train.shape[1] #Number of parameters
reg_size = input_train.shape[0] #Number of examples
num_output = output_train.shape[1] #Number of outputs (in this case, just 1 - susceptibility)
weights = np.random.rand(num_input+1,num_hidden)
peights = np.random.rand(num_hidden+1,num_output)
biasH = np.ones((num_hidden,1))
biasO = np.ones((num_output,1))
S = np.zeros((num_hidden,1))
H = np.zeros((num_hidden,1))
R = np.zeros((num_output,1))
output = np.zeros((reg_size,num_output))
erro = np.zeros((1,nr_epochs))
erro_train = np.zeros((1,nr_epochs))
erro_validate = np.zeros((1,nr_epochs))
def activation(x):
fx = 1/(1+math.exp(-x))
return fx
for epoch in range(0,nr_epochs):
for k in range(0,reg_size):
for i in range(0,num_hidden):
for j in range(0,num_input):
S[i,0] = S[i,0] + input_train[k,j]*weights[j,i]
S[i,0] = S[i,0] + biasH[i,0]*weights[num_input,i]
H[i,0] = activation(S[i,0])
S[i,0] = 0
for i in range(0,num_output):
for j in range(0,num_hidden):
R[i,0] = R[i,0] + H[j,0]*peights[j,i]
R[i,0] = R[i,0] + biasO[i,0]*peights[num_hidden,i]
output[k,i] = activation(R[i,0])
R[i,0] = 0
#Backpropagation
#Uptade the weights in the output layer
for i in range(0,num_hidden):
for j in range(0,num_output):
if i < (num_hidden+1):
peights[i,j] = peights[i,j] + coef*(output_train[k,j] - output[k,j])*output[k,j]*(1-output[k,j])*H[i,0]
elif i == (num_hidden+1):
peights[i,j] = peights[i,j] + coef*(output_train[k,j] - output[k,j])*output[k,j]*(1-output[k,j])*biasO[j,0]
#Uptade the weights in the hidden layer
buff = 0
for j in range(0,num_hidden):
for i in range(0,num_input):
for k1 in range(0,num_output):
buff = buff + (output_train[k,k1] - output[k,k1])*output[k,k1]*(1-output[k,k1])*peights[j,k1]
if i < (num_input+1):
weights[i,j] = weights[i,j] + coef*buff*H[j,0]*(1-H[j,0])*input_train[k,i]
elif i == num_input+1:
weights[i,j] = weights[i,j] + coef*buff*H[j,i]*(1-H[j,0])*biasH[j,0]
buff = 0 #Zeroes the buffer variable
erro_train[0,epoch] = np.linalg.norm((output - output_train)/reg_size)
output_val,erro_val = ann_validate(input_val,output_val,weights,peights,biasH,biasO,max_in,max_out,min_in,min_out)
erro_validate[0,epoch] = np.linalg.norm(erro_val)
#Collects the weights in the minimum validation error
if epoch == 0:
W = weights
P = peights
epoch_min = epoch
erro_min = 1000
elif erro_min > np.linalg.norm(erro_validate[0,epoch]):
W = weights
P = peights
epoch_min = epoch
erro_min = np.linalg.norm(erro_validate[0,epoch])
[output,erro_test,erro_round] = ann_test(input_test,output_test,weights,peights,biasH,biasO)
x = np.arange(1,nr_epochs+1).reshape((nr_epochs,1))
plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.5, hspace=None)
plt.subplot(1,2,1)
plt.plot(x,erro_train.T,x,erro_validate.T,'g-', epoch_min, erro_min,'g*')
plt.xlabel('Epochs')
plt.ylabel('Root mean square output error')
plt.legend(('Training','Validation','Early stop'))
plt.subplot(1,2,2)
plt.bar(np.arange(1,(erro_test.shape[1])+1),erro_test.reshape(erro_test.shape[1]))
plt.xlabel('Instances')
plt.ylabel('Error (ANN output - real output)')
os.chdir(directory)
plt.savefig('ANN_train_val', dpi=300, facecolor='w', edgecolor='w',
orientation='portrait', papertype=None, format=None,
transparent=False, bbox_inches='tight', pad_inches=0.2,
frameon=None)
os.chdir('../')
rounded = np.asarray(erro_round).reshape((B.shape[0],1))
error = sum(abs(erro_round.T))
gscript.message(_("Test set error: "))
gscript.message(error)
#Assign the minimum weights to their original names
weights = W
peights = P
os.chdir(directory)
np.savetxt('weights.txt', (weights), delimiter=',')
np.savetxt('peights.txt', (peights), delimiter=',')
np.savetxt('biasH.txt', (biasH), delimiter=',')
np.savetxt('biasO.txt', (biasO), delimiter=',')
os.mkdir('Inputs and outputs')
os.chdir('Inputs and outputs')
np.savetxt('Input_test.txt', (input_test), delimiter=',')
np.savetxt('Output_test.txt', (output_test), delimiter=',')
np.savetxt('Input_val.txt', (input_val), delimiter=',')
np.savetxt('Output_val.txt', (output_val), delimiter=',')
np.savetxt('Input_train.txt', (input_train), delimiter=',')
np.savetxt('Output_train.txt', (output_train), delimiter=',')
np.savetxt('Error_train.txt', (erro_train), delimiter=',')
np.savetxt('Error_val.txt', (erro_validate), delimiter=',')
np.savetxt('Epoch_and_error_min.txt', (epoch_min,erro_min), delimiter=',')
np.savetxt('Test_set_TOTAL_error.txt', (error), delimiter=',')
np.savetxt('Error_test.txt', (erro_test), delimiter=',')
param = open('ANN_Parameters.txt','w')
param.write('Hidden neurons: '+str(num_hidden))
param.write('\n Learning rate: '+str(coef))
param.write('\n Epochs: '+str(nr_epochs))
param.close()
os.chdir('../')
os.chdir('../')
#Sensitivity analysis
def sensitivity(input_data,output_size,weights,peights,biasH,biasO):
input_size = input_data.shape[1] #Number of columns (parameters)
npts = 200 #Number of samples in the sensitivity evaluation set
sens_set = np.random.random_sample((npts,1)) #Return random floats in the half-open interval [0.0, 1.0)
fixed_par_value = np.empty([input_size,1])
ones_sens = np.ones((200,1))
#Calculation of the normalized mean value of each entry
for k in range(0,input_size):
fixed_par_value[k] = (np.mean(input_data[:,k]))
#Pre-allocation
input_sens = [[([1] * npts) for j in range(input_size)] for i in range(input_size)]
output_sens = [[([0] * npts) for j in range(input_size)] for i in range(input_size)]
input_sens = np.asarray(input_sens,dtype=float)
for k1 in range(0,input_size):
for k2 in range(0,input_size):
if k1 == k2:
input_sens[k1,k2,:] = sens_set.reshape(sens_set.shape[0]).T
else:
input_sens[k1,k2,:] = (ones_sens*fixed_par_value[k2]).reshape(ones_sens.shape[0])
for k1 in range(0,input_size):
input_sens2 = np.asarray(input_sens[k1]).T
output = ann_reckon(input_sens2,weights,peights,biasH,biasO)
output_sens[k1] = output
return sens_set,fixed_par_value,input_sens,output_sens
[sens_set,fixed_par_value,input_sens,output_sens] = sensitivity(input_data,output_data.shape[1],weights,peights,biasH,biasO)
for k in range(0,input_data.shape[1]):
plt.figure()
plt.plot(sens_set,output_sens[k],'.')
plt.title('Sensitivity analysis. Parameter: '+columnst[k])
plt.ylabel('Output response')
plt.xlabel('Parameter: '+columnst[k])
os.chdir(directory)
plt.savefig('SensitivityAnalysisVar_'+columnst[k], dpi=300, facecolor='w', edgecolor='w',
orientation='portrait', papertype=None, format=None,
transparent=False, bbox_inches='tight', pad_inches=0.2,
frameon=None)
os.chdir('../')
#Reckon
if not flag_train:
output_reckon = ann_reckon(reckon,weights,peights,biasH,biasO)
a = np.reshape(output_reckon,(col,row),order='F')
a = np.transpose(a)
else:
a = 0
return a
#Reckon
def ann_reckon(input_data,weights,peights,biasH,biasO):
import numpy as np
import math
#Collect dimensions
num_input = input_data.shape[1]
reg_size = input_data.shape[0]
num_output = 1
num_hidden = peights.shape[0] - 1
def activation(x):
fx = 1/(1+math.exp(-x))
return fx
S = np.zeros((num_hidden,1))
H = np.zeros((num_hidden,1))
R = np.zeros((num_output,1))
output = np.zeros((reg_size,num_output))
for k in range(0,reg_size):
for i in range(0,num_hidden):
for j in range(0,num_input):
S[i,0] = S[i,0] + input_data[k,j]*weights[j,i]
S[i,0] = S[i,0] + biasH[i,0]*weights[num_input,i]
H[i,0] = activation(S[i,0])
S[i,0] = 0
for i in range(0,num_output):
for j in range(0,num_hidden):
R[i,0] = R[i,0] + H[j,0]*peights[j,i]
R[i,0] = R[i,0] + biasO[i,0]*peights[num_hidden,i]
output[k,i] = activation(R[i,0])
R[i,0] = 0
output_reckon = output
return output_reckon
def ANN_batch(input_data,output_data,reckon,hidden,trials,coef,nr_epochs,val_samples,test_samples,directory,columnst,col,row,flag_train):
#hidden is a vector now
#trials is the number of initial conditions
#Train a set of neural networks and select the best one
#Different number of hidden neurons
#Different number of initial conditions
import numpy as np
import math
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
import os
import grass.script as gscript
os.mkdir(directory) #Create directory -> REVER HERE
#Scaling the data
max_in = np.zeros((1,input_data.shape[1]))
min_in = np.zeros((1,input_data.shape[1]))
max_out = 1
min_out = 0
alldata = np.concatenate((input_data,reckon))
for i in range(0,input_data.shape[1]):
max_in[0,i] = np.nanmax(alldata[:,i])
min_in[0,i] = np.nanmin(alldata[:,i])
os.chdir(directory)
np.savetxt('max_in.txt', (max_in), delimiter=',')
np.savetxt('min_in.txt', (min_in), delimiter=',')
os.chdir('../')
for j in range(0,input_data.shape[1]):
if max_in[0,j] != 0:
input_data[:,j] = (input_data[:,j] - min_in[0,j])/(max_in[0,j]-min_in[0,j])
for j in range(0,reckon.shape[1]):
if max_in[0,j] != 0:
reckon[:,j] = (reckon[:,j] - min_in[0,j])/(max_in[0,j]-min_in[0,j])
#Resample data
m = 1
n = input_data.shape[0]
n_test = int(math.ceil(test_samples*n)) #TORNAR GENERICA ESSAS %%
n_val = int(math.ceil(val_samples*n))
n_train = int(n - n_test - n_val)
reg_set = np.arange(0,n)
T = np.zeros((n_train,1))
V = np.zeros((n_val,1))
flag_test = 0
B_ind = ((n-m)*np.random.rand(2*n_test,1)+m)
B_ind = [math.floor(x) for x in B_ind]
while flag_test == 0:
if len(np.unique(B_ind)) == n_test:
flag_test = 1
else:
del B_ind[0]
B_ind = np.unique(B_ind)
B_ind = B_ind.astype(int)
B = reg_set[B_ind]-1 #Test
reg_rest = np.setdiff1d(reg_set,B) #Dataset for TRAINING and VALIDATIONS
flag_val = 0
V_buff = ((n-n_test-m)*np.random.rand(2*n_val,1)+m)
V_buff = [math.floor(x) for x in V_buff]
while flag_val == 0:
if len(np.unique(V_buff)) == n_val:
flag_val = 1
elif len(np.unique(V_buff)) > n_val:
del V_buff[0]
else:
V_buff = ((n-n_test-m)*np.random.rand(2*n_val,1)+m)
V_buff = [math.floor(x) for x in V_buff]
V_buff = [int(x) for x in V_buff] #Transform to integer
V[:,0] = (reg_rest[np.unique(V_buff)])
T[:,0] = np.setdiff1d(reg_rest,V[:,0])
T = T.astype(np.int64) #Training
V = V.astype(np.int64) #Validations
B = B.astype(np.int64) #Test
p = input_data.shape[1]
input_test = np.zeros((B.shape[0],p))
output_test = np.zeros((B.shape[0],1))
input_train = np.zeros((T.shape[0],p))
output_train = np.zeros((T.shape[0],1))
input_val = np.zeros((B.shape[0],p))
output_val = np.zeros((T.shape[0],1))
n1 = T.shape[0]
n2 = B.shape[0]
n3 = V.shape[0]
for i in range(0,n1-1):
input_train[i,:] = input_data[T[i],:]
output_train[i,:] = output_data[T[i]]
for i in range(0,n2-1):
input_test[i,:] = input_data[B[i],:]
output_test[i,:] = output_data[B[i]]
for i in range(0,n3-1):
input_val[i,:] = input_data[V[i],:]
output_val[i,:] = output_data[V[i]]
output_data = output_data.reshape((output_data.size,1))
def ann_train(input_train,output_train,input_val,output_val,input_test,output_test,reckon,hidden,trials,coef,nr_epochs,val_samples,test_samples,directory,columnst,col,row,flag_train):
#Training
num_input = input_train.shape[1] #Number of parameters
reg_size = input_train.shape[0] #Number of examples
num_output = output_train.shape[1] #Number of outputs (in this case, just 1 - susceptibility)
weights = np.random.rand(num_input+1,num_hidden)
peights = np.random.rand(num_hidden+1,num_output)
biasH = np.ones((num_hidden,1))
biasO = np.ones((num_output,1))
S = np.zeros((num_hidden,1))
H = np.zeros((num_hidden,1))
R = np.zeros((num_output,1))
output = np.zeros((reg_size,num_output))
erro = np.zeros((1,nr_epochs))
erro_train = np.zeros((1,nr_epochs))
erro_validate = np.zeros((1,nr_epochs))
def activation(x):
fx = 1/(1+math.exp(-x))
return fx
for epoch in range(0,nr_epochs):
for k in range(0,reg_size):
for i in range(0,num_hidden):
for j in range(0,num_input):
S[i,0] = S[i,0] + input_train[k,j]*weights[j,i]
S[i,0] = S[i,0] + biasH[i,0]*weights[num_input,i]
H[i,0] = activation(S[i,0])
S[i,0] = 0
for i in range(0,num_output):
for j in range(0,num_hidden):
R[i,0] = R[i,0] + H[j,0]*peights[j,i]
R[i,0] = R[i,0] + biasO[i,0]*peights[num_hidden,i]
output[k,i] = activation(R[i,0])
R[i,0] = 0
#Backpropagation
#Uptade the weights in the output layer
for i in range(0,num_hidden):
for j in range(0,num_output):
if i < (num_hidden+1):
peights[i,j] = peights[i,j] + coef*(output_train[k,j] - output[k,j])*output[k,j]*(1-output[k,j])*H[i,0]
elif i == (num_hidden+1):
peights[i,j] = peights[i,j] + coef*(output_train[k,j] - output[k,j])*output[k,j]*(1-output[k,j])*biasO[j,0]
#Uptade the weights in the hidden layer
buff = 0
for j in range(0,num_hidden):
for i in range(0,num_input):
for k1 in range(0,num_output):
buff = buff + (output_train[k,k1] - output[k,k1])*output[k,k1]*(1-output[k,k1])*peights[j,k1]
if i < (num_input+1):
weights[i,j] = weights[i,j] + coef*buff*H[j,0]*(1-H[j,0])*input_train[k,i]
elif i == num_input+1:
weights[i,j] = weights[i,j] + coef*buff*H[j,i]*(1-H[j,0])*biasH[j,0]
buff = 0 #Zeroes the buffer variable
erro_train[0,epoch] = np.linalg.norm((output - output_train)/reg_size)
output_val,erro_val = ann_validate(input_val,output_val,weights,peights,biasH,biasO,max_in,max_out,min_in,min_out)
erro_validate[0,epoch] = np.linalg.norm(erro_val)
#Collects the weights in the minimum validation error
if epoch == 0:
W = weights
P = peights
epoch_min = epoch
erro_min = 1000
elif erro_min > np.linalg.norm(erro_validate[0,epoch]):
W = weights
P = peights
epoch_min = epoch
erro_min = np.linalg.norm(erro_validate[0,epoch])
[output,erro_test,erro_round] = ann_test(input_test,output_test,weights,peights,biasH,biasO)
error = np.linalg.norm(erro_test)
rounded = np.asarray(erro_round).reshape((B.shape[0],1))
error_total = sum(abs(erro_round.T))
#Assign the minimum weights to their original names
weights = W
peights = P
return output,weights,peights,biasH,biasO,erro_train,erro_validate,epoch_min,erro_min,error,erro_test,error_total
#Validations
def ann_validate(input_data,output_data,weights,peights,biasH,biasO,max_in,max_out,min_in,min_out):
#Collect dimensions
num_input = input_data.shape[1] #Number of parameters
reg_size = input_data.shape[0] #Number of records
num_output = output_data.shape[1]
num_hidden = peights.shape[0] - 1 #Number of lines of weights matrix of the output layer - 1
S = np.zeros((num_hidden,1))
H = np.zeros((num_hidden,1))
R = np.zeros((num_output,1))
output = np.zeros((reg_size,num_output))
erro = np.zeros((1,reg_size))
def activation(x): #Sigmoid as activation function
fx = 1/(1+math.exp(-x))
return fx
for k in range(0,reg_size):
for i in range(0,num_hidden):
for j in range(0,num_input):
S[i,0] = S[i,0] + input_data[k,j]*weights[j,i]
S[i,0] = S[i,0] + biasH[i,0]*weights[num_input,i]
H[i,0] = activation(S[i,0])
S[i,0] = 0
for i in range(0,num_output):
for j in range(0,num_hidden):
R[i,0] = R[i,0] + H[j,0]*peights[j,i]
R[i,0] = R[i,0] + biasO[i,0]*peights[num_hidden,i]
output[k,i] = activation(R[i,0])
R[i,0] = 0
erro[0,k] = output[k,:] - output_data[k,:]
return output,erro
#Test
def ann_test(input_data,output_data,weights,peights,biasH,biasO):
#Collect dimensions
num_input = input_data.shape[1]
reg_size = input_data.shape[0]
num_output = output_data.shape[1]
num_hidden = peights.shape[0] - 1
def activation(x): #activation function
fx = 1/(1+math.exp(-x))
return fx
S = np.zeros((num_hidden,1))
H = np.zeros((num_hidden,1))
R = np.zeros((num_output,1))
output = np.zeros((reg_size,num_output))
erro_dec = np.zeros((1,reg_size))
erro_round = np.zeros((1,reg_size))
for k in range(0,reg_size):
for i in range(0,num_hidden):
for j in range(0,num_input):
S[i,0] = S[i,0] + input_data[k,j]*weights[j,i]
S[i,0] = S[i,0] + biasH[i,0]*weights[num_input,i]
H[i,0] = activation(S[i,0])
S[i,0] = 0
for i in range(0,num_output):
for j in range(0,num_hidden):
R[i,0] = R[i,0] + H[j,0]*peights[j,i]
R[i,0] = R[i,0] + biasO[i,0]*peights[num_hidden,i]
output[k,i] = activation(R[i,0])
R[i,0] = 0
erro_dec[0,k] = output[k,:] - output_data[k,:]
erro_round[0,k] = np.around(erro_dec[0,k])
return output,erro_dec,erro_round
#Reckon
def ann_reckon(input_data,weights,peights,biasH,biasO):
#Collect dimensions
num_input = input_data.shape[1]
reg_size = input_data.shape[0]
num_output = 1
num_hidden = peights.shape[0] - 1
def activation(x):
fx = 1/(1+math.exp(-x))
return fx
S = np.zeros((num_hidden,1))
H = np.zeros((num_hidden,1))
R = np.zeros((num_output,1))
output = np.zeros((reg_size,num_output))
for k in range(0,reg_size):
for i in range(0,num_hidden):
for j in range(0,num_input):
S[i,0] = S[i,0] + input_data[k,j]*weights[j,i]
S[i,0] = S[i,0] + biasH[i,0]*weights[num_input,i]
H[i,0] = activation(S[i,0])
S[i,0] = 0
for i in range(0,num_output):
for j in range(0,num_hidden):
R[i,0] = R[i,0] + H[j,0]*peights[j,i]
R[i,0] = R[i,0] + biasO[i,0]*peights[num_hidden,i]
output[k,i] = activation(R[i,0])
R[i,0] = 0
output_reckon = output
return output_reckon
num_hidden = len(hidden)
erro_buff = 9999999
for k1 in range(0,num_hidden): #hidden neurons tested
for k2 in range(0,trials): #initial conditions
[output,Weights,Peights,BiasH,BiasO,erro_train,erro_validate,epoch_min,erro_min,error,erro_test,error_total] = ann_train(input_train,output_train,input_val,output_val,input_test,output_test,reckon,hidden,trials,coef,nr_epochs,val_samples,test_samples,directory,columnst,col,row,flag_train)
gscript.message(_("Hidden neuron: "))
neuron_tested = hidden[k1]
gscript.message(neuron_tested)
gscript.message(_("Initial condition: "))
gscript.message(k2+1)
gscript.message(_("---------------------------------"))
if error < erro_buff:
erro_buff = error
#then save the data in variables
weights = Weights
peights = Peights
biasH = BiasH
biasO = BiasO
Erro_train = erro_train
Erro_val = erro_validate
Early_stop = np.array([epoch_min,erro_min])
Erro_test = erro_test
Erro_test_norm = erro_buff
neurons = hidden[k1]
Epoch_min = epoch_min
Erro_min = erro_min
Total_erro = error_total
gscript.message(_("Test set error from the best ANN: "))
gscript.message(Total_erro)
gscript.message(_("Best ANN hidden neurons: "))
gscript.message(neurons)
x = np.arange(1,nr_epochs+1).reshape((nr_epochs,1))
plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.5, hspace=None)
plt.subplot(1,2,1)
plt.plot(x,Erro_train.T,x,Erro_val.T,'g-', Epoch_min, Erro_min,'g*')
plt.xlabel('Epochs')
plt.ylabel('Root mean square output error')
plt.legend(('Training','Validation','Early stop'))
plt.subplot(1,2,2)
plt.bar(np.arange(1,(Erro_test.shape[1])+1),Erro_test.reshape(Erro_test.shape[1]))
plt.xlabel('Instances')
plt.ylabel('Error (ANN output - real output)')
os.chdir(directory)
plt.savefig('ANN_train_val', dpi=300, facecolor='w', edgecolor='w',
orientation='portrait', papertype=None, format=None,
transparent=False, bbox_inches='tight', pad_inches=0.2,
frameon=None)
os.chdir('../')
#Sensitivity analysis
def sensitivity(input_data,output_size,weights,peights,biasH,biasO):
input_size = input_data.shape[1] #Number of columns (parameters)
npts = 200 #Number of samples in the sensitivity evaluation set
sens_set = np.random.random_sample((npts,1)) #Return random floats in the half-open interval [0.0, 1.0)
fixed_par_value = np.empty([input_size,1])
ones_sens = np.ones((200,1))
#Calculation of the normalized mean value of each entry
for k in range(0,input_size):
fixed_par_value[k] = (np.mean(input_data[:,k]))
#Pre-allocation
input_sens = [[([1] * npts) for j in range(input_size)] for i in range(input_size)]
output_sens = [[([0] * npts) for j in range(input_size)] for i in range(input_size)]
input_sens = np.asarray(input_sens,dtype=float)
for k1 in range(0,input_size):
for k2 in range(0,input_size):
if k1 == k2:
input_sens[k1,k2,:] = sens_set.reshape(sens_set.shape[0]).T
else:
input_sens[k1,k2,:] = (ones_sens*fixed_par_value[k2]).reshape(ones_sens.shape[0])
for k1 in range(0,input_size):
input_sens2 = np.asarray(input_sens[k1]).T
output = ann_reckon(input_sens2,weights,peights,biasH,biasO)
output_sens[k1] = output
return sens_set,fixed_par_value,input_sens,output_sens
[sens_set,fixed_par_value,input_sens,output_sens] = sensitivity(input_data,output_data.shape[1],weights,peights,biasH,biasO)
for k in range(0,input_data.shape[1]):
plt.figure()
plt.plot(sens_set,output_sens[k],'.')
plt.title('Sensitivity analysis. Parameter: '+columnst[k])
plt.ylabel('Output response')
plt.xlabel('Parameter: '+columnst[k])
os.chdir(directory)
plt.savefig('SensitivityAnalysisVar_'+columnst[k], dpi=300, facecolor='w', edgecolor='w',
orientation='portrait', papertype=None, format=None,
transparent=False, bbox_inches='tight', pad_inches=0.2,
frameon=None)
os.chdir('../')
os.chdir(directory)
np.savetxt('weights.txt', (weights), delimiter=',')
np.savetxt('peights.txt', (peights), delimiter=',')
np.savetxt('biasH.txt', (biasH), delimiter=',')
np.savetxt('biasO.txt', (biasO), delimiter=',')
os.mkdir('Inputs and outputs')
os.chdir('Inputs and outputs')
np.savetxt('Input_test.txt', (input_test), delimiter=',')
np.savetxt('Output_test.txt', (output_test), delimiter=',')
np.savetxt('Input_val.txt', (input_val), delimiter=',')
np.savetxt('Output_val.txt', (output_val), delimiter=',')
np.savetxt('Input_train.txt', (input_train), delimiter=',')
np.savetxt('Output_train.txt', (output_train), delimiter=',')
np.savetxt('Error_train.txt', (Erro_train), delimiter=',')
np.savetxt('Error_val.txt', (Erro_val), delimiter=',')
np.savetxt('Epoch_and_error_min.txt', (Early_stop), delimiter=',')
np.savetxt('Test_set_TOTAL_error.txt', (Total_erro), delimiter=',')
np.savetxt('Error_test.txt', (Erro_test), delimiter=',')
param = open('ANN_Parameters.txt','w')
param.write('Hidden neurons of best ANN: '+str(neurons))
param.write('\n Learning rate: '+str(coef))
param.write('\n Epochs: '+str(nr_epochs))
param.write('\n Hidden neurons tested: '+str(hidden))
param.write('\n Number of initial conditions tested: '+str(trials))
param.close()
os.chdir('../')
os.chdir('../')
#Reckon
if not flag_train:
output_reckon = ann_reckon(reckon,weights,peights,biasH,biasO)
a = np.reshape(output_reckon,(col,row),order='F')
a = np.transpose(a)
else:
a = 0
return a
| 38.960742
| 301
| 0.552943
| 5,293
| 35,727
| 3.5768
| 0.054223
| 0.014473
| 0.034228
| 0.027308
| 0.945595
| 0.935823
| 0.932126
| 0.932126
| 0.924202
| 0.924202
| 0
| 0.029028
| 0.300921
| 35,727
| 917
| 302
| 38.960742
| 0.72898
| 0.062586
| 0
| 0.918803
| 0
| 0
| 0.042877
| 0.00518
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029915
| false
| 0
| 0.019943
| 0
| 0.079772
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
253de405efcb01d7243cf9555559bb499b3c827e
| 96
|
py
|
Python
|
examples/lp_train/models/__init__.py
|
drcut/QPyTorch
|
63c293178e8ce9e6e5b218dee96536e9c4ad1e5c
|
[
"MIT"
] | 172
|
2019-04-25T20:24:12.000Z
|
2022-03-31T06:19:08.000Z
|
examples/lp_train/models/__init__.py
|
drcut/QPyTorch
|
63c293178e8ce9e6e5b218dee96536e9c4ad1e5c
|
[
"MIT"
] | 36
|
2019-05-09T19:27:03.000Z
|
2022-01-27T10:53:37.000Z
|
examples/lp_train/models/__init__.py
|
drcut/QPyTorch
|
63c293178e8ce9e6e5b218dee96536e9c4ad1e5c
|
[
"MIT"
] | 43
|
2019-05-04T15:14:09.000Z
|
2022-02-21T17:45:30.000Z
|
from .vgg import *
from .vgg_low import *
from .preresnet import *
from .preresnet_low import *
| 19.2
| 28
| 0.75
| 14
| 96
| 5
| 0.357143
| 0.428571
| 0.542857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 96
| 4
| 29
| 24
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c28a0565bad8bd6342df91e52a98f34a942dd157
| 112
|
py
|
Python
|
plugins/dig/komand_dig/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/dig/komand_dig/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/dig/komand_dig/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
from .forward.action import Forward
from .reverse.action import Reverse
| 28
| 39
| 0.794643
| 17
| 112
| 5.235294
| 0.705882
| 0.269663
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151786
| 112
| 3
| 40
| 37.333333
| 0.936842
| 0.330357
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c2de7cc36d2e154a38f73e15950c0b4dd818fe57
| 15,770
|
py
|
Python
|
testcases/generated/iam_test.py
|
Tanc009/jdcloud-cli
|
4e11de77c68501f44e7026c0ad1c24e5d043197e
|
[
"Apache-2.0"
] | 95
|
2018-06-05T10:49:32.000Z
|
2019-12-31T11:07:36.000Z
|
testcases/generated/iam_test.py
|
Tanc009/jdcloud-cli
|
4e11de77c68501f44e7026c0ad1c24e5d043197e
|
[
"Apache-2.0"
] | 22
|
2018-06-05T10:58:59.000Z
|
2020-07-31T12:13:19.000Z
|
testcases/generated/iam_test.py
|
Tanc009/jdcloud-cli
|
4e11de77c68501f44e7026c0ad1c24e5d043197e
|
[
"Apache-2.0"
] | 21
|
2018-06-04T12:50:27.000Z
|
2020-11-05T10:55:28.000Z
|
# coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
import unittest
import os
import json
class IamTest(unittest.TestCase):
def test_enable_sub_user_access_key(self):
cmd = """python ../../main.py iam enable-sub-user-access-key --sub-user 'xxx' --access-key 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_disable_sub_user_access_key(self):
cmd = """python ../../main.py iam disable-sub-user-access-key --sub-user 'xxx' --access-key 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_delete_sub_user_access_key(self):
cmd = """python ../../main.py iam delete-sub-user-access-key --sub-user 'xxx' --access-key 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_create_group(self):
cmd = """python ../../main.py iam create-group --create-group-info '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_group(self):
cmd = """python ../../main.py iam describe-group --group-name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_update_group(self):
cmd = """python ../../main.py iam update-group --group-name 'xxx' --update-group-info '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_delete_group(self):
cmd = """python ../../main.py iam delete-group --group-name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_group_sub_users(self):
cmd = """python ../../main.py iam describe-group-sub-users --group-name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_groups(self):
cmd = """python ../../main.py iam describe-groups """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_attached_group_policies(self):
cmd = """python ../../main.py iam describe-attached-group-policies --group-name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_detach_group_policy(self):
cmd = """python ../../main.py iam detach-group-policy --group-name 'xxx' --policy-name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_attach_group_policy(self):
cmd = """python ../../main.py iam attach-group-policy --group-name 'xxx' --policy-name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_remove_sub_user_from_group(self):
cmd = """python ../../main.py iam remove-sub-user-from-group --group-name 'xxx' --sub-user 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_add_sub_user_to_group(self):
cmd = """python ../../main.py iam add-sub-user-to-group --group-name 'xxx' --sub-user 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_create_permission(self):
cmd = """python ../../main.py iam create-permission --create-permission-info '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_permission_detail(self):
cmd = """python ../../main.py iam describe-permission-detail --permission-id '5'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_update_permission(self):
cmd = """python ../../main.py iam update-permission --permission-id '5' --update-permission-info '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_permissions(self):
cmd = """python ../../main.py iam describe-permissions --page-number '5' --page-size '5' --query-type '5'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_sub_user_permissions(self):
cmd = """python ../../main.py iam describe-sub-user-permissions --sub-user 'xxx' --page-number '5' --page-size '5'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_add_permissions_to_sub_user(self):
cmd = """python ../../main.py iam add-permissions-to-sub-user --sub-user 'xxx' --add-permissions-info '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_remove_permission_of_sub_user(self):
cmd = """python ../../main.py iam remove-permission-of-sub-user --permission-id '5' --sub-user 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_create_policy(self):
cmd = """python ../../main.py iam create-policy --create-policy-info '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_policy(self):
cmd = """python ../../main.py iam describe-policy --policy-name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_update_policy(self):
cmd = """python ../../main.py iam update-policy --policy-name 'xxx' --update-policy-info '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_delete_policy(self):
cmd = """python ../../main.py iam delete-policy --policy-name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_update_policy_description(self):
cmd = """python ../../main.py iam update-policy-description --policy-name 'xxx' --update-policy-description-info '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_policies(self):
cmd = """python ../../main.py iam describe-policies """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_create_role(self):
cmd = """python ../../main.py iam create-role --create-role-info '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_role(self):
cmd = """python ../../main.py iam describe-role --role-name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_delete_role(self):
cmd = """python ../../main.py iam delete-role --role-name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_update_assume_role_policy(self):
cmd = """python ../../main.py iam update-assume-role-policy --role-name 'xxx' --update-assume-role-policy-info '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_roles(self):
cmd = """python ../../main.py iam describe-roles """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_attach_role_policy(self):
cmd = """python ../../main.py iam attach-role-policy --role-name 'xxx' --policy-name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_detach_role_policy(self):
cmd = """python ../../main.py iam detach-role-policy --role-name 'xxx' --policy-name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_role_policies(self):
cmd = """python ../../main.py iam describe-role-policies --role-name 'xxx' --sort '5'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_create_sub_user(self):
cmd = """python ../../main.py iam create-sub-user --create-sub-user-info '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_sub_user(self):
cmd = """python ../../main.py iam describe-sub-user --sub-user 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_update_sub_user(self):
cmd = """python ../../main.py iam update-sub-user --sub-user 'xxx' --update-sub-user-info '{"":""}'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_delete_sub_user(self):
cmd = """python ../../main.py iam delete-sub-user --sub-user 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_sub_users(self):
cmd = """python ../../main.py iam describe-sub-users """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_sub_user_groups(self):
cmd = """python ../../main.py iam describe-sub-user-groups --sub-user 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_attached_sub_user_policies(self):
cmd = """python ../../main.py iam describe-attached-sub-user-policies --sub-user 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_detach_sub_user_policy(self):
cmd = """python ../../main.py iam detach-sub-user-policy --sub-user 'xxx' --policy-name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_attach_sub_user_policy(self):
cmd = """python ../../main.py iam attach-sub-user-policy --sub-user 'xxx' --policy-name 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_describe_user_access_keys(self):
cmd = """python ../../main.py iam describe-user-access-keys """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_create_user_access_key(self):
cmd = """python ../../main.py iam create-user-access-key """
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_enabled_user_access_key(self):
cmd = """python ../../main.py iam enabled-user-access-key --access-key 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_disabled_user_access_key(self):
cmd = """python ../../main.py iam disabled-user-access-key --access-key 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
def test_delete_user_access_key(self):
cmd = """python ../../main.py iam delete-user-access-key --access-key 'xxx'"""
with os.popen(cmd) as f:
content = f.read()
print(content)
result = json.loads(content)
self.assertIsInstance(result, dict)
| 33.768737
| 134
| 0.588586
| 1,958
| 15,770
| 4.659857
| 0.069459
| 0.037593
| 0.069816
| 0.091298
| 0.88733
| 0.860587
| 0.856313
| 0.798992
| 0.729943
| 0.678211
| 0
| 0.001553
| 0.265124
| 15,770
| 466
| 135
| 33.841202
| 0.785745
| 0.040203
| 0
| 0.706052
| 0
| 0.066282
| 0.242162
| 0.050403
| 0
| 0
| 0
| 0
| 0.14121
| 1
| 0.14121
| false
| 0
| 0.008646
| 0
| 0.152738
| 0.14121
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6c0374d20069132d5113634e39b456f50e696163
| 6,418
|
py
|
Python
|
adform/base.py
|
dutkiewicz/adform-api
|
5b670ea971c261565d1fe4cf7c18b2e109f8449d
|
[
"MIT"
] | null | null | null |
adform/base.py
|
dutkiewicz/adform-api
|
5b670ea971c261565d1fe4cf7c18b2e109f8449d
|
[
"MIT"
] | 6
|
2019-11-29T04:53:15.000Z
|
2020-06-29T04:41:24.000Z
|
adform/base.py
|
dutkiewicz/adform-api
|
5b670ea971c261565d1fe4cf7c18b2e109f8449d
|
[
"MIT"
] | null | null | null |
from urllib.parse import urljoin
import requests
from adform import exceptions
class Base:
"""Base class for requesting API with ticket. Containst common http methods like GET, POST, PUT, DELETE."""
base_url = "https://api.adform.com"
_ticket = None
body = None
_headers = {'Authorization': 'Bearer {}'.format(_ticket)}
def __init__(self, ticket):
self._ticket = ticket
self._headers['Authorization'] = 'Bearer {}'.format(self._ticket)
def _get(self, endpoint):
url = urljoin(self.base_url, endpoint)
response = requests.get(url, headers=self._headers)
if response.status_code >= 200 or response.status_code < 300:
return response
elif response.status_code == 400:
error_msg = response.json()
raise exceptions.BadRequestError("Error {}. Reason: {}".format(response.status_code,
error_msg['reason']))
elif response.status_code == 401:
error_msg = response.json()
raise exceptions.UnauthorizedError("Error {}. Reason: {}".format(response.status_code,
error_msg['reason']))
elif response.status_code == 403:
error_msg = response.json()
raise exceptions.ForbiddenError("Error {}. Reason: {}".format(response.status_code,
error_msg['reason']))
elif response.status_code == 429:
error_msg = response.json()
raise exceptions.QuotaLimitExceededError("Error {}. Reason: {}".format(response.status_code,
error_msg['reason']))
else:
raise exceptions.ApiError('There was an ambiguous error while processing your request.')
def _post(self, endpoint):
url = urljoin(self.base_url, endpoint)
response = requests.post(url, headers=self._headers, json=self.body)
if response.status_code >= 200 or response.status_code < 300:
return response
elif response.status_code == 400:
error_msg = response.json()
raise exceptions.BadRequestError("Error {}. Reason: {}".format(response.status_code,
error_msg['reason']))
elif response.status_code == 401:
error_msg = response.json()
raise exceptions.UnauthorizedError("Error {}. Reason: {}".format(response.status_code,
error_msg['reason']))
elif response.status_code == 403:
error_msg = response.json()
raise exceptions.ForbiddenError("Error {}. Reason: {}".format(response.status_code,
error_msg['reason']))
elif response.status_code == 429:
error_msg = response.json()
raise exceptions.QuotaLimitExceededError("Error {}. Reason: {}".format(response.status_code,
error_msg['reason']))
else:
raise exceptions.ApiError('There was an ambiguous error while processing your request.')
def _put(self, endpoint):
url = urljoin(self.base_url, endpoint)
response = requests.post(url, headers=self._headers, json=self.body)
if response.status_code >= 200 or response.status_code < 300:
return response
elif response.status_code == 400:
error_msg = response.json()
raise exceptions.BadRequestError("Error {}. Reason: {}".format(response.status_code,
error_msg['reason']))
elif response.status_code == 401:
error_msg = response.json()
raise exceptions.UnauthorizedError("Error {}. Reason: {}".format(response.status_code,
error_msg['reason']))
elif response.status_code == 403:
error_msg = response.json()
raise exceptions.ForbiddenError("Error {}. Reason: {}".format(response.status_code,
error_msg['reason']))
elif response.status_code == 429:
error_msg = response.json()
raise exceptions.QuotaLimitExceededError("Error {}. Reason: {}".format(response.status_code,
error_msg['reason']))
else:
raise exceptions.ApiError('There was an ambiguous error while processing your request.')
def _delete(self, endpoint):
url = urljoin(self.base_url, endpoint)
response = requests.delete(url, headers=self._headers)
if response.status_code >= 200 or response.status_code < 300:
return response
elif response.status_code == 400:
error_msg = response.json()
raise exceptions.BadRequestError("Error {}. Reason: {}".format(response.status_code,
error_msg['reason']))
elif response.status_code == 401:
error_msg = response.json()
raise exceptions.UnauthorizedError("Error {}. Reason: {}".format(response.status_code,
error_msg['reason']))
elif response.status_code == 403:
error_msg = response.json()
raise exceptions.ForbiddenError("Error {}. Reason: {}".format(response.status_code,
error_msg['reason']))
elif response.status_code == 429:
error_msg = response.json()
raise exceptions.QuotaLimitExceededError("Error {}. Reason: {}".format(response.status_code,
error_msg['reason']))
else:
raise exceptions.ApiError('There was an ambiguous error while processing your request.')
| 54.389831
| 111
| 0.523683
| 568
| 6,418
| 5.75
| 0.121479
| 0.171464
| 0.220453
| 0.107777
| 0.894366
| 0.894366
| 0.894366
| 0.894366
| 0.894366
| 0.894366
| 0
| 0.018059
| 0.378778
| 6,418
| 117
| 112
| 54.854701
| 0.801104
| 0.015737
| 0
| 0.834951
| 0
| 0
| 0.11377
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048544
| false
| 0
| 0.029126
| 0
| 0.165049
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6c07371788661f836f0d619a6dafba1dadcef5ae
| 3,613
|
py
|
Python
|
maddpg/models/mlp_net.py
|
zixianma/PIC
|
bbfe8985121e3ffb693c047ed3fe85d0c8256737
|
[
"MIT"
] | 28
|
2019-10-31T00:38:10.000Z
|
2022-03-21T12:33:03.000Z
|
maddpg/models/mlp_net.py
|
zixianma/PIC
|
bbfe8985121e3ffb693c047ed3fe85d0c8256737
|
[
"MIT"
] | 10
|
2019-11-27T12:37:25.000Z
|
2021-06-07T11:52:34.000Z
|
maddpg/models/mlp_net.py
|
baicenxiao/AREL
|
2168508138fde62150bc5e8b47b1aa3bcef09785
|
[
"MIT"
] | 13
|
2019-10-31T00:38:17.000Z
|
2022-03-06T04:24:09.000Z
|
"""Implements a simple two layer mlp network."""
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
class MlpNet(nn.Module):
"""Implements a simple fully connected mlp network."""
def __init__(self, sa_dim, n_agents, hidden_size,
agent_id=0, agent_shuffle='none'):
super(MlpNet, self).__init__()
self.linear1 = nn.Linear(sa_dim * n_agents, hidden_size)
self.linear2 = nn.Linear(hidden_size, hidden_size)
self.V = nn.Linear(hidden_size, 1)
self.V.weight.data.mul_(0.1)
self.V.bias.data.mul_(0.1)
self.n_agents = n_agents
self.agent_id = agent_id
self.agent_shuffle = agent_shuffle
def forward(self, x):
# Perform shuffling.
bz = x.shape[0]
if self.agent_shuffle == 'all':
x_out = []
for k in range(bz):
rand_idx = np.random.permutation(self.n_agents)
x_out.append(x[k, :, rand_idx].unsqueeze(0))
x = torch.cat(x_out, 0)
elif self.agent_shuffle == 'others':
x_out = []
for k in range(bz):
rand_idx = np.random.permutation(self.n_agents-1)
index_except = np.concatenate([np.arange(0, self.agent_id),
np.arange(self.agent_id+1, self.n_agents) ])
except_shuffle = index_except[rand_idx]
x_tmp = x[k, :, :]
x_tmp[:, index_except] = x_tmp[:, except_shuffle]
x_out.append(x_tmp.unsqueeze(0))
x = torch.cat(x_out, 0)
elif self.agent_shuffle == 'none':
pass
else:
raise NotImplemented(
'Unsupported agent_shuffle opt: %s' % self.agent_shuffle)
# Reshape to fit into mlp.
x = x.view(bz, -1)
x = self.linear1(x)
x = F.relu(x)
x = self.linear2(x)
x = F.relu(x)
V = self.V(x)
return V
class MlpNetM(nn.Module):
"""Implements a simple fully connected mlp network."""
def __init__(self, sa_dim, n_agents, hidden_size,
agent_id=0, agent_shuffle='none'):
super(MlpNetM, self).__init__()
self.linear1 = nn.Linear(sa_dim, hidden_size)
self.linear2 = nn.Linear(hidden_size * 3, hidden_size)
self.linear3 = nn.Linear(hidden_size, hidden_size)
self.V = nn.Linear(hidden_size, 1)
self.V.weight.data.mul_(0.1)
self.V.bias.data.mul_(0.1)
self.n_agents = n_agents
self.agent_id = agent_id
self.agent_shuffle = agent_shuffle
def forward(self, x):
# Perform shuffling.
bz = x.shape[0]
if self.agent_shuffle == 'all':
x_out = []
for k in range(bz):
rand_idx = np.random.permutation(self.n_agents)
x_out.append(x[k, :, rand_idx].unsqueeze(0))
x = torch.cat(x_out, 0)
elif self.agent_shuffle == 'others':
x_out = []
for k in range(bz):
rand_idx = np.random.permutation(self.n_agents-1)
index_except = np.concatenate([np.arange(0, self.agent_id),
np.arange(self.agent_id+1, self.n_agents) ])
except_shuffle = index_except[rand_idx]
x_tmp = x[k, :, :]
x_tmp[:, index_except] = x_tmp[:, except_shuffle]
x_out.append(x_tmp.unsqueeze(0))
x = torch.cat(x_out, 0)
elif self.agent_shuffle == 'none':
pass
else:
raise NotImplemented(
'Unsupported agent_shuffle opt: %s' % self.agent_shuffle)
# Reshape to fit into mlp.
x1, x2, x3 = self.linear1(x[:, :, 0]), self.linear1(x[:, :, 1]), self.linear1(x[:, :, 2])
x = torch.cat((x1, x2, x3), 1)
x = F.relu(x)
x = self.linear2(x)
x = F.relu(x)
x = self.linear3(x)
x = F.relu(x)
V = self.V(x)
return V
| 31.417391
| 93
| 0.605868
| 550
| 3,613
| 3.774545
| 0.167273
| 0.092486
| 0.077071
| 0.043353
| 0.879094
| 0.879094
| 0.875723
| 0.872832
| 0.813102
| 0.813102
| 0
| 0.019331
| 0.255466
| 3,613
| 114
| 94
| 31.692982
| 0.752416
| 0.063382
| 0
| 0.806452
| 0
| 0
| 0.029718
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043011
| false
| 0.021505
| 0.043011
| 0
| 0.129032
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6c2dbbee5656abf8acf0130cb3149b545bfc5a8b
| 47,719
|
py
|
Python
|
app/tests/test_orion.py
|
RoboticBase/uoa-poc2-controller
|
5fdd7951d26d62ea72b25460936b26a9631b7e0a
|
[
"Apache-2.0"
] | null | null | null |
app/tests/test_orion.py
|
RoboticBase/uoa-poc2-controller
|
5fdd7951d26d62ea72b25460936b26a9631b7e0a
|
[
"Apache-2.0"
] | null | null | null |
app/tests/test_orion.py
|
RoboticBase/uoa-poc2-controller
|
5fdd7951d26d62ea72b25460936b26a9631b7e0a
|
[
"Apache-2.0"
] | null | null | null |
import os
import json
import datetime as dt
import importlib
from unittest.mock import call
import requests
import dateutil.parser
from werkzeug.exceptions import InternalServerError, NotFound, BadRequest
import pytest
import freezegun
import lazy_import
orion = lazy_import.lazy_module('src.orion')
const = lazy_import.lazy_module('src.const')
caller = lazy_import.lazy_module('src.caller')
token = lazy_import.lazy_module('src.token')
@pytest.fixture
def mocked_requests(mocker):
orion.requests = mocker.MagicMock()
yield orion.requests
@pytest.fixture
def mocked_response(mocker):
return mocker.MagicMock(spec=requests.Response)
@pytest.fixture
def reload_module():
importlib.reload(const)
importlib.reload(orion)
yield
@pytest.mark.usefixtures('reload_module')
class TestSendCommand:
@pytest.mark.parametrize('payload', [
{'msg': 'dummy'},
{'test': {'nested': [1, 2.0, '3']}},
{},
[1, 1.2e-2, 'a', True, {'a': 'b'}, None],
[],
'dummy',
1,
0.5,
True,
None,
tuple([1, 2]),
])
@pytest.mark.parametrize('env_token, expected_token', [
('orion_token', 'bearer orion_token'),
(None, None),
])
def test_success(self, mocker, mocked_response, payload, env_token, expected_token):
if env_token is not None:
os.environ['ORION_TOKEN'] = env_token
importlib.reload(const)
importlib.reload(orion)
mocked_requests = mocker.MagicMock()
orion.requests = mocked_requests
fiware_service = 'dummy_service'
fiware_servicepath = 'dummy_servicepath'
entity_type = 'dummy_type'
entity_id = 'dummy_id'
mocked_response.status_code = 200
mocked_response.text = 'test'
mocked_requests.patch.return_value = mocked_response
result = orion.send_command(fiware_service, fiware_servicepath, entity_type, entity_id, payload)
assert result.status_code == 200
assert result.text == 'test'
assert mocked_requests.get.call_count == 0
assert mocked_requests.post.call_count == 0
assert mocked_requests.put.call_count == 0
assert mocked_requests.patch.call_count == 1
assert mocked_requests.delete.call_count == 0
endpoint = f'{const.ORION_ENDPOINT}/v2/entities/{entity_id}/attrs?type={entity_type}'
headers = {
'Content-Type': 'application/json',
'FIWARE-SERVICE': fiware_service,
'FIWARE-SERVICEPATH': fiware_servicepath,
}
if expected_token is not None:
headers['Authorization'] = expected_token
assert mocked_requests.patch.call_args == call(endpoint, headers=headers, json=payload)
@pytest.mark.parametrize('response_code, expected_exception, expected_value', [
(300, InternalServerError, '500 Internal Server Error'),
(400, InternalServerError, '500 Internal Server Error'),
(404, NotFound, '404 Not Found'),
])
@pytest.mark.parametrize('response_text, expected_text', [
('root_cause', 'root_cause'),
(None, ''),
('', ''),
])
def test_response_error(self, mocked_requests,
response_code, expected_exception, expected_value, response_text, expected_text):
fiware_service = 'dummy_service'
fiware_servicepath = 'dummy_servicepath'
entity_type = 'dummy_type'
entity_id = 'dummy_id'
payload = {
'msg': 'dummy'
}
class MockResponse:
pass
mocked_response = MockResponse()
mocked_response.status_code = response_code
if response_text is not None:
mocked_response.text = response_text
mocked_requests.patch.return_value = mocked_response
with pytest.raises(expected_exception) as e:
orion.send_command(fiware_service, fiware_servicepath, entity_type, entity_id, payload)
result = {
'message': 'can not send command to orion',
'root_cause': expected_text,
}
assert str(e.value) == f'{expected_value}: {result}'
assert mocked_requests.get.call_count == 0
assert mocked_requests.post.call_count == 0
assert mocked_requests.put.call_count == 0
assert mocked_requests.patch.call_count == 1
assert mocked_requests.delete.call_count == 0
endpoint = f'{const.ORION_ENDPOINT}/v2/entities/{entity_id}/attrs?type={entity_type}'
headers = {
'Content-Type': 'application/json',
'FIWARE-SERVICE': fiware_service,
'FIWARE-SERVICEPATH': fiware_servicepath,
}
assert mocked_requests.patch.call_args == call(endpoint, headers=headers, json=payload)
@pytest.mark.parametrize(
'fiware_service, fiware_servicepath, entity_type, entity_id', [
('dummy', 0, 1e-1, True),
(0, 1e-1, True, None),
(1e-1, True, None, []),
(True, None, [], {}),
(None, [], {}, tuple(['a', 1])),
([], {}, tuple(['a', 1]), set([1, 2, 1])),
({}, tuple(['a', 1]), set([1, 2, 1]), dt.datetime.utcnow()),
(tuple(['a', 1]), set([1, 2, 1]), dt.datetime.utcnow(), 'dummy'),
(set([1, 2, 1]), dt.datetime.utcnow(), 'dummy', 0),
(dt.datetime.utcnow(), 'dummy', 0, 1e-1),
]
)
def test_invalid_args(self, mocked_requests, mocked_response,
fiware_service, fiware_servicepath, entity_type, entity_id):
payload = {
'msg': 'dummy'
}
with pytest.raises(TypeError) as e:
orion.send_command(fiware_service, fiware_servicepath, entity_type, entity_id, payload)
assert mocked_requests.get.call_count == 0
assert mocked_requests.post.call_count == 0
assert mocked_requests.put.call_count == 0
assert mocked_requests.patch.call_count == 0
assert mocked_requests.delete.call_count == 0
assert str(e.value) == 'fiware_service, fiware_servicepath, entity_type and entity_id must be "str"'
@pytest.mark.parametrize('payload', [
dt.datetime.utcnow(),
set([1, 2, 1]),
])
def test_invalid_payload(self, mocked_requests, mocked_response, payload):
fiware_service = 'dummy_service'
fiware_servicepath = 'dummy_servicepath'
entity_type = 'dummy_type'
entity_id = 'dummy_id'
with pytest.raises(TypeError) as e:
orion.send_command(fiware_service, fiware_servicepath, entity_type, entity_id, payload)
assert mocked_requests.get.call_count == 0
assert mocked_requests.post.call_count == 0
assert mocked_requests.put.call_count == 0
assert mocked_requests.patch.call_count == 0
assert mocked_requests.delete.call_count == 0
assert str(e.value) == 'payload must be json serializable'
@pytest.mark.usefixtures('reload_module')
class TestQueryEntity:
@pytest.mark.parametrize('env_token, expected_token', [
('orion_token', 'bearer orion_token'),
(None, None),
])
def test_success(self, mocker, mocked_response, env_token, expected_token):
if env_token is not None:
os.environ['ORION_TOKEN'] = env_token
importlib.reload(const)
importlib.reload(orion)
mocked_requests = mocker.MagicMock()
orion.requests = mocked_requests
fiware_service = 'dummy_service'
fiware_servicepath = 'dummy_servicepath'
entity_type = 'dummy_type'
query = 'foo==dummy_query'
mocked_response.status_code = 200
mocked_response.json.return_value = [{'result': 'test'}]
mocked_requests.get.return_value = mocked_response
result = orion.query_entity(fiware_service, fiware_servicepath, entity_type, query)
assert result == {'result': 'test'}
assert mocked_requests.get.call_count == 1
assert mocked_requests.post.call_count == 0
assert mocked_requests.put.call_count == 0
assert mocked_requests.patch.call_count == 0
assert mocked_requests.delete.call_count == 0
endpoint = f'{const.ORION_ENDPOINT}/v2/entities/'
headers = {
'FIWARE-SERVICE': fiware_service,
'FIWARE-SERVICEPATH': fiware_servicepath,
}
if expected_token is not None:
headers['Authorization'] = expected_token
params = {
'type': entity_type,
'limit': const.ORION_LIST_NUM_LIMIT,
'q': query,
}
assert mocked_requests.get.call_args == call(endpoint, headers=headers, params=params)
@pytest.mark.parametrize('response_code, expected_exception, expected_value', [
(300, InternalServerError, '500 Internal Server Error'),
(400, InternalServerError, '500 Internal Server Error'),
(404, NotFound, '404 Not Found'),
])
@pytest.mark.parametrize('response_text, expected_text', [
('root_cause', 'root_cause'),
(None, ''),
('', ''),
])
def test_response_error(self, mocked_requests,
response_code, expected_exception, expected_value, response_text, expected_text):
fiware_service = 'dummy_service'
fiware_servicepath = 'dummy_servicepath'
entity_type = 'dummy_type'
query = 'foo==dummy_query'
class MockResponse:
pass
mocked_response = MockResponse()
mocked_response.status_code = response_code
if response_text is not None:
mocked_response.text = response_text
mocked_requests.get.return_value = mocked_response
with pytest.raises(expected_exception) as e:
orion.query_entity(fiware_service, fiware_servicepath, entity_type, query)
result = {
'message': 'can not get entities from orion',
'root_cause': expected_text,
}
assert str(e.value) == f'{expected_value}: {result}'
assert mocked_requests.get.call_count == 1
assert mocked_requests.post.call_count == 0
assert mocked_requests.put.call_count == 0
assert mocked_requests.patch.call_count == 0
assert mocked_requests.delete.call_count == 0
endpoint = f'{const.ORION_ENDPOINT}/v2/entities/'
headers = {
'FIWARE-SERVICE': fiware_service,
'FIWARE-SERVICEPATH': fiware_servicepath,
}
params = {
'type': entity_type,
'limit': const.ORION_LIST_NUM_LIMIT,
'q': query,
}
assert mocked_requests.get.call_args == call(endpoint, headers=headers, params=params)
def test_json_decodeerror(self, mocked_requests, mocked_response):
fiware_service = 'dummy_service'
fiware_servicepath = 'dummy_servicepath'
entity_type = 'dummy_type'
query = 'foo==dummy_query'
mocked_response.status_code = 200
mocked_response.json.side_effect = json.decoder.JSONDecodeError('test error', doc='doc', pos=1)
mocked_requests.get.return_value = mocked_response
with pytest.raises(BadRequest) as e:
orion.query_entity(fiware_service, fiware_servicepath, entity_type, query)
result = {
'message': 'can not parse result',
'root_cause': 'test error: line 1 column 2 (char 1)',
}
assert str(e.value) == f'400 Bad Request: {result}'
assert mocked_requests.get.call_count == 1
assert mocked_requests.post.call_count == 0
assert mocked_requests.put.call_count == 0
assert mocked_requests.patch.call_count == 0
assert mocked_requests.delete.call_count == 0
endpoint = f'{const.ORION_ENDPOINT}/v2/entities/'
headers = {
'FIWARE-SERVICE': fiware_service,
'FIWARE-SERVICEPATH': fiware_servicepath,
}
params = {
'type': entity_type,
'limit': const.ORION_LIST_NUM_LIMIT,
'q': query,
}
assert mocked_requests.get.call_args == call(endpoint, headers=headers, params=params)
@pytest.mark.parametrize('response_json', [
'dummy', 0, 1e-1, True, [], ['a', 1], {}, {'a': 1}, tuple(['a', 1]), set([1, 2]), dt.datetime.utcnow(), None
])
def test_invalid_json(self, mocked_requests, mocked_response, response_json):
fiware_service = 'dummy_service'
fiware_servicepath = 'dummy_servicepath'
entity_type = 'dummy_type'
query = 'foo==dummy_query'
expected_msg = {'message': f'can not retrieve an entity, entity_type={entity_type}, query={query}'}
mocked_response.status_code = 200
mocked_response.json.return_value = response_json
mocked_requests.get.return_value = mocked_response
with pytest.raises(BadRequest) as e:
orion.query_entity(fiware_service, fiware_servicepath, entity_type, query)
assert str(e.value) == f'400 Bad Request: {expected_msg}'
assert mocked_requests.get.call_count == 1
assert mocked_requests.post.call_count == 0
assert mocked_requests.put.call_count == 0
assert mocked_requests.patch.call_count == 0
assert mocked_requests.delete.call_count == 0
endpoint = f'{const.ORION_ENDPOINT}/v2/entities/'
headers = {
'FIWARE-SERVICE': fiware_service,
'FIWARE-SERVICEPATH': fiware_servicepath,
}
params = {
'type': entity_type,
'limit': const.ORION_LIST_NUM_LIMIT,
'q': query,
}
assert mocked_requests.get.call_args == call(endpoint, headers=headers, params=params)
@pytest.mark.parametrize(
'fiware_service, fiware_servicepath, entity_type, query', [
('dummy', 0, 1e-1, True),
(0, 1e-1, True, None),
(1e-1, True, None, []),
(True, None, [], {}),
(None, [], {}, tuple(['a', 1])),
([], {}, tuple(['a', 1]), set([1, 2, 1])),
({}, tuple(['a', 1]), set([1, 2, 1]), dt.datetime.utcnow()),
(tuple(['a', 1]), set([1, 2, 1]), dt.datetime.utcnow(), 'dummy'),
(set([1, 2, 1]), dt.datetime.utcnow(), 'dummy', 0),
(dt.datetime.utcnow(), 'dummy', 0, 1e-1),
]
)
def test_invalid_args(self, mocked_requests, mocked_response,
fiware_service, fiware_servicepath, entity_type, query):
with pytest.raises(TypeError) as e:
orion.query_entity(fiware_service, fiware_servicepath, entity_type, query)
assert mocked_requests.get.call_count == 0
assert mocked_requests.post.call_count == 0
assert mocked_requests.put.call_count == 0
assert mocked_requests.patch.call_count == 0
assert mocked_requests.delete.call_count == 0
assert str(e.value) == 'fiware_service, fiware_servicepath, entity_type and query must be "str"'
@pytest.mark.usefixtures('reload_module')
class TestGetEntities:
@pytest.mark.parametrize('env_token, expected_token', [
('orion_token', 'bearer orion_token'),
(None, None),
])
def test_success(self, mocker, mocked_response, env_token, expected_token):
if env_token is not None:
os.environ['ORION_TOKEN'] = env_token
importlib.reload(const)
importlib.reload(orion)
mocked_requests = mocker.MagicMock()
orion.requests = mocked_requests
fiware_service = 'dummy_service'
fiware_servicepath = 'dummy_servicepath'
entity_type = 'dummy_type'
response_json = [{'result': 'test1'}, {'result': 'test2'}]
mocked_response.status_code = 200
mocked_response.json.return_value = response_json
mocked_requests.get.return_value = mocked_response
result = orion.get_entities(fiware_service, fiware_servicepath, entity_type)
assert result == response_json
assert mocked_requests.get.call_count == 1
assert mocked_requests.post.call_count == 0
assert mocked_requests.put.call_count == 0
assert mocked_requests.patch.call_count == 0
assert mocked_requests.delete.call_count == 0
endpoint = f'{const.ORION_ENDPOINT}/v2/entities/'
headers = {
'FIWARE-SERVICE': fiware_service,
'FIWARE-SERVICEPATH': fiware_servicepath,
}
if expected_token is not None:
headers['Authorization'] = expected_token
params = {
'type': entity_type,
'limit': const.ORION_LIST_NUM_LIMIT,
}
assert mocked_requests.get.call_args == call(endpoint, headers=headers, params=params)
@pytest.mark.parametrize('response_code, expected_exception, expected_value', [
(300, InternalServerError, '500 Internal Server Error'),
(400, InternalServerError, '500 Internal Server Error'),
(404, NotFound, '404 Not Found'),
])
@pytest.mark.parametrize('response_text, expected_text', [
('root_cause', 'root_cause'),
(None, ''),
('', ''),
])
def test_response_error(self, mocked_requests,
response_code, expected_exception, expected_value, response_text, expected_text):
fiware_service = 'dummy_service'
fiware_servicepath = 'dummy_servicepath'
entity_type = 'dummy_type'
class MockResponse:
pass
mocked_response = MockResponse()
mocked_response.status_code = response_code
if response_text is not None:
mocked_response.text = response_text
mocked_requests.get.return_value = mocked_response
with pytest.raises(expected_exception) as e:
orion.get_entities(fiware_service, fiware_servicepath, entity_type)
result = {
'message': 'can not get entities from orion',
'root_cause': expected_text,
}
assert str(e.value) == f'{expected_value}: {result}'
assert mocked_requests.get.call_count == 1
assert mocked_requests.post.call_count == 0
assert mocked_requests.put.call_count == 0
assert mocked_requests.patch.call_count == 0
assert mocked_requests.delete.call_count == 0
endpoint = f'{const.ORION_ENDPOINT}/v2/entities/'
headers = {
'FIWARE-SERVICE': fiware_service,
'FIWARE-SERVICEPATH': fiware_servicepath,
}
params = {
'type': entity_type,
'limit': const.ORION_LIST_NUM_LIMIT,
}
assert mocked_requests.get.call_args == call(endpoint, headers=headers, params=params)
def test_json_decodeerror(self, mocked_requests, mocked_response):
fiware_service = 'dummy_service'
fiware_servicepath = 'dummy_servicepath'
entity_type = 'dummy_type'
mocked_response.status_code = 200
mocked_response.json.side_effect = json.decoder.JSONDecodeError('test error', doc='doc', pos=1)
mocked_requests.get.return_value = mocked_response
with pytest.raises(BadRequest) as e:
orion.get_entities(fiware_service, fiware_servicepath, entity_type)
result = {
'message': 'can not parse result',
'root_cause': 'test error: line 1 column 2 (char 1)',
}
assert str(e.value) == f'400 Bad Request: {result}'
assert mocked_requests.get.call_count == 1
assert mocked_requests.post.call_count == 0
assert mocked_requests.put.call_count == 0
assert mocked_requests.patch.call_count == 0
assert mocked_requests.delete.call_count == 0
endpoint = f'{const.ORION_ENDPOINT}/v2/entities/'
headers = {
'FIWARE-SERVICE': fiware_service,
'FIWARE-SERVICEPATH': fiware_servicepath,
}
params = {
'type': entity_type,
'limit': const.ORION_LIST_NUM_LIMIT,
}
assert mocked_requests.get.call_args == call(endpoint, headers=headers, params=params)
@pytest.mark.parametrize('fiware_service, fiware_servicepath, entity_type', [
('dummy', 0, 1e-1),
(0, 1e-1, True),
(1e-1, True, None),
(True, None, []),
(None, [], {}),
([], {}, tuple(['a', 1])),
({}, tuple(['a', 1]), set([1, 2, 1])),
(tuple(['a', 1]), set([1, 2, 1]), dt.datetime.utcnow()),
(set([1, 2, 1]), dt.datetime.utcnow(), 'dummy'),
(dt.datetime.utcnow(), 'dummy', 0),
])
def test_invalid_args(self, mocked_requests, mocked_response,
fiware_service, fiware_servicepath, entity_type):
with pytest.raises(TypeError) as e:
orion.get_entities(fiware_service, fiware_servicepath, entity_type)
assert mocked_requests.get.call_count == 0
assert mocked_requests.post.call_count == 0
assert mocked_requests.put.call_count == 0
assert mocked_requests.patch.call_count == 0
assert mocked_requests.delete.call_count == 0
assert str(e.value) == 'fiware_service, fiware_servicepath and entity_type must be "str"'
@pytest.mark.usefixtures('reload_module')
class TestGetEntity:
@pytest.mark.parametrize('env_token, expected_token', [
('orion_token', 'bearer orion_token'),
(None, None),
])
def test_success(self, mocker, mocked_response, env_token, expected_token):
if env_token is not None:
os.environ['ORION_TOKEN'] = env_token
importlib.reload(const)
importlib.reload(orion)
mocked_requests = mocker.MagicMock()
orion.requests = mocked_requests
fiware_service = 'dummy_service'
fiware_servicepath = 'dummy_servicepath'
entity_type = 'dummy_type'
entity_id = 'dummy_id'
response_json = {'result': 'test1'}
mocked_response.status_code = 200
mocked_response.json.return_value = response_json
mocked_requests.get.return_value = mocked_response
result = orion.get_entity(fiware_service, fiware_servicepath, entity_type, entity_id)
assert result == response_json
assert mocked_requests.get.call_count == 1
assert mocked_requests.post.call_count == 0
assert mocked_requests.put.call_count == 0
assert mocked_requests.patch.call_count == 0
assert mocked_requests.delete.call_count == 0
endpoint = f'{const.ORION_ENDPOINT}/v2/entities/{entity_id}'
headers = {
'FIWARE-SERVICE': fiware_service,
'FIWARE-SERVICEPATH': fiware_servicepath,
}
if expected_token is not None:
headers['Authorization'] = expected_token
params = {
'type': entity_type,
}
assert mocked_requests.get.call_args == call(endpoint, headers=headers, params=params)
@pytest.mark.parametrize('response_code, expected_exception, expected_value', [
(300, InternalServerError, '500 Internal Server Error'),
(400, InternalServerError, '500 Internal Server Error'),
(404, NotFound, '404 Not Found'),
])
@pytest.mark.parametrize('response_text, expected_text', [
('root_cause', 'root_cause'),
(None, ''),
('', ''),
])
def test_response_error(self, mocked_requests,
response_code, expected_exception, expected_value, response_text, expected_text):
fiware_service = 'dummy_service'
fiware_servicepath = 'dummy_servicepath'
entity_type = 'dummy_type'
entity_id = 'dummy_id'
class MockResponse:
pass
mocked_response = MockResponse()
mocked_response.status_code = response_code
if response_text is not None:
mocked_response.text = response_text
mocked_requests.get.return_value = mocked_response
with pytest.raises(expected_exception) as e:
orion.get_entity(fiware_service, fiware_servicepath, entity_type, entity_id)
result = {
'message': 'can not get an entity from orion',
'root_cause': expected_text,
}
assert str(e.value) == f'{expected_value}: {result}'
assert mocked_requests.get.call_count == 1
assert mocked_requests.post.call_count == 0
assert mocked_requests.put.call_count == 0
assert mocked_requests.patch.call_count == 0
assert mocked_requests.delete.call_count == 0
endpoint = f'{const.ORION_ENDPOINT}/v2/entities/{entity_id}'
headers = {
'FIWARE-SERVICE': fiware_service,
'FIWARE-SERVICEPATH': fiware_servicepath,
}
params = {
'type': entity_type,
}
assert mocked_requests.get.call_args == call(endpoint, headers=headers, params=params)
def test_json_decodeerror(self, mocked_requests, mocked_response):
fiware_service = 'dummy_service'
fiware_servicepath = 'dummy_servicepath'
entity_type = 'dummy_type'
entity_id = 'dummy_id'
mocked_response.status_code = 200
mocked_response.json.side_effect = json.decoder.JSONDecodeError('test error', doc='doc', pos=1)
mocked_requests.get.return_value = mocked_response
with pytest.raises(BadRequest) as e:
orion.get_entity(fiware_service, fiware_servicepath, entity_type, entity_id)
result = {
'message': 'can not parse result',
'root_cause': 'test error: line 1 column 2 (char 1)',
}
assert str(e.value) == f'400 Bad Request: {result}'
assert mocked_requests.get.call_count == 1
assert mocked_requests.post.call_count == 0
assert mocked_requests.put.call_count == 0
assert mocked_requests.patch.call_count == 0
assert mocked_requests.delete.call_count == 0
endpoint = f'{const.ORION_ENDPOINT}/v2/entities/{entity_id}'
headers = {
'FIWARE-SERVICE': fiware_service,
'FIWARE-SERVICEPATH': fiware_servicepath,
}
params = {
'type': entity_type,
}
assert mocked_requests.get.call_args == call(endpoint, headers=headers, params=params)
@pytest.mark.parametrize('fiware_service, fiware_servicepath, entity_type, entity_id', [
('dummy', 0, 1e-1, True),
(0, 1e-1, True, None),
(1e-1, True, None, []),
(True, None, [], {}),
(None, [], {}, tuple(['a', 1])),
([], {}, tuple(['a', 1]), set([1, 2, 1])),
({}, tuple(['a', 1]), set([1, 2, 1]), dt.datetime.utcnow()),
(tuple(['a', 1]), set([1, 2, 1]), dt.datetime.utcnow(), 'dummy'),
(set([1, 2, 1]), dt.datetime.utcnow(), 'dummy', 0),
(dt.datetime.utcnow(), 'dummy', 0, 1e-1),
])
def test_invalid_args(self, mocked_requests, mocked_response,
fiware_service, fiware_servicepath, entity_type, entity_id):
with pytest.raises(TypeError) as e:
orion.get_entity(fiware_service, fiware_servicepath, entity_type, entity_id)
assert mocked_requests.get.call_count == 0
assert mocked_requests.post.call_count == 0
assert mocked_requests.put.call_count == 0
assert mocked_requests.patch.call_count == 0
assert mocked_requests.delete.call_count == 0
assert str(e.value) == 'fiware_service, fiware_servicepath, entity_type and entity_id must be "str"'
@pytest.mark.usefixtures('reload_module')
class TestMakeDeliveryRobotCommand:
@pytest.mark.parametrize('timezone, expected_datetime', [
(None, '2020-01-01T18:04:05.000+00:00'),
('UTC', '2020-01-01T18:04:05.000+00:00'),
('Asia/Tokyo', '2020-01-02T03:04:05.000+09:00'),
])
def test_timezone(self, timezone, expected_datetime):
time = '2020-01-02T03:04:05+09:00'
if timezone is not None:
os.environ['TIMEZONE'] = timezone
importlib.reload(const)
importlib.reload(orion)
cmd = 'test'
cmd_waypoints = [{'from': 'place_1', 'to': 'place_2'}, {'from': 'place_2', 'to': 'place_3'}]
navigating_waypoints = [{'x': 1.0, 'y': -1.5}, {'x': 2.0, 'y': -1.5}]
with freezegun.freeze_time(time):
payload = orion.make_delivery_robot_command(cmd, cmd_waypoints, navigating_waypoints)
assert payload == {
'send_cmd': {
'value': {
'time': expected_datetime,
'cmd': cmd,
'waypoints': cmd_waypoints
}
},
'navigating_waypoints': {
'type': 'object',
'value': navigating_waypoints,
'metadata': {
'TimeInstant': {
'type': 'datetime',
'value': expected_datetime,
}
}
}
}
@pytest.mark.parametrize('c, cw, nw, rwl, cr, o, ca', [
('dummy', 0, 1e-1, True, [], ['a', 1], {}),
(0, 1e-1, True, [], ['a', 1], {}, {'a': 1}),
(1e-1, True, [], ['a', 1], {}, {'a': 1}, tuple(['a', 1])),
(True, [], ['a', 1], {}, {'a': 1}, tuple(['a', 1]), set([1, 2])),
([], ['a', 1], {}, {'a': 1}, tuple(['a', 1]), set([1, 2]), dt.datetime.utcnow()),
(['a', 1], {}, {'a': 1}, tuple(['a', 1]), set([1, 2]), dt.datetime.utcnow(), caller.Caller.WAREHOUSE),
({}, {'a': 1}, tuple(['a', 1]), set([1, 2]), dt.datetime.utcnow(), caller.Caller.WAREHOUSE, caller.Caller.ORDERING),
({'a': 1}, tuple(['a', 1]), set([1, 2]), dt.datetime.utcnow(), caller.Caller.WAREHOUSE, caller.Caller.ORDERING, None),
(tuple(['a', 1]), set([1, 2]), dt.datetime.utcnow(), caller.Caller.WAREHOUSE, caller.Caller.ORDERING, None, 'dummy'),
(set([1, 2]), dt.datetime.utcnow(), caller.Caller.WAREHOUSE, caller.Caller.ORDERING, None, 'dummy', 0),
(dt.datetime.utcnow(), caller.Caller.WAREHOUSE, caller.Caller.ORDERING, None, 'dummy', 0, 1e-1),
(caller.Caller.WAREHOUSE, caller.Caller.ORDERING, None, 'dummy', 0, 1e-1, True),
(caller.Caller.ORDERING, None, 'dummy', 0, 1e-1, True, []),
(None, 'dummy', 0, 1e-1, True, [], ['a', 1]),
])
def test_args(self, c, cw, nw, rwl, cr, o, ca):
time = '2020-01-02T03:04:05.000+00:00'
with freezegun.freeze_time(time):
payload = orion.make_delivery_robot_command(c, cw, nw, rwl, cr, o, ca)
result = {
'send_cmd': {
'value': {
'time': time,
'cmd': c,
'waypoints': cw,
}
},
'navigating_waypoints': {
'type': 'object',
'value': nw,
'metadata': {
'TimeInstant': {
'type': 'datetime',
'value': time,
}
}
},
'remaining_waypoints_list': {
'type': 'array',
'value': rwl,
'metadata': {
'TimeInstant': {
'type': 'datetime',
'value': time,
}
}
},
'current_routes': {
'type': 'array',
'value': cr,
'metadata': {
'TimeInstant': {
'type': 'datetime',
'value': time,
}
}
},
'order': {
'type': 'object',
'value': o,
'metadata': {
'TimeInstant': {
'type': 'datetime',
'value': time,
}
}
},
'caller': {
'type': 'string',
'value': ca.value if isinstance(ca, caller.Caller) else None,
'metadata': {
'TimeInstant': {
'type': 'datetime',
'value': time,
}
}
}
}
assert payload == {k: v for k, v in result.items()
if v['value'] is not None or k == 'navigating_waypoints'}
@pytest.mark.usefixtures('reload_module')
class TestMakeEmergencyCommand:
@pytest.mark.parametrize('timezone, expected_datetime', [
(None, '2020-01-01T18:04:05.000+00:00'),
('UTC', '2020-01-01T18:04:05.000+00:00'),
('Asia/Tokyo', '2020-01-02T03:04:05.000+09:00'),
])
def test_timezone(self, timezone, expected_datetime):
time = '2020-01-02T03:04:05+09:00'
if timezone is not None:
os.environ['TIMEZONE'] = timezone
importlib.reload(const)
importlib.reload(orion)
cmd = 'test'
with freezegun.freeze_time(time):
payload = orion.make_emergency_command(cmd)
result = {
'send_emg': {
'value': {
'time': expected_datetime,
'emergency_cmd': cmd,
}
}
}
assert payload == result
@pytest.mark.parametrize('cmd', [
'dummy', 0, 1e-1, True, [], ['a', 1], {}, {'a': 1}, tuple(['a', 1]), set([1, 2]), dt.datetime.utcnow(), None
])
def test_args(self, cmd):
time = '2020-01-02T03:04:05.000+00:00'
with freezegun.freeze_time(time):
payload = orion.make_emergency_command(cmd)
assert payload == {
'send_emg': {
'value': {
'time': time,
'emergency_cmd': cmd,
}
}
}
@pytest.mark.usefixtures('reload_module')
class TestMakeUpdateModeCommmand:
@pytest.mark.parametrize('timezone, expected_datetime', [
(None, '2020-01-01T18:04:05.000+00:00'),
('UTC', '2020-01-01T18:04:05.000+00:00'),
('Asia/Tokyo', '2020-01-02T03:04:05.000+09:00'),
])
def test_timezone(self, timezone, expected_datetime):
time = '2020-01-02T03:04:05+09:00'
if timezone is not None:
os.environ['TIMEZONE'] = timezone
importlib.reload(const)
importlib.reload(orion)
next_mode = 'navi'
with freezegun.freeze_time(time):
payload = orion.make_updatemode_command(next_mode)
assert payload == {
'current_mode': {
'type': 'string',
'value': next_mode,
'metadata': {
'TimeInstant': {
'type': 'datetime',
'value': expected_datetime,
}
}
},
}
@pytest.mark.parametrize('next_mode', [
'dummy', 0, 1e-1, True, [], ['a', 1], {}, {'a': 1}, tuple(['a', 1]), set([1, 2]), dt.datetime.utcnow(), None
])
def test_args(self, next_mode):
time = '2020-01-02T03:04:05.000+00:00'
with freezegun.freeze_time(time):
payload = orion.make_updatemode_command(next_mode)
assert payload == {
'current_mode': {
'type': 'string',
'value': next_mode,
'metadata': {
'TimeInstant': {
'type': 'datetime',
'value': time,
}
}
},
}
@pytest.mark.usefixtures('reload_module')
class TestMakeUpdateStateCommmand:
@pytest.mark.parametrize('timezone, expected_datetime', [
(None, '2020-01-01T18:04:05.000+00:00'),
('UTC', '2020-01-01T18:04:05.000+00:00'),
('Asia/Tokyo', '2020-01-02T03:04:05.000+09:00'),
])
def test_timezone(self, timezone, expected_datetime):
time = '2020-01-02T03:04:05+09:00'
if timezone is not None:
os.environ['TIMEZONE'] = timezone
importlib.reload(const)
importlib.reload(orion)
next_state = 'navi'
with freezegun.freeze_time(time):
payload = orion.make_updatestate_command(next_state)
assert payload == {
'current_state': {
'type': 'string',
'value': next_state,
'metadata': {
'TimeInstant': {
'type': 'datetime',
'value': expected_datetime,
}
}
},
}
@pytest.mark.parametrize('next_state', [
'dummy', 0, 1e-1, True, [], ['a', 1], {}, {'a': 1}, tuple(['a', 1]), set([1, 2]), dt.datetime.utcnow(), None
])
def test_args(self, next_state):
time = '2020-01-02T03:04:05.000+00:00'
with freezegun.freeze_time(time):
payload = orion.make_updatestate_command(next_state)
assert payload == {
'current_state': {
'type': 'string',
'value': next_state,
'metadata': {
'TimeInstant': {
'type': 'datetime',
'value': time,
}
}
},
}
@pytest.mark.usefixtures('reload_module')
class TestMakeRobotuiSendstateCommand:
@pytest.mark.parametrize('timezone, expected_datetime', [
(None, '2020-01-01T18:04:05.000+00:00'),
('UTC', '2020-01-01T18:04:05.000+00:00'),
('Asia/Tokyo', '2020-01-02T03:04:05.000+09:00'),
])
def test_timezone(self, timezone, expected_datetime):
time = '2020-01-02T03:04:05+09:00'
if timezone is not None:
os.environ['TIMEZONE'] = timezone
importlib.reload(const)
importlib.reload(orion)
next_state = 'standby'
destination = 'dest'
with freezegun.freeze_time(time):
payload = orion.make_robotui_sendstate_command(next_state, destination)
assert payload == {
'send_state': {
'value': {
'time': expected_datetime,
'state': next_state,
'destination': destination,
}
},
}
@pytest.mark.parametrize('next_state, destination', [
('dummy', 0),
(0, 1e-1),
(1e-1, True),
(True, []),
([], ['a', 1]),
(['a', 1], {}),
({}, {'a': 1}),
({'a': 1}, tuple(['a', 1])),
(tuple(['a', 1]), set([1, 2])),
(set([1, 2]), dt.datetime.utcnow()),
(dt.datetime.utcnow(), None),
(None, 'dummy'),
])
def test_args(self, next_state, destination):
time = '2020-01-02T03:04:05.000+00:00'
with freezegun.freeze_time(time):
payload = orion.make_robotui_sendstate_command(next_state, destination)
assert payload == {
'send_state': {
'value': {
'time': time,
'state': next_state,
'destination': destination,
}
},
}
@pytest.mark.usefixtures('reload_module')
class TestMakeRobotuiSendtokeninfoCommand:
@pytest.mark.parametrize('timezone, expected_datetime', [
(None, '2020-01-01T18:04:05.000+00:00'),
('UTC', '2020-01-01T18:04:05.000+00:00'),
('Asia/Tokyo', '2020-01-02T03:04:05.000+09:00'),
])
def test_timezone(self, timezone, expected_datetime):
time = '2020-01-02T03:04:05+09:00'
if timezone is not None:
os.environ['TIMEZONE'] = timezone
importlib.reload(const)
importlib.reload(orion)
tkn = token.Token.get('test')
tkn.lock_owner_id = 'lock_owner_id'
tkn.prev_owner_id = 'prev_owner_id'
mode = token.TokenMode.LOCK
with freezegun.freeze_time(time):
payload = orion.make_robotui_sendtokeninfo_command(tkn, mode)
assert payload == {
'send_token_info': {
'value': {
'time': expected_datetime,
'token': str(tkn),
'mode': str(mode),
'lock_owner_id': 'lock_owner_id',
'prev_owner_id': 'prev_owner_id',
}
},
}
@pytest.mark.parametrize('tkn, mode, loi, poi', [
(token.Token(''), token.TokenMode.LOCK, 'dummy', 0),
(token.Token('abc'), token.TokenMode.RELEASE, 0, 1e-1),
(token.Token(''), token.TokenMode.SUSPEND, 1e-1, True),
(token.Token('abc'), token.TokenMode.RESUME, True, []),
(token.Token(''), token.TokenMode.LOCK, [], ['a', 1]),
(token.Token('abc'), token.TokenMode.RELEASE, ['a', 1], {}),
(token.Token(''), token.TokenMode.SUSPEND, {}, {'a': 1}),
(token.Token('abc'), token.TokenMode.RESUME, {'a': 1}, tuple(['a', 1])),
(token.Token(''), token.TokenMode.LOCK, tuple(['a', 1]), set([1, 2])),
(token.Token('abc'), token.TokenMode.RELEASE, set([1, 2]), dt.datetime.utcnow()),
(token.Token(''), token.TokenMode.SUSPEND, dt.datetime.utcnow(), None),
(token.Token('abc'), token.TokenMode.RESUME, None, 'dummy'),
])
def test_args(self, tkn, mode, loi, poi):
time = '2020-01-02T03:04:05.000+00:00'
tkn.lock_owner_id = loi
tkn.prev_owner_id = poi
with freezegun.freeze_time(time):
payload = orion.make_robotui_sendtokeninfo_command(tkn, mode)
assert payload == {
'send_token_info': {
'value': {
'time': time,
'token': str(tkn),
'mode': str(mode),
'lock_owner_id': loi,
'prev_owner_id': poi,
}
},
}
@pytest.mark.parametrize('tkn, mode', [
('dummy', 0),
(0, 1e-1),
(1e-1, True),
(True, []),
([], ['a', 1]),
(['a', 1], {}),
({}, {'a': 1}),
({'a': 1}, tuple(['a', 1])),
(tuple(['a', 1]), set([1, 2])),
(set([1, 2]), dt.datetime.utcnow()),
(dt.datetime.utcnow(), None),
])
def test_invalid_args(self, tkn, mode):
with pytest.raises(TypeError) as e:
orion.make_robotui_sendtokeninfo_command(tkn, mode)
assert str(e.value) == 'invalid token or mode'
@pytest.mark.usefixtures('reload_module')
class TestMakeTokenInfoCommand:
@pytest.mark.parametrize('timezone, expected_datetime', [
(None, '2020-01-01T18:04:05.000+00:00'),
('UTC', '2020-01-01T18:04:05.000+00:00'),
('Asia/Tokyo', '2020-01-02T03:04:05.000+09:00'),
])
def test_timezone(self, timezone, expected_datetime):
time = '2020-01-02T03:04:05+09:00'
if timezone is not None:
os.environ['TIMEZONE'] = timezone
importlib.reload(const)
importlib.reload(orion)
is_locked = True
robot_id = 'robot_01'
waitings = ['robot_02']
with freezegun.freeze_time(time):
payload = orion.make_token_info_command(is_locked, robot_id, waitings)
assert payload == {
'is_locked': {
'type': 'boolean',
'value': is_locked,
'metadata': {
'TimeInstant': {
'type': 'datetime',
'value': expected_datetime,
}
}
},
'lock_owner_id': {
'type': 'string',
'value': robot_id,
'metadata': {
'TimeInstant': {
'type': 'datetime',
'value': expected_datetime,
}
}
},
'waitings': {
'type': 'array',
'value': waitings,
'metadata': {
'TimeInstant': {
'type': 'datetime',
'value': expected_datetime,
}
}
},
}
@pytest.mark.parametrize('is_locked, robot_id, waitings', [
('dummy', 0, 1e-1),
(0, 1e-1, True),
(1e-1, True, []),
(True, [], ['a', 1]),
([], ['a', 1], {}),
(['a', 1], {}, {'a': 1}),
({}, {'a': 1}, tuple(['a', 1])),
({'a': 1}, tuple(['a', 1]), set([1, 2])),
(tuple(['a', 1]), set([1, 2]), dt.datetime.utcnow()),
(set([1, 2]), dt.datetime.utcnow(), None),
(dt.datetime.utcnow(), None, 'dummy'),
(None, 'dummy', 0),
])
def test_args(self, is_locked, robot_id, waitings):
time = '2020-01-02T03:04:05.000+00:00'
with freezegun.freeze_time(time):
payload = orion.make_token_info_command(is_locked, robot_id, waitings)
assert payload == {
'is_locked': {
'type': 'boolean',
'value': is_locked,
'metadata': {
'TimeInstant': {
'type': 'datetime',
'value': time,
}
}
},
'lock_owner_id': {
'type': 'string',
'value': robot_id,
'metadata': {
'TimeInstant': {
'type': 'datetime',
'value': time,
}
}
},
'waitings': {
'type': 'array',
'value': waitings,
'metadata': {
'TimeInstant': {
'type': 'datetime',
'value': time,
}
}
},
}
@pytest.mark.usefixtures('reload_module')
class TestMakeUpdatelastprocessedtimeCommand:
@pytest.mark.parametrize('timezone, expected_datetime', [
(None, '2020-01-01T18:04:05.000+00:00'),
('UTC', '2020-01-01T18:04:05.000+00:00'),
('Asia/Tokyo', '2020-01-02T03:04:05.000+09:00'),
])
def test_timezone(self, timezone, expected_datetime):
time = '2020-01-02T03:04:05+09:00'
if timezone is not None:
os.environ['TIMEZONE'] = timezone
importlib.reload(const)
importlib.reload(orion)
lpt_time = '2020-02-03T04:05:06.789+09:00'
lpt = dateutil.parser.parse(lpt_time)
with freezegun.freeze_time(time):
payload = orion.make_updatelastprocessedtime_command(lpt)
assert payload == {
'last_processed_time': {
'type': 'ISO8601',
'value': lpt.isoformat(timespec='milliseconds'),
'metadata': {
'TimeInstant': {
'type': 'datetime',
'value': expected_datetime,
}
}
},
}
@pytest.mark.parametrize('lpt', [
'dummy', 0, 1e-1, True, [], ['a', 1], {}, {'a': 1}, tuple(['a', 1]), set([1, 2]), token.Token(''), None
])
def test_invalid_args(self, lpt):
with pytest.raises(TypeError) as e:
orion.make_updatelastprocessedtime_command(lpt)
assert str(e.value) == 'last_processed_time is must be "datetime"'
| 35.932982
| 126
| 0.555628
| 5,102
| 47,719
| 5.001372
| 0.049784
| 0.071874
| 0.076028
| 0.038249
| 0.905788
| 0.887212
| 0.848963
| 0.832857
| 0.82106
| 0.807501
| 0
| 0.040903
| 0.309374
| 47,719
| 1,327
| 127
| 35.96006
| 0.733372
| 0
| 0
| 0.729778
| 0
| 0
| 0.162744
| 0.035688
| 0
| 0
| 0
| 0
| 0.117333
| 1
| 0.032889
| false
| 0.003556
| 0.036444
| 0.000889
| 0.084444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
66631ae72a6911933d3fb035eb246232c161b9db
| 92,518
|
py
|
Python
|
dlkit/abstract_osid/mapping/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
dlkit/abstract_osid/mapping/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
dlkit/abstract_osid/mapping/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
"""Implementations of mapping abstract base class managers."""
# pylint: disable=invalid-name
# Method names comply with OSID specification.
# pylint: disable=no-init
# Abstract classes do not define __init__.
# pylint: disable=too-few-public-methods
# Some interfaces are specified as 'markers' and include no methods.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
# pylint: disable=too-many-arguments
# Argument signature defined in specification.
# pylint: disable=duplicate-code
# All apparent duplicates have been inspected. They aren't.
import abc
class MappingProfile:
"""The mapping profile describes the interoperability among mapping services."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def supports_visible_federation(self):
"""Tests if any map federation is exposed.
Federation is exposed when a specific map may be identified,
selected and used to create a lookup or admin session.
Federation is not exposed when a set of maps appears as a single
map.
:return: ``true`` if visible federation is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_location_lookup(self):
"""Tests if looking up locations is supported.
:return: ``true`` if location lookup is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_location_query(self):
"""Tests if querying locations is supported.
:return: ``true`` if location query is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_location_search(self):
"""Tests if searching locations is supported.
:return: ``true`` if location search is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_location_admin(self):
"""Tests if locationadministrative service is supported.
:return: ``true`` if location administration is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_location_notification(self):
"""Tests if a locationnotification service is supported.
:return: ``true`` if location notification is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_location_hierarchy(self):
"""Tests if a locationhierarchy service is supported.
:return: ``true`` if location hierarchy is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_location_hierarchy_design(self):
"""Tests if a location hierarchy design service is supported.
:return: ``true`` if location hierarchy design is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_location_map(self):
"""Tests if a location map lookup service is supported.
:return: ``true`` if a location map lookup service is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_location_map_assignment(self):
"""Tests if a location map assignment service is supported.
:return: ``true`` if a location to map assignment service is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_location_smart_map(self):
"""Tests if a location smart map service is supported.
:return: ``true`` if a location smart map service is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_location_adjacency(self):
"""Tests if a location adjacency service is supported.
:return: ``true`` if a location adjacency service is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_location_spatial(self):
"""Tests if a location spatial service is supported.
:return: ``true`` if a location spatial service is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_location(self):
"""Tests if a resource location service is supported.
:return: ``true`` if a resource location service is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_location_update(self):
"""Tests if a resource location update service is supported.
:return: ``true`` if a resource location update service is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_location_notification(self):
"""Tests if a resource location notification service is supported.
:return: ``true`` if a resource location notification service is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_resource_position_notification(self):
"""Tests if a resource position notification service is supported.
:return: ``true`` if a resource position notification service is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_my_location(self):
"""Tests if a location service is supported for the current agent.
:return: ``true`` if my location is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_map_lookup(self):
"""Tests if looking up maps is supported.
:return: ``true`` if map lookup is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_map_query(self):
"""Tests if querying maps is supported.
:return: ``true`` if a map query service is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_map_search(self):
"""Tests if searching maps is supported.
:return: ``true`` if map search is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_map_admin(self):
"""Tests if map administrative service is supported.
:return: ``true`` if map administration is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_map_notification(self):
"""Tests if a mapnotification service is supported.
:return: ``true`` if map notification is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_map_hierarchy(self):
"""Tests for the availability of a map hierarchy traversal service.
:return: ``true`` if map hierarchy traversal is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented in all
providers.*
"""
return # boolean
@abc.abstractmethod
def supports_map_hierarchy_design(self):
"""Tests for the availability of a map hierarchy design service.
:return: ``true`` if map hierarchy design is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_mapping_batch(self):
"""Tests if the mapping batch service is supported.
:return: ``true`` if maping batch service is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_mapping_path(self):
"""Tests if the mapping path service is supported.
:return: ``true`` if maping path service is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def supports_mapping_route(self):
"""Tests if the mapping route service is supported.
:return: ``true`` if maping route service is supported, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_location_record_types(self):
"""Gets the supported ``Location`` record types.
:return: a list containing the supported ``Location`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
location_record_types = abc.abstractproperty(fget=get_location_record_types)
@abc.abstractmethod
def supports_location_record_type(self, location_record_type):
"""Tests if the given ``Location`` record type is supported.
:param location_record_type: a ``Type`` indicating a ``Location`` record type
:type location_record_type: ``osid.type.Type``
:return: ``true`` if the given record type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``location_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_location_search_record_types(self):
"""Gets the supported ``Location`` search types.
:return: a list containing the supported ``Location`` search types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
location_search_record_types = abc.abstractproperty(fget=get_location_search_record_types)
@abc.abstractmethod
def supports_location_search_record_type(self, location_search_record_type):
"""Tests if the given ``Location`` search type is supported.
:param location_search_record_type: a ``Type`` indicating a ``Location`` search type
:type location_search_record_type: ``osid.type.Type``
:return: ``true`` if the given ``Type`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``location_search_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_map_record_types(self):
"""Gets the supported ``Map`` record types.
:return: a list containing the supported ``Map`` types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
map_record_types = abc.abstractproperty(fget=get_map_record_types)
@abc.abstractmethod
def supports_map_record_type(self, map_record_type):
"""Tests if the given ``Map`` record type is supported.
:param map_record_type: a ``Type`` indicating a ``Map`` record type
:type map_record_type: ``osid.type.Type``
:return: ``true`` if the given ``Type`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``map_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_map_search_record_types(self):
"""Gets the supported ``Map`` search record types.
:return: a list containing the supported ``Map`` search record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
map_search_record_types = abc.abstractproperty(fget=get_map_search_record_types)
@abc.abstractmethod
def supports_map_search_record_type(self, map_search_record_type):
"""Tests if the given ``Map`` search record type is supported.
:param map_search_record_type: a ``Type`` indicating a ``Map`` search record type
:type map_search_record_type: ``osid.type.Type``
:return: ``true`` if the given ``Type`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``map_search_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_resource_location_record_types(self):
"""Gets the supported ``ResourceLocation`` record types.
:return: a list containing the supported ``ResourceLocation`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
resource_location_record_types = abc.abstractproperty(fget=get_resource_location_record_types)
@abc.abstractmethod
def supports_resource_location_record_type(self, resource_location_record_type):
"""Tests if the given ``ResourceLocationRecord`` record type is supported.
:param resource_location_record_type: a ``Type`` indicating a ``ResourceLocation`` type
:type resource_location_record_type: ``osid.type.Type``
:return: ``true`` if the given ``Type`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``resource_location_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_coordinate_types(self):
"""Gets the supported ``Coordinate`` types.
:return: a list containing the supported ``Coordinate`` types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
coordinate_types = abc.abstractproperty(fget=get_coordinate_types)
@abc.abstractmethod
def supports_coordinate_type(self, coordinate_type):
"""Tests if the given ``Coordinate`` type is supported.
:param coordinate_type: a ``Type`` indicating a ``Coordinate`` type
:type coordinate_type: ``osid.type.Type``
:return: ``true`` if the given ``Type`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``coordinate_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_heading_types(self):
"""Gets the supported ``Heading`` types.
:return: a list containing the supported ``Heading`` types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
heading_types = abc.abstractproperty(fget=get_heading_types)
@abc.abstractmethod
def supports_heading_type(self, heading_type):
"""Tests if the given ``Heading`` type is supported.
:param heading_type: a ``Type`` indicating a ``Heading`` type
:type heading_type: ``osid.type.Type``
:return: ``true`` if the given ``Type`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``heading_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_spatial_unit_record_types(self):
"""Gets the supported ``SpatialUnit`` record types.
:return: a list containing the supported ``SpatialUnit`` record types
:rtype: ``osid.type.TypeList``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.TypeList
spatial_unit_record_types = abc.abstractproperty(fget=get_spatial_unit_record_types)
@abc.abstractmethod
def supports_spatial_unit_record_type(self, spatial_unit_record_type):
"""Tests if the given ``SpatialUnit`` record type is supported.
:param spatial_unit_record_type: a ``Type`` indicating a ``SpatialUnit`` record type
:type spatial_unit_record_type: ``osid.type.Type``
:return: ``true`` if the given ``Type`` is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``NullArgument`` -- ``spatial_unit_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
class MappingManager:
"""The mapping manager provides access to mapping sessions and provides interoperability tests for various aspects of this service.
The sessions included in this manager are:
* ``LocationLookupSession:`` a session to retrieve locations
* ``LocationQuerySession:`` a session to query for locations
* ``LocationSearchSession:`` a session to search for locations
* ``LocationAdminSession:`` a session to create and delete
locations
* ``LocationNotificationSession:`` a session to receive
notifications pertaining to location changes
* ``LocationHierarchySession:`` a session to examine locations in
a hierarchy
* ``LocationHierarchyAssignmentSession:`` a session to traverse
the location hierarchy
* ``LocationMapSession:`` a session to look up location to map
mappings
* ``LocationMapAssignmentSession:`` a session to manage location
to map mappings
* ``LocationSmartMapSession:`` a session to manage dynamic maps of
locations
* ``LocationAdjacencySession:`` a session to query neighboring
locations
* ``LocationSpatialSession:`` a session to lookup locations
spatially
* ``ResourceLocationSession:`` a session query resources at
locations
* ``ResourceLocationUpdateSession:`` a session to assign resources
to locations
* ``ResourceLocationNotificationSession:`` a session to subscribe
to notifications when resources move among locations
* ``ResourcePositionNotificationSession:`` a session to subscribe
to notifications when resources change positions
* ``MyLocationSession:`` a session to query locations for the user
agent
* ``MapLookupSession:`` a session to retrieve maps
* ``MapQuerySession:`` a session to search for maps
* ``MapSearchSession:`` a session to search for maps
* ``MapAdminSession:`` a session to create and delete maps
* ``MapNotificationSession:`` a session to receive notifications
pertaining to map changes
* ``MapHierarchySession:`` a session to traverse a hierarchy of
map
* ``MapHierarchyDesignSession:`` a session to manage a map
hierarchy
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_location_lookup_session(self):
"""Gets the ``OsidSession`` associated with the location lookup service.
:return: a ``LocationLookupSession``
:rtype: ``osid.mapping.LocationLookupSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_lookup()`` is ``true``.*
"""
return # osid.mapping.LocationLookupSession
location_lookup_session = abc.abstractproperty(fget=get_location_lookup_session)
@abc.abstractmethod
def get_location_lookup_session_for_map(self, map_id):
"""Gets the ``OsidSession`` associated with the location lookup service for the given map.
:param map_id: the ``Id`` of the map
:type map_id: ``osid.id.Id``
:return: a ``LocationLookupSession``
:rtype: ``osid.mapping.LocationLookupSession``
:raise: ``NotFound`` -- no ``Map`` found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_lookup()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationLookupSession
@abc.abstractmethod
def get_location_query_session(self):
"""Gets the ``OsidSession`` associated with the location query service.
:return: a ``LocationQuerySession``
:rtype: ``osid.mapping.LocationQuerySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_query()`` is ``true``.*
"""
return # osid.mapping.LocationQuerySession
location_query_session = abc.abstractproperty(fget=get_location_query_session)
@abc.abstractmethod
def get_location_query_session_for_map(self, map_id):
"""Gets the ``OsidSession`` associated with the location query service for the given map.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:return: a ``LocationQuerySession``
:rtype: ``osid.mapping.LocationQuerySession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_query()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_query()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationQuerySession
@abc.abstractmethod
def get_location_search_session(self):
"""Gets the ``OsidSession`` associated with the location search service.
:return: a ``LocationSearchSession``
:rtype: ``osid.mapping.LocationSearchSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_search()`` is ``true``.*
"""
return # osid.mapping.LocationSearchSession
location_search_session = abc.abstractproperty(fget=get_location_search_session)
@abc.abstractmethod
def get_location_search_session_for_map(self, map_id):
"""Gets the ``OsidSession`` associated with the location search service for the given map.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:return: a ``LocationSearchSession``
:rtype: ``osid.mapping.LocationSearchSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_search()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_search()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationSearchSession
@abc.abstractmethod
def get_location_admin_session(self):
"""Gets the ``OsidSession`` associated with the location administration service.
:return: a ``LocationAdminSession``
:rtype: ``osid.mapping.LocationAdminSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_admin()`` is ``true``.*
"""
return # osid.mapping.LocationAdminSession
location_admin_session = abc.abstractproperty(fget=get_location_admin_session)
@abc.abstractmethod
def get_location_admin_session_for_map(self, map_id):
"""Gets the ``OsidSession`` associated with the location administration service for the given map.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:return: a ``LocationAdminSession``
:rtype: ``osid.mapping.LocationAdminSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_admin()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_admin()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationAdminSession
@abc.abstractmethod
def get_location_notification_session(self, location_receiver):
"""Gets the ``OsidSession`` associated with the location notification service.
:param location_receiver: the notification callback
:type location_receiver: ``osid.mapping.LocationReceiver``
:return: a ``LocationNotificationSession``
:rtype: ``osid.mapping.LocationNotificationSession``
:raise: ``NullArgument`` -- ``location_receiver`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_notification()`` is ``true``.*
"""
return # osid.mapping.LocationNotificationSession
@abc.abstractmethod
def get_location_notification_session_for_map(self, location_receiver, map_id):
"""Gets the ``OsidSession`` associated with the location notification service for the given map.
:param location_receiver: the notification callback
:type location_receiver: ``osid.mapping.LocationReceiver``
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:return: a ``LocationNotificationSession``
:rtype: ``osid.mapping.LocationNotificationSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``location_receiver`` or ``map_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_notification()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationNotificationSession
@abc.abstractmethod
def get_location_hierarchy_session(self):
"""Gets the ``OsidSession`` associated with the location hierarchy service.
:return: a ``LocationHierarchySession``
:rtype: ``osid.mapping.LocationHierarchySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_hierarchy()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_hierarchy()`` is ``true``.*
"""
return # osid.mapping.LocationHierarchySession
location_hierarchy_session = abc.abstractproperty(fget=get_location_hierarchy_session)
@abc.abstractmethod
def get_location_hierarchy_session_for_map(self, map_id):
"""Gets the ``OsidSession`` associated with the location hierarchy service for the given map.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:return: a ``LocationHierarchySession``
:rtype: ``osid.mapping.LocationHierarchySession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_hierarchy()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_hierarchy()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationHierarchySession
@abc.abstractmethod
def get_location_hierarchy_design_session(self):
"""Gets the ``OsidSession`` associated with the location hierarchy design service.
:return: a ``LocationHierarchyDesignSession``
:rtype: ``osid.mapping.LocationHierarchyDesignSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_hierarchy_design()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_hierarchy_design()`` is ``true``.*
"""
return # osid.mapping.LocationHierarchyDesignSession
location_hierarchy_design_session = abc.abstractproperty(fget=get_location_hierarchy_design_session)
@abc.abstractmethod
def get_location_hierarchy_design_session_for_map(self, map_id):
"""Gets the ``OsidSession`` associated with the location hierarchy design service for the given map.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:return: a ``LocationHierarchySession``
:rtype: ``osid.mapping.LocationHierarchyDesignSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_hierarchy_design()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_hierarchy_design()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationHierarchyDesignSession
@abc.abstractmethod
def get_location_map_session(self):
"""Gets the ``OsidSession`` to lookup location/map mappings.
:return: a ``LocationMapSession``
:rtype: ``osid.mapping.LocationMapSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_map()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_map()`` is ``true``.*
"""
return # osid.mapping.LocationMapSession
location_map_session = abc.abstractproperty(fget=get_location_map_session)
@abc.abstractmethod
def get_location_map_assignment_session(self):
"""Gets the ``OsidSession`` associated with assigning locations to maps.
:return: a ``LocationMapAssignmentSession``
:rtype: ``osid.mapping.LocationMapAssignmentSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_map_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_map_assignment()`` is ``true``.*
"""
return # osid.mapping.LocationMapAssignmentSession
location_map_assignment_session = abc.abstractproperty(fget=get_location_map_assignment_session)
@abc.abstractmethod
def get_location_smart_map_session(self, map_id):
"""Gets the ``OsidSession`` to manage locatin smart maps.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:return: a ``LocationSmartMapSession``
:rtype: ``osid.mapping.LocationSmartMapSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_smart_map()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_smart_map()`` is ``true``.*
"""
return # osid.mapping.LocationSmartMapSession
@abc.abstractmethod
def get_location_adjacency_session(self):
"""Gets the ``OsidSession`` associated with the location adjacency service.
:return: a ``LocationAdjacencySession``
:rtype: ``osid.mapping.LocationAdjacencySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_adjacency()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_adjacency()`` is ``true``.*
"""
return # osid.mapping.LocationAdjacencySession
location_adjacency_session = abc.abstractproperty(fget=get_location_adjacency_session)
@abc.abstractmethod
def get_location_adjacency_session_for_map(self, map_id):
"""Gets the ``OsidSession`` associated with the location adjacency service for the given map.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:return: a ``LocationAdjacencySession``
:rtype: ``osid.mapping.LocationAdjacencySession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_adjacency()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_adjacency()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationAdjacencySession
@abc.abstractmethod
def get_location_spatial_session(self):
"""Gets the ``OsidSession`` associated with the location spatial service.
:return: a ``LocationSpatialSession``
:rtype: ``osid.mapping.LocationSpatialSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_spatial()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_spatial()`` is ``true``.*
"""
return # osid.mapping.LocationSpatialSession
location_spatial_session = abc.abstractproperty(fget=get_location_spatial_session)
@abc.abstractmethod
def get_location_spatial_session_for_map(self, map_id):
"""Gets the ``OsidSession`` associated with the location spatial service for the given map.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:return: a ``LocationSpatialSession``
:rtype: ``osid.mapping.LocationSpatialSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_spatial()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_spatial()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationSpatialSession
@abc.abstractmethod
def get_resource_location_session(self):
"""Gets the ``OsidSession`` associated with the resource location service.
:return: a ``ResourceLocationSession``
:rtype: ``osid.mapping.ResourceLocationSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_location()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_location()`` is ``true``.*
"""
return # osid.mapping.ResourceLocationSession
resource_location_session = abc.abstractproperty(fget=get_resource_location_session)
@abc.abstractmethod
def get_resource_location_session_for_map(self, map_id):
"""Gets the ``OsidSession`` associated with the resource location service for the given map.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:return: a ``ResourceLocationSession``
:rtype: ``osid.mapping.ResourceLocationSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_location()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_location()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.ResourceLocationSession
@abc.abstractmethod
def get_resource_location_update_session(self):
"""Gets the ``OsidSession`` associated with the resource location update service.
:return: a ``ResourceLocationUpdateSession``
:rtype: ``osid.mapping.ResourceLocationUpdateSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_location_update()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_location_update()`` is ``true``.*
"""
return # osid.mapping.ResourceLocationUpdateSession
resource_location_update_session = abc.abstractproperty(fget=get_resource_location_update_session)
@abc.abstractmethod
def get_resource_location_update_session_for_map(self, map_id):
"""Gets the ``OsidSession`` associated with the resource location update service for the given map.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:return: a ``ResourceLocationUpdateSession``
:rtype: ``osid.mapping.ResourceLocationUpdateSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_location_update()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_location_update()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.ResourceLocationUpdateSession
@abc.abstractmethod
def get_resource_location_notification_session(self, resource_location_receiver):
"""Gets the ``OsidSession`` associated with the resource location notification service.
:param resource_location_receiver: the notification callback
:type resource_location_receiver: ``osid.mapping.ResourceLocationReceiver``
:return: a ``ResourceLocationNotificationSession``
:rtype: ``osid.mapping.ResourceLocationNotificationSession``
:raise: ``NullArgument`` -- ``resource_location_receiver`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_location_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_location_notification()`` is ``true``.*
"""
return # osid.mapping.ResourceLocationNotificationSession
@abc.abstractmethod
def get_resource_location_notification_session_for_map(self, resource_location_receiver, map_id):
"""Gets the ``OsidSession`` associated with the resource location notification service for the given map.
:param resource_location_receiver: the notification callback
:type resource_location_receiver: ``osid.mapping.ResourceLocationReceiver``
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:return: a ``ResourceLocationNotificationSession``
:rtype: ``osid.mapping.ResourceLocationNotificationSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``resource_location_receiver`` or ``map_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_location_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_location_notification()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.ResourceLocationNotificationSession
@abc.abstractmethod
def get_resource_position_notification_session(self, resource_position_receiver):
"""Gets the ``OsidSession`` associated with the resource position notification service.
:param resource_position_receiver: the notification callback
:type resource_position_receiver: ``osid.mapping.ResourcePositionReceiver``
:return: a ``ResourcePositionNotificationSession``
:rtype: ``osid.mapping.ResourcePositionNotificationSession``
:raise: ``NullArgument`` -- ``resource_position_receiver`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_position_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_position_notification()`` is ``true``.*
"""
return # osid.mapping.ResourcePositionNotificationSession
@abc.abstractmethod
def get_resource_position_notification_session_for_map(self, resource_position_receiver, map_id):
"""Gets the ``OsidSession`` associated with the resource position notification service for the given map.
:param resource_position_receiver: the notification callback
:type resource_position_receiver: ``osid.mapping.ResourcePositionReceiver``
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:return: a ``ResourcePositionNotificationSession``
:rtype: ``osid.mapping.ResourcePositionNotificationSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``resource_position_receiver`` or ``map_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_position_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_position_notification()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.ResourcePositionNotificationSession
@abc.abstractmethod
def get_my_location_session(self):
"""Gets the ``OsidSession`` associated with the my location service.
:return: a ``MyLocationLookupSession``
:rtype: ``osid.mapping.MyLocationSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_my_location_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_my_location_lookup()`` is ``true``.*
"""
return # osid.mapping.MyLocationSession
my_location_session = abc.abstractproperty(fget=get_my_location_session)
@abc.abstractmethod
def get_my_location_session_for_map(self, map_id):
"""Gets the ``OsidSession`` associated with the my location service for the given map.
:param map_id: the ``Id`` of the map
:type map_id: ``osid.id.Id``
:return: a ``MyLocationLookupSession``
:rtype: ``osid.mapping.MyLocationSession``
:raise: ``NotFound`` -- no ``Map`` found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_my_location_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_my_location_lookup()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.MyLocationSession
@abc.abstractmethod
def get_map_lookup_session(self):
"""Gets the ``OsidSession`` associated with the map lookup service.
:return: a ``MapLookupSession``
:rtype: ``osid.mapping.MapLookupSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_map_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_map_lookup()`` is ``true``.*
"""
return # osid.mapping.MapLookupSession
map_lookup_session = abc.abstractproperty(fget=get_map_lookup_session)
@abc.abstractmethod
def get_map_query_session(self):
"""Gets the ``OsidSession`` associated with the map query service.
:return: a ``MapQuerySession``
:rtype: ``osid.mapping.MapQuerySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_map_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_map_query()`` is ``true``.*
"""
return # osid.mapping.MapQuerySession
map_query_session = abc.abstractproperty(fget=get_map_query_session)
@abc.abstractmethod
def get_map_search_session(self):
"""Gets the ``OsidSession`` associated with the map search service.
:return: a ``MapSearchSession``
:rtype: ``osid.mapping.MapSearchSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_map_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_map_search()`` is ``true``.*
"""
return # osid.mapping.MapSearchSession
map_search_session = abc.abstractproperty(fget=get_map_search_session)
@abc.abstractmethod
def get_map_admin_session(self):
"""Gets the ``OsidSession`` associated with the map administrative service.
:return: a ``MapAdminSession``
:rtype: ``osid.mapping.MapAdminSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_map_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_map_admin()`` is ``true``.*
"""
return # osid.mapping.MapAdminSession
map_admin_session = abc.abstractproperty(fget=get_map_admin_session)
@abc.abstractmethod
def get_map_notification_session(self, map_receiver):
"""Gets the ``OsidSession`` associated with the map notification service.
:param map_receiver: the notification callback
:type map_receiver: ``osid.mapping.MapReceiver``
:return: a ``MapNotificationSession``
:rtype: ``osid.mapping.MapNotificationSession``
:raise: ``NullArgument`` -- ``map_receiver`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_map_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_map_notification()`` is ``true``.*
"""
return # osid.mapping.MapNotificationSession
@abc.abstractmethod
def get_map_hierarchy_session(self):
"""Gets the ``OsidSession`` associated with the map hierarchy service.
:return: a ``MapHierarchySession`` for maps
:rtype: ``osid.mapping.MapHierarchySession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_map_hierarchy()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_map_hierarchy()`` is ``true``.*
"""
return # osid.mapping.MapHierarchySession
map_hierarchy_session = abc.abstractproperty(fget=get_map_hierarchy_session)
@abc.abstractmethod
def get_map_hierarchy_design_session(self):
"""Gets the ``OsidSession`` associated with the map hierarchy design service.
:return: a ``HierarchyDesignSession`` for maps
:rtype: ``osid.mapping.MapHierarchyDesignSession``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_map_hierarchy_design()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_map_hierarchy_design()`` is ``true``.*
"""
return # osid.mapping.MapHierarchyDesignSession
map_hierarchy_design_session = abc.abstractproperty(fget=get_map_hierarchy_design_session)
@abc.abstractmethod
def get_mapping_batch_manager(self):
"""Gets the mapping batch manager.
:return: a ``MappingBatchManager`` for paths
:rtype: ``osid.mapping.batch.MappingBatchManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_mapping_batch()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_mapping_batch()`` is ``true``.*
"""
return # osid.mapping.batch.MappingBatchManager
mapping_batch_manager = abc.abstractproperty(fget=get_mapping_batch_manager)
@abc.abstractmethod
def get_mapping_path_manager(self):
"""Gets the mapping path manager.
:return: a ``MappingPathManager`` for paths
:rtype: ``osid.mapping.path.MappingPathManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_mapping_path()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_mapping_path()`` is ``true``.*
"""
return # osid.mapping.path.MappingPathManager
mapping_path_manager = abc.abstractproperty(fget=get_mapping_path_manager)
@abc.abstractmethod
def get_mapping_route_manager(self):
"""Gets the mapping route manager.
:return: a ``MappingRouteManager`` for routes
:rtype: ``osid.mapping.route.MappingRouteManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_mapping_route()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_mapping_route()`` is ``true``.*
"""
return # osid.mapping.route.MappingRouteManager
mapping_route_manager = abc.abstractproperty(fget=get_mapping_route_manager)
class MappingProxyManager:
"""The mapping proxy manager provides access to mapping sessions and provides interoperability tests for various aspects of this service.
Methods in this manager pass a ``Proxy`` for passing information
from server environments. The sessions included in this manager are:
* ``LocationLookupSession:`` a session to retrieve locations
* ``LocationQuerySession:`` a session to query for locations
* ``LocationSearchSession:`` a session to search for locations
* ``LocationAdminSession:`` a session to create and delete
locations
* ``LocationNotificationSession:`` a session to receive
notifications pertaining to location changes
* ``LocationHierarchySession:`` a session to examine locations in
a hierarchy
* ``LocationHierarchyAssignmentSession:`` a session to traverse
the location hierarchy
* ``LocationMapSession:`` a session to lookup location to map
mappings
* ``LocationMapAssignmentSession:`` a session to manage location
to map mappings
* ``LocationSmartMapSession:`` a session to manage dynamic maps of
locations
* ``LocationAdjacencySession:`` a session to query neighboring
locations
* ``LocationSpatialSession:`` a session to lookup locations
spatially
* ``ResourceLocationSession:`` a session query resources at
locations
* ``ResourceLocationUpdateSession:`` a session to assign resources
to locations
* ``ResourceLocationNotificationSession:`` a session to subscribe
to notifications when resources move among locations
* ``ResourcePositionNotificationSession:`` a session to subscribe
to notifications when resources change positions
* ``MyLocationSession:`` a session to query locations for the user
agent
* ``MapLookupSession:`` a session to retrieve maps
* ``MapQuerySession:`` a session to search for maps
* ``MapSearchSession:`` a session to search for maps
* ``MapAdminSession:`` a session to create and delete maps
* ``MapNotificationSession:`` a session to receive notifications
pertaining to map changes
* ``MapHierarchySession:`` a session to traverse a hierarchy of
map
* ``MapHierarchyDesignSession:`` a session to manage a map
hierarchy
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_location_lookup_session(self, proxy):
"""Gets the ``OsidSession`` associated with the location lookup service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationLookupSession``
:rtype: ``osid.mapping.LocationLookupSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_lookup()`` is ``true``.*
"""
return # osid.mapping.LocationLookupSession
@abc.abstractmethod
def get_location_lookup_session_for_map(self, map_id, proxy):
"""Gets the ``OsidSession`` associated with the location lookup service for the given map.
:param map_id: the ``Id`` of the map
:type map_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationLookupSession``
:rtype: ``osid.mapping.LocationLookupSession``
:raise: ``NotFound`` -- no ``Map`` found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_lookup()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationLookupSession
@abc.abstractmethod
def get_location_query_session(self, proxy):
"""Gets the ``OsidSession`` associated with the location query service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationQuerySession``
:rtype: ``osid.mapping.LocationQuerySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_query()`` is ``true``.*
"""
return # osid.mapping.LocationQuerySession
@abc.abstractmethod
def get_location_query_session_for_map(self, map_id, proxy):
"""Gets the ``OsidSession`` associated with the location query service for the given map.
:param map_id: the ``Id`` of the map
:type map_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationQuerySession``
:rtype: ``osid.mapping.LocationQuerySession``
:raise: ``NotFound`` -- no ``Map`` found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_query()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_query()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationQuerySession
@abc.abstractmethod
def get_location_search_session(self, proxy):
"""Gets the ``OsidSession`` associated with the location search service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationSearchSession``
:rtype: ``osid.mapping.LocationSearchSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_search()`` is ``true``.*
"""
return # osid.mapping.LocationSearchSession
@abc.abstractmethod
def get_location_search_session_for_map(self, map_id, proxy):
"""Gets the ``OsidSession`` associated with the location search service for the given map.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationSearchSession``
:rtype: ``osid.mapping.LocationSearchSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_search()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_search()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationSearchSession
@abc.abstractmethod
def get_location_admin_session(self, proxy):
"""Gets the ``OsidSession`` associated with the location administration service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationAdminSession``
:rtype: ``osid.mapping.LocationAdminSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_admin()`` is ``true``.*
"""
return # osid.mapping.LocationAdminSession
@abc.abstractmethod
def get_location_admin_session_for_map(self, map_id, proxy):
"""Gets the ``OsidSession`` associated with the location administration service for the given map.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationAdminSession``
:rtype: ``osid.mapping.LocationAdminSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_admin()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_admin()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationAdminSession
@abc.abstractmethod
def get_location_notification_session(self, location_receiver, proxy):
"""Gets the ``OsidSession`` associated with the location notification service.
:param location_receiver: the notification callback
:type location_receiver: ``osid.mapping.LocationReceiver``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationNotificationSession``
:rtype: ``osid.mapping.LocationNotificationSession``
:raise: ``NullArgument`` -- ``location_receiver`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_notification()`` is ``true``.*
"""
return # osid.mapping.LocationNotificationSession
@abc.abstractmethod
def get_location_notification_session_for_map(self, location_receiver, map_id, proxy):
"""Gets the ``OsidSession`` associated with the location notification service for the given map.
:param location_receiver: the notification callback
:type location_receiver: ``osid.mapping.LocationReceiver``
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationNotificationSession``
:rtype: ``osid.mapping.LocationNotificationSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``location_receiver, map_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_notification()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationNotificationSession
@abc.abstractmethod
def get_location_hierarchy_session(self, proxy):
"""Gets the ``OsidSession`` associated with the location hierarchy service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationHierarchySession``
:rtype: ``osid.mapping.LocationHierarchySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_hierarchy()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_hierarchy()`` is ``true``.*
"""
return # osid.mapping.LocationHierarchySession
@abc.abstractmethod
def get_location_hierarchy_session_for_map(self, map_id, proxy):
"""Gets the ``OsidSession`` associated with the location hierarchy service for the given map.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationHierarchySession``
:rtype: ``osid.mapping.LocationHierarchySession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_hierarchy()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_hierarchy()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationHierarchySession
@abc.abstractmethod
def get_location_hierarchy_design_session(self, proxy):
"""Gets the ``OsidSession`` associated with the location hierarchy design service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationHierarchyDesignSession``
:rtype: ``osid.mapping.LocationHierarchyDesignSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_hierarchy_design()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_hierarchy_design()`` is ``true``.*
"""
return # osid.mapping.LocationHierarchyDesignSession
@abc.abstractmethod
def get_location_hierarchy_design_session_for_map(self, map_id, proxy):
"""Gets the ``OsidSession`` associated with the location hierarchy design service for the given map.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationHierarchySession``
:rtype: ``osid.mapping.LocationHierarchyDesignSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_hierarchy_design()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_hierarchy_design()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationHierarchyDesignSession
@abc.abstractmethod
def get_location_map_session(self, proxy):
"""Gets the ``OsidSession`` to lookup location/map mappings.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationMapSession``
:rtype: ``osid.mapping.LocationMapSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_map()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_map()`` is ``true``.*
"""
return # osid.mapping.LocationMapSession
@abc.abstractmethod
def get_location_map_assignment_session(self, proxy):
"""Gets the ``OsidSession`` associated with assigning locations to maps.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationMapAssignmentSession``
:rtype: ``osid.mapping.LocationMapAssignmentSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_map_assignment()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_map_assignment()`` is ``true``.*
"""
return # osid.mapping.LocationMapAssignmentSession
@abc.abstractmethod
def get_location_smart_map_session(self, map_id, proxy):
"""Gets the ``OsidSession`` to manage location smart maps.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationSmartMapSession``
:rtype: ``osid.mapping.LocationSmartMapSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_smart_map()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_smart_map()`` is ``true``.*
"""
return # osid.mapping.LocationSmartMapSession
@abc.abstractmethod
def get_location_adjacency_session(self, proxy):
"""Gets the ``OsidSession`` associated with the location adjacency service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationAdjacencySession``
:rtype: ``osid.mapping.LocationAdjacencySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_adjacency()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_adjacency()`` is ``true``.*
"""
return # osid.mapping.LocationAdjacencySession
@abc.abstractmethod
def get_location_adjacency_session_for_map(self, map_id, proxy):
"""Gets the ``OsidSession`` associated with the location adjacency service for the given map.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationAdjacencySession``
:rtype: ``osid.mapping.LocationAdjacencySession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_adjacency()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_adjacency()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationAdjacencySession
@abc.abstractmethod
def get_location_spatial_session(self, proxy):
"""Gets the ``OsidSession`` associated with the location spatial service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationSpatialSession``
:rtype: ``osid.mapping.LocationSpatialSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_spatial()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_spatial()`` is ``true``.*
"""
return # osid.mapping.LocationSpatialSession
@abc.abstractmethod
def get_location_spatial_session_for_map(self, map_id, proxy):
"""Gets the ``OsidSession`` associated with the location spatial service for the given map.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``LocationSpatialSession``
:rtype: ``osid.mapping.LocationSpatialSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_location_spatial()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_location_spatial()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.LocationSpatialSession
@abc.abstractmethod
def get_resource_location_session(self, proxy):
"""Gets the ``OsidSession`` associated with the resource location service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceLocationSession``
:rtype: ``osid.mapping.ResourceLocationSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_location()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_location()`` is ``true``.*
"""
return # osid.mapping.ResourceLocationSession
@abc.abstractmethod
def get_resource_location_session_for_map(self, map_id, proxy):
"""Gets the ``OsidSession`` associated with the resource location service for the given map.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceLocationSession``
:rtype: ``osid.mapping.ResourceLocationSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_location()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_location()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.ResourceLocationSession
@abc.abstractmethod
def get_resource_location_update_session(self, proxy):
"""Gets the ``OsidSession`` associated with the resource location update service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceLocationUpdateSession``
:rtype: ``osid.mapping.ResourceLocationUpdateSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_location_update()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_location_update()`` is ``true``.*
"""
return # osid.mapping.ResourceLocationUpdateSession
@abc.abstractmethod
def get_resource_location_update_session_for_map(self, map_id, proxy):
"""Gets the ``OsidSession`` associated with the resource location update service for the given map.
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceLocationUpdateSession``
:rtype: ``osid.mapping.ResourceLocationUpdateSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_location_update()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_location_update()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.ResourceLocationUpdateSession
@abc.abstractmethod
def get_resource_location_notification_session(self, resource_location_receiver, proxy):
"""Gets the ``OsidSession`` associated with the resource location notification service.
:param resource_location_receiver: the notification callback
:type resource_location_receiver: ``osid.mapping.ResourceLocationReceiver``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceLocationNotificationSession``
:rtype: ``osid.mapping.ResourceLocationNotificationSession``
:raise: ``NullArgument`` -- ``resource_location_receiver`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_location_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_location_notification()`` is ``true``.*
"""
return # osid.mapping.ResourceLocationNotificationSession
@abc.abstractmethod
def get_resource_location_notification_session_for_map(self, resource_location_receiver, map_id, proxy):
"""Gets the ``OsidSession`` associated with the resource location notification service for the given map.
:param resource_location_receiver: the notification callback
:type resource_location_receiver: ``osid.mapping.ResourceLocationReceiver``
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourceLocationNotificationSession``
:rtype: ``osid.mapping.ResourceLocationNotificationSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``resource_location_receiver, map_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_location_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_location_notification()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.ResourceLocationNotificationSession
@abc.abstractmethod
def get_resource_position_notification_session(self, resource_position_receiver, proxy):
"""Gets the ``OsidSession`` associated with the resource position notification service.
:param resource_position_receiver: the notification callback
:type resource_position_receiver: ``osid.mapping.ResourcePositionReceiver``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourcePositionNotificationSession``
:rtype: ``osid.mapping.ResourceLocationNotificationSession``
:raise: ``NullArgument`` -- ``resource_position_receiver`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_position_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_position_notification()`` is ``true``.*
"""
return # osid.mapping.ResourceLocationNotificationSession
@abc.abstractmethod
def get_resource_position_notification_session_for_map(self, resource_position_receiver, map_id, proxy):
"""Gets the ``OsidSession`` associated with the resource position notification service for the given map.
:param resource_position_receiver: the notification callback
:type resource_position_receiver: ``osid.mapping.ResourcePositionReceiver``
:param map_id: the ``Id`` of the ``Map``
:type map_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``ResourcePositionNotificationSession``
:rtype: ``osid.mapping.ResourcePositionNotificationSession``
:raise: ``NotFound`` -- no map found by the given ``Id``
:raise: ``NullArgument`` -- ``resource_position_receiver, map_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_resource_position_notification()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_resource_position_notification()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.ResourcePositionNotificationSession
@abc.abstractmethod
def get_my_location_session(self, proxy):
"""Gets the ``OsidSession`` associated with the my location service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``MyLocationLookupSession``
:rtype: ``osid.mapping.MyLocationSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_my_location_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_my_location_lookup()`` is ``true``.*
"""
return # osid.mapping.MyLocationSession
@abc.abstractmethod
def get_my_location_session_for_map(self, map_id, proxy):
"""Gets the ``OsidSession`` associated with the my location service for the given map.
:param map_id: the ``Id`` of the map
:type map_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``MyLocationLookupSession``
:rtype: ``osid.mapping.MyLocationSession``
:raise: ``NotFound`` -- no ``Map`` found by the given ``Id``
:raise: ``NullArgument`` -- ``map_id`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_my_location_lookup()`` or ``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_my_location_lookup()`` and
``supports_visible_federation()`` are ``true``*
"""
return # osid.mapping.MyLocationSession
@abc.abstractmethod
def get_map_lookup_session(self, proxy):
"""Gets the ``OsidSession`` associated with the map lookup service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``MapLookupSession``
:rtype: ``osid.mapping.MapLookupSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_map_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_map_lookup()`` is ``true``.*
"""
return # osid.mapping.MapLookupSession
@abc.abstractmethod
def get_map_query_session(self, proxy):
"""Gets the ``OsidSession`` associated with the map query service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``MapQuerySession``
:rtype: ``osid.mapping.MapQuerySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_map_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_map_query()`` is ``true``.*
"""
return # osid.mapping.MapQuerySession
@abc.abstractmethod
def get_map_search_session(self, proxy):
"""Gets the ``OsidSession`` associated with the map search service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``MapSearchSession``
:rtype: ``osid.mapping.MapSearchSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_map_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_map_search()`` is ``true``.*
"""
return # osid.mapping.MapSearchSession
@abc.abstractmethod
def get_map_admin_session(self, proxy):
"""Gets the ``OsidSession`` associated with the map administrative service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``MapAdminSession``
:rtype: ``osid.mapping.MapAdminSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_map_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_map_admin()`` is ``true``.*
"""
return # osid.mapping.MapAdminSession
@abc.abstractmethod
def get_map_notification_session(self, map_receiver, proxy):
"""Gets the ``OsidSession`` associated with the map notification service.
:param map_receiver: the notification callback
:type map_receiver: ``osid.mapping.MapReceiver``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``MapNotificationSession``
:rtype: ``osid.mapping.MapNotificationSession``
:raise: ``NullArgument`` -- ``map_receiver`` or ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_map_notification()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_map_notification()`` is ``true``.*
"""
return # osid.mapping.MapNotificationSession
@abc.abstractmethod
def get_map_hierarchy_session(self, proxy):
"""Gets the ``OsidSession`` associated with the map hierarchy service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``MapHierarchySession`` for maps
:rtype: ``osid.mapping.MapHierarchySession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_map_hierarchy()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_map_hierarchy()`` is ``true``.*
"""
return # osid.mapping.MapHierarchySession
@abc.abstractmethod
def get_map_hierarchy_design_session(self, proxy):
"""Gets the ``OsidSession`` associated with the map hierarchy design service.
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: a ``HierarchyDesignSession`` for maps
:rtype: ``osid.mapping.MapHierarchyDesignSession``
:raise: ``NullArgument`` -- ``proxy`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_map_hierarchy_design()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_map_hierarchy_design()`` is ``true``.*
"""
return # osid.mapping.MapHierarchyDesignSession
@abc.abstractmethod
def get_mapping_batch_proxy_manager(self):
"""Gets the mapping batch manager.
:return: a ``MappingBatchProxyManager`` for paths
:rtype: ``osid.mapping.batch.MappingBatchProxyManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_mapping_batch()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_mapping_batch()`` is ``true``.*
"""
return # osid.mapping.batch.MappingBatchProxyManager
mapping_batch_proxy_manager = abc.abstractproperty(fget=get_mapping_batch_proxy_manager)
@abc.abstractmethod
def get_mapping_path_proxy_manager(self):
"""Gets the mapping path manager.
:return: a ``MappingPathProxyManager`` for paths
:rtype: ``osid.mapping.path.MappingPathProxyManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_mapping_path()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_mapping_path()`` is ``true``.*
"""
return # osid.mapping.path.MappingPathProxyManager
mapping_path_proxy_manager = abc.abstractproperty(fget=get_mapping_path_proxy_manager)
@abc.abstractmethod
def get_mapping_route_proxy_manager(self):
"""Gets the mapping route manager.
:return: a ``MappingRouteProxyManager`` for routes
:rtype: ``osid.mapping.route.MappingRouteProxyManager``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unimplemented`` -- ``supports_mapping_route()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_mapping_route()`` is ``true``.*
"""
return # osid.mapping.route.MappingRouteProxyManager
mapping_route_proxy_manager = abc.abstractproperty(fget=get_mapping_route_proxy_manager)
| 40.225217
| 141
| 0.650846
| 9,471
| 92,518
| 6.208637
| 0.027452
| 0.033298
| 0.042856
| 0.034285
| 0.958114
| 0.942315
| 0.916618
| 0.870753
| 0.827778
| 0.799769
| 0
| 0
| 0.225383
| 92,518
| 2,299
| 142
| 40.242714
| 0.820501
| 0.71705
| 0
| 0.610048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.301435
| false
| 0
| 0.002392
| 0
| 0.698565
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
dd04656fde2921ea52fb6414a18b7b307a09b818
| 174
|
py
|
Python
|
bolinette/core/models/__init__.py
|
bolinette/bolinette
|
b35a7d828c7d9617da6a8d7ac066e3b675a65252
|
[
"MIT"
] | 4
|
2020-11-02T15:16:32.000Z
|
2022-01-11T11:19:24.000Z
|
bolinette/core/models/__init__.py
|
bolinette/bolinette
|
b35a7d828c7d9617da6a8d7ac066e3b675a65252
|
[
"MIT"
] | 14
|
2021-01-04T11:06:59.000Z
|
2022-03-23T17:01:49.000Z
|
bolinette/core/models/__init__.py
|
bolinette/bolinette
|
b35a7d828c7d9617da6a8d7ac066e3b675a65252
|
[
"MIT"
] | null | null | null |
from bolinette.core.models.model import Model, ModelMetadata, ModelProperty
from bolinette.core.models.attributes import Column, ColumnList, Backref, Reference, Relationship
| 58
| 97
| 0.850575
| 20
| 174
| 7.4
| 0.7
| 0.175676
| 0.22973
| 0.310811
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08046
| 174
| 2
| 98
| 87
| 0.925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dd3463f88dabb317552923808d12d192d8e62b95
| 27,798
|
py
|
Python
|
scripts/achived/standard_regression_within_experiment_n_trials_back.py
|
nmningmei/metacognition
|
734082e247cc7fc9d277563e2676e10692617a3f
|
[
"MIT"
] | 3
|
2019-07-09T15:37:46.000Z
|
2019-07-17T16:28:02.000Z
|
scripts/achived/standard_regression_within_experiment_n_trials_back.py
|
nmningmei/metacognition
|
734082e247cc7fc9d277563e2676e10692617a3f
|
[
"MIT"
] | null | null | null |
scripts/achived/standard_regression_within_experiment_n_trials_back.py
|
nmningmei/metacognition
|
734082e247cc7fc9d277563e2676e10692617a3f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 15 16:02:16 2018
@author: ning
"""
import os
working_dir = ''
import pandas as pd
pd.options.mode.chained_assignment = None
import statsmodels.formula.api as sm
import numpy as np
from sklearn.preprocessing import StandardScaler
result_dir = '../results/'
# Exp 1
experiment = 'pos'
df = pd.read_csv(os.path.join(working_dir,'../data/PoSdata.csv'))
df = df[df.columns[1:]]
df.columns = ['participant',
'blocks',
'trials',
'firstgabor',
'success',
'tilted',
'correct',
'RT_correct',
'awareness',
'RT_awareness',
'confidence',
'RT_confidence']
np.random.seed(12345)
results = dict(sub = [],
model = [],
score = [],
window = [],
correct = [],
awareness = [],
confidence = [],
RT_correct = [],
RT_awareness = [],
RT_confidence = [],
r2 = [],
intercept = [],
)
# use all 6 possible features
for n_back in range(11): # loop through the number of trials looking back
for participant,df_sub in df.groupby('participant'):# for each subject
# make sure all the attributes are either 0 or 1
df_sub.loc[:,'success' ] = df_sub.loc[:,'success' ].values - 1
df_sub.loc[:,'awareness' ] = df_sub.loc[:,'awareness' ].values - 1
df_sub.loc[:,'confidence'] = df_sub.loc[:,'confidence'].values - 1
df_sub['intercept'] = 1
feature_names = ['intercept',
'correct',
'awareness',
'confidence',
'RT_correct',
'RT_awareness',
'RT_confidence']
target_name = 'success'
features, targets = [],[]
for block, df_block in df_sub.groupby('blocks'):
# preparing the features and target by shifting the feature columns up
# and shifting the target column down
feature = (df_block[feature_names].shift(n_back) # shift downward so that the first n_back rows are gone
.dropna() # since some of rows are gone, so they are nans
.values # I only need the matrix not the data frame
)
target = (df_block[target_name].shift(-n_back) # same thing for the target, but shifting upward, and the last n_back rows are gone
.dropna()
.values
)
features.append(feature)
targets.append(target)
features = np.concatenate(features)
targets = np.concatenate(targets)
df_ = pd.DataFrame(features,columns = feature_names)
df_[target_name] = targets
model = sm.Logit(df_[target_name],df_[feature_names])
temp = model.fit(method='lbfgs')
results['sub'].append(participant)
results['model'].append('logistic')
results['score'].append([temp.bic,temp.aic])
results['window'].append(n_back)
for name in feature_names:
results[name].append([temp.params[name],
temp.bse[name],
temp.tvalues[name],
temp.pvalues[name],
temp.conf_int().loc[name][0],
temp.conf_int().loc[name][1],
])
results['r2'].append(temp.prsquared)
c = pd.DataFrame(results) # tansform a dictionary object to a data frame
for name in feature_names:
temp = c[name].to_frame()
temp[name+'_coef'] = np.vstack(temp[name].values)[:,0]
temp[name+'_se'] = np.vstack(temp[name].values)[:,1]
temp[name+'_tval'] = np.vstack(temp[name].values)[:,2]
temp[name+'_pval'] = np.vstack(temp[name].values)[:,3]
temp[name+'_lower'] = np.vstack(temp[name].values)[:,4]
temp[name+'_upper'] = np.vstack(temp[name].values)[:,5]
for k_name in temp.columns[1:]:
c[k_name] = temp[k_name].values
c = c.drop(name,axis=1)
c.to_csv(os.path.join(result_dir,'pos_logistic_statsmodel_6_features.csv'),index=False)
c = pd.read_csv(os.path.join(result_dir,'pos_logistic_statsmodel_6_features.csv'))
j = c.groupby('window').mean().reset_index()
j.to_csv(os.path.join(result_dir,'pos_logistic_statsmodel_mean_6_features.csv'),index=False)
##############################################################################################################
# 3 judgement features
experiment = 'pos'
df = pd.read_csv(os.path.join(working_dir,'../data/PoSdata.csv'))
df = df[df.columns[1:]]
df.columns = ['participant',
'blocks',
'trials',
'firstgabor',
'success',
'tilted',
'correct',
'RT_correct',
'awareness',
'RT_awareness',
'confidence',
'RT_confidence']
np.random.seed(12345)
results = dict(sub = [],
model = [],
score = [],
window = [],
correct = [],
awareness = [],
confidence = [],
r2 = [],
intercept = [],
)
# use all 6 possible features
for n_back in range(11): # loop through the number of trials looking back
for participant,df_sub in df.groupby('participant'):# for each subject
# make sure all the attributes are either 0 or 1
df_sub.loc[:,'success' ] = df_sub.loc[:,'success' ].values - 1
df_sub.loc[:,'awareness' ] = df_sub.loc[:,'awareness' ].values - 1
df_sub.loc[:,'confidence'] = df_sub.loc[:,'confidence'].values - 1
df_sub['intercept'] = 1
feature_names = ['intercept',
'correct',
'awareness',
'confidence',]
target_name = 'success'
features, targets = [],[]
for block, df_block in df_sub.groupby('blocks'):
# preparing the features and target by shifting the feature columns up
# and shifting the target column down
feature = (df_block[feature_names].shift(n_back) # shift downward so that the first n_back rows are gone
.dropna() # since some of rows are gone, so they are nans
.values # I only need the matrix not the data frame
)
target = (df_block[target_name].shift(-n_back) # same thing for the target, but shifting upward, and the last n_back rows are gone
.dropna()
.values
)
features.append(feature)
targets.append(target)
features = np.concatenate(features)
targets = np.concatenate(targets)
df_ = pd.DataFrame(features,columns = feature_names)
df_[target_name] = targets
model = sm.Logit(df_[target_name],df_[feature_names])
temp = model.fit(method='lbfgs')
results['sub'].append(participant)
results['model'].append('logistic')
results['score'].append([temp.bic,temp.aic])
results['window'].append(n_back)
for name in feature_names:
results[name].append([temp.params[name],
temp.bse[name],
temp.tvalues[name],
temp.pvalues[name],
temp.conf_int().loc[name][0],
temp.conf_int().loc[name][1],
])
results['r2'].append(temp.prsquared)
c = pd.DataFrame(results) # tansform a dictionary object to a data frame
for name in feature_names:
temp = c[name].to_frame()
temp[name+'_coef'] = np.vstack(temp[name].values)[:,0]
temp[name+'_se'] = np.vstack(temp[name].values)[:,1]
temp[name+'_tval'] = np.vstack(temp[name].values)[:,2]
temp[name+'_pval'] = np.vstack(temp[name].values)[:,3]
temp[name+'_lower'] = np.vstack(temp[name].values)[:,4]
temp[name+'_upper'] = np.vstack(temp[name].values)[:,5]
for k_name in temp.columns[1:]:
c[k_name] = temp[k_name].values
c = c.drop(name,axis=1)
c.to_csv(os.path.join(result_dir,'pos_logistic_statsmodel_3_1_features.csv'),index=False)
c = pd.read_csv(os.path.join(result_dir,'pos_logistic_statsmodel_3_1_features.csv'))
j = c.groupby('window').mean().reset_index()
j.to_csv(os.path.join(result_dir,'pos_logistic_statsmodel_mean_3_1_features.csv'),index=False)
#####################################################################################################################################
# RT features
experiment = 'pos'
df = pd.read_csv(os.path.join(working_dir,'../data/PoSdata.csv'))
df = df[df.columns[1:]]
df.columns = ['participant',
'blocks',
'trials',
'firstgabor',
'success',
'tilted',
'correct',
'RT_correct',
'awareness',
'RT_awareness',
'confidence',
'RT_confidence']
np.random.seed(12345)
results = dict(sub = [],
model = [],
score = [],
window = [],
RT_correct = [],
RT_awareness = [],
RT_confidence = [],
r2 = [],
intercept = [],
)
# use all 6 possible features
for n_back in range(11): # loop through the number of trials looking back
for participant,df_sub in df.groupby('participant'):# for each subject
# make sure all the attributes are either 0 or 1
df_sub.loc[:,'success' ] = df_sub.loc[:,'success' ].values - 1
df_sub.loc[:,'awareness' ] = df_sub.loc[:,'awareness' ].values - 1
df_sub.loc[:,'confidence'] = df_sub.loc[:,'confidence'].values - 1
df_sub['intercept'] = 1
feature_names = ['intercept',
'RT_correct',
'RT_awareness',
'RT_confidence']
target_name = 'success'
features, targets = [],[]
for block, df_block in df_sub.groupby('blocks'):
# preparing the features and target by shifting the feature columns up
# and shifting the target column down
feature = (df_block[feature_names].shift(n_back) # shift downward so that the first n_back rows are gone
.dropna() # since some of rows are gone, so they are nans
.values # I only need the matrix not the data frame
)
target = (df_block[target_name].shift(-n_back) # same thing for the target, but shifting upward, and the last n_back rows are gone
.dropna()
.values
)
features.append(feature)
targets.append(target)
features = np.concatenate(features)
targets = np.concatenate(targets)
df_ = pd.DataFrame(features,columns = feature_names)
df_[target_name] = targets
model = sm.Logit(df_[target_name],df_[feature_names])
temp = model.fit(method='lbfgs')
results['sub'].append(participant)
results['model'].append('logistic')
results['score'].append([temp.bic,temp.aic])
results['window'].append(n_back)
for name in feature_names:
results[name].append([temp.params[name],
temp.bse[name],
temp.tvalues[name],
temp.pvalues[name],
temp.conf_int().loc[name][0],
temp.conf_int().loc[name][1],
])
results['r2'].append(temp.prsquared)
c = pd.DataFrame(results) # tansform a dictionary object to a data frame
for name in feature_names:
temp = c[name].to_frame()
temp[name+'_coef'] = np.vstack(temp[name].values)[:,0]
temp[name+'_se'] = np.vstack(temp[name].values)[:,1]
temp[name+'_tval'] = np.vstack(temp[name].values)[:,2]
temp[name+'_pval'] = np.vstack(temp[name].values)[:,3]
temp[name+'_lower'] = np.vstack(temp[name].values)[:,4]
temp[name+'_upper'] = np.vstack(temp[name].values)[:,5]
for k_name in temp.columns[1:]:
c[k_name] = temp[k_name].values
c = c.drop(name,axis=1)
c.to_csv(os.path.join(result_dir,'pos_logistic_statsmodel_RT_features.csv'),index=False)
c = pd.read_csv(os.path.join(result_dir,'pos_logistic_statsmodel_RT_features.csv'))
j = c.groupby('window').mean().reset_index()
j.to_csv(os.path.join(result_dir,'pos_logistic_statsmodel_mean_RT_features.csv'),index=False)
#####################################################################################################
#####################################################################################################
#####################################################################################################
################################ ATT #####################################
#####################################################################################################
experiment = 'att'
df = pd.read_csv(os.path.join(working_dir,'../data/ATTfoc.csv'))
df = df[df.columns[1:]]
df.columns = ['participant',
'blocks',
'trials',
'firstgabor',
'attention',
'tilted',
'correct',
'RT_correct',
'awareness',
'RT_awareness',
'confidence',
'RT_confidence']
np.random.seed(12345)
results = dict(sub = [],
model = [],
score = [],
window = [],
correct = [],
awareness = [],
confidence = [],
RT_correct = [],
RT_awareness = [],
RT_confidence = [],
r2 = [],
intercept = [],
)
# use all 6 features
for n_back in range(11):# loop through the number of trials you want to look back
for participant,df_sub in df.groupby('participant'):# loop through each subject
# make sure all the attributes are either 0 or 1
df_sub.loc[:,'attention' ] = df_sub.loc[:,'attention' ].values - 1
df_sub.loc[:,'awareness' ] = df_sub.loc[:,'awareness' ].values - 1
df_sub.loc[:,'confidence'] = df_sub.loc[:,'confidence'].values - 1
df_sub['intercept'] = 1
feature_names = ['intercept',
'correct',
'awareness',
'confidence',
'RT_correct',
'RT_awareness',
'RT_confidence']
target_name = 'attention'
features, targets = [],[]
for block, df_block in df_sub.groupby('blocks'):
# preparing the features and target by shifting the feature columns up
# and shifting the target column down
feature = (df_block[feature_names].shift(n_back) # shift downward so that the first n_back rows are gone
.dropna() # since some of rows are gone, so they are nans
.values # I only need the matrix not the data frame
)
target = (df_block[target_name].shift(-n_back) # same thing for the target, but shifting upward, and the last n_back rows are gone
.dropna()
.values
)
features.append(feature)
targets.append(target)
features = np.concatenate(features)
targets = np.concatenate(targets)
df_ = pd.DataFrame(features,columns = feature_names)
df_[target_name] = targets
model = sm.Logit(df_[target_name],df_[feature_names])
temp = model.fit(method='lbfgs')
results['sub'].append(participant)
results['model'].append('logistic')
results['score'].append([temp.bic,temp.aic])
results['window'].append(n_back)
for name in feature_names:
results[name].append([temp.params[name],
temp.bse[name],
temp.tvalues[name],
temp.pvalues[name],
temp.conf_int().loc[name][0],
temp.conf_int().loc[name][1],
])
results['r2'].append(temp.prsquared)
c = pd.DataFrame(results) # tansform a dictionary object to a data frame
for name in feature_names:
temp = c[name].to_frame()
temp[name+'_coef'] = np.vstack(temp[name].values)[:,0]
temp[name+'_se'] = np.vstack(temp[name].values)[:,1]
temp[name+'_tval'] = np.vstack(temp[name].values)[:,2]
temp[name+'_pval'] = np.vstack(temp[name].values)[:,3]
temp[name+'_lower'] = np.vstack(temp[name].values)[:,4]
temp[name+'_upper'] = np.vstack(temp[name].values)[:,5]
for k_name in temp.columns[1:]:
c[k_name] = temp[k_name].values
c = c.drop(name,axis=1)
c.to_csv(os.path.join(result_dir,'att_logistic_statsmodel_6_features.csv'),index=False)
c = pd.read_csv(os.path.join(result_dir,'att_logistic_statsmodel_6_features.csv'))
j = c.groupby('window').mean().reset_index()
j.to_csv(os.path.join(result_dir,'att_logistic_statsmodel_mean_6_features.csv'),index=False)
#######################################################################################################################################
# 3 judgement features
experiment = 'att'
df = pd.read_csv(os.path.join(working_dir,'../data/ATTfoc.csv'))
df = df[df.columns[1:]]
df.columns = ['participant',
'blocks',
'trials',
'firstgabor',
'attention',
'tilted',
'correct',
'RT_correct',
'awareness',
'RT_awareness',
'confidence',
'RT_confidence']
np.random.seed(12345)
results = dict(sub = [],
model = [],
score = [],
window = [],
correct = [],
awareness = [],
confidence = [],
r2 = [],
intercept = [],
)
# use all 6 features
for n_back in range(11):# loop through the number of trials you want to look back
for participant,df_sub in df.groupby('participant'):# loop through each subject
# make sure all the attributes are either 0 or 1
df_sub.loc[:,'attention' ] = df_sub.loc[:,'attention' ].values - 1
df_sub.loc[:,'awareness' ] = df_sub.loc[:,'awareness' ].values - 1
df_sub.loc[:,'confidence'] = df_sub.loc[:,'confidence'].values - 1
df_sub['intercept'] = 1
feature_names = ['intercept',
'correct',
'awareness',
'confidence',
]
target_name = 'attention'
features, targets = [],[]
for block, df_block in df_sub.groupby('blocks'):
# preparing the features and target by shifting the feature columns up
# and shifting the target column down
feature = (df_block[feature_names].shift(n_back) # shift downward so that the first n_back rows are gone
.dropna() # since some of rows are gone, so they are nans
.values # I only need the matrix not the data frame
)
target = (df_block[target_name].shift(-n_back) # same thing for the target, but shifting upward, and the last n_back rows are gone
.dropna()
.values
)
features.append(feature)
targets.append(target)
features = np.concatenate(features)
targets = np.concatenate(targets)
df_ = pd.DataFrame(features,columns = feature_names)
df_[target_name] = targets
model = sm.Logit(df_[target_name],df_[feature_names])
temp = model.fit(method='lbfgs')
results['sub'].append(participant)
results['model'].append('logistic')
results['score'].append([temp.bic,temp.aic])
results['window'].append(n_back)
for name in feature_names:
results[name].append([temp.params[name],
temp.bse[name],
temp.tvalues[name],
temp.pvalues[name],
temp.conf_int().loc[name][0],
temp.conf_int().loc[name][1],
])
results['r2'].append(temp.prsquared)
c = pd.DataFrame(results) # tansform a dictionary object to a data frame
for name in feature_names:
temp = c[name].to_frame()
temp[name+'_coef'] = np.vstack(temp[name].values)[:,0]
temp[name+'_se'] = np.vstack(temp[name].values)[:,1]
temp[name+'_tval'] = np.vstack(temp[name].values)[:,2]
temp[name+'_pval'] = np.vstack(temp[name].values)[:,3]
temp[name+'_lower'] = np.vstack(temp[name].values)[:,4]
temp[name+'_upper'] = np.vstack(temp[name].values)[:,5]
for k_name in temp.columns[1:]:
c[k_name] = temp[k_name].values
c = c.drop(name,axis=1)
c.to_csv(os.path.join(result_dir,'att_logistic_statsmodel_3_1_features.csv'),index=False)
c = pd.read_csv(os.path.join(result_dir,'att_logistic_statsmodel_3_1_features.csv'))
j = c.groupby('window').mean().reset_index()
j.to_csv(os.path.join(result_dir,'att_logistic_statsmodel_mean_3_1_features.csv'),index=False)
####################################################################################################################################
# RT features
experiment = 'att'
df = pd.read_csv(os.path.join(working_dir,'../data/ATTfoc.csv'))
df = df[df.columns[1:]]
df.columns = ['participant',
'blocks',
'trials',
'firstgabor',
'attention',
'tilted',
'correct',
'RT_correct',
'awareness',
'RT_awareness',
'confidence',
'RT_confidence']
np.random.seed(12345)
results = dict(sub = [],
model = [],
score = [],
window = [],
RT_correct = [],
RT_awareness = [],
RT_confidence = [],
r2 = [],
intercept = [],
)
# use all 6 features
for n_back in range(11):# loop through the number of trials you want to look back
for participant,df_sub in df.groupby('participant'):# loop through each subject
# make sure all the attributes are either 0 or 1
df_sub.loc[:,'attention' ] = df_sub.loc[:,'attention' ].values - 1
df_sub.loc[:,'awareness' ] = df_sub.loc[:,'awareness' ].values - 1
df_sub.loc[:,'confidence'] = df_sub.loc[:,'confidence'].values - 1
df_sub['intercept'] = 1
feature_names = ['intercept',
'RT_correct',
'RT_awareness',
'RT_confidence']
target_name = 'attention'
features, targets = [],[]
for block, df_block in df_sub.groupby('blocks'):
# preparing the features and target by shifting the feature columns up
# and shifting the target column down
feature = (df_block[feature_names].shift(n_back) # shift downward so that the first n_back rows are gone
.dropna() # since some of rows are gone, so they are nans
.values # I only need the matrix not the data frame
)
target = (df_block[target_name].shift(-n_back) # same thing for the target, but shifting upward, and the last n_back rows are gone
.dropna()
.values
)
features.append(feature)
targets.append(target)
features = np.concatenate(features)
targets = np.concatenate(targets)
df_ = pd.DataFrame(features,columns = feature_names)
df_[target_name] = targets
model = sm.Logit(df_[target_name],df_[feature_names])
temp = model.fit(method='lbfgs')
results['sub'].append(participant)
results['model'].append('logistic')
results['score'].append([temp.bic,temp.aic])
results['window'].append(n_back)
for name in feature_names:
results[name].append([temp.params[name],
temp.bse[name],
temp.tvalues[name],
temp.pvalues[name],
temp.conf_int().loc[name][0],
temp.conf_int().loc[name][1],
])
results['r2'].append(temp.prsquared)
c = pd.DataFrame(results) # tansform a dictionary object to a data frame
for name in feature_names:
temp = c[name].to_frame()
temp[name+'_coef'] = np.vstack(temp[name].values)[:,0]
temp[name+'_se'] = np.vstack(temp[name].values)[:,1]
temp[name+'_tval'] = np.vstack(temp[name].values)[:,2]
temp[name+'_pval'] = np.vstack(temp[name].values)[:,3]
temp[name+'_lower'] = np.vstack(temp[name].values)[:,4]
temp[name+'_upper'] = np.vstack(temp[name].values)[:,5]
for k_name in temp.columns[1:]:
c[k_name] = temp[k_name].values
c = c.drop(name,axis=1)
c.to_csv(os.path.join(result_dir,'att_logistic_statsmodel_RT_features.csv'),index=False)
c = pd.read_csv(os.path.join(result_dir,'att_logistic_statsmodel_RT_features.csv'))
j = c.groupby('window').mean().reset_index()
j.to_csv(os.path.join(result_dir,'att_logistic_statsmodel_mean_RT_features.csv'),index=False)
| 44.053883
| 149
| 0.491618
| 2,960
| 27,798
| 4.465878
| 0.0625
| 0.043574
| 0.021787
| 0.043574
| 0.982752
| 0.982752
| 0.982752
| 0.982752
| 0.979272
| 0.979272
| 0
| 0.010897
| 0.352975
| 27,798
| 630
| 150
| 44.12381
| 0.724063
| 0.118066
| 0
| 0.912381
| 0
| 0
| 0.118195
| 0.031279
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009524
| 0
| 0.009524
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dd4f74091274e569564ed87d0a4f752b4585a3e3
| 155
|
py
|
Python
|
utils_mongodb/__init__.py
|
BrunoASNascimento/api_aluraflix
|
6eaa26f5316732e14a65ce2cd422a1c81331f10e
|
[
"MIT"
] | null | null | null |
utils_mongodb/__init__.py
|
BrunoASNascimento/api_aluraflix
|
6eaa26f5316732e14a65ce2cd422a1c81331f10e
|
[
"MIT"
] | null | null | null |
utils_mongodb/__init__.py
|
BrunoASNascimento/api_aluraflix
|
6eaa26f5316732e14a65ce2cd422a1c81331f10e
|
[
"MIT"
] | null | null | null |
from .mongo_connection import *
from .read_documents import *
from .create_document import *
from .delete_document import *
from .update_document import *
| 25.833333
| 31
| 0.806452
| 20
| 155
| 6
| 0.5
| 0.333333
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 155
| 5
| 32
| 31
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dd7ffad4c90394e9827410f5c63c8e561c194680
| 192
|
py
|
Python
|
venvctrl/cli/__init__.py
|
flowersjg/venvctrl
|
36d4e0e4d5ebced6385a6ade1198f4769ff2df41
|
[
"MIT"
] | 4
|
2015-04-20T15:13:34.000Z
|
2020-11-24T04:57:47.000Z
|
venvctrl/cli/__init__.py
|
flowersjg/venvctrl
|
36d4e0e4d5ebced6385a6ade1198f4769ff2df41
|
[
"MIT"
] | 9
|
2017-10-25T01:56:12.000Z
|
2021-12-30T03:49:54.000Z
|
venvctrl/cli/__init__.py
|
flowersjg/venvctrl
|
36d4e0e4d5ebced6385a6ade1198f4769ff2df41
|
[
"MIT"
] | 6
|
2015-12-14T20:09:39.000Z
|
2019-04-30T22:20:48.000Z
|
"""Command line scripts and utilities."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
| 27.428571
| 41
| 0.848958
| 24
| 192
| 6
| 0.583333
| 0.277778
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114583
| 192
| 6
| 42
| 32
| 0.847059
| 0.182292
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dd8ca5f848259aec6ccd307848b60e668896c616
| 29,595
|
py
|
Python
|
mdot_rest/test/resource.py
|
uw-it-aca/mdot-rest
|
3f5aa88ae2ac9693f283b8843ac8998b10dc7bb8
|
[
"Apache-2.0"
] | null | null | null |
mdot_rest/test/resource.py
|
uw-it-aca/mdot-rest
|
3f5aa88ae2ac9693f283b8843ac8998b10dc7bb8
|
[
"Apache-2.0"
] | 67
|
2015-07-23T23:22:14.000Z
|
2022-02-04T21:39:43.000Z
|
mdot_rest/test/resource.py
|
uw-it-aca/mdot-rest
|
3f5aa88ae2ac9693f283b8843ac8998b10dc7bb8
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from django.test import TestCase, Client
from django.contrib.auth.models import User
from rest_framework import status
import mdot_rest.models as resource_models
import json
import datetime
from mock import patch
class ResourceTest(TestCase):
def setUp(self):
self.default_date = datetime.datetime(1945, 11, 3, 12, 3, 34)
with patch('django.utils.timezone.now') as mock_now:
mock_now.return_value = self.default_date
self.resource1 = resource_models.UWResource.objects.create(
title='ITConnect',
feature_desc='This is a test.',
featured=True,
accessible=True,
responsive_web=True,
campus_seattle=True,
campus_tacoma=False,
campus_bothell=False,
published=True)
self.resource2 = resource_models.UWResource.objects.create(
title='SpaceScout',
feature_desc='This is another test.',
featured=True,
accessible=False,
responsive_web=True,
campus_seattle=True,
campus_tacoma=True,
campus_bothell=True,
published=True)
self.resource3 = resource_models.UWResource.objects.create(
title='Unpublished',
feature_desc='This is a resource that is not published.',
featured=True,
accessible=False,
responsive_web=True,
campus_seattle=True,
campus_tacoma=True,
campus_bothell=True,
published=False)
self.intended_audience1 = \
resource_models.IntendedAudience.objects.create(
audience='Students')
self.intended_audience2 = \
resource_models.IntendedAudience.objects.create(
audience='Developers')
self.intended_audience1.save()
self.intended_audience2.save()
self.intended_audience1.resource.add(self.resource1)
self.intended_audience1.resource.add(self.resource2)
self.intended_audience2.resource.add(self.resource2)
self.intended_audience2.resource.add(self.resource3)
self.resource_link1 = resource_models.ResourceLink.objects.create(
link_type='IOS',
resource=self.resource1,
url='uw.edu/itconnect')
self.resource_link2 = resource_models.ResourceLink.objects.create(
link_type='WEB',
resource=self.resource2,
url='spacescout.uw.edu')
self.resource_link3 = resource_models.ResourceLink.objects.create(
link_type='IOS',
resource=self.resource2,
url='spacescout.ue.edu/ios')
self.resource_link4 = resource_models.ResourceLink.objects.create(
link_type='WEB',
resource=self.resource3,
url='washington.edu/notathing')
self.resource1.save()
self.resource2.save()
self.resource3.save()
self.intended_audience1.save()
self.intended_audience2.save()
self.resource_link1.save()
self.resource_link2.save()
self.resource_link3.save()
self.resource_link4.save()
self.client = Client()
def test_resource_str(self):
"""
Test that the __str__ method returns the resource name.
Python 2 should still use unicode. Python 3 only uses str
"""
self.assertEqual(self.resource1.__str__(), 'ITConnect')
def test_intendedaudience_str(self):
"""
Test that the __str__ method returns the audience name.
"""
self.assertEqual(self.intended_audience1.__str__(), 'Students')
def test_resource_linke_str(self):
"""
Test that the __str__ method returns the resource link name.
"""
self.assertEqual(self.resource_link1.__str__(), 'ITConnect: IOS')
def test_simple_resource(self):
"""
Get the first resource in the database.
"""
path = "/api/v1/uwresources/{0}/".format(self.resource1.pk)
response = self.client.get(path, format='json')
expected_response = {u'accessible': True,
u'campus_bothell': False,
u'campus_seattle': True,
u'campus_tacoma': False,
u'created_date': u'1945-11-03T12:03:34-08:00',
u'feature_desc': u'This is a test.',
u'featured': True,
u'id': self.resource1.pk,
u'image': None,
u'intended_audiences': [{u'audience':
u'Students'}],
u'last_modified': u'1945-11-03T12:03:34-08:00',
u'resource_links': [{u'link_type': u'IOS',
u'url':
u'uw.edu/itconnect'}],
u'responsive_web': True,
u'title': u'ITConnect'
}
self.assertTrue(json.loads(response.content) == expected_response)
self.assertTrue(json.loads(response.content).__len__(),
expected_response.__len__())
def test_get_published_resources(self):
"""
Request all resources, but check that we only get published ones.
"""
response = self.client.get('/api/v1/uwresources/')
expected_response = [{u'accessible': True,
u'feature_desc': u'This is a test.',
u'title': u'ITConnect',
u'image': None,
u'created_date': u'1945-11-03T12:03:34-08:00',
u'campus_seattle': True,
u'campus_bothell': False,
u'responsive_web': True,
u'featured': True,
u'last_modified': u'1945-11-03T12:03:34-08:00',
u'intended_audiences': [{u'audience':
u'Students'}],
u'resource_links': [{u'url': u'uw.edu/itconnect',
u'link_type': u'IOS'}],
u'id': 1,
u'campus_tacoma': False
},
{u'accessible': False,
u'feature_desc': u'This is another test.',
u'title': u'SpaceScout',
u'image': None,
u'created_date': u'1945-11-03T12:03:34-08:00',
u'campus_seattle': True,
u'campus_bothell': True,
u'responsive_web': True,
u'featured': True,
u'last_modified': u'1945-11-03T12:03:34-08:00',
u'intended_audiences': [{u'audience':
u'Students'},
{u'audience':
u'Developers'}],
u'resource_links': [{u'url':
u'spacescout.uw.edu',
u'link_type': u'WEB'},
{u'url':
u'spacescout.ue.edu/ios',
u'link_type': u'IOS'}],
u'id': 2,
u'campus_tacoma': True
}]
self.assertTrue(sorted(json.loads(response.content),
key=lambda i: i['id']) ==
sorted(expected_response, key=lambda i: i['id']))
self.assertTrue(json.loads(response.content).__len__(),
expected_response.__len__())
def test_get_accessible_resource(self):
"""
Get resources that are accessible.
"""
response = self.client.get('/api/v1/uwresources/?accessible=True',
format='json')
expected_response = [{u'accessible': True,
u'campus_bothell': False,
u'campus_seattle': True,
u'campus_tacoma': False,
u'created_date': u'1945-11-03T12:03:34-08:00',
u'feature_desc': u'This is a test.',
u'featured': True,
u'id': 1,
u'image': None,
u'intended_audiences': [{u'audience':
u'Students'}],
u'last_modified': u'1945-11-03T12:03:34-08:00',
u'resource_links': [{u'link_type': u'IOS',
u'url':
u'uw.edu/itconnect'}],
u'responsive_web': True,
u'title': u'ITConnect'
}]
self.assertTrue(sorted(json.loads(response.content),
key=lambda i: i['id']) ==
sorted(expected_response, key=lambda i: i['id']))
self.assertTrue(json.loads(response.content).__len__(),
expected_response.__len__())
def test_get_responsive_resources(self):
"""
Get resources that are responsive.
"""
response = self.client.get('/api/v1/uwresources/?responsive_web=True')
expected_response = [{u'accessible': True,
u'feature_desc': u'This is a test.',
u'title': u'ITConnect',
u'image': None,
u'created_date': u'1945-11-03T12:03:34-08:00',
u'campus_seattle': True,
u'campus_bothell': False,
u'responsive_web': True,
u'featured': True,
u'last_modified': u'1945-11-03T12:03:34-08:00',
u'intended_audiences': [{u'audience':
u'Students'}],
u'resource_links': [{u'url': u'uw.edu/itconnect',
u'link_type': u'IOS'}],
u'id': 1,
u'campus_tacoma': False
},
{u'accessible': False,
u'feature_desc': u'This is another test.',
u'title': u'SpaceScout',
u'image': None,
u'created_date': u'1945-11-03T12:03:34-08:00',
u'campus_seattle': True,
u'campus_bothell': True,
u'responsive_web': True,
u'featured': True,
u'last_modified': u'1945-11-03T12:03:34-08:00',
u'intended_audiences': [{u'audience':
u'Students'},
{u'audience':
u'Developers'}],
u'resource_links': [{u'url':
u'spacescout.uw.edu',
u'link_type': u'WEB'},
{u'url':
u'spacescout.ue.edu/ios',
u'link_type': u'IOS'}],
u'id': 2,
u'campus_tacoma': True
}]
self.assertTrue(sorted(json.loads(response.content),
key=lambda i: i['id']) ==
sorted(expected_response, key=lambda i: i['id']))
self.assertTrue(json.loads(response.content).__len__(),
expected_response.__len__())
def test_get_seattle_resource(self):
"""
Get all the resources that are for seattle campus.
"""
response = self.client.get('/api/v1/uwresources/?campus_seattle=True')
expected_response = [{u'accessible': True,
u'feature_desc': u'This is a test.',
u'title': u'ITConnect',
u'image': None,
u'created_date': u'1945-11-03T12:03:34-08:00',
u'campus_seattle': True,
u'campus_bothell': False,
u'responsive_web': True,
u'featured': True,
u'last_modified': u'1945-11-03T12:03:34-08:00',
u'intended_audiences': [{u'audience':
u'Students'}],
u'resource_links': [{u'url': u'uw.edu/itconnect',
u'link_type': u'IOS'}],
u'id': 1,
u'campus_tacoma': False
},
{u'accessible': False,
u'feature_desc': u'This is another test.',
u'title': u'SpaceScout',
u'image': None,
u'created_date': u'1945-11-03T12:03:34-08:00',
u'campus_seattle': True,
u'campus_bothell': True,
u'responsive_web': True,
u'featured': True,
u'last_modified': u'1945-11-03T12:03:34-08:00',
u'intended_audiences': [{u'audience':
u'Students'},
{u'audience':
u'Developers'}],
u'resource_links': [{u'url':
u'spacescout.uw.edu',
u'link_type': u'WEB'},
{u'url':
u'spacescout.ue.edu/ios',
u'link_type': u'IOS'}],
u'id': 2,
u'campus_tacoma': True
}]
self.assertTrue(sorted(json.loads(response.content),
key=lambda i: i['id']) ==
sorted(expected_response, key=lambda i: i['id']))
self.assertTrue(json.loads(response.content).__len__(),
expected_response.__len__())
def test_get_featured_resource(self):
"""
Get all the resources that are flagged as featured.
"""
response = self.client.get('/api/v1/uwresources/?featured=True')
expected_response = [{u'accessible': True,
u'feature_desc': u'This is a test.',
u'title': u'ITConnect',
u'image': None,
u'created_date': u'1945-11-03T12:03:34-08:00',
u'campus_seattle': True,
u'campus_bothell': False,
u'responsive_web': True,
u'featured': True,
u'last_modified': u'1945-11-03T12:03:34-08:00',
u'intended_audiences': [{u'audience':
u'Students'}],
u'resource_links': [{u'url':
u'uw.edu/itconnect',
u'link_type':
u'IOS'}],
u'id': 1,
u'campus_tacoma': False
},
{u'accessible': False,
u'feature_desc': u'This is another test.',
u'title': u'SpaceScout',
u'image': None,
u'created_date': u'1945-11-03T12:03:34-08:00',
u'campus_seattle': True,
u'campus_bothell': True,
u'responsive_web': True,
u'featured': True,
u'last_modified': u'1945-11-03T12:03:34-08:00',
u'intended_audiences': [{u'audience':
u'Students'},
{u'audience':
u'Developers'}],
u'resource_links': [{u'url':
u'spacescout.uw.edu',
u'link_type': u'WEB'},
{u'url':
u'spacescout.ue.edu/ios',
u'link_type': u'IOS'}],
u'id': 2,
u'campus_tacoma': True
}]
self.assertTrue(sorted(json.loads(response.content),
key=lambda i: i['id']) ==
sorted(expected_response, key=lambda i: i['id']))
self.assertTrue(json.loads(response.content).__len__(),
expected_response.__len__())
def test_get_resource_by_title(self):
"""
Get a resource by its title.
"""
response = self.client.get('/api/v1/uwresources/?title=SpaceScout')
expected_response = [{u'accessible': False,
u'feature_desc': u'This is another test.',
u'title': u'SpaceScout',
u'image': None,
u'created_date': u'1945-11-03T12:03:34-08:00',
u'campus_seattle': True,
u'campus_bothell': True,
u'responsive_web': True,
u'featured': True,
u'last_modified': u'1945-11-03T12:03:34-08:00',
u'intended_audiences': [{u'audience':
u'Students'},
{u'audience':
u'Developers'}],
u'resource_links': [{u'url':
u'spacescout.uw.edu',
u'link_type': u'WEB'},
{u'url':
u'spacescout.ue.edu/ios',
u'link_type': u'IOS'}],
u'id': 2,
u'campus_tacoma': True}]
self.assertTrue(sorted(json.loads(response.content),
key=lambda i: i['id']) ==
sorted(expected_response, key=lambda i: i['id']))
self.assertTrue(json.loads(response.content).__len__(),
expected_response.__len__())
def test_get_resource_by_audience(self):
"""
Get all the resources that are for an audience.
"""
response = self.client.get('/api/v1/uwresources/?audience=Students')
expected_response = [{u'accessible': True,
u'feature_desc': u'This is a test.',
u'title': u'ITConnect',
u'image': None,
u'created_date': u'1945-11-03T12:03:34-08:00',
u'campus_seattle': True,
u'campus_bothell': False,
u'responsive_web': True,
u'featured': True,
u'last_modified': u'1945-11-03T12:03:34-08:00',
u'intended_audiences': [{u'audience':
u'Students'}],
u'resource_links': [{u'url': u'uw.edu/itconnect',
u'link_type': u'IOS'}],
u'id': 1,
u'campus_tacoma': False
},
{u'accessible': False,
u'feature_desc': u'This is another test.',
u'title': u'SpaceScout',
u'image': None,
u'created_date': u'1945-11-03T12:03:34-08:00',
u'campus_seattle': True,
u'campus_bothell': True,
u'responsive_web': True,
u'featured': True,
u'last_modified': u'1945-11-03T12:03:34-08:00',
u'intended_audiences': [{u'audience':
u'Students'},
{u'audience':
u'Developers'}],
u'resource_links': [{u'url':
u'spacescout.uw.edu',
u'link_type': u'WEB'},
{u'url':
u'spacescout.ue.edu/ios',
u'link_type': u'IOS'}],
u'id': 2,
u'campus_tacoma': True
}]
self.assertTrue(sorted(json.loads(response.content),
key=lambda i: i['id']) ==
sorted(expected_response, key=lambda i: i['id']))
self.assertTrue(json.loads(response.content).__len__(),
expected_response.__len__())
def test_complex_filter(self):
"""
Get all the resources that satisfy a complex filter.
"""
response = self.client.get(
'/api/v1/uwresources/?accessible=True&campus_seattle=True&\
responsive_web=True&featured=True&audience=Students')
expected_response = [{u'accessible': True,
u'feature_desc': u'This is a test.',
u'title': u'ITConnect',
u'image': None,
u'created_date': u'1945-11-03T12:03:34-08:00',
u'campus_seattle': True,
u'campus_bothell': False,
u'responsive_web': True,
u'featured': True,
u'last_modified': u'1945-11-03T12:03:34-08:00',
u'intended_audiences': [{u'audience':
u'Students'}],
u'resource_links': [{u'url': u'uw.edu/itconnect',
u'link_type': u'IOS'}],
u'id': 1,
u'campus_tacoma': False
}]
self.assertTrue(sorted(json.loads(response.content)) ==
sorted(expected_response))
self.assertTrue(json.loads(response.content).__len__(),
expected_response.__len__())
def test_put_to_api(self):
"""
The rest API is readonly, so an attempt at a put should return a
forbidden status code.
"""
new_resource = {u'accessible': True,
u'feature_desc': u'This is a test.',
u'title': u'ITConnect',
u'created_date': u'1945-11-03T12:03:34-08:00',
u'campus_seattle': True,
u'campus_bothell': False,
u'responsive_web': True,
u'featured': True,
u'last_modified': u'1945-11-03T12:03:34-08:00',
u'intended_audiences': [{u'audience': u'Students'}],
u'resource_links': [{u'url': u'uw.edu/itconnect',
u'link_type': u'IOS'}],
u'id': 1,
u'campus_tacoma': False
}
response = self.client.put('/api/v1/uwresources/', new_resource)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_post_to_api(self):
"""
The rest API is readonly, so an attempt at a post should return a
forbidden status code.
"""
new_resource = {u'accessible': True,
u'feature_desc': u'This is a test.',
u'title': u'ITConnect',
u'created_date': u'1945-11-03T12:03:34-08:00',
u'campus_seattle': True,
u'campus_bothell': False,
u'responsive_web': True,
u'featured': True,
u'last_modified': u'1945-11-03T12:03:34-08:00',
u'intended_audiences': [{u'audience': u'Students'}],
u'resource_links': [{u'url': u'uw.edu/itconnect',
u'link_type': u'IOS'}],
u'id': 1,
u'campus_tacoma': False
}
response = self.client.post('/api/v1/uwresources/', new_resource)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_delete_from_api(self):
"""
The rest API is readonly, so an attempt to delete a resource should
return a forbidden status code.
"""
response = self.client.delete('/api/v1/uwresources/?title=ITConnect')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_patch_to_api(self):
"""
The rest API is readonly, so an attempt to patch a resource should
return a forbidden status code.
"""
new_resource = {u'accessible': True,
u'feature_desc': u'This is a test.',
u'title': u'ITConnect',
u'created_date': u'1945-11-03T12:03:34-08:00',
u'campus_seattle': True,
u'campus_bothell': False,
u'responsive_web': True,
u'featured': True,
u'last_modified': u'1945-11-03T12:03:34-08:00',
u'intended_audiences': [{u'audience': u'Students'}],
u'resource_links': [{u'url': u'uw.edu/itconnect',
u'link_type': u'IOS'}],
u'id': 1,
u'campus_tacoma': False
}
response = self.client.patch('/api/v1/uwresources/', new_resource)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def tearDown(self):
"""
Destroys all the objects that were made for each test.
"""
self.resource1.delete()
self.resource2.delete()
self.intended_audience1.delete()
self.intended_audience2.delete()
self.resource_link1.delete()
self.resource_link2.delete()
self.resource_link3.delete()
| 49.65604
| 79
| 0.405474
| 2,624
| 29,595
| 4.410061
| 0.070122
| 0.029381
| 0.020567
| 0.035258
| 0.821466
| 0.810577
| 0.781974
| 0.758987
| 0.741272
| 0.719927
| 0
| 0.047787
| 0.492313
| 29,595
| 595
| 80
| 49.739496
| 0.722396
| 0.038993
| 0
| 0.768293
| 0
| 0
| 0.203656
| 0.047522
| 0
| 0
| 0
| 0
| 0.050813
| 1
| 0.036585
| false
| 0
| 0.014228
| 0
| 0.052846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dd9e5d10f335affa0cf1458640b84e6fe3781971
| 130
|
py
|
Python
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/nixi/calculators/calc_synth.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 82
|
2016-06-29T17:24:43.000Z
|
2021-04-16T06:49:17.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/nixi/calculators/calc_synth.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 6
|
2022-01-12T18:22:08.000Z
|
2022-03-25T10:19:27.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/nixi/calculators/calc_synth.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 56
|
2016-08-02T10:50:50.000Z
|
2021-07-19T08:57:34.000Z
|
from pyradioconfig.parts.jumbo.calculators.calc_synth import CALC_Synth_jumbo
class CALC_Synth_nixi(CALC_Synth_jumbo):
pass
| 21.666667
| 77
| 0.846154
| 19
| 130
| 5.421053
| 0.578947
| 0.349515
| 0.271845
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 130
| 6
| 78
| 21.666667
| 0.880342
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
9ffd3dfe517c2c2940f0a62f5cc5898d630c80cf
| 334
|
py
|
Python
|
spotify_api/error.py
|
Saphyel/spotify-api
|
e1e73e7aa6e5946aa832d3c89fbae80b5de23ffe
|
[
"MIT"
] | 1
|
2019-11-03T18:15:16.000Z
|
2019-11-03T18:15:16.000Z
|
spotify_api/error.py
|
Saphyel/spotify-api
|
e1e73e7aa6e5946aa832d3c89fbae80b5de23ffe
|
[
"MIT"
] | null | null | null |
spotify_api/error.py
|
Saphyel/spotify-api
|
e1e73e7aa6e5946aa832d3c89fbae80b5de23ffe
|
[
"MIT"
] | null | null | null |
__strict__ = True
class SpotifyOauthError(Exception):
pass
class SpotifyRepositoryError(Exception):
def __init__(self, http_status: int, body: str):
self.http_status = http_status
self.body = body
def __str__(self):
return 'http status: {0}, code:{1}'.format(str(self.http_status), self.body)
| 22.266667
| 84
| 0.679641
| 41
| 334
| 5.146341
| 0.487805
| 0.236967
| 0.199052
| 0.161137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007547
| 0.206587
| 334
| 14
| 85
| 23.857143
| 0.788679
| 0
| 0
| 0
| 0
| 0
| 0.077844
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.111111
| 0
| 0.111111
| 0.555556
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 7
|
b00c92d899753288e3ccbc11682fc78e480342cd
| 146
|
py
|
Python
|
src/fritz_dect_mail_extract/constants.py
|
CarliJoy/fritz-dect-mail-extract
|
e3636ae9c15aa9740cd49cb7cd0aec42aad0d1f0
|
[
"MIT"
] | null | null | null |
src/fritz_dect_mail_extract/constants.py
|
CarliJoy/fritz-dect-mail-extract
|
e3636ae9c15aa9740cd49cb7cd0aec42aad0d1f0
|
[
"MIT"
] | null | null | null |
src/fritz_dect_mail_extract/constants.py
|
CarliJoy/fritz-dect-mail-extract
|
e3636ae9c15aa9740cd49cb7cd0aec42aad0d1f0
|
[
"MIT"
] | null | null | null |
ENV_NAMES = {
"PASSWORD": "DECT_MAIL_EXTRACT_PASSWORD",
"USERNAME": "DECT_MAIL_EXTRACT_USER",
"SERVER": "DECT_MAIL_EXTRACT_SERVER",
}
| 24.333333
| 45
| 0.705479
| 17
| 146
| 5.470588
| 0.529412
| 0.258065
| 0.483871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150685
| 146
| 5
| 46
| 29.2
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.643836
| 0.493151
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.2
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
b057bd219b8aacca0f9e1a4c355cdab53ae64190
| 100
|
py
|
Python
|
pytorch_agents/a2c/__init__.py
|
AiForAlpha/pytorch_agents
|
0151b16f0a8cf7131cede83c369cbc728435b3c2
|
[
"MIT"
] | null | null | null |
pytorch_agents/a2c/__init__.py
|
AiForAlpha/pytorch_agents
|
0151b16f0a8cf7131cede83c369cbc728435b3c2
|
[
"MIT"
] | null | null | null |
pytorch_agents/a2c/__init__.py
|
AiForAlpha/pytorch_agents
|
0151b16f0a8cf7131cede83c369cbc728435b3c2
|
[
"MIT"
] | null | null | null |
from pytorch_agents.a2c.a2c import A2C
from pytorch_agents.a2c.policies import CnnPolicy, MlpPolicy
| 33.333333
| 60
| 0.86
| 15
| 100
| 5.6
| 0.533333
| 0.261905
| 0.404762
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043956
| 0.09
| 100
| 2
| 61
| 50
| 0.879121
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
c6b2bc07a4bf94297331b03585a0e4447ceb2c0a
| 3,137
|
py
|
Python
|
custom_augmentation.py
|
rongtuech/handbodypose
|
6e06164acee8d24096e062c69553acdd8756c0f6
|
[
"Apache-2.0"
] | 5
|
2021-05-06T06:50:40.000Z
|
2021-09-01T10:10:06.000Z
|
custom_augmentation.py
|
rongtuech/handbodypose
|
6e06164acee8d24096e062c69553acdd8756c0f6
|
[
"Apache-2.0"
] | 2
|
2021-05-06T01:43:39.000Z
|
2021-05-11T07:33:13.000Z
|
custom_augmentation.py
|
rongtuech/handbodypose
|
6e06164acee8d24096e062c69553acdd8756c0f6
|
[
"Apache-2.0"
] | null | null | null |
import cv2
from albumentations import Compose, PadIfNeeded, ShiftScaleRotate, ImageCompression, KeypointParams, \
LongestMaxSize
from albumentations.imgaug.transforms import IAAAffine
class COCOTransformation:
def __init__(self, width, height):
self.aug = Compose([
ShiftScaleRotate(p=0.5, rotate_limit=5, scale_limit=0.05, border_mode=cv2.BORDER_CONSTANT),
ImageCompression(quality_lower=95, quality_upper=100, p=1),
IAAAffine(shear=0.2, always_apply=False, p=0.3),
LongestMaxSize(max_size=width if width > height else height),
PadIfNeeded(min_height=height, min_width=width, border_mode=cv2.BORDER_CONSTANT)
], keypoint_params=KeypointParams(format='xy', label_fields=['pose_id', "join_id"],
remove_invisible=True))
def __call__(self, image, keypoints):
cood, pose, join = keypoints
transformed = self.aug(image=image, keypoints=cood, pose_id=pose, join_id=join)
return transformed["image"], (transformed['keypoints'], transformed['pose_id'], \
transformed['join_id'])
class HandTransformation:
def __init__(self, width, height):
self.aug = Compose([
ShiftScaleRotate(p=0.5, rotate_limit=5, scale_limit=0.05, border_mode=cv2.BORDER_CONSTANT),
ImageCompression(quality_lower=95, quality_upper=100, p=1),
IAAAffine(shear=0.2, always_apply=False, p=0.3),
LongestMaxSize(max_size=width if width > height else height),
PadIfNeeded(min_height=height, min_width=width, border_mode=cv2.BORDER_CONSTANT)
], keypoint_params=KeypointParams(format='xy', label_fields=["join_id"],
remove_invisible=True))
def __call__(self, image, keypoints):
cood, join = keypoints
transformed = self.aug(image=image, keypoints=cood, join_id=join)
return transformed["image"], (transformed['keypoints'], transformed['join_id'])
class COCOTransformationTest:
def __init__(self, width, height):
self.aug = Compose([
LongestMaxSize(max_size=width if width > height else height),
PadIfNeeded(min_height=height, min_width=width, border_mode=cv2.BORDER_CONSTANT)
], keypoint_params=KeypointParams(format='xy', label_fields=['pose_id', "join_id"],
remove_invisible=True))
def __call__(self, image, keypoints):
cood, pose, join = keypoints
transformed = self.aug(image=image, keypoints=cood, pose_id=pose, join_id=join)
return transformed["image"], (transformed['keypoints'], transformed['pose_id'], \
transformed['join_id'])
class InferenceTransformation:
def __init__(self, width, height):
self.aug = Compose([
LongestMaxSize(max_size=width if width > height else height),
PadIfNeeded(min_height=height, min_width=width, border_mode=cv2.BORDER_CONSTANT)
])
def __call__(self, image):
transformed = self.aug(image=image)
return transformed["image"]
| 48.261538
| 103
| 0.660822
| 355
| 3,137
| 5.585915
| 0.194366
| 0.027231
| 0.039334
| 0.057489
| 0.851236
| 0.837115
| 0.837115
| 0.837115
| 0.837115
| 0.778114
| 0
| 0.016109
| 0.228244
| 3,137
| 65
| 104
| 48.261538
| 0.802974
| 0
| 0
| 0.703704
| 0
| 0
| 0.039197
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.148148
| false
| 0
| 0.055556
| 0
| 0.351852
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
05a44b36f0db8eecb847582e47d0934d006b14c0
| 192
|
py
|
Python
|
finitewave/cpuwave3D/model/__init__.py
|
ArsOkenov/Finitewave
|
14274d74be824a395b47a5c53ba18188798ab70d
|
[
"MIT"
] | null | null | null |
finitewave/cpuwave3D/model/__init__.py
|
ArsOkenov/Finitewave
|
14274d74be824a395b47a5c53ba18188798ab70d
|
[
"MIT"
] | null | null | null |
finitewave/cpuwave3D/model/__init__.py
|
ArsOkenov/Finitewave
|
14274d74be824a395b47a5c53ba18188798ab70d
|
[
"MIT"
] | null | null | null |
from finitewave.cpuwave3D.model.aliev_panfilov_3d import AlievPanfilov3D
from finitewave.cpuwave3D.model.luo_rudy91_3d import LuoRudy913D
from finitewave.cpuwave3D.model.tp06_3d import TP063D
| 48
| 72
| 0.890625
| 26
| 192
| 6.384615
| 0.538462
| 0.253012
| 0.415663
| 0.506024
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094444
| 0.0625
| 192
| 3
| 73
| 64
| 0.827778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
af30b2dba65301ca560614579a27a497d45b5850
| 97
|
py
|
Python
|
gym_pcgrl/envs/__init__.py
|
JiangZehua/gym-pcgrl
|
80ddbde173803e81060578c2c4167d8d1f5cacba
|
[
"MIT"
] | null | null | null |
gym_pcgrl/envs/__init__.py
|
JiangZehua/gym-pcgrl
|
80ddbde173803e81060578c2c4167d8d1f5cacba
|
[
"MIT"
] | null | null | null |
gym_pcgrl/envs/__init__.py
|
JiangZehua/gym-pcgrl
|
80ddbde173803e81060578c2c4167d8d1f5cacba
|
[
"MIT"
] | null | null | null |
from gym_pcgrl.envs.pcgrl_env import PcgrlEnv
from gym_pcgrl.envs.pcgrl_env_3D import PcgrlEnv3D
| 32.333333
| 50
| 0.876289
| 17
| 97
| 4.705882
| 0.529412
| 0.175
| 0.3
| 0.4
| 0.6
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0.022472
| 0.082474
| 97
| 2
| 51
| 48.5
| 0.876404
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bbed305f22e0a61671e306336c80610cb5e83bd8
| 153
|
py
|
Python
|
apcommand/readme.py
|
russellnakamura/apcommand
|
84a8ac522967477e10e51d3583f83c3b7de1ac2b
|
[
"MIT"
] | null | null | null |
apcommand/readme.py
|
russellnakamura/apcommand
|
84a8ac522967477e10e51d3583f83c3b7de1ac2b
|
[
"MIT"
] | null | null | null |
apcommand/readme.py
|
russellnakamura/apcommand
|
84a8ac522967477e10e51d3583f83c3b7de1ac2b
|
[
"MIT"
] | null | null | null |
# python standard library
import subprocess
print(subprocess.check_output('atheros -h'.split()))
print(subprocess.check_output('broadcom -h'.split()))
| 21.857143
| 53
| 0.771242
| 19
| 153
| 6.105263
| 0.631579
| 0.258621
| 0.344828
| 0.448276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078431
| 153
| 7
| 53
| 21.857143
| 0.822695
| 0.150327
| 0
| 0
| 0
| 0
| 0.164063
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 8
|
a5647d63b8c8146d30515389485be29abd7942ab
| 62
|
py
|
Python
|
src/pkg_trainmote/userHelper.py
|
FelixNievelstein/Trainmote-Server
|
120b8b5a2db4c08789e57788233c38d659628330
|
[
"MIT"
] | null | null | null |
src/pkg_trainmote/userHelper.py
|
FelixNievelstein/Trainmote-Server
|
120b8b5a2db4c08789e57788233c38d659628330
|
[
"MIT"
] | null | null | null |
src/pkg_trainmote/userHelper.py
|
FelixNievelstein/Trainmote-Server
|
120b8b5a2db4c08789e57788233c38d659628330
|
[
"MIT"
] | null | null | null |
import os
def is_root() -> bool:
return os.geteuid() == 0
| 15.5
| 28
| 0.612903
| 10
| 62
| 3.7
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020833
| 0.225806
| 62
| 4
| 28
| 15.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
a56b8cd7dc853e2d6d9c08fb275977656a6c16fb
| 523
|
py
|
Python
|
prototype_template/template_module/__init__.py
|
IceKhan13/quantum-prototype-template
|
87f6ca0cf27de368a1755aa3975ea8f82b691021
|
[
"Apache-2.0"
] | 2
|
2022-03-01T20:30:20.000Z
|
2022-03-01T21:19:22.000Z
|
prototype_template/template_module/__init__.py
|
IceKhan13/quantum-prototype-template
|
87f6ca0cf27de368a1755aa3975ea8f82b691021
|
[
"Apache-2.0"
] | 9
|
2022-03-09T21:26:42.000Z
|
2022-03-30T21:10:19.000Z
|
prototype_template/template_module/__init__.py
|
IceKhan13/quantum-prototype-template
|
87f6ca0cf27de368a1755aa3975ea8f82b691021
|
[
"Apache-2.0"
] | 2
|
2022-03-28T17:47:26.000Z
|
2022-03-28T19:59:12.000Z
|
"""
===========================================================
Template module (:mod:`prototype_template.template_module`)
===========================================================
.. currentmodule:: prototype_template.template_module
This module contains an :class:`TemplateClass`
which is the main class for this template example.
Template module classes and functions
=====================================
.. autosummary::
:toctree: ../stubs/
TemplateClass
"""
from .template_class import TemplateClass
| 24.904762
| 59
| 0.537285
| 41
| 523
| 6.731707
| 0.585366
| 0.202899
| 0.181159
| 0.224638
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101338
| 523
| 20
| 60
| 26.15
| 0.587234
| 0.900574
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a5771d1e49880786440c4bdf0be8e2b5a99a773d
| 51
|
py
|
Python
|
run.py
|
cjalmeida/conda_pyspark
|
878c64d82c4aef61dd649a74981b0b701c21a93c
|
[
"MIT"
] | null | null | null |
run.py
|
cjalmeida/conda_pyspark
|
878c64d82c4aef61dd649a74981b0b701c21a93c
|
[
"MIT"
] | null | null | null |
run.py
|
cjalmeida/conda_pyspark
|
878c64d82c4aef61dd649a74981b0b701c21a93c
|
[
"MIT"
] | null | null | null |
import my_module.do_it
my_module.do_it.run_test()
| 12.75
| 26
| 0.823529
| 11
| 51
| 3.363636
| 0.636364
| 0.432432
| 0.540541
| 0.648649
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078431
| 51
| 3
| 27
| 17
| 0.787234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
3c04bb815f11910bea5f75b0aa6c9b03e97d0e5b
| 16,492
|
py
|
Python
|
tests/st/ops/gpu/test_scatter_update_op.py
|
GuoSuiming/mindspore
|
48afc4cfa53d970c0b20eedfb46e039db2a133d5
|
[
"Apache-2.0"
] | 55
|
2020-12-17T10:26:06.000Z
|
2022-03-28T07:18:26.000Z
|
tests/st/ops/gpu/test_scatter_update_op.py
|
forwhat461/mindspore
|
59a277756eb4faad9ac9afcc7fd526e8277d4994
|
[
"Apache-2.0"
] | null | null | null |
tests/st/ops/gpu/test_scatter_update_op.py
|
forwhat461/mindspore
|
59a277756eb4faad9ac9afcc7fd526e8277d4994
|
[
"Apache-2.0"
] | 14
|
2021-01-29T02:39:47.000Z
|
2022-03-23T05:00:26.000Z
|
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor, Parameter
from mindspore.ops import operations as P
from mindspore.ops.operations import _inner_ops as inner
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
# all cases tested against dchip
class TestScatterUpdateNet(nn.Cell):
def __init__(self, inputx, indices, updates):
super(TestScatterUpdateNet, self).__init__()
self.scatter_update = P.ScatterUpdate()
self.inputx = Parameter(inputx, name="inputx")
self.indices = Parameter(indices, name="indices")
self.updates = Parameter(updates, name="updates")
def construct(self):
out = self.scatter_update(self.inputx, self.indices, self.updates)
return out
def scatter_update_net(inputx, indices, updates):
net = TestScatterUpdateNet(inputx, indices, updates)
return net()
class TestScatterUpdateDynamicNet(nn.Cell):
def __init__(self, inputx, indices, updates):
super(TestScatterUpdateDynamicNet, self).__init__()
self.scatter_update = P.ScatterUpdate()
self.test_dynamic = inner.GpuConvertToDynamicShape()
self.inputx = Parameter(inputx, name="inputx")
self.indices = Parameter(indices, name="indices")
self.updates = Parameter(updates, name="updates")
def construct(self):
indices = self.test_dynamic(self.indices)
updates = self.test_dynamic(self.updates)
out = self.scatter_update(self.inputx, indices, updates)
return out
def scatter_update_d_net(inputx, indices, updates):
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
net = TestScatterUpdateDynamicNet(inputx, indices, updates)
return net()
class TestScatterUpdateDynamicNet2(nn.Cell):
def __init__(self, inputx):
super(TestScatterUpdateDynamicNet2, self).__init__()
self.scatter_update = P.ScatterUpdate()
self.test_dynamic = inner.GpuConvertToDynamicShape()
self.inputx = Parameter(inputx, name="inputx")
def construct(self, indices, updates):
indices = self.test_dynamic(indices)
updates = self.test_dynamic(updates)
out = self.scatter_update(self.inputx, indices, updates)
return out
def scatter_update_d2_net(inputx, indices_1, updates_1,
indices_2, updates_2):
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
net = TestScatterUpdateDynamicNet2(inputx)
out1 = net(indices_1, updates_1)
out2 = net(indices_2, updates_2)
return (out1, out2)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_scatter_update_small_float32():
inputx = Tensor(np.zeros((2, 3)).astype(np.float32))
indices = Tensor(np.array([0, 1]).astype(np.int32))
updates = Tensor(np.arange(6).reshape((2, 3)).astype(np.float32))
output = scatter_update_net(inputx, indices, updates)
expected = np.array([[0., 1., 2.],
[3., 4., 5.]])
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_scatter_update_input_updated():
inputx = Tensor(np.zeros((2, 3)).astype(np.float32))
indices = Tensor(np.array([0, 1]).astype(np.int32))
updates = Tensor(np.arange(6).reshape((2, 3)).astype(np.float32))
net = TestScatterUpdateNet(inputx, indices, updates)
net()
expected = np.array([[0., 1., 2.],
[3., 4., 5.]])
np.testing.assert_array_almost_equal(net.inputx.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_scatter_update_input_less_than_1_float32():
inputx = Tensor(np.array([[0.214141, 0.415151, 0.51516],
[0.876542, 0.451611, 0.55112],
[0.111244, 0.633333, 0.34444]]).astype(np.float32))
indices = Tensor(np.array([1, 0, 2]).astype(np.int32))
updates = Tensor(np.arange(34, 43).reshape((3, 3)).astype(np.float32))
output = scatter_update_net(inputx, indices, updates)
expected = np.array([[37., 38., 39.],
[34., 35., 36.],
[40., 41., 42.]], dtype=np.float32)
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_scatter_update_float16():
inputx = Tensor(np.zeros((2, 3)).astype(np.float16))
indices = Tensor(np.array([0, 1]).astype(np.int32))
updates = Tensor(np.arange(6).reshape((2, 3)).astype(np.float16))
output = scatter_update_net(inputx, indices, updates)
expected = np.array([[0., 1., 2.],
[3., 4., 5.]]).astype(np.float16)
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_scatter_update_int32():
inputx = Tensor(np.zeros((2, 3)).astype(np.int32))
indices = Tensor(np.array([0, 1]).astype(np.int32))
updates = Tensor(np.arange(6).reshape((2, 3)).astype(np.int32))
output = scatter_update_net(inputx, indices, updates)
expected = np.array([[0., 1., 2.],
[3., 4., 5.]]).astype(np.int32)
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_scatter_update_large_float16():
inputx = Tensor(np.zeros((4, 3)).astype(np.float16))
indices = Tensor(np.array([[2, 1], [0, 3]]).astype(np.int32))
updates = Tensor(np.arange(63, 75).reshape((2, 2, 3)).astype(np.float16))
output = scatter_update_net(inputx, indices, updates)
expected = np.array([[69., 70., 71.],
[66., 67., 68.],
[63., 64., 65.],
[72., 73., 74.]]).astype(np.float16)
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_scatter_update_disordered_float16():
inputx = Tensor(np.flip(np.arange(34, 46).reshape(3, 4).astype(np.float16)))
indices = Tensor(np.array([1, 2]).astype(np.int32))
updates = Tensor(np.arange(63, 71).reshape((2, 4)).astype(np.float16))
output = scatter_update_net(inputx, indices, updates)
expected = np.array([[45., 44., 43., 42.],
[63., 64., 65., 66.],
[67., 68., 69., 70.]]).astype(np.float16)
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_scatter_update_disordered_int32():
inputx = Tensor(np.flip(np.arange(34, 46).reshape(3, 4).astype(np.int32)))
indices = Tensor(np.array([1, 2]).astype(np.int32))
updates = Tensor(np.arange(63, 71).reshape((2, 4)).astype(np.int32))
output = scatter_update_net(inputx, indices, updates)
expected = np.array([[45., 44., 43., 42.],
[63., 64., 65., 66.],
[67., 68., 69., 70.]]).astype(np.int32)
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_scatter_update_large_shape_float16():
inputx = Tensor(np.arange(96).reshape((4, 2, 3, 4)).astype(np.float16))
indices = Tensor(np.array([1, 0]).astype(np.int32))
updates = Tensor(np.flip(np.arange(48).reshape((2, 2, 3, 4)).astype(np.float16)))
output = scatter_update_net(inputx, indices, updates)
expected = np.array([[[[23., 22., 21., 20.],
[19., 18., 17., 16.],
[15., 14., 13., 12.]],
[[11., 10., 9., 8.],
[7., 6., 5., 4.],
[3., 2., 1., 0.]]],
[[[47., 46., 45., 44.],
[43., 42., 41., 40.],
[39., 38., 37., 36.]],
[[35., 34., 33., 32.],
[31., 30., 29., 28.],
[27., 26., 25., 24.]]],
[[[48., 49., 50., 51.],
[52., 53., 54., 55.],
[56., 57., 58., 59.]],
[[60., 61., 62., 63.],
[64., 65., 66., 67.],
[68., 69., 70., 71.]]],
[[[72., 73., 74., 75.],
[76., 77., 78., 79.],
[80., 81., 82., 83.]],
[[84., 85., 86., 87.],
[88., 89., 90., 91.],
[92., 93., 94., 95.]]]]).astype(np.float16)
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_scatter_update_disordered_int8():
inputx = Tensor(np.flip(np.arange(34, 46).reshape(3, 4).astype(np.int8)))
indices = Tensor(np.array([1, 2]).astype(np.int32))
updates = Tensor(np.arange(63, 71).reshape((2, 4)).astype(np.int8))
output = scatter_update_net(inputx, indices, updates)
expected = np.array([[45., 44., 43., 42.],
[63., 64., 65., 66.],
[67., 68., 69., 70.]]).astype(np.int8)
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_scatter_update_large_shape_int8():
inputx = Tensor(np.arange(96).reshape((4, 2, 3, 4)).astype(np.int8))
indices = Tensor(np.array([1, 0]).astype(np.int32))
updates = Tensor(np.flip(np.arange(48).reshape((2, 2, 3, 4)).astype(np.int8)))
output = scatter_update_net(inputx, indices, updates)
expected = np.array([[[[23., 22., 21., 20.],
[19., 18., 17., 16.],
[15., 14., 13., 12.]],
[[11., 10., 9., 8.],
[7., 6., 5., 4.],
[3., 2., 1., 0.]]],
[[[47., 46., 45., 44.],
[43., 42., 41., 40.],
[39., 38., 37., 36.]],
[[35., 34., 33., 32.],
[31., 30., 29., 28.],
[27., 26., 25., 24.]]],
[[[48., 49., 50., 51.],
[52., 53., 54., 55.],
[56., 57., 58., 59.]],
[[60., 61., 62., 63.],
[64., 65., 66., 67.],
[68., 69., 70., 71.]]],
[[[72., 73., 74., 75.],
[76., 77., 78., 79.],
[80., 81., 82., 83.]],
[[84., 85., 86., 87.],
[88., 89., 90., 91.],
[92., 93., 94., 95.]]]]).astype(np.int8)
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_scatter_update_large_uint8():
inputx = Tensor(np.zeros((4, 3)).astype(np.uint8))
indices = Tensor(np.array([[2, 1], [0, 3]]).astype(np.int32))
updates = Tensor(np.arange(63, 75).reshape((2, 2, 3)).astype(np.uint8))
output = scatter_update_net(inputx, indices, updates)
expected = np.array([[69., 70., 71.],
[66., 67., 68.],
[63., 64., 65.],
[72., 73., 74.]]).astype(np.uint8)
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_scatter_update_disordered_uint8():
inputx = Tensor(np.flip(np.arange(34, 46).reshape(3, 4).astype(np.uint8)))
indices = Tensor(np.array([1, 2]).astype(np.int32))
updates = Tensor(np.arange(63, 71).reshape((2, 4)).astype(np.uint8))
output = scatter_update_net(inputx, indices, updates)
expected = np.array([[45., 44., 43., 42.],
[63., 64., 65., 66.],
[67., 68., 69., 70.]]).astype(np.uint8)
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_scatter_update_large_shape_dynamic_int8():
inputx = Tensor(np.arange(96).reshape((4, 2, 3, 4)).astype(np.int8))
indices = Tensor(np.array([1, 0]).astype(np.int32))
updates = Tensor(np.flip(np.arange(48).reshape((2, 2, 3, 4)).astype(np.int8)))
output = scatter_update_d_net(inputx, indices, updates)
expected = np.array([[[[23., 22., 21., 20.],
[19., 18., 17., 16.],
[15., 14., 13., 12.]],
[[11., 10., 9., 8.],
[7., 6., 5., 4.],
[3., 2., 1., 0.]]],
[[[47., 46., 45., 44.],
[43., 42., 41., 40.],
[39., 38., 37., 36.]],
[[35., 34., 33., 32.],
[31., 30., 29., 28.],
[27., 26., 25., 24.]]],
[[[48., 49., 50., 51.],
[52., 53., 54., 55.],
[56., 57., 58., 59.]],
[[60., 61., 62., 63.],
[64., 65., 66., 67.],
[68., 69., 70., 71.]]],
[[[72., 73., 74., 75.],
[76., 77., 78., 79.],
[80., 81., 82., 83.]],
[[84., 85., 86., 87.],
[88., 89., 90., 91.],
[92., 93., 94., 95.]]]]).astype(np.int8)
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_scatter_update_disordered_dynamic_int32():
inputx = Tensor(np.flip(np.arange(34, 46).reshape(3, 4).astype(np.int32)))
indices = Tensor(np.array([1, 2]).astype(np.int32))
updates = Tensor(np.arange(63, 71).reshape((2, 4)).astype(np.int32))
output = scatter_update_d_net(inputx, indices, updates)
expected = np.array([[45., 44., 43., 42.],
[63., 64., 65., 66.],
[67., 68., 69., 70.]]).astype(np.int32)
np.testing.assert_array_almost_equal(output.asnumpy(), expected)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_scatter_update_two_inputs():
inputx = Tensor(np.zeros((2, 3)).astype(np.float32))
indices_1 = Tensor(np.array([0, 1]).astype(np.int32))
updates_1 = Tensor(np.arange(6).reshape((2, 3)).astype(np.float32))
indices_2 = Tensor(np.array([1]).astype(np.int32))
updates_2 = Tensor(np.arange(34, 37).reshape((1, 3)).astype(np.float32))
output_1, output_2 = scatter_update_d2_net(inputx, indices_1, updates_1,
indices_2, updates_2)
expected_1 = np.array([[0., 1., 2.],
[3., 4., 5.]], dtype=np.float32)
expected_2 = np.array([[0., 1., 2.],
[34., 35., 36.]], dtype=np.float32)
np.testing.assert_array_almost_equal(output_1.asnumpy(), expected_1)
np.testing.assert_array_almost_equal(output_2.asnumpy(), expected_2)
| 45.811111
| 85
| 0.556997
| 2,071
| 16,492
| 4.292129
| 0.124095
| 0.055799
| 0.038025
| 0.03825
| 0.844077
| 0.82259
| 0.801777
| 0.78929
| 0.764878
| 0.751491
| 0
| 0.103629
| 0.269767
| 16,492
| 359
| 86
| 45.938719
| 0.634476
| 0.040565
| 0
| 0.712934
| 0
| 0
| 0.003479
| 0
| 0
| 0
| 0
| 0
| 0.053628
| 1
| 0.078864
| false
| 0
| 0.022082
| 0
| 0.129338
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c286f7f4ec6f05d2f74ff49ccaea1f093a5a543
| 205
|
py
|
Python
|
splparser/rules/common/regexrules.py
|
lowell80/splparser
|
2b33d74d59565cc53ae47452126621300dba9ed8
|
[
"BSD-3-Clause"
] | 31
|
2015-12-02T15:41:06.000Z
|
2022-03-16T22:27:23.000Z
|
splparser/rules/common/regexrules.py
|
lowell80/splparser
|
2b33d74d59565cc53ae47452126621300dba9ed8
|
[
"BSD-3-Clause"
] | 1
|
2021-06-24T11:23:00.000Z
|
2021-06-24T11:23:00.000Z
|
splparser/rules/common/regexrules.py
|
lowell80/splparser
|
2b33d74d59565cc53ae47452126621300dba9ed8
|
[
"BSD-3-Clause"
] | 15
|
2015-12-10T16:48:20.000Z
|
2022-02-26T18:12:44.000Z
|
from splparser.parsetree import *
def p_regex_regular_expression(p):
"""regex : REGULAR_EXPRESSION"""
p[0] = ParseTreeNode('VALUE', nodetype='REGULAR_EXPRESSION', raw=p[1], is_argument=True)
| 18.636364
| 92
| 0.717073
| 26
| 205
| 5.423077
| 0.692308
| 0.361702
| 0.184397
| 0.326241
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011364
| 0.141463
| 205
| 10
| 93
| 20.5
| 0.789773
| 0.126829
| 0
| 0
| 0
| 0
| 0.136905
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3c56cc774eb1adce349790bdf8a99891256f01fe
| 740
|
py
|
Python
|
prob79.py
|
vf42/projecteuler
|
a99c0f7afe458e12e63a13aa96f456208292e7db
|
[
"MIT"
] | null | null | null |
prob79.py
|
vf42/projecteuler
|
a99c0f7afe458e12e63a13aa96f456208292e7db
|
[
"MIT"
] | null | null | null |
prob79.py
|
vf42/projecteuler
|
a99c0f7afe458e12e63a13aa96f456208292e7db
|
[
"MIT"
] | null | null | null |
'''
Created on Oct 2, 2012
@author: vadim
Todo: need to learn string algorithms.
'''
f = open('data/keylog.txt')
mx = [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]
while True:
line = f.readline()
if not line:
break
line = line[:-1]
mx[int(line[0])][int(line[1])] += 1
mx[int(line[1])][int(line[2])] += 1
for l in mx:
print(l)
f.close()
if __name__ == '__main__':
pass
| 20.555556
| 39
| 0.410811
| 159
| 740
| 1.861635
| 0.232704
| 0.668919
| 0.993243
| 1.310811
| 0.337838
| 0.337838
| 0.337838
| 0.337838
| 0.337838
| 0.337838
| 0
| 0.226721
| 0.332432
| 740
| 35
| 40
| 21.142857
| 0.37247
| 0.105405
| 0
| 0.347826
| 0
| 0
| 0.035168
| 0
| 0
| 0
| 0
| 0.028571
| 0
| 1
| 0
| false
| 0.043478
| 0
| 0
| 0
| 0.043478
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c6659efcdbfdc734ad0ad60d5c7389995044df7
| 755
|
py
|
Python
|
ast_matcher/categories/narrowing.py
|
PrVrSs/ast-matcher
|
cf36cd83b1e0acc051df510178b1787c53dfea57
|
[
"MIT"
] | null | null | null |
ast_matcher/categories/narrowing.py
|
PrVrSs/ast-matcher
|
cf36cd83b1e0acc051df510178b1787c53dfea57
|
[
"MIT"
] | null | null | null |
ast_matcher/categories/narrowing.py
|
PrVrSs/ast-matcher
|
cf36cd83b1e0acc051df510178b1787c53dfea57
|
[
"MIT"
] | null | null | null |
from typing import final
from .base import BaseNarrowing
__all__ = (
'HasName',
)
@final
class HasName(BaseNarrowing):
def _check(self, node) -> bool:
raise NotImplementedError
@final
class AllOf(BaseNarrowing):
def _check(self, node) -> bool:
raise NotImplementedError
@final
class AnyOf(BaseNarrowing):
def _check(self, node) -> bool:
raise NotImplementedError
@final
class HasOperatorName(BaseNarrowing):
def _check(self, node) -> bool:
raise NotImplementedError
@final
class ArgumentCountIs(BaseNarrowing):
def _check(self, node) -> bool:
raise NotImplementedError
@final
class Unless(BaseNarrowing):
def _check(self, node) -> bool:
raise NotImplementedError
| 16.413043
| 37
| 0.692715
| 76
| 755
| 6.75
| 0.276316
| 0.116959
| 0.245614
| 0.292398
| 0.764133
| 0.764133
| 0.764133
| 0.764133
| 0.653021
| 0.653021
| 0
| 0
| 0.218543
| 755
| 45
| 38
| 16.777778
| 0.869492
| 0
| 0
| 0.62069
| 0
| 0
| 0.009272
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.206897
| false
| 0
| 0.068966
| 0
| 0.482759
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c67d2ebc20a912e6fccf5a10eeedbb14e5a6c3e
| 13,057
|
py
|
Python
|
addons/purchase/tests/test_purchase_invoice.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
addons/purchase/tests/test_purchase_invoice.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
addons/purchase/tests/test_purchase_invoice.py
|
SHIVJITH/Odoo_Machine_Test
|
310497a9872db7844b521e6dab5f7a9f61d365a4
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.account.tests.common import AccountTestInvoicingCommon
from odoo.tests import tagged
from odoo.tests.common import Form
@tagged('post_install', '-at_install')
class TestPurchaseToInvoice(AccountTestInvoicingCommon):
@classmethod
def setUpClass(cls):
super(TestPurchaseToInvoice, cls).setUpClass()
uom_unit = cls.env.ref('uom.product_uom_unit')
uom_hour = cls.env.ref('uom.product_uom_hour')
cls.product_order = cls.env['product.product'].create({
'name': "Zed+ Antivirus",
'standard_price': 235.0,
'list_price': 280.0,
'type': 'consu',
'uom_id': uom_unit.id,
'uom_po_id': uom_unit.id,
'purchase_method': 'purchase',
'default_code': 'PROD_ORDER',
'taxes_id': False,
})
cls.service_deliver = cls.env['product.product'].create({
'name': "Cost-plus Contract",
'standard_price': 200.0,
'list_price': 180.0,
'type': 'service',
'uom_id': uom_unit.id,
'uom_po_id': uom_unit.id,
'purchase_method': 'receive',
'default_code': 'SERV_DEL',
'taxes_id': False,
})
cls.service_order = cls.env['product.product'].create({
'name': "Prepaid Consulting",
'standard_price': 40.0,
'list_price': 90.0,
'type': 'service',
'uom_id': uom_hour.id,
'uom_po_id': uom_hour.id,
'purchase_method': 'purchase',
'default_code': 'PRE-PAID',
'taxes_id': False,
})
cls.product_deliver = cls.env['product.product'].create({
'name': "Switch, 24 ports",
'standard_price': 55.0,
'list_price': 70.0,
'type': 'consu',
'uom_id': uom_unit.id,
'uom_po_id': uom_unit.id,
'purchase_method': 'receive',
'default_code': 'PROD_DEL',
'taxes_id': False,
})
def test_vendor_bill_delivered(self):
"""Test if a order of product invoiced by delivered quantity can be
correctly invoiced."""
purchase_order = self.env['purchase.order'].with_context(tracking_disable=True).create({
'partner_id': self.partner_a.id,
})
PurchaseOrderLine = self.env['purchase.order.line'].with_context(tracking_disable=True)
pol_prod_deliver = PurchaseOrderLine.create({
'name': self.product_deliver.name,
'product_id': self.product_deliver.id,
'product_qty': 10.0,
'product_uom': self.product_deliver.uom_id.id,
'price_unit': self.product_deliver.list_price,
'order_id': purchase_order.id,
'taxes_id': False,
})
pol_serv_deliver = PurchaseOrderLine.create({
'name': self.service_deliver.name,
'product_id': self.service_deliver.id,
'product_qty': 10.0,
'product_uom': self.service_deliver.uom_id.id,
'price_unit': self.service_deliver.list_price,
'order_id': purchase_order.id,
'taxes_id': False,
})
purchase_order.button_confirm()
self.assertEqual(purchase_order.invoice_status, "no")
for line in purchase_order.order_line:
self.assertEqual(line.qty_to_invoice, 0.0)
self.assertEqual(line.qty_invoiced, 0.0)
purchase_order.order_line.qty_received = 5
self.assertEqual(purchase_order.invoice_status, "to invoice")
for line in purchase_order.order_line:
self.assertEqual(line.qty_to_invoice, 5)
self.assertEqual(line.qty_invoiced, 0.0)
purchase_order.action_create_invoice()
self.assertEqual(purchase_order.invoice_status, "invoiced")
for line in purchase_order.order_line:
self.assertEqual(line.qty_to_invoice, 0.0)
self.assertEqual(line.qty_invoiced, 5)
def test_vendor_bill_ordered(self):
"""Test if a order of product invoiced by ordered quantity can be
correctly invoiced."""
purchase_order = self.env['purchase.order'].with_context(tracking_disable=True).create({
'partner_id': self.partner_a.id,
})
PurchaseOrderLine = self.env['purchase.order.line'].with_context(tracking_disable=True)
pol_prod_order = PurchaseOrderLine.create({
'name': self.product_order.name,
'product_id': self.product_order.id,
'product_qty': 10.0,
'product_uom': self.product_order.uom_id.id,
'price_unit': self.product_order.list_price,
'order_id': purchase_order.id,
'taxes_id': False,
})
pol_serv_order = PurchaseOrderLine.create({
'name': self.service_order.name,
'product_id': self.service_order.id,
'product_qty': 10.0,
'product_uom': self.service_order.uom_id.id,
'price_unit': self.service_order.list_price,
'order_id': purchase_order.id,
'taxes_id': False,
})
purchase_order.button_confirm()
self.assertEqual(purchase_order.invoice_status, "to invoice")
for line in purchase_order.order_line:
self.assertEqual(line.qty_to_invoice, 10)
self.assertEqual(line.qty_invoiced, 0.0)
purchase_order.order_line.qty_received = 5
self.assertEqual(purchase_order.invoice_status, "to invoice")
for line in purchase_order.order_line:
self.assertEqual(line.qty_to_invoice, 10)
self.assertEqual(line.qty_invoiced, 0.0)
purchase_order.action_create_invoice()
self.assertEqual(purchase_order.invoice_status, "invoiced")
for line in purchase_order.order_line:
self.assertEqual(line.qty_to_invoice, 0.0)
self.assertEqual(line.qty_invoiced, 10)
def test_vendor_bill_delivered_return(self):
"""Test when return product, a order of product invoiced by delivered
quantity can be correctly invoiced."""
purchase_order = self.env['purchase.order'].with_context(tracking_disable=True).create({
'partner_id': self.partner_a.id,
})
PurchaseOrderLine = self.env['purchase.order.line'].with_context(tracking_disable=True)
pol_prod_deliver = PurchaseOrderLine.create({
'name': self.product_deliver.name,
'product_id': self.product_deliver.id,
'product_qty': 10.0,
'product_uom': self.product_deliver.uom_id.id,
'price_unit': self.product_deliver.list_price,
'order_id': purchase_order.id,
'taxes_id': False,
})
pol_serv_deliver = PurchaseOrderLine.create({
'name': self.service_deliver.name,
'product_id': self.service_deliver.id,
'product_qty': 10.0,
'product_uom': self.service_deliver.uom_id.id,
'price_unit': self.service_deliver.list_price,
'order_id': purchase_order.id,
'taxes_id': False,
})
purchase_order.button_confirm()
purchase_order.order_line.qty_received = 10
purchase_order.action_create_invoice()
self.assertEqual(purchase_order.invoice_status, "invoiced")
for line in purchase_order.order_line:
self.assertEqual(line.qty_to_invoice, 0.0)
self.assertEqual(line.qty_invoiced, 10)
purchase_order.order_line.qty_received = 5
self.assertEqual(purchase_order.invoice_status, "to invoice")
for line in purchase_order.order_line:
self.assertEqual(line.qty_to_invoice, -5)
self.assertEqual(line.qty_invoiced, 10)
purchase_order.action_create_invoice()
self.assertEqual(purchase_order.invoice_status, "invoiced")
for line in purchase_order.order_line:
self.assertEqual(line.qty_to_invoice, 0.0)
self.assertEqual(line.qty_invoiced, 5)
def test_vendor_bill_ordered_return(self):
"""Test when return product, a order of product invoiced by ordered
quantity can be correctly invoiced."""
purchase_order = self.env['purchase.order'].with_context(tracking_disable=True).create({
'partner_id': self.partner_a.id,
})
PurchaseOrderLine = self.env['purchase.order.line'].with_context(tracking_disable=True)
pol_prod_order = PurchaseOrderLine.create({
'name': self.product_order.name,
'product_id': self.product_order.id,
'product_qty': 10.0,
'product_uom': self.product_order.uom_id.id,
'price_unit': self.product_order.list_price,
'order_id': purchase_order.id,
'taxes_id': False,
})
pol_serv_order = PurchaseOrderLine.create({
'name': self.service_order.name,
'product_id': self.service_order.id,
'product_qty': 10.0,
'product_uom': self.service_order.uom_id.id,
'price_unit': self.service_order.list_price,
'order_id': purchase_order.id,
'taxes_id': False,
})
purchase_order.button_confirm()
purchase_order.order_line.qty_received = 10
purchase_order.action_create_invoice()
self.assertEqual(purchase_order.invoice_status, "invoiced")
for line in purchase_order.order_line:
self.assertEqual(line.qty_to_invoice, 0.0)
self.assertEqual(line.qty_invoiced, 10)
purchase_order.order_line.qty_received = 5
self.assertEqual(purchase_order.invoice_status, "invoiced")
for line in purchase_order.order_line:
self.assertEqual(line.qty_to_invoice, 0.0)
self.assertEqual(line.qty_invoiced, 10)
def test_vendor_severals_bills_and_multicurrency(self):
"""
This test ensures that, when adding several PO to a bill, if they are expressed with different
currency, the amount of each AML is converted to the bill's currency
"""
PurchaseOrderLine = self.env['purchase.order.line']
PurchaseBillUnion = self.env['purchase.bill.union']
ResCurrencyRate = self.env['res.currency.rate']
usd = self.env.ref('base.USD')
eur = self.env.ref('base.EUR')
purchase_orders = []
ResCurrencyRate.create({'currency_id': usd.id, 'rate': 1})
ResCurrencyRate.create({'currency_id': eur.id, 'rate': 2})
for currency in [usd, eur]:
po = self.env['purchase.order'].with_context(tracking_disable=True).create({
'partner_id': self.partner_a.id,
'currency_id': currency.id,
})
pol_prod_order = PurchaseOrderLine.create({
'name': self.product_order.name,
'product_id': self.product_order.id,
'product_qty': 1,
'product_uom': self.product_order.uom_id.id,
'price_unit': 1000,
'order_id': po.id,
'taxes_id': False,
})
po.button_confirm()
pol_prod_order.write({'qty_received': 1})
purchase_orders.append(po)
move_form = Form(self.env['account.move'].with_context(default_move_type='in_invoice'))
move_form.purchase_vendor_bill_id = PurchaseBillUnion.browse(-purchase_orders[0].id)
move_form.purchase_vendor_bill_id = PurchaseBillUnion.browse(-purchase_orders[1].id)
move = move_form.save()
amls = move.line_ids.filtered(lambda l: l.account_internal_group == 'expense')
self.assertEqual(move.amount_total, 1500)
self.assertEqual(move.currency_id, usd)
self.assertEqual(len(amls), 2)
self.assertEqual(amls[0].amount_currency, 1000)
self.assertEqual(amls[1].amount_currency, 500)
def test_product_price_decimal_accuracy(self):
self.env['ir.model.data'].xmlid_to_object('product.decimal_price').digits = 3
self.env.company.currency_id.rounding = 0.01
po = self.env['purchase.order'].with_context(tracking_disable=True).create({
'partner_id': self.partner_a.id,
'order_line': [(0, 0, {
'name': self.product_a.name,
'product_id': self.product_a.id,
'product_qty': 12,
'product_uom': self.product_a.uom_id.id,
'price_unit': 0.001,
'taxes_id': False,
})]
})
po.button_confirm()
po.order_line.qty_received = 12
move_form = Form(self.env['account.move'].with_context(default_move_type='in_invoice'))
move_form.purchase_vendor_bill_id = self.env['purchase.bill.union'].browse(-po.id)
move = move_form.save()
self.assertEqual(move.amount_total, 0.01)
| 43.235099
| 102
| 0.620433
| 1,553
| 13,057
| 4.943335
| 0.11848
| 0.101602
| 0.054448
| 0.063045
| 0.805132
| 0.76879
| 0.739482
| 0.720724
| 0.720724
| 0.718119
| 0
| 0.015014
| 0.265451
| 13,057
| 301
| 103
| 43.378738
| 0.785424
| 0.048327
| 0
| 0.708812
| 0
| 0
| 0.143019
| 0.001702
| 0
| 0
| 0
| 0
| 0.149425
| 1
| 0.02682
| false
| 0
| 0.011494
| 0
| 0.042146
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c9ebf2c66fb9fd27d68de77fa1d54d3cccb41c7
| 48,164
|
py
|
Python
|
tests/test_df_keycollision.py
|
Pcosmin/Optimus
|
ef3306d1b752bbfb1959ddb9103786acb8e9b9ba
|
[
"Apache-2.0"
] | 1
|
2020-09-22T13:04:37.000Z
|
2020-09-22T13:04:37.000Z
|
tests/test_df_keycollision.py
|
rafaelang/Optimus
|
809088f41588c968b2e30210f98a494a497b07ff
|
[
"Apache-2.0"
] | null | null | null |
tests/test_df_keycollision.py
|
rafaelang/Optimus
|
809088f41588c968b2e30210f98a494a497b07ff
|
[
"Apache-2.0"
] | null | null | null |
from pyspark.sql.types import *
from optimus import Optimus
from optimus.helpers.json import json_enconding
from optimus.helpers.functions import deep_sort
import unittest
from pyspark.ml.linalg import Vectors, VectorUDT, DenseVector
import numpy as np
nan = np.nan
from optimus.engines.spark.ml import keycollision as keyCol
op = Optimus(master='local')
source_df=op.create.df([('LOCNCODE', StringType(), True),('LOCNDSCR', StringType(), True),('ADDRESS1', StringType(), True),('ADDRESS2', StringType(), True),('ADDRESS3', StringType(), True),('CITY', StringType(), True),('STATE', StringType(), True),('ZIPCODE', StringType(), True),('COUNTRY', StringType(), True),('Location_Segment', StringType(), True),('PAQ', StringType(), True),('TIPUNI', StringType(), True),('Tipo_unidad', StringType(), True),('ITEMNMBR', StringType(), True),('ITMSHNAM', StringType(), True),('MZ', StringType(), True),('LT', StringType(), True),('EDIF', StringType(), True),('NIVEL', StringType(), True),('NOUNI', StringType(), True),('CONDO', StringType(), True),('REGIMEN', StringType(), True),('ETAPA', StringType(), True),('PROTO', StringType(), True),('ITEMDESC', StringType(), True),('NIVELES', StringType(), True),('COCHERA', StringType(), True),('RECAM', StringType(), True),('ALCOB', StringType(), True),('BANOS', StringType(), True),('Num_Balcon', StringType(), True),('SALA', StringType(), True),('COMEDOR', StringType(), True),('COCINA', StringType(), True),('Cuarto_Lavado', StringType(), True),('Cuarto_Servicio', StringType(), True),('OTROX', StringType(), True),('OTROX1', StringType(), True),('SupCons', StringType(), True),('PATIOSERV', StringType(), True),('TERRAZA', StringType(), True),('BALCON', StringType(), True),('AZOTEA', StringType(), True),('Otros', StringType(), True),('AREATOT', StringType(), True),('FRENTE', StringType(), True),('Sup_Terreno', StringType(), True),('EXCEDENTE', StringType(), True),('OTRO1', StringType(), True),('OTRO2', StringType(), True),('TAMANO', StringType(), True),('UBICAVER', StringType(), True),('UBICAHORI', StringType(), True),('QTYONHND_', StringType(), True),('QTYSOLD', StringType(), True),('INACTIVE', StringType(), True),('UOMPRICE', StringType(), True),('MONTOAPA', StringType(), True),('PAGINI', StringType(), True),('ENGANCHE', StringType(), True),('FECHESCRIPRO', StringType(), True),('FECHAENTREGA', StringType(), True),('FECHASALIDAVENTAS', StringType(), True),('LIBERADO_NOLIBERADO', StringType(), True),('ACTIVO_INACTIVO', StringType(), True),('Estatus1Vivienda', StringType(), True),('Estatus2Vivienda', StringType(), True),('CUSTNMBR', StringType(), True),('Nombre_Completo', StringType(), True),('cNombre', StringType(), True),('cApellidoPaterno', StringType(), True),('cApellidoMaterno', StringType(), True),('cRfc', StringType(), True),('cCurp', StringType(), True),('fkIdGradoInteres', StringType(), True),('cSexo', StringType(), True),('cEmail', StringType(), True),('cTelefonoCasa', StringType(), True),('cTelefonoCelular', StringType(), True),('cTelefonoTrabajo', StringType(), True),('cNumeroSeguroSocial', StringType(), True),('dFechaNacimiento', StringType(), True),('cEstadoCivil', StringType(), True),('cRegimenConyugal', StringType(), True),('cNacionalidad', StringType(), True),('cLugarNacimiento', StringType(), True),('cRecomendadoPor', StringType(), True),('fkIdMedio', StringType(), True),('cMedioContacto', StringType(), True),('cCalle', StringType(), True),('cNumeroExterior', StringType(), True),('cNumeroInterior', StringType(), True),('cColonia', StringType(), True),('cMunicipio', StringType(), True),('cEstado', StringType(), True),('cPais', StringType(), True),('cCodigoPostal', StringType(), True),('nTiempoResidencia', StringType(), True),('cComentario', StringType(), True),('cNumeroIdentificacion', StringType(), True),('cTipoIdentificación', StringType(), True),('REFERENCIA', StringType(), True),('FACTURA', StringType(), True),('NOTACR', StringType(), True),('Precio_cierre', StringType(), True),('Precio_cierre_Tot', StringType(), True),('Aumento_al_Contrato', StringType(), True),('Condonacón', StringType(), True),('Precio_Escritura_Total', StringType(), True),('Precio_Dev', StringType(), True),('Precio_Dev_Total', StringType(), True),('Notarios_Proyectados', StringType(), True),('Gatos_A_terceros', StringType(), True),('Depositos', StringType(), True),('Saldo', StringType(), True),('dFechaCreacion', StringType(), True),('dFechaModificacion', StringType(), True),('FECHA_Cotizado', StringType(), True),('FECHA_SolApartado', StringType(), True),('FECHA_AutApartado', StringType(), True),('Vigencia_Apartado', StringType(), True),('FechaVencimientoApartado', StringType(), True),('FECHA_SolDictamen', StringType(), True),('FECHA_ProcDictamen', StringType(), True),('FECHA_DictaminadoLlamada', StringType(), True),('FECHA_DictaminadoFirma', StringType(), True),('FECHA_Dictaminado', StringType(), True),('FECHA_Rechazado', StringType(), True),('FECHA_EscrituraAvaluo', StringType(), True),('FECHA_EscrituraFolio', StringType(), True),('FolioEscsritura', StringType(), True),('FECHA_EscrituraReal', StringType(), True),('FECHA_Cancelado', StringType(), True),('FECHA_Liberado', StringType(), True),('FECHA_Entregado', StringType(), True),('MotivoCancelacion', StringType(), True)], [('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV008', 'ALVCDEY0080', None, None, None, None, '008', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV021', 'ALVCDEY0690', None, None, None, None, '069', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV022', 'ALVCDEY0710', None, None, None, None, '071', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV027', 'ALVCDEY0810', None, None, None, None, '081', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV032', 'ALVCEEY0090', None, None, None, None, '009', None, '0', '0', 'EST CEEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV035', 'ALVCEEY0150', None, None, None, None, '015', None, '0', '0', 'EST CEEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV009', 'ALVCDEY0100', None, None, None, None, '010', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV012', 'ALVCDEY0160', None, None, None, None, '016', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV019', 'ALVCDEY0650', None, None, None, None, '065', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV044', 'ALVCUEY0340', None, None, None, None, '034', None, '0', '0', 'EST CUEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None)])
class Test_df_keycollision(unittest.TestCase):
maxDiff = None
@staticmethod
def test_fingerprint():
actual_df =keyCol.fingerprint(source_df,'STATE')
expected_df = op.create.df([('LOCNCODE', StringType(), True),('LOCNDSCR', StringType(), True),('ADDRESS1', StringType(), True),('ADDRESS2', StringType(), True),('ADDRESS3', StringType(), True),('CITY', StringType(), True),('STATE', StringType(), True),('ZIPCODE', StringType(), True),('COUNTRY', StringType(), True),('Location_Segment', StringType(), True),('PAQ', StringType(), True),('TIPUNI', StringType(), True),('Tipo_unidad', StringType(), True),('ITEMNMBR', StringType(), True),('ITMSHNAM', StringType(), True),('MZ', StringType(), True),('LT', StringType(), True),('EDIF', StringType(), True),('NIVEL', StringType(), True),('NOUNI', StringType(), True),('CONDO', StringType(), True),('REGIMEN', StringType(), True),('ETAPA', StringType(), True),('PROTO', StringType(), True),('ITEMDESC', StringType(), True),('NIVELES', StringType(), True),('COCHERA', StringType(), True),('RECAM', StringType(), True),('ALCOB', StringType(), True),('BANOS', StringType(), True),('Num_Balcon', StringType(), True),('SALA', StringType(), True),('COMEDOR', StringType(), True),('COCINA', StringType(), True),('Cuarto_Lavado', StringType(), True),('Cuarto_Servicio', StringType(), True),('OTROX', StringType(), True),('OTROX1', StringType(), True),('SupCons', StringType(), True),('PATIOSERV', StringType(), True),('TERRAZA', StringType(), True),('BALCON', StringType(), True),('AZOTEA', StringType(), True),('Otros', StringType(), True),('AREATOT', StringType(), True),('FRENTE', StringType(), True),('Sup_Terreno', StringType(), True),('EXCEDENTE', StringType(), True),('OTRO1', StringType(), True),('OTRO2', StringType(), True),('TAMANO', StringType(), True),('UBICAVER', StringType(), True),('UBICAHORI', StringType(), True),('QTYONHND_', StringType(), True),('QTYSOLD', StringType(), True),('INACTIVE', StringType(), True),('UOMPRICE', StringType(), True),('MONTOAPA', StringType(), True),('PAGINI', StringType(), True),('ENGANCHE', StringType(), True),('FECHESCRIPRO', StringType(), True),('FECHAENTREGA', StringType(), True),('FECHASALIDAVENTAS', StringType(), True),('LIBERADO_NOLIBERADO', StringType(), True),('ACTIVO_INACTIVO', StringType(), True),('Estatus1Vivienda', StringType(), True),('Estatus2Vivienda', StringType(), True),('CUSTNMBR', StringType(), True),('Nombre_Completo', StringType(), True),('cNombre', StringType(), True),('cApellidoPaterno', StringType(), True),('cApellidoMaterno', StringType(), True),('cRfc', StringType(), True),('cCurp', StringType(), True),('fkIdGradoInteres', StringType(), True),('cSexo', StringType(), True),('cEmail', StringType(), True),('cTelefonoCasa', StringType(), True),('cTelefonoCelular', StringType(), True),('cTelefonoTrabajo', StringType(), True),('cNumeroSeguroSocial', StringType(), True),('dFechaNacimiento', StringType(), True),('cEstadoCivil', StringType(), True),('cRegimenConyugal', StringType(), True),('cNacionalidad', StringType(), True),('cLugarNacimiento', StringType(), True),('cRecomendadoPor', StringType(), True),('fkIdMedio', StringType(), True),('cMedioContacto', StringType(), True),('cCalle', StringType(), True),('cNumeroExterior', StringType(), True),('cNumeroInterior', StringType(), True),('cColonia', StringType(), True),('cMunicipio', StringType(), True),('cEstado', StringType(), True),('cPais', StringType(), True),('cCodigoPostal', StringType(), True),('nTiempoResidencia', StringType(), True),('cComentario', StringType(), True),('cNumeroIdentificacion', StringType(), True),('cTipoIdentificación', StringType(), True),('REFERENCIA', StringType(), True),('FACTURA', StringType(), True),('NOTACR', StringType(), True),('Precio_cierre', StringType(), True),('Precio_cierre_Tot', StringType(), True),('Aumento_al_Contrato', StringType(), True),('Condonacón', StringType(), True),('Precio_Escritura_Total', StringType(), True),('Precio_Dev', StringType(), True),('Precio_Dev_Total', StringType(), True),('Notarios_Proyectados', StringType(), True),('Gatos_A_terceros', StringType(), True),('Depositos', StringType(), True),('Saldo', StringType(), True),('dFechaCreacion', StringType(), True),('dFechaModificacion', StringType(), True),('FECHA_Cotizado', StringType(), True),('FECHA_SolApartado', StringType(), True),('FECHA_AutApartado', StringType(), True),('Vigencia_Apartado', StringType(), True),('FechaVencimientoApartado', StringType(), True),('FECHA_SolDictamen', StringType(), True),('FECHA_ProcDictamen', StringType(), True),('FECHA_DictaminadoLlamada', StringType(), True),('FECHA_DictaminadoFirma', StringType(), True),('FECHA_Dictaminado', StringType(), True),('FECHA_Rechazado', StringType(), True),('FECHA_EscrituraAvaluo', StringType(), True),('FECHA_EscrituraFolio', StringType(), True),('FolioEscsritura', StringType(), True),('FECHA_EscrituraReal', StringType(), True),('FECHA_Cancelado', StringType(), True),('FECHA_Liberado', StringType(), True),('FECHA_Entregado', StringType(), True),('MotivoCancelacion', StringType(), True),('STATE***FINGERPRINT', StringType(), True)], [('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV008', 'ALVCDEY0080', None, None, None, None, '008', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'distritofederal'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV021', 'ALVCDEY0690', None, None, None, None, '069', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'distritofederal'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV022', 'ALVCDEY0710', None, None, None, None, '071', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'distritofederal'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV027', 'ALVCDEY0810', None, None, None, None, '081', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'distritofederal'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV032', 'ALVCEEY0090', None, None, None, None, '009', None, '0', '0', 'EST CEEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'distritofederal'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV035', 'ALVCEEY0150', None, None, None, None, '015', None, '0', '0', 'EST CEEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'distritofederal'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV009', 'ALVCDEY0100', None, None, None, None, '010', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'distritofederal'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV012', 'ALVCDEY0160', None, None, None, None, '016', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'distritofederal'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV019', 'ALVCDEY0650', None, None, None, None, '065', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'distritofederal'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV044', 'ALVCUEY0340', None, None, None, None, '034', None, '0', '0', 'EST CUEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'distritofederal')])
assert (expected_df.collect() == actual_df.collect())
@staticmethod
def test_fingerprint_cluster():
actual_df =keyCol.fingerprint_cluster(source_df,'STATE')
actual_df =json_enconding(actual_df)
expected_value =json_enconding({'Distrito Federal': {'similar': {'Distrito Federal': 10}, 'count': 1, 'sum': 10}})
assert(expected_value == actual_df)
@staticmethod
def test_n_gram_fingerprint():
actual_df =keyCol.n_gram_fingerprint(source_df,'STATE')
expected_df = op.create.df([('LOCNCODE', StringType(), True),('LOCNDSCR', StringType(), True),('ADDRESS1', StringType(), True),('ADDRESS2', StringType(), True),('ADDRESS3', StringType(), True),('CITY', StringType(), True),('STATE', StringType(), True),('ZIPCODE', StringType(), True),('COUNTRY', StringType(), True),('Location_Segment', StringType(), True),('PAQ', StringType(), True),('TIPUNI', StringType(), True),('Tipo_unidad', StringType(), True),('ITEMNMBR', StringType(), True),('ITMSHNAM', StringType(), True),('MZ', StringType(), True),('LT', StringType(), True),('EDIF', StringType(), True),('NIVEL', StringType(), True),('NOUNI', StringType(), True),('CONDO', StringType(), True),('REGIMEN', StringType(), True),('ETAPA', StringType(), True),('PROTO', StringType(), True),('ITEMDESC', StringType(), True),('NIVELES', StringType(), True),('COCHERA', StringType(), True),('RECAM', StringType(), True),('ALCOB', StringType(), True),('BANOS', StringType(), True),('Num_Balcon', StringType(), True),('SALA', StringType(), True),('COMEDOR', StringType(), True),('COCINA', StringType(), True),('Cuarto_Lavado', StringType(), True),('Cuarto_Servicio', StringType(), True),('OTROX', StringType(), True),('OTROX1', StringType(), True),('SupCons', StringType(), True),('PATIOSERV', StringType(), True),('TERRAZA', StringType(), True),('BALCON', StringType(), True),('AZOTEA', StringType(), True),('Otros', StringType(), True),('AREATOT', StringType(), True),('FRENTE', StringType(), True),('Sup_Terreno', StringType(), True),('EXCEDENTE', StringType(), True),('OTRO1', StringType(), True),('OTRO2', StringType(), True),('TAMANO', StringType(), True),('UBICAVER', StringType(), True),('UBICAHORI', StringType(), True),('QTYONHND_', StringType(), True),('QTYSOLD', StringType(), True),('INACTIVE', StringType(), True),('UOMPRICE', StringType(), True),('MONTOAPA', StringType(), True),('PAGINI', StringType(), True),('ENGANCHE', StringType(), True),('FECHESCRIPRO', StringType(), True),('FECHAENTREGA', StringType(), True),('FECHASALIDAVENTAS', StringType(), True),('LIBERADO_NOLIBERADO', StringType(), True),('ACTIVO_INACTIVO', StringType(), True),('Estatus1Vivienda', StringType(), True),('Estatus2Vivienda', StringType(), True),('CUSTNMBR', StringType(), True),('Nombre_Completo', StringType(), True),('cNombre', StringType(), True),('cApellidoPaterno', StringType(), True),('cApellidoMaterno', StringType(), True),('cRfc', StringType(), True),('cCurp', StringType(), True),('fkIdGradoInteres', StringType(), True),('cSexo', StringType(), True),('cEmail', StringType(), True),('cTelefonoCasa', StringType(), True),('cTelefonoCelular', StringType(), True),('cTelefonoTrabajo', StringType(), True),('cNumeroSeguroSocial', StringType(), True),('dFechaNacimiento', StringType(), True),('cEstadoCivil', StringType(), True),('cRegimenConyugal', StringType(), True),('cNacionalidad', StringType(), True),('cLugarNacimiento', StringType(), True),('cRecomendadoPor', StringType(), True),('fkIdMedio', StringType(), True),('cMedioContacto', StringType(), True),('cCalle', StringType(), True),('cNumeroExterior', StringType(), True),('cNumeroInterior', StringType(), True),('cColonia', StringType(), True),('cMunicipio', StringType(), True),('cEstado', StringType(), True),('cPais', StringType(), True),('cCodigoPostal', StringType(), True),('nTiempoResidencia', StringType(), True),('cComentario', StringType(), True),('cNumeroIdentificacion', StringType(), True),('cTipoIdentificación', StringType(), True),('REFERENCIA', StringType(), True),('FACTURA', StringType(), True),('NOTACR', StringType(), True),('Precio_cierre', StringType(), True),('Precio_cierre_Tot', StringType(), True),('Aumento_al_Contrato', StringType(), True),('Condonacón', StringType(), True),('Precio_Escritura_Total', StringType(), True),('Precio_Dev', StringType(), True),('Precio_Dev_Total', StringType(), True),('Notarios_Proyectados', StringType(), True),('Gatos_A_terceros', StringType(), True),('Depositos', StringType(), True),('Saldo', StringType(), True),('dFechaCreacion', StringType(), True),('dFechaModificacion', StringType(), True),('FECHA_Cotizado', StringType(), True),('FECHA_SolApartado', StringType(), True),('FECHA_AutApartado', StringType(), True),('Vigencia_Apartado', StringType(), True),('FechaVencimientoApartado', StringType(), True),('FECHA_SolDictamen', StringType(), True),('FECHA_ProcDictamen', StringType(), True),('FECHA_DictaminadoLlamada', StringType(), True),('FECHA_DictaminadoFirma', StringType(), True),('FECHA_Dictaminado', StringType(), True),('FECHA_Rechazado', StringType(), True),('FECHA_EscrituraAvaluo', StringType(), True),('FECHA_EscrituraFolio', StringType(), True),('FolioEscsritura', StringType(), True),('FECHA_EscrituraReal', StringType(), True),('FECHA_Cancelado', StringType(), True),('FECHA_Liberado', StringType(), True),('FECHA_Entregado', StringType(), True),('MotivoCancelacion', StringType(), True),('STATE***FINGERPRINT', StringType(), True)], [('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV008', 'ALVCDEY0080', None, None, None, None, '008', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'aldediederfeisitofraristtotr'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV021', 'ALVCDEY0690', None, None, None, None, '069', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'aldediederfeisitofraristtotr'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV022', 'ALVCDEY0710', None, None, None, None, '071', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'aldediederfeisitofraristtotr'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV027', 'ALVCDEY0810', None, None, None, None, '081', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'aldediederfeisitofraristtotr'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV032', 'ALVCEEY0090', None, None, None, None, '009', None, '0', '0', 'EST CEEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'aldediederfeisitofraristtotr'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV035', 'ALVCEEY0150', None, None, None, None, '015', None, '0', '0', 'EST CEEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'aldediederfeisitofraristtotr'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV009', 'ALVCDEY0100', None, None, None, None, '010', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'aldediederfeisitofraristtotr'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV012', 'ALVCDEY0160', None, None, None, None, '016', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'aldediederfeisitofraristtotr'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV019', 'ALVCDEY0650', None, None, None, None, '065', None, '0', '0', 'EST CDEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'aldediederfeisitofraristtotr'), ('ALV', 'Altos Lindavista', 'Guanajuato # 85', None, 'San Bartolo Atepehuacan', 'Gustavo A. Madero', 'Distrito Federal', '07730', 'Mexico', '0531', None, '2', 'ESTACIONAMIENTO', 'ALVV044', 'ALVCUEY0340', None, None, None, None, '034', None, '0', '0', 'EST CUEY', 'Cajon virtual', None, None, None, None, None, None, None, None, None, None, None, None, None, '2.2', None, None, None, None, None, None, '2.4', None, '0', None, None, 'Chico', 'Cajon virtual', 'Cajon virtual', '0', '0', '1', '0', None, None, None, None, None, None, 'NO LIBERADO', 'INACTIVO', 'DISPONIBLE', '000-DISPONIBLE', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', '.00000', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'aldediederfeisitofraristtotr')])
assert (expected_df.collect() == actual_df.collect())
@staticmethod
def test_n_gram_fingerprint_cluster():
actual_df =keyCol.n_gram_fingerprint_cluster(source_df,'STATE',2)
actual_df =json_enconding(actual_df)
expected_value =json_enconding({'Distrito Federal': {'similar': {'Distrito Federal': 10}, 'count': 1, 'sum': 10}})
assert(expected_value == actual_df)
| 1,301.72973
| 15,772
| 0.655386
| 6,093
| 48,164
| 5.153455
| 0.043821
| 0.626752
| 0.859873
| 1.054777
| 0.987516
| 0.98465
| 0.981465
| 0.981465
| 0.981465
| 0.981465
| 0
| 0.064748
| 0.113363
| 48,164
| 36
| 15,773
| 1,337.888889
| 0.670546
| 0
| 0
| 0.342857
| 0
| 0
| 0.302134
| 0.01416
| 0
| 0
| 0
| 0
| 0.114286
| 1
| 0.114286
| false
| 0
| 0.228571
| 0
| 0.4
| 0.228571
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
5909800a70aacc90ac8870e927d6a571fa65eef8
| 88
|
py
|
Python
|
scylla/proxy/__init__.py
|
peng4217/scylla
|
aa5133d7c6d565c95651fc75b26ad605da0982cd
|
[
"Apache-2.0"
] | 3,556
|
2018-04-28T22:59:40.000Z
|
2022-03-28T22:20:07.000Z
|
scylla/proxy/__init__.py
|
peng4217/scylla
|
aa5133d7c6d565c95651fc75b26ad605da0982cd
|
[
"Apache-2.0"
] | 120
|
2018-05-20T11:49:00.000Z
|
2022-03-07T00:08:55.000Z
|
scylla/proxy/__init__.py
|
peng4217/scylla
|
aa5133d7c6d565c95651fc75b26ad605da0982cd
|
[
"Apache-2.0"
] | 518
|
2018-05-27T01:42:25.000Z
|
2022-03-25T12:38:32.000Z
|
from .server import start_forward_proxy_server_non_blocking, start_forward_proxy_server
| 44
| 87
| 0.920455
| 13
| 88
| 5.615385
| 0.615385
| 0.328767
| 0.465753
| 0.630137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056818
| 88
| 1
| 88
| 88
| 0.879518
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
59269e3f3af184609ce95d0b83e631749af0d841
| 5,771
|
py
|
Python
|
modelos/test_edna.py
|
Chrpob/heroku-telegram-bot
|
7a30c4dcc4928b5aa16dfdce2df6251e0ea33ec4
|
[
"MIT"
] | null | null | null |
modelos/test_edna.py
|
Chrpob/heroku-telegram-bot
|
7a30c4dcc4928b5aa16dfdce2df6251e0ea33ec4
|
[
"MIT"
] | null | null | null |
modelos/test_edna.py
|
Chrpob/heroku-telegram-bot
|
7a30c4dcc4928b5aa16dfdce2df6251e0ea33ec4
|
[
"MIT"
] | null | null | null |
import unittest
from modelos.edna import Edna, DESEMPENO_NARRATIVO, COMPRENSION_DISCURSO_NARRATIVO
class TestEdna(unittest.TestCase):
def test_edad_4(self):
# dn: 2, 4, 9, 12, 14
# cdn: 12, 19, 26, 30, 34
dn_deficit_y_cdn_deficit = Edna('4', '1', '7')
dn_deficit_y_cdn_normal_bajo = Edna('4', '2', '15')
self.assertEqual(dn_deficit_y_cdn_deficit.resultados[DESEMPENO_NARRATIVO]['resultado'], 'Deficit')
self.assertEqual(dn_deficit_y_cdn_deficit.resultados[DESEMPENO_NARRATIVO]['percentil'], 'Menor a p10')
self.assertEqual(dn_deficit_y_cdn_deficit.resultados[COMPRENSION_DISCURSO_NARRATIVO]['resultado'], 'Deficit')
self.assertEqual(dn_deficit_y_cdn_deficit.resultados[COMPRENSION_DISCURSO_NARRATIVO]['percentil'], 'Menor a p10')
self.assertEqual(dn_deficit_y_cdn_normal_bajo.resultados[DESEMPENO_NARRATIVO]['resultado'], 'Deficit')
self.assertEqual(dn_deficit_y_cdn_normal_bajo.resultados[DESEMPENO_NARRATIVO]['percentil'], 'Igual a p10')
self.assertEqual(dn_deficit_y_cdn_normal_bajo.resultados[COMPRENSION_DISCURSO_NARRATIVO]['resultado'], 'Normal bajo')
self.assertEqual(dn_deficit_y_cdn_normal_bajo.resultados[COMPRENSION_DISCURSO_NARRATIVO]['percentil'], 'Menor a p25')
# dn_normal_bajo_cdn_normal_bajo = Edna('4', '')
def test_edad_5(self):
# dn: 2, 6, 11, 16, 18
# cdn: 12, 19, 26, 30, 34
dn_normal_bajo_y_cdn_normal_bajo = Edna('5', '5', '19')
dn_normal_bajo_y_cdn_normal = Edna('5', '6', '23')
self.assertEqual(dn_normal_bajo_y_cdn_normal_bajo.resultados[DESEMPENO_NARRATIVO]['resultado'], 'Normal bajo')
self.assertEqual(dn_normal_bajo_y_cdn_normal_bajo.resultados[DESEMPENO_NARRATIVO]['percentil'], 'Menor a p25')
self.assertEqual(dn_normal_bajo_y_cdn_normal_bajo.resultados[COMPRENSION_DISCURSO_NARRATIVO]['resultado'], 'Normal bajo')
self.assertEqual(dn_normal_bajo_y_cdn_normal_bajo.resultados[COMPRENSION_DISCURSO_NARRATIVO]['percentil'], 'Igual a p25')
self.assertEqual(dn_normal_bajo_y_cdn_normal.resultados[DESEMPENO_NARRATIVO]['resultado'], 'Normal bajo')
self.assertEqual(dn_normal_bajo_y_cdn_normal.resultados[DESEMPENO_NARRATIVO]['percentil'], 'Igual a p25')
self.assertEqual(dn_normal_bajo_y_cdn_normal.resultados[COMPRENSION_DISCURSO_NARRATIVO]['resultado'], 'Normal')
self.assertEqual(dn_normal_bajo_y_cdn_normal.resultados[COMPRENSION_DISCURSO_NARRATIVO]['percentil'], 'Menor a p50')
# dn_normal_bajo_cdn_normal_bajo = Edna('4', '')
def test_edad_6(self):
# dn: 9, 11, 13, 17, 18
# cdn: 19, 25, 30, 33, 35
dn_normal_y_cdn_normal_1 = Edna('6', '12', '31')
dn_normal_y_cdn_normal_2 = Edna('6', '13', '33')
self.assertEqual(dn_normal_y_cdn_normal_1.resultados[DESEMPENO_NARRATIVO]['resultado'], 'Normal')
self.assertEqual(dn_normal_y_cdn_normal_1.resultados[DESEMPENO_NARRATIVO]['percentil'], 'Menor a p50')
self.assertEqual(dn_normal_y_cdn_normal_1.resultados[COMPRENSION_DISCURSO_NARRATIVO]['resultado'],
'Normal')
self.assertEqual(dn_normal_y_cdn_normal_1.resultados[COMPRENSION_DISCURSO_NARRATIVO]['percentil'],
'Menor a p75')
self.assertEqual(dn_normal_y_cdn_normal_2.resultados[DESEMPENO_NARRATIVO]['resultado'], 'Normal')
self.assertEqual(dn_normal_y_cdn_normal_2.resultados[DESEMPENO_NARRATIVO]['percentil'], 'Igual a p50')
self.assertEqual(dn_normal_y_cdn_normal_2.resultados[COMPRENSION_DISCURSO_NARRATIVO]['resultado'], 'Normal')
self.assertEqual(dn_normal_y_cdn_normal_2.resultados[COMPRENSION_DISCURSO_NARRATIVO]['percentil'], 'Igual a p75')
# dn_normal_bajo_cdn_normal_bajo = Edna('4', '')
def test_edad_7(self):
# dn: NO APLICA
# cdn: 19, 25, 30, 33, 35
dn_normal_y_cdn_normal_1 = Edna('7', None, '31')
self.assertEqual(dn_normal_y_cdn_normal_1.resultados[DESEMPENO_NARRATIVO]['resultado'], 'NO APLICA')
self.assertEqual(dn_normal_y_cdn_normal_1.resultados[DESEMPENO_NARRATIVO]['percentil'], 'NO APLICA')
self.assertEqual(dn_normal_y_cdn_normal_1.resultados[COMPRENSION_DISCURSO_NARRATIVO]['resultado'],
'Normal')
self.assertEqual(dn_normal_y_cdn_normal_1.resultados[COMPRENSION_DISCURSO_NARRATIVO]['percentil'],
'Menor a p75')
def test_edad_10(self):
# dn: 15, 18, 20, 22, 23
# cdn: 30, 34, 35, 37, 38
dn_normal_y_cdn_normal_1 = Edna('10', '23', '38')
dn_normal_y_cdn_normal_2 = Edna('10', '24', '39')
self.assertEqual(dn_normal_y_cdn_normal_1.resultados[DESEMPENO_NARRATIVO]['resultado'], 'Normal')
self.assertEqual(dn_normal_y_cdn_normal_1.resultados[DESEMPENO_NARRATIVO]['percentil'], 'Igual a p90')
self.assertEqual(dn_normal_y_cdn_normal_1.resultados[COMPRENSION_DISCURSO_NARRATIVO]['resultado'],
'Normal')
self.assertEqual(dn_normal_y_cdn_normal_1.resultados[COMPRENSION_DISCURSO_NARRATIVO]['percentil'],
'Igual a p90')
self.assertEqual(dn_normal_y_cdn_normal_2.resultados[DESEMPENO_NARRATIVO]['resultado'], 'Normal')
self.assertEqual(dn_normal_y_cdn_normal_2.resultados[DESEMPENO_NARRATIVO]['percentil'], 'Superior a p90')
self.assertEqual(dn_normal_y_cdn_normal_2.resultados[COMPRENSION_DISCURSO_NARRATIVO]['resultado'],
'Normal')
self.assertEqual(dn_normal_y_cdn_normal_2.resultados[COMPRENSION_DISCURSO_NARRATIVO]['percentil'],
'Superior a p90')
if __name__ == '__main__':
print('Unit test EDNA')
unittest.main()
| 64.842697
| 129
| 0.712875
| 747
| 5,771
| 5.085676
| 0.099063
| 0.047381
| 0.105291
| 0.169518
| 0.91682
| 0.902869
| 0.884707
| 0.845222
| 0.840484
| 0.836009
| 0
| 0.040033
| 0.168948
| 5,771
| 89
| 130
| 64.842697
| 0.752085
| 0.062381
| 0
| 0.25
| 0
| 0
| 0.133753
| 0
| 0
| 0
| 0
| 0
| 0.5625
| 1
| 0.078125
| false
| 0
| 0.03125
| 0
| 0.125
| 0.015625
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
596c137c11681dcf130695e0f5c98ccdf75b3a37
| 35
|
py
|
Python
|
csr_utils/__init__.py
|
narges-rzv/csr_utils
|
9a643b4a7ab2ccc889664ea63e9bff764f8cbc8b
|
[
"MIT"
] | null | null | null |
csr_utils/__init__.py
|
narges-rzv/csr_utils
|
9a643b4a7ab2ccc889664ea63e9bff764f8cbc8b
|
[
"MIT"
] | null | null | null |
csr_utils/__init__.py
|
narges-rzv/csr_utils
|
9a643b4a7ab2ccc889664ea63e9bff764f8cbc8b
|
[
"MIT"
] | null | null | null |
from csr_utils.csr_utils import *
| 11.666667
| 33
| 0.8
| 6
| 35
| 4.333333
| 0.666667
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 35
| 2
| 34
| 17.5
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
59d68873e93594462507c55ad4c70362eab1092b
| 174,272
|
py
|
Python
|
py/client/pydeephaven/proto/console_pb2.py
|
mattrunyon/deephaven-core
|
80e3567e4647ab76a81e483d0a8ab542f9aadace
|
[
"MIT"
] | null | null | null |
py/client/pydeephaven/proto/console_pb2.py
|
mattrunyon/deephaven-core
|
80e3567e4647ab76a81e483d0a8ab542f9aadace
|
[
"MIT"
] | null | null | null |
py/client/pydeephaven/proto/console_pb2.py
|
mattrunyon/deephaven-core
|
80e3567e4647ab76a81e483d0a8ab542f9aadace
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: deephaven/proto/console.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from pydeephaven.proto import table_pb2 as deephaven_dot_proto_dot_table__pb2
from pydeephaven.proto import ticket_pb2 as deephaven_dot_proto_dot_ticket__pb2
from pydeephaven.proto import application_pb2 as deephaven_dot_proto_dot_application__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='deephaven/proto/console.proto',
package='io.deephaven.proto.backplane.script.grpc',
syntax='proto3',
serialized_options=b'H\001P\001',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x1d\x64\x65\x65phaven/proto/console.proto\x12(io.deephaven.proto.backplane.script.grpc\x1a\x1b\x64\x65\x65phaven/proto/table.proto\x1a\x1c\x64\x65\x65phaven/proto/ticket.proto\x1a!deephaven/proto/application.proto\"\x18\n\x16GetConsoleTypesRequest\"0\n\x17GetConsoleTypesResponse\x12\x15\n\rconsole_types\x18\x01 \x03(\t\"i\n\x13StartConsoleRequest\x12<\n\tresult_id\x18\x01 \x01(\x0b\x32).io.deephaven.proto.backplane.grpc.Ticket\x12\x14\n\x0csession_type\x18\x02 \x01(\t\"T\n\x14StartConsoleResponse\x12<\n\tresult_id\x18\x01 \x01(\x0b\x32).io.deephaven.proto.backplane.grpc.Ticket\"I\n\x16LogSubscriptionRequest\x12\x1f\n\x17last_seen_log_timestamp\x18\x01 \x01(\x03\x12\x0e\n\x06levels\x18\x02 \x03(\t\"O\n\x13LogSubscriptionData\x12\x0e\n\x06micros\x18\x01 \x01(\x03\x12\x11\n\tlog_level\x18\x02 \x01(\t\x12\x0f\n\x07message\x18\x03 \x01(\tJ\x04\x08\x04\x10\x05\"j\n\x15\x45xecuteCommandRequest\x12=\n\nconsole_id\x18\x01 \x01(\x0b\x32).io.deephaven.proto.backplane.grpc.Ticket\x12\x0c\n\x04\x63ode\x18\x03 \x01(\tJ\x04\x08\x02\x10\x03\"w\n\x16\x45xecuteCommandResponse\x12\x15\n\rerror_message\x18\x01 \x01(\t\x12\x46\n\x07\x63hanges\x18\x02 \x01(\x0b\x32\x35.io.deephaven.proto.backplane.grpc.FieldsChangeUpdate\"\xb5\x01\n\x1a\x42indTableToVariableRequest\x12=\n\nconsole_id\x18\x01 \x01(\x0b\x32).io.deephaven.proto.backplane.grpc.Ticket\x12\x15\n\rvariable_name\x18\x03 \x01(\t\x12;\n\x08table_id\x18\x04 \x01(\x0b\x32).io.deephaven.proto.backplane.grpc.TicketJ\x04\x08\x02\x10\x03\"\x1d\n\x1b\x42indTableToVariableResponse\"\x94\x01\n\x14\x43\x61ncelCommandRequest\x12=\n\nconsole_id\x18\x01 \x01(\x0b\x32).io.deephaven.proto.backplane.grpc.Ticket\x12=\n\ncommand_id\x18\x02 \x01(\x0b\x32).io.deephaven.proto.backplane.grpc.Ticket\"\x17\n\x15\x43\x61ncelCommandResponse\"\x93\x03\n\x13\x41utoCompleteRequest\x12V\n\ropen_document\x18\x01 \x01(\x0b\x32=.io.deephaven.proto.backplane.script.grpc.OpenDocumentRequestH\x00\x12Z\n\x0f\x63hange_document\x18\x02 \x01(\x0b\x32?.io.deephaven.proto.backplane.script.grpc.ChangeDocumentRequestH\x00\x12\x63\n\x14get_completion_items\x18\x03 \x01(\x0b\x32\x43.io.deephaven.proto.backplane.script.grpc.GetCompletionItemsRequestH\x00\x12X\n\x0e\x63lose_document\x18\x04 \x01(\x0b\x32>.io.deephaven.proto.backplane.script.grpc.CloseDocumentRequestH\x00\x42\t\n\x07request\"\x84\x01\n\x14\x41utoCompleteResponse\x12`\n\x10\x63ompletion_items\x18\x01 \x01(\x0b\x32\x44.io.deephaven.proto.backplane.script.grpc.GetCompletionItemsResponseH\x00\x42\n\n\x08response\"\x15\n\x13\x42rowserNextResponse\"\xa7\x01\n\x13OpenDocumentRequest\x12=\n\nconsole_id\x18\x01 \x01(\x0b\x32).io.deephaven.proto.backplane.grpc.Ticket\x12Q\n\rtext_document\x18\x02 \x01(\x0b\x32:.io.deephaven.proto.backplane.script.grpc.TextDocumentItem\"S\n\x10TextDocumentItem\x12\x0b\n\x03uri\x18\x01 \x01(\t\x12\x13\n\x0blanguage_id\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x0c\n\x04text\x18\x04 \x01(\t\"\xb7\x01\n\x14\x43loseDocumentRequest\x12=\n\nconsole_id\x18\x01 \x01(\x0b\x32).io.deephaven.proto.backplane.grpc.Ticket\x12`\n\rtext_document\x18\x02 \x01(\x0b\x32I.io.deephaven.proto.backplane.script.grpc.VersionedTextDocumentIdentifier\"\xc0\x03\n\x15\x43hangeDocumentRequest\x12=\n\nconsole_id\x18\x01 \x01(\x0b\x32).io.deephaven.proto.backplane.grpc.Ticket\x12`\n\rtext_document\x18\x02 \x01(\x0b\x32I.io.deephaven.proto.backplane.script.grpc.VersionedTextDocumentIdentifier\x12w\n\x0f\x63ontent_changes\x18\x03 \x03(\x0b\x32^.io.deephaven.proto.backplane.script.grpc.ChangeDocumentRequest.TextDocumentContentChangeEvent\x1a\x8c\x01\n\x1eTextDocumentContentChangeEvent\x12\x46\n\x05range\x18\x01 \x01(\x0b\x32\x37.io.deephaven.proto.backplane.script.grpc.DocumentRange\x12\x14\n\x0crange_length\x18\x02 \x01(\x05\x12\x0c\n\x04text\x18\x03 \x01(\t\"\x93\x01\n\rDocumentRange\x12\x41\n\x05start\x18\x01 \x01(\x0b\x32\x32.io.deephaven.proto.backplane.script.grpc.Position\x12?\n\x03\x65nd\x18\x02 \x01(\x0b\x32\x32.io.deephaven.proto.backplane.script.grpc.Position\"?\n\x1fVersionedTextDocumentIdentifier\x12\x0b\n\x03uri\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\"+\n\x08Position\x12\x0c\n\x04line\x18\x01 \x01(\x05\x12\x11\n\tcharacter\x18\x02 \x01(\x05\"\xe4\x02\n\x19GetCompletionItemsRequest\x12=\n\nconsole_id\x18\x01 \x01(\x0b\x32).io.deephaven.proto.backplane.grpc.Ticket\x12L\n\x07\x63ontext\x18\x02 \x01(\x0b\x32;.io.deephaven.proto.backplane.script.grpc.CompletionContext\x12`\n\rtext_document\x18\x03 \x01(\x0b\x32I.io.deephaven.proto.backplane.script.grpc.VersionedTextDocumentIdentifier\x12\x44\n\x08position\x18\x04 \x01(\x0b\x32\x32.io.deephaven.proto.backplane.script.grpc.Position\x12\x12\n\nrequest_id\x18\x05 \x01(\x05\"D\n\x11\x43ompletionContext\x12\x14\n\x0ctrigger_kind\x18\x01 \x01(\x05\x12\x19\n\x11trigger_character\x18\x02 \x01(\t\"\x8a\x01\n\x1aGetCompletionItemsResponse\x12G\n\x05items\x18\x01 \x03(\x0b\x32\x38.io.deephaven.proto.backplane.script.grpc.CompletionItem\x12\x12\n\nrequest_id\x18\x02 \x01(\x05\x12\x0f\n\x07success\x18\x03 \x01(\x08\"\x93\x03\n\x0e\x43ompletionItem\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0e\n\x06length\x18\x02 \x01(\x05\x12\r\n\x05label\x18\x03 \x01(\t\x12\x0c\n\x04kind\x18\x04 \x01(\x05\x12\x0e\n\x06\x64\x65tail\x18\x05 \x01(\t\x12\x15\n\rdocumentation\x18\x06 \x01(\t\x12\x12\n\ndeprecated\x18\x07 \x01(\x08\x12\x11\n\tpreselect\x18\x08 \x01(\x08\x12\x45\n\ttext_edit\x18\t \x01(\x0b\x32\x32.io.deephaven.proto.backplane.script.grpc.TextEdit\x12\x11\n\tsort_text\x18\n \x01(\t\x12\x13\n\x0b\x66ilter_text\x18\x0b \x01(\t\x12\x1a\n\x12insert_text_format\x18\x0c \x01(\x05\x12Q\n\x15\x61\x64\x64itional_text_edits\x18\r \x03(\x0b\x32\x32.io.deephaven.proto.backplane.script.grpc.TextEdit\x12\x19\n\x11\x63ommit_characters\x18\x0e \x03(\t\"`\n\x08TextEdit\x12\x46\n\x05range\x18\x01 \x01(\x0b\x32\x37.io.deephaven.proto.backplane.script.grpc.DocumentRange\x12\x0c\n\x04text\x18\x02 \x01(\t\"\xab/\n\x10\x46igureDescriptor\x12\x12\n\x05title\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x12\n\ntitle_font\x18\x02 \x01(\t\x12\x13\n\x0btitle_color\x18\x03 \x01(\t\x12\x17\n\x0fupdate_interval\x18\x07 \x01(\x03\x12\x0c\n\x04\x63ols\x18\x08 \x01(\x05\x12\x0c\n\x04rows\x18\t \x01(\x05\x12Z\n\x06\x63harts\x18\n \x03(\x0b\x32J.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor\x12\x0e\n\x06\x65rrors\x18\r \x03(\t\x1a\xa0\x05\n\x0f\x43hartDescriptor\x12\x0f\n\x07\x63olspan\x18\x01 \x01(\x05\x12\x0f\n\x07rowspan\x18\x02 \x01(\x05\x12[\n\x06series\x18\x03 \x03(\x0b\x32K.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor\x12\x66\n\x0cmulti_series\x18\x04 \x03(\x0b\x32P.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesDescriptor\x12W\n\x04\x61xes\x18\x05 \x03(\x0b\x32I.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor\x12h\n\nchart_type\x18\x06 \x01(\x0e\x32T.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.ChartType\x12\x12\n\x05title\x18\x07 \x01(\tH\x00\x88\x01\x01\x12\x12\n\ntitle_font\x18\x08 \x01(\t\x12\x13\n\x0btitle_color\x18\t \x01(\t\x12\x13\n\x0bshow_legend\x18\n \x01(\x08\x12\x13\n\x0blegend_font\x18\x0b \x01(\t\x12\x14\n\x0clegend_color\x18\x0c \x01(\t\x12\x0c\n\x04is3d\x18\r \x01(\x08\"N\n\tChartType\x12\x06\n\x02XY\x10\x00\x12\x07\n\x03PIE\x10\x01\x12\x08\n\x04OHLC\x10\x02\x12\x0c\n\x08\x43\x41TEGORY\x10\x03\x12\x07\n\x03XYZ\x10\x04\x12\x0f\n\x0b\x43\x41TEGORY_3D\x10\x05\x42\x08\n\x06_title\x1a\xfe\x04\n\x10SeriesDescriptor\x12^\n\nplot_style\x18\x01 \x01(\x0e\x32J.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesPlotStyle\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x1a\n\rlines_visible\x18\x03 \x01(\x08H\x00\x88\x01\x01\x12\x1b\n\x0eshapes_visible\x18\x04 \x01(\x08H\x01\x88\x01\x01\x12\x18\n\x10gradient_visible\x18\x05 \x01(\x08\x12\x12\n\nline_color\x18\x06 \x01(\t\x12\x1f\n\x12point_label_format\x18\x08 \x01(\tH\x02\x88\x01\x01\x12\x1f\n\x12x_tool_tip_pattern\x18\t \x01(\tH\x03\x88\x01\x01\x12\x1f\n\x12y_tool_tip_pattern\x18\n \x01(\tH\x04\x88\x01\x01\x12\x13\n\x0bshape_label\x18\x0b \x01(\t\x12\x17\n\nshape_size\x18\x0c \x01(\x01H\x05\x88\x01\x01\x12\x13\n\x0bshape_color\x18\r \x01(\t\x12\r\n\x05shape\x18\x0e \x01(\t\x12\x61\n\x0c\x64\x61ta_sources\x18\x0f \x03(\x0b\x32K.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SourceDescriptorB\x10\n\x0e_lines_visibleB\x11\n\x0f_shapes_visibleB\x15\n\x13_point_label_formatB\x15\n\x13_x_tool_tip_patternB\x15\n\x13_y_tool_tip_patternB\r\n\x0b_shape_sizeJ\x04\x08\x07\x10\x08\x1a\xec\n\n\x15MultiSeriesDescriptor\x12^\n\nplot_style\x18\x01 \x01(\x0e\x32J.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesPlotStyle\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x63\n\nline_color\x18\x03 \x01(\x0b\x32O.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.StringMapWithDefault\x12\x64\n\x0bpoint_color\x18\x04 \x01(\x0b\x32O.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.StringMapWithDefault\x12\x64\n\rlines_visible\x18\x05 \x01(\x0b\x32M.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BoolMapWithDefault\x12\x65\n\x0epoints_visible\x18\x06 \x01(\x0b\x32M.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BoolMapWithDefault\x12g\n\x10gradient_visible\x18\x07 \x01(\x0b\x32M.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BoolMapWithDefault\x12k\n\x12point_label_format\x18\x08 \x01(\x0b\x32O.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.StringMapWithDefault\x12k\n\x12x_tool_tip_pattern\x18\t \x01(\x0b\x32O.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.StringMapWithDefault\x12k\n\x12y_tool_tip_pattern\x18\n \x01(\x0b\x32O.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.StringMapWithDefault\x12\x64\n\x0bpoint_label\x18\x0b \x01(\x0b\x32O.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.StringMapWithDefault\x12\x63\n\npoint_size\x18\x0c \x01(\x0b\x32O.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.DoubleMapWithDefault\x12\x64\n\x0bpoint_shape\x18\r \x01(\x0b\x32O.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.StringMapWithDefault\x12l\n\x0c\x64\x61ta_sources\x18\x0e \x03(\x0b\x32V.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesSourceDescriptor\x1aL\n\x14StringMapWithDefault\x12\x16\n\x0e\x64\x65\x66\x61ult_string\x18\x01 \x01(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12\x0e\n\x06values\x18\x03 \x03(\t\x1aL\n\x14\x44oubleMapWithDefault\x12\x16\n\x0e\x64\x65\x66\x61ult_double\x18\x01 \x01(\x01\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12\x0e\n\x06values\x18\x03 \x03(\x01\x1aH\n\x12\x42oolMapWithDefault\x12\x14\n\x0c\x64\x65\x66\x61ult_bool\x18\x01 \x01(\x08\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12\x0e\n\x06values\x18\x03 \x03(\x08\x1a\xa6\x08\n\x0e\x41xisDescriptor\x12\n\n\x02id\x18\x01 \x01(\t\x12m\n\x0b\x66ormat_type\x18\x02 \x01(\x0e\x32X.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisFormatType\x12`\n\x04type\x18\x03 \x01(\x0e\x32R.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisType\x12h\n\x08position\x18\x04 \x01(\x0e\x32V.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisPosition\x12\x0b\n\x03log\x18\x05 \x01(\x08\x12\r\n\x05label\x18\x06 \x01(\t\x12\x12\n\nlabel_font\x18\x07 \x01(\t\x12\x12\n\nticks_font\x18\x08 \x01(\t\x12\x1b\n\x0e\x66ormat_pattern\x18\t \x01(\tH\x00\x88\x01\x01\x12\r\n\x05\x63olor\x18\n \x01(\t\x12\x11\n\tmin_range\x18\x0b \x01(\x01\x12\x11\n\tmax_range\x18\x0c \x01(\x01\x12\x1b\n\x13minor_ticks_visible\x18\r \x01(\x08\x12\x1b\n\x13major_ticks_visible\x18\x0e \x01(\x08\x12\x18\n\x10minor_tick_count\x18\x0f \x01(\x05\x12$\n\x17gap_between_major_ticks\x18\x10 \x01(\x01H\x01\x88\x01\x01\x12\x1c\n\x14major_tick_locations\x18\x11 \x03(\x01\x12\x18\n\x10tick_label_angle\x18\x12 \x01(\x01\x12\x0e\n\x06invert\x18\x13 \x01(\x08\x12\x14\n\x0cis_time_axis\x18\x14 \x01(\x08\x12{\n\x1c\x62usiness_calendar_descriptor\x18\x15 \x01(\x0b\x32U.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor\"*\n\x0e\x41xisFormatType\x12\x0c\n\x08\x43\x41TEGORY\x10\x00\x12\n\n\x06NUMBER\x10\x01\"C\n\x08\x41xisType\x12\x05\n\x01X\x10\x00\x12\x05\n\x01Y\x10\x01\x12\t\n\x05SHAPE\x10\x02\x12\x08\n\x04SIZE\x10\x03\x12\t\n\x05LABEL\x10\x04\x12\t\n\x05\x43OLOR\x10\x05\"B\n\x0c\x41xisPosition\x12\x07\n\x03TOP\x10\x00\x12\n\n\x06\x42OTTOM\x10\x01\x12\x08\n\x04LEFT\x10\x02\x12\t\n\x05RIGHT\x10\x03\x12\x08\n\x04NONE\x10\x04\x42\x11\n\x0f_format_patternB\x1a\n\x18_gap_between_major_ticks\x1a\xf0\x06\n\x1a\x42usinessCalendarDescriptor\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\ttime_zone\x18\x02 \x01(\t\x12v\n\rbusiness_days\x18\x03 \x03(\x0e\x32_.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.DayOfWeek\x12~\n\x10\x62usiness_periods\x18\x04 \x03(\x0b\x32\x64.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.BusinessPeriod\x12o\n\x08holidays\x18\x05 \x03(\x0b\x32].io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.Holiday\x1a-\n\x0e\x42usinessPeriod\x12\x0c\n\x04open\x18\x01 \x01(\t\x12\r\n\x05\x63lose\x18\x02 \x01(\t\x1a\xf8\x01\n\x07Holiday\x12m\n\x04\x64\x61te\x18\x01 \x01(\x0b\x32_.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.LocalDate\x12~\n\x10\x62usiness_periods\x18\x02 \x03(\x0b\x32\x64.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.BusinessPeriod\x1a\x35\n\tLocalDate\x12\x0c\n\x04year\x18\x01 \x01(\x05\x12\r\n\x05month\x18\x02 \x01(\x05\x12\x0b\n\x03\x64\x61y\x18\x03 \x01(\x05\"g\n\tDayOfWeek\x12\n\n\x06SUNDAY\x10\x00\x12\n\n\x06MONDAY\x10\x01\x12\x0b\n\x07TUESDAY\x10\x02\x12\r\n\tWEDNESDAY\x10\x03\x12\x0c\n\x08THURSDAY\x10\x04\x12\n\n\x06\x46RIDAY\x10\x05\x12\x0c\n\x08SATURDAY\x10\x06\x1a\xae\x01\n\x1bMultiSeriesSourceDescriptor\x12\x0f\n\x07\x61xis_id\x18\x01 \x01(\t\x12S\n\x04type\x18\x02 \x01(\x0e\x32\x45.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SourceType\x12\x14\n\x0ctable_map_id\x18\x03 \x01(\x05\x12\x13\n\x0b\x63olumn_name\x18\x04 \x01(\t\x1a\xac\x02\n\x10SourceDescriptor\x12\x0f\n\x07\x61xis_id\x18\x01 \x01(\t\x12S\n\x04type\x18\x02 \x01(\x0e\x32\x45.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SourceType\x12\x10\n\x08table_id\x18\x03 \x01(\x05\x12\x14\n\x0ctable_map_id\x18\x04 \x01(\x05\x12\x13\n\x0b\x63olumn_name\x18\x05 \x01(\t\x12\x13\n\x0b\x63olumn_type\x18\x06 \x01(\t\x12`\n\tone_click\x18\x07 \x01(\x0b\x32M.io.deephaven.proto.backplane.script.grpc.FigureDescriptor.OneClickDescriptor\x1a\x63\n\x12OneClickDescriptor\x12\x0f\n\x07\x63olumns\x18\x01 \x03(\t\x12\x14\n\x0c\x63olumn_types\x18\x02 \x03(\t\x12&\n\x1erequire_all_filters_to_display\x18\x03 \x01(\x08\"\x99\x01\n\x0fSeriesPlotStyle\x12\x07\n\x03\x42\x41R\x10\x00\x12\x0f\n\x0bSTACKED_BAR\x10\x01\x12\x08\n\x04LINE\x10\x02\x12\x08\n\x04\x41REA\x10\x03\x12\x10\n\x0cSTACKED_AREA\x10\x04\x12\x07\n\x03PIE\x10\x05\x12\r\n\tHISTOGRAM\x10\x06\x12\x08\n\x04OHLC\x10\x07\x12\x0b\n\x07SCATTER\x10\x08\x12\x08\n\x04STEP\x10\t\x12\r\n\tERROR_BAR\x10\n\"\xac\x01\n\nSourceType\x12\x05\n\x01X\x10\x00\x12\x05\n\x01Y\x10\x01\x12\x05\n\x01Z\x10\x02\x12\t\n\x05X_LOW\x10\x03\x12\n\n\x06X_HIGH\x10\x04\x12\t\n\x05Y_LOW\x10\x05\x12\n\n\x06Y_HIGH\x10\x06\x12\x08\n\x04TIME\x10\x07\x12\x08\n\x04OPEN\x10\x08\x12\x08\n\x04HIGH\x10\t\x12\x07\n\x03LOW\x10\n\x12\t\n\x05\x43LOSE\x10\x0b\x12\t\n\x05SHAPE\x10\x0c\x12\x08\n\x04SIZE\x10\r\x12\t\n\x05LABEL\x10\x0e\x12\t\n\x05\x43OLOR\x10\x0f\x42\x08\n\x06_titleJ\x04\x08\x0b\x10\x0cJ\x04\x08\x0c\x10\r2\xff\n\n\x0e\x43onsoleService\x12\x98\x01\n\x0fGetConsoleTypes\x12@.io.deephaven.proto.backplane.script.grpc.GetConsoleTypesRequest\x1a\x41.io.deephaven.proto.backplane.script.grpc.GetConsoleTypesResponse\"\x00\x12\x8f\x01\n\x0cStartConsole\x12=.io.deephaven.proto.backplane.script.grpc.StartConsoleRequest\x1a>.io.deephaven.proto.backplane.script.grpc.StartConsoleResponse\"\x00\x12\x96\x01\n\x0fSubscribeToLogs\x12@.io.deephaven.proto.backplane.script.grpc.LogSubscriptionRequest\x1a=.io.deephaven.proto.backplane.script.grpc.LogSubscriptionData\"\x00\x30\x01\x12\x95\x01\n\x0e\x45xecuteCommand\x12?.io.deephaven.proto.backplane.script.grpc.ExecuteCommandRequest\x1a@.io.deephaven.proto.backplane.script.grpc.ExecuteCommandResponse\"\x00\x12\x92\x01\n\rCancelCommand\x12>.io.deephaven.proto.backplane.script.grpc.CancelCommandRequest\x1a?.io.deephaven.proto.backplane.script.grpc.CancelCommandResponse\"\x00\x12\xa4\x01\n\x13\x42indTableToVariable\x12\x44.io.deephaven.proto.backplane.script.grpc.BindTableToVariableRequest\x1a\x45.io.deephaven.proto.backplane.script.grpc.BindTableToVariableResponse\"\x00\x12\x99\x01\n\x12\x41utoCompleteStream\x12=.io.deephaven.proto.backplane.script.grpc.AutoCompleteRequest\x1a>.io.deephaven.proto.backplane.script.grpc.AutoCompleteResponse\"\x00(\x01\x30\x01\x12\x9b\x01\n\x16OpenAutoCompleteStream\x12=.io.deephaven.proto.backplane.script.grpc.AutoCompleteRequest\x1a>.io.deephaven.proto.backplane.script.grpc.AutoCompleteResponse\"\x00\x30\x01\x12\x98\x01\n\x16NextAutoCompleteStream\x12=.io.deephaven.proto.backplane.script.grpc.AutoCompleteRequest\x1a=.io.deephaven.proto.backplane.script.grpc.BrowserNextResponse\"\x00\x42\x04H\x01P\x01\x62\x06proto3'
,
dependencies=[deephaven_dot_proto_dot_table__pb2.DESCRIPTOR,deephaven_dot_proto_dot_ticket__pb2.DESCRIPTOR,deephaven_dot_proto_dot_application__pb2.DESCRIPTOR,])
_FIGUREDESCRIPTOR_CHARTDESCRIPTOR_CHARTTYPE = _descriptor.EnumDescriptor(
name='ChartType',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.ChartType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='XY', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PIE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='OHLC', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CATEGORY', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='XYZ', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CATEGORY_3D', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=4832,
serialized_end=4910,
)
_sym_db.RegisterEnumDescriptor(_FIGUREDESCRIPTOR_CHARTDESCRIPTOR_CHARTTYPE)
_FIGUREDESCRIPTOR_AXISDESCRIPTOR_AXISFORMATTYPE = _descriptor.EnumDescriptor(
name='AxisFormatType',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisFormatType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='CATEGORY', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NUMBER', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=8021,
serialized_end=8063,
)
_sym_db.RegisterEnumDescriptor(_FIGUREDESCRIPTOR_AXISDESCRIPTOR_AXISFORMATTYPE)
_FIGUREDESCRIPTOR_AXISDESCRIPTOR_AXISTYPE = _descriptor.EnumDescriptor(
name='AxisType',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='X', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Y', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SHAPE', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SIZE', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LABEL', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='COLOR', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=8065,
serialized_end=8132,
)
_sym_db.RegisterEnumDescriptor(_FIGUREDESCRIPTOR_AXISDESCRIPTOR_AXISTYPE)
_FIGUREDESCRIPTOR_AXISDESCRIPTOR_AXISPOSITION = _descriptor.EnumDescriptor(
name='AxisPosition',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.AxisPosition',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='TOP', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BOTTOM', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LEFT', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RIGHT', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NONE', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=8134,
serialized_end=8200,
)
_sym_db.RegisterEnumDescriptor(_FIGUREDESCRIPTOR_AXISDESCRIPTOR_AXISPOSITION)
_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_DAYOFWEEK = _descriptor.EnumDescriptor(
name='DayOfWeek',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.DayOfWeek',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='SUNDAY', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MONDAY', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TUESDAY', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WEDNESDAY', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='THURSDAY', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FRIDAY', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SATURDAY', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=9027,
serialized_end=9130,
)
_sym_db.RegisterEnumDescriptor(_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_DAYOFWEEK)
_FIGUREDESCRIPTOR_SERIESPLOTSTYLE = _descriptor.EnumDescriptor(
name='SeriesPlotStyle',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesPlotStyle',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='BAR', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='STACKED_BAR', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LINE', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AREA', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='STACKED_AREA', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PIE', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='HISTOGRAM', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='OHLC', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SCATTER', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='STEP', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ERROR_BAR', index=10, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=9714,
serialized_end=9867,
)
_sym_db.RegisterEnumDescriptor(_FIGUREDESCRIPTOR_SERIESPLOTSTYLE)
_FIGUREDESCRIPTOR_SOURCETYPE = _descriptor.EnumDescriptor(
name='SourceType',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SourceType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='X', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Y', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Z', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='X_LOW', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='X_HIGH', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Y_LOW', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Y_HIGH', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TIME', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='OPEN', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='HIGH', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOW', index=10, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CLOSE', index=11, number=11,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SHAPE', index=12, number=12,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SIZE', index=13, number=13,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LABEL', index=14, number=14,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='COLOR', index=15, number=15,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=9870,
serialized_end=10042,
)
_sym_db.RegisterEnumDescriptor(_FIGUREDESCRIPTOR_SOURCETYPE)
_GETCONSOLETYPESREQUEST = _descriptor.Descriptor(
name='GetConsoleTypesRequest',
full_name='io.deephaven.proto.backplane.script.grpc.GetConsoleTypesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=169,
serialized_end=193,
)
_GETCONSOLETYPESRESPONSE = _descriptor.Descriptor(
name='GetConsoleTypesResponse',
full_name='io.deephaven.proto.backplane.script.grpc.GetConsoleTypesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='console_types', full_name='io.deephaven.proto.backplane.script.grpc.GetConsoleTypesResponse.console_types', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=195,
serialized_end=243,
)
_STARTCONSOLEREQUEST = _descriptor.Descriptor(
name='StartConsoleRequest',
full_name='io.deephaven.proto.backplane.script.grpc.StartConsoleRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='result_id', full_name='io.deephaven.proto.backplane.script.grpc.StartConsoleRequest.result_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='session_type', full_name='io.deephaven.proto.backplane.script.grpc.StartConsoleRequest.session_type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=245,
serialized_end=350,
)
_STARTCONSOLERESPONSE = _descriptor.Descriptor(
name='StartConsoleResponse',
full_name='io.deephaven.proto.backplane.script.grpc.StartConsoleResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='result_id', full_name='io.deephaven.proto.backplane.script.grpc.StartConsoleResponse.result_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=352,
serialized_end=436,
)
_LOGSUBSCRIPTIONREQUEST = _descriptor.Descriptor(
name='LogSubscriptionRequest',
full_name='io.deephaven.proto.backplane.script.grpc.LogSubscriptionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='last_seen_log_timestamp', full_name='io.deephaven.proto.backplane.script.grpc.LogSubscriptionRequest.last_seen_log_timestamp', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='levels', full_name='io.deephaven.proto.backplane.script.grpc.LogSubscriptionRequest.levels', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=438,
serialized_end=511,
)
_LOGSUBSCRIPTIONDATA = _descriptor.Descriptor(
name='LogSubscriptionData',
full_name='io.deephaven.proto.backplane.script.grpc.LogSubscriptionData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='micros', full_name='io.deephaven.proto.backplane.script.grpc.LogSubscriptionData.micros', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_level', full_name='io.deephaven.proto.backplane.script.grpc.LogSubscriptionData.log_level', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='message', full_name='io.deephaven.proto.backplane.script.grpc.LogSubscriptionData.message', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=513,
serialized_end=592,
)
_EXECUTECOMMANDREQUEST = _descriptor.Descriptor(
name='ExecuteCommandRequest',
full_name='io.deephaven.proto.backplane.script.grpc.ExecuteCommandRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='console_id', full_name='io.deephaven.proto.backplane.script.grpc.ExecuteCommandRequest.console_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='code', full_name='io.deephaven.proto.backplane.script.grpc.ExecuteCommandRequest.code', index=1,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=594,
serialized_end=700,
)
_EXECUTECOMMANDRESPONSE = _descriptor.Descriptor(
name='ExecuteCommandResponse',
full_name='io.deephaven.proto.backplane.script.grpc.ExecuteCommandResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='error_message', full_name='io.deephaven.proto.backplane.script.grpc.ExecuteCommandResponse.error_message', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='changes', full_name='io.deephaven.proto.backplane.script.grpc.ExecuteCommandResponse.changes', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=702,
serialized_end=821,
)
_BINDTABLETOVARIABLEREQUEST = _descriptor.Descriptor(
name='BindTableToVariableRequest',
full_name='io.deephaven.proto.backplane.script.grpc.BindTableToVariableRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='console_id', full_name='io.deephaven.proto.backplane.script.grpc.BindTableToVariableRequest.console_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='variable_name', full_name='io.deephaven.proto.backplane.script.grpc.BindTableToVariableRequest.variable_name', index=1,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='table_id', full_name='io.deephaven.proto.backplane.script.grpc.BindTableToVariableRequest.table_id', index=2,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=824,
serialized_end=1005,
)
_BINDTABLETOVARIABLERESPONSE = _descriptor.Descriptor(
name='BindTableToVariableResponse',
full_name='io.deephaven.proto.backplane.script.grpc.BindTableToVariableResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1007,
serialized_end=1036,
)
_CANCELCOMMANDREQUEST = _descriptor.Descriptor(
name='CancelCommandRequest',
full_name='io.deephaven.proto.backplane.script.grpc.CancelCommandRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='console_id', full_name='io.deephaven.proto.backplane.script.grpc.CancelCommandRequest.console_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='command_id', full_name='io.deephaven.proto.backplane.script.grpc.CancelCommandRequest.command_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1039,
serialized_end=1187,
)
_CANCELCOMMANDRESPONSE = _descriptor.Descriptor(
name='CancelCommandResponse',
full_name='io.deephaven.proto.backplane.script.grpc.CancelCommandResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1189,
serialized_end=1212,
)
_AUTOCOMPLETEREQUEST = _descriptor.Descriptor(
name='AutoCompleteRequest',
full_name='io.deephaven.proto.backplane.script.grpc.AutoCompleteRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='open_document', full_name='io.deephaven.proto.backplane.script.grpc.AutoCompleteRequest.open_document', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='change_document', full_name='io.deephaven.proto.backplane.script.grpc.AutoCompleteRequest.change_document', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='get_completion_items', full_name='io.deephaven.proto.backplane.script.grpc.AutoCompleteRequest.get_completion_items', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='close_document', full_name='io.deephaven.proto.backplane.script.grpc.AutoCompleteRequest.close_document', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='request', full_name='io.deephaven.proto.backplane.script.grpc.AutoCompleteRequest.request',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=1215,
serialized_end=1618,
)
_AUTOCOMPLETERESPONSE = _descriptor.Descriptor(
name='AutoCompleteResponse',
full_name='io.deephaven.proto.backplane.script.grpc.AutoCompleteResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='completion_items', full_name='io.deephaven.proto.backplane.script.grpc.AutoCompleteResponse.completion_items', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='response', full_name='io.deephaven.proto.backplane.script.grpc.AutoCompleteResponse.response',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=1621,
serialized_end=1753,
)
_BROWSERNEXTRESPONSE = _descriptor.Descriptor(
name='BrowserNextResponse',
full_name='io.deephaven.proto.backplane.script.grpc.BrowserNextResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1755,
serialized_end=1776,
)
_OPENDOCUMENTREQUEST = _descriptor.Descriptor(
name='OpenDocumentRequest',
full_name='io.deephaven.proto.backplane.script.grpc.OpenDocumentRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='console_id', full_name='io.deephaven.proto.backplane.script.grpc.OpenDocumentRequest.console_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='text_document', full_name='io.deephaven.proto.backplane.script.grpc.OpenDocumentRequest.text_document', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1779,
serialized_end=1946,
)
_TEXTDOCUMENTITEM = _descriptor.Descriptor(
name='TextDocumentItem',
full_name='io.deephaven.proto.backplane.script.grpc.TextDocumentItem',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='uri', full_name='io.deephaven.proto.backplane.script.grpc.TextDocumentItem.uri', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='language_id', full_name='io.deephaven.proto.backplane.script.grpc.TextDocumentItem.language_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='version', full_name='io.deephaven.proto.backplane.script.grpc.TextDocumentItem.version', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='text', full_name='io.deephaven.proto.backplane.script.grpc.TextDocumentItem.text', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1948,
serialized_end=2031,
)
_CLOSEDOCUMENTREQUEST = _descriptor.Descriptor(
name='CloseDocumentRequest',
full_name='io.deephaven.proto.backplane.script.grpc.CloseDocumentRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='console_id', full_name='io.deephaven.proto.backplane.script.grpc.CloseDocumentRequest.console_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='text_document', full_name='io.deephaven.proto.backplane.script.grpc.CloseDocumentRequest.text_document', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2034,
serialized_end=2217,
)
_CHANGEDOCUMENTREQUEST_TEXTDOCUMENTCONTENTCHANGEEVENT = _descriptor.Descriptor(
name='TextDocumentContentChangeEvent',
full_name='io.deephaven.proto.backplane.script.grpc.ChangeDocumentRequest.TextDocumentContentChangeEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='range', full_name='io.deephaven.proto.backplane.script.grpc.ChangeDocumentRequest.TextDocumentContentChangeEvent.range', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='range_length', full_name='io.deephaven.proto.backplane.script.grpc.ChangeDocumentRequest.TextDocumentContentChangeEvent.range_length', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='text', full_name='io.deephaven.proto.backplane.script.grpc.ChangeDocumentRequest.TextDocumentContentChangeEvent.text', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2528,
serialized_end=2668,
)
_CHANGEDOCUMENTREQUEST = _descriptor.Descriptor(
name='ChangeDocumentRequest',
full_name='io.deephaven.proto.backplane.script.grpc.ChangeDocumentRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='console_id', full_name='io.deephaven.proto.backplane.script.grpc.ChangeDocumentRequest.console_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='text_document', full_name='io.deephaven.proto.backplane.script.grpc.ChangeDocumentRequest.text_document', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='content_changes', full_name='io.deephaven.proto.backplane.script.grpc.ChangeDocumentRequest.content_changes', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_CHANGEDOCUMENTREQUEST_TEXTDOCUMENTCONTENTCHANGEEVENT, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2220,
serialized_end=2668,
)
_DOCUMENTRANGE = _descriptor.Descriptor(
name='DocumentRange',
full_name='io.deephaven.proto.backplane.script.grpc.DocumentRange',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='start', full_name='io.deephaven.proto.backplane.script.grpc.DocumentRange.start', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='end', full_name='io.deephaven.proto.backplane.script.grpc.DocumentRange.end', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2671,
serialized_end=2818,
)
_VERSIONEDTEXTDOCUMENTIDENTIFIER = _descriptor.Descriptor(
name='VersionedTextDocumentIdentifier',
full_name='io.deephaven.proto.backplane.script.grpc.VersionedTextDocumentIdentifier',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='uri', full_name='io.deephaven.proto.backplane.script.grpc.VersionedTextDocumentIdentifier.uri', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='version', full_name='io.deephaven.proto.backplane.script.grpc.VersionedTextDocumentIdentifier.version', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2820,
serialized_end=2883,
)
_POSITION = _descriptor.Descriptor(
name='Position',
full_name='io.deephaven.proto.backplane.script.grpc.Position',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='line', full_name='io.deephaven.proto.backplane.script.grpc.Position.line', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='character', full_name='io.deephaven.proto.backplane.script.grpc.Position.character', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2885,
serialized_end=2928,
)
_GETCOMPLETIONITEMSREQUEST = _descriptor.Descriptor(
name='GetCompletionItemsRequest',
full_name='io.deephaven.proto.backplane.script.grpc.GetCompletionItemsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='console_id', full_name='io.deephaven.proto.backplane.script.grpc.GetCompletionItemsRequest.console_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='context', full_name='io.deephaven.proto.backplane.script.grpc.GetCompletionItemsRequest.context', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='text_document', full_name='io.deephaven.proto.backplane.script.grpc.GetCompletionItemsRequest.text_document', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='position', full_name='io.deephaven.proto.backplane.script.grpc.GetCompletionItemsRequest.position', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='request_id', full_name='io.deephaven.proto.backplane.script.grpc.GetCompletionItemsRequest.request_id', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2931,
serialized_end=3287,
)
_COMPLETIONCONTEXT = _descriptor.Descriptor(
name='CompletionContext',
full_name='io.deephaven.proto.backplane.script.grpc.CompletionContext',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='trigger_kind', full_name='io.deephaven.proto.backplane.script.grpc.CompletionContext.trigger_kind', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='trigger_character', full_name='io.deephaven.proto.backplane.script.grpc.CompletionContext.trigger_character', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3289,
serialized_end=3357,
)
_GETCOMPLETIONITEMSRESPONSE = _descriptor.Descriptor(
name='GetCompletionItemsResponse',
full_name='io.deephaven.proto.backplane.script.grpc.GetCompletionItemsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='items', full_name='io.deephaven.proto.backplane.script.grpc.GetCompletionItemsResponse.items', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='request_id', full_name='io.deephaven.proto.backplane.script.grpc.GetCompletionItemsResponse.request_id', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='success', full_name='io.deephaven.proto.backplane.script.grpc.GetCompletionItemsResponse.success', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3360,
serialized_end=3498,
)
_COMPLETIONITEM = _descriptor.Descriptor(
name='CompletionItem',
full_name='io.deephaven.proto.backplane.script.grpc.CompletionItem',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='start', full_name='io.deephaven.proto.backplane.script.grpc.CompletionItem.start', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='length', full_name='io.deephaven.proto.backplane.script.grpc.CompletionItem.length', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='label', full_name='io.deephaven.proto.backplane.script.grpc.CompletionItem.label', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='kind', full_name='io.deephaven.proto.backplane.script.grpc.CompletionItem.kind', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='detail', full_name='io.deephaven.proto.backplane.script.grpc.CompletionItem.detail', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='documentation', full_name='io.deephaven.proto.backplane.script.grpc.CompletionItem.documentation', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='deprecated', full_name='io.deephaven.proto.backplane.script.grpc.CompletionItem.deprecated', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='preselect', full_name='io.deephaven.proto.backplane.script.grpc.CompletionItem.preselect', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='text_edit', full_name='io.deephaven.proto.backplane.script.grpc.CompletionItem.text_edit', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sort_text', full_name='io.deephaven.proto.backplane.script.grpc.CompletionItem.sort_text', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='filter_text', full_name='io.deephaven.proto.backplane.script.grpc.CompletionItem.filter_text', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='insert_text_format', full_name='io.deephaven.proto.backplane.script.grpc.CompletionItem.insert_text_format', index=11,
number=12, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='additional_text_edits', full_name='io.deephaven.proto.backplane.script.grpc.CompletionItem.additional_text_edits', index=12,
number=13, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='commit_characters', full_name='io.deephaven.proto.backplane.script.grpc.CompletionItem.commit_characters', index=13,
number=14, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3501,
serialized_end=3904,
)
_TEXTEDIT = _descriptor.Descriptor(
name='TextEdit',
full_name='io.deephaven.proto.backplane.script.grpc.TextEdit',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='range', full_name='io.deephaven.proto.backplane.script.grpc.TextEdit.range', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='text', full_name='io.deephaven.proto.backplane.script.grpc.TextEdit.text', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3906,
serialized_end=4002,
)
_FIGUREDESCRIPTOR_CHARTDESCRIPTOR = _descriptor.Descriptor(
name='ChartDescriptor',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='colspan', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.colspan', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='rowspan', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.rowspan', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='series', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.series', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='multi_series', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.multi_series', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='axes', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.axes', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='chart_type', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.chart_type', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='title', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.title', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='title_font', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.title_font', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='title_color', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.title_color', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='show_legend', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.show_legend', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='legend_font', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.legend_font', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='legend_color', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.legend_color', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is3d', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor.is3d', index=12,
number=13, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_FIGUREDESCRIPTOR_CHARTDESCRIPTOR_CHARTTYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='_title', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor._title',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=4248,
serialized_end=4920,
)
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR = _descriptor.Descriptor(
name='SeriesDescriptor',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='plot_style', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.plot_style', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lines_visible', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.lines_visible', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='shapes_visible', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.shapes_visible', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gradient_visible', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.gradient_visible', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='line_color', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.line_color', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='point_label_format', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.point_label_format', index=6,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='x_tool_tip_pattern', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.x_tool_tip_pattern', index=7,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='y_tool_tip_pattern', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.y_tool_tip_pattern', index=8,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='shape_label', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.shape_label', index=9,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='shape_size', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.shape_size', index=10,
number=12, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='shape_color', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.shape_color', index=11,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='shape', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.shape', index=12,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data_sources', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor.data_sources', index=13,
number=15, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='_lines_visible', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor._lines_visible',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_shapes_visible', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor._shapes_visible',
index=1, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_point_label_format', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor._point_label_format',
index=2, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_x_tool_tip_pattern', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor._x_tool_tip_pattern',
index=3, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_y_tool_tip_pattern', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor._y_tool_tip_pattern',
index=4, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_shape_size', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor._shape_size',
index=5, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=4923,
serialized_end=5561,
)
_FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR = _descriptor.Descriptor(
name='MultiSeriesDescriptor',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesDescriptor',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='plot_style', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesDescriptor.plot_style', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesDescriptor.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='line_color', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesDescriptor.line_color', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='point_color', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesDescriptor.point_color', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lines_visible', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesDescriptor.lines_visible', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='points_visible', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesDescriptor.points_visible', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gradient_visible', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesDescriptor.gradient_visible', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='point_label_format', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesDescriptor.point_label_format', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='x_tool_tip_pattern', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesDescriptor.x_tool_tip_pattern', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='y_tool_tip_pattern', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesDescriptor.y_tool_tip_pattern', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='point_label', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesDescriptor.point_label', index=10,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='point_size', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesDescriptor.point_size', index=11,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='point_shape', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesDescriptor.point_shape', index=12,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data_sources', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesDescriptor.data_sources', index=13,
number=14, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5564,
serialized_end=6952,
)
_FIGUREDESCRIPTOR_STRINGMAPWITHDEFAULT = _descriptor.Descriptor(
name='StringMapWithDefault',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.StringMapWithDefault',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='default_string', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.StringMapWithDefault.default_string', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='keys', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.StringMapWithDefault.keys', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='values', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.StringMapWithDefault.values', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6954,
serialized_end=7030,
)
_FIGUREDESCRIPTOR_DOUBLEMAPWITHDEFAULT = _descriptor.Descriptor(
name='DoubleMapWithDefault',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.DoubleMapWithDefault',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='default_double', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.DoubleMapWithDefault.default_double', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='keys', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.DoubleMapWithDefault.keys', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='values', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.DoubleMapWithDefault.values', index=2,
number=3, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7032,
serialized_end=7108,
)
_FIGUREDESCRIPTOR_BOOLMAPWITHDEFAULT = _descriptor.Descriptor(
name='BoolMapWithDefault',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BoolMapWithDefault',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='default_bool', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BoolMapWithDefault.default_bool', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='keys', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BoolMapWithDefault.keys', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='values', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BoolMapWithDefault.values', index=2,
number=3, type=8, cpp_type=7, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7110,
serialized_end=7182,
)
_FIGUREDESCRIPTOR_AXISDESCRIPTOR = _descriptor.Descriptor(
name='AxisDescriptor',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='format_type', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.format_type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.type', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='position', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.position', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.log', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='label', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.label', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='label_font', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.label_font', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ticks_font', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.ticks_font', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='format_pattern', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.format_pattern', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='color', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.color', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='min_range', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.min_range', index=10,
number=11, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_range', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.max_range', index=11,
number=12, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='minor_ticks_visible', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.minor_ticks_visible', index=12,
number=13, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='major_ticks_visible', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.major_ticks_visible', index=13,
number=14, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='minor_tick_count', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.minor_tick_count', index=14,
number=15, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gap_between_major_ticks', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.gap_between_major_ticks', index=15,
number=16, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='major_tick_locations', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.major_tick_locations', index=16,
number=17, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tick_label_angle', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.tick_label_angle', index=17,
number=18, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='invert', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.invert', index=18,
number=19, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_time_axis', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.is_time_axis', index=19,
number=20, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='business_calendar_descriptor', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor.business_calendar_descriptor', index=20,
number=21, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_FIGUREDESCRIPTOR_AXISDESCRIPTOR_AXISFORMATTYPE,
_FIGUREDESCRIPTOR_AXISDESCRIPTOR_AXISTYPE,
_FIGUREDESCRIPTOR_AXISDESCRIPTOR_AXISPOSITION,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='_format_pattern', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor._format_pattern',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_gap_between_major_ticks', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor._gap_between_major_ticks',
index=1, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=7185,
serialized_end=8247,
)
_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_BUSINESSPERIOD = _descriptor.Descriptor(
name='BusinessPeriod',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.BusinessPeriod',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='open', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.BusinessPeriod.open', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='close', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.BusinessPeriod.close', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8674,
serialized_end=8719,
)
_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_HOLIDAY = _descriptor.Descriptor(
name='Holiday',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.Holiday',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='date', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.Holiday.date', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='business_periods', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.Holiday.business_periods', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8722,
serialized_end=8970,
)
_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_LOCALDATE = _descriptor.Descriptor(
name='LocalDate',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.LocalDate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='year', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.LocalDate.year', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='month', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.LocalDate.month', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='day', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.LocalDate.day', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8972,
serialized_end=9025,
)
_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR = _descriptor.Descriptor(
name='BusinessCalendarDescriptor',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='time_zone', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.time_zone', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='business_days', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.business_days', index=2,
number=3, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='business_periods', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.business_periods', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='holidays', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.holidays', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_BUSINESSPERIOD, _FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_HOLIDAY, _FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_LOCALDATE, ],
enum_types=[
_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_DAYOFWEEK,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8250,
serialized_end=9130,
)
_FIGUREDESCRIPTOR_MULTISERIESSOURCEDESCRIPTOR = _descriptor.Descriptor(
name='MultiSeriesSourceDescriptor',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesSourceDescriptor',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='axis_id', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesSourceDescriptor.axis_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesSourceDescriptor.type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='table_map_id', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesSourceDescriptor.table_map_id', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='column_name', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesSourceDescriptor.column_name', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9133,
serialized_end=9307,
)
_FIGUREDESCRIPTOR_SOURCEDESCRIPTOR = _descriptor.Descriptor(
name='SourceDescriptor',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SourceDescriptor',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='axis_id', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SourceDescriptor.axis_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SourceDescriptor.type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='table_id', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SourceDescriptor.table_id', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='table_map_id', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SourceDescriptor.table_map_id', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='column_name', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SourceDescriptor.column_name', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='column_type', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SourceDescriptor.column_type', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='one_click', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SourceDescriptor.one_click', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9310,
serialized_end=9610,
)
_FIGUREDESCRIPTOR_ONECLICKDESCRIPTOR = _descriptor.Descriptor(
name='OneClickDescriptor',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.OneClickDescriptor',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='columns', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.OneClickDescriptor.columns', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='column_types', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.OneClickDescriptor.column_types', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='require_all_filters_to_display', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.OneClickDescriptor.require_all_filters_to_display', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9612,
serialized_end=9711,
)
_FIGUREDESCRIPTOR = _descriptor.Descriptor(
name='FigureDescriptor',
full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='title', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.title', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='title_font', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.title_font', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='title_color', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.title_color', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='update_interval', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.update_interval', index=3,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cols', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.cols', index=4,
number=8, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='rows', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.rows', index=5,
number=9, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='charts', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.charts', index=6,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='errors', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor.errors', index=7,
number=13, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_FIGUREDESCRIPTOR_CHARTDESCRIPTOR, _FIGUREDESCRIPTOR_SERIESDESCRIPTOR, _FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR, _FIGUREDESCRIPTOR_STRINGMAPWITHDEFAULT, _FIGUREDESCRIPTOR_DOUBLEMAPWITHDEFAULT, _FIGUREDESCRIPTOR_BOOLMAPWITHDEFAULT, _FIGUREDESCRIPTOR_AXISDESCRIPTOR, _FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR, _FIGUREDESCRIPTOR_MULTISERIESSOURCEDESCRIPTOR, _FIGUREDESCRIPTOR_SOURCEDESCRIPTOR, _FIGUREDESCRIPTOR_ONECLICKDESCRIPTOR, ],
enum_types=[
_FIGUREDESCRIPTOR_SERIESPLOTSTYLE,
_FIGUREDESCRIPTOR_SOURCETYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='_title', full_name='io.deephaven.proto.backplane.script.grpc.FigureDescriptor._title',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=4005,
serialized_end=10064,
)
_STARTCONSOLEREQUEST.fields_by_name['result_id'].message_type = deephaven_dot_proto_dot_ticket__pb2._TICKET
_STARTCONSOLERESPONSE.fields_by_name['result_id'].message_type = deephaven_dot_proto_dot_ticket__pb2._TICKET
_EXECUTECOMMANDREQUEST.fields_by_name['console_id'].message_type = deephaven_dot_proto_dot_ticket__pb2._TICKET
_EXECUTECOMMANDRESPONSE.fields_by_name['changes'].message_type = deephaven_dot_proto_dot_application__pb2._FIELDSCHANGEUPDATE
_BINDTABLETOVARIABLEREQUEST.fields_by_name['console_id'].message_type = deephaven_dot_proto_dot_ticket__pb2._TICKET
_BINDTABLETOVARIABLEREQUEST.fields_by_name['table_id'].message_type = deephaven_dot_proto_dot_ticket__pb2._TICKET
_CANCELCOMMANDREQUEST.fields_by_name['console_id'].message_type = deephaven_dot_proto_dot_ticket__pb2._TICKET
_CANCELCOMMANDREQUEST.fields_by_name['command_id'].message_type = deephaven_dot_proto_dot_ticket__pb2._TICKET
_AUTOCOMPLETEREQUEST.fields_by_name['open_document'].message_type = _OPENDOCUMENTREQUEST
_AUTOCOMPLETEREQUEST.fields_by_name['change_document'].message_type = _CHANGEDOCUMENTREQUEST
_AUTOCOMPLETEREQUEST.fields_by_name['get_completion_items'].message_type = _GETCOMPLETIONITEMSREQUEST
_AUTOCOMPLETEREQUEST.fields_by_name['close_document'].message_type = _CLOSEDOCUMENTREQUEST
_AUTOCOMPLETEREQUEST.oneofs_by_name['request'].fields.append(
_AUTOCOMPLETEREQUEST.fields_by_name['open_document'])
_AUTOCOMPLETEREQUEST.fields_by_name['open_document'].containing_oneof = _AUTOCOMPLETEREQUEST.oneofs_by_name['request']
_AUTOCOMPLETEREQUEST.oneofs_by_name['request'].fields.append(
_AUTOCOMPLETEREQUEST.fields_by_name['change_document'])
_AUTOCOMPLETEREQUEST.fields_by_name['change_document'].containing_oneof = _AUTOCOMPLETEREQUEST.oneofs_by_name['request']
_AUTOCOMPLETEREQUEST.oneofs_by_name['request'].fields.append(
_AUTOCOMPLETEREQUEST.fields_by_name['get_completion_items'])
_AUTOCOMPLETEREQUEST.fields_by_name['get_completion_items'].containing_oneof = _AUTOCOMPLETEREQUEST.oneofs_by_name['request']
_AUTOCOMPLETEREQUEST.oneofs_by_name['request'].fields.append(
_AUTOCOMPLETEREQUEST.fields_by_name['close_document'])
_AUTOCOMPLETEREQUEST.fields_by_name['close_document'].containing_oneof = _AUTOCOMPLETEREQUEST.oneofs_by_name['request']
_AUTOCOMPLETERESPONSE.fields_by_name['completion_items'].message_type = _GETCOMPLETIONITEMSRESPONSE
_AUTOCOMPLETERESPONSE.oneofs_by_name['response'].fields.append(
_AUTOCOMPLETERESPONSE.fields_by_name['completion_items'])
_AUTOCOMPLETERESPONSE.fields_by_name['completion_items'].containing_oneof = _AUTOCOMPLETERESPONSE.oneofs_by_name['response']
_OPENDOCUMENTREQUEST.fields_by_name['console_id'].message_type = deephaven_dot_proto_dot_ticket__pb2._TICKET
_OPENDOCUMENTREQUEST.fields_by_name['text_document'].message_type = _TEXTDOCUMENTITEM
_CLOSEDOCUMENTREQUEST.fields_by_name['console_id'].message_type = deephaven_dot_proto_dot_ticket__pb2._TICKET
_CLOSEDOCUMENTREQUEST.fields_by_name['text_document'].message_type = _VERSIONEDTEXTDOCUMENTIDENTIFIER
_CHANGEDOCUMENTREQUEST_TEXTDOCUMENTCONTENTCHANGEEVENT.fields_by_name['range'].message_type = _DOCUMENTRANGE
_CHANGEDOCUMENTREQUEST_TEXTDOCUMENTCONTENTCHANGEEVENT.containing_type = _CHANGEDOCUMENTREQUEST
_CHANGEDOCUMENTREQUEST.fields_by_name['console_id'].message_type = deephaven_dot_proto_dot_ticket__pb2._TICKET
_CHANGEDOCUMENTREQUEST.fields_by_name['text_document'].message_type = _VERSIONEDTEXTDOCUMENTIDENTIFIER
_CHANGEDOCUMENTREQUEST.fields_by_name['content_changes'].message_type = _CHANGEDOCUMENTREQUEST_TEXTDOCUMENTCONTENTCHANGEEVENT
_DOCUMENTRANGE.fields_by_name['start'].message_type = _POSITION
_DOCUMENTRANGE.fields_by_name['end'].message_type = _POSITION
_GETCOMPLETIONITEMSREQUEST.fields_by_name['console_id'].message_type = deephaven_dot_proto_dot_ticket__pb2._TICKET
_GETCOMPLETIONITEMSREQUEST.fields_by_name['context'].message_type = _COMPLETIONCONTEXT
_GETCOMPLETIONITEMSREQUEST.fields_by_name['text_document'].message_type = _VERSIONEDTEXTDOCUMENTIDENTIFIER
_GETCOMPLETIONITEMSREQUEST.fields_by_name['position'].message_type = _POSITION
_GETCOMPLETIONITEMSRESPONSE.fields_by_name['items'].message_type = _COMPLETIONITEM
_COMPLETIONITEM.fields_by_name['text_edit'].message_type = _TEXTEDIT
_COMPLETIONITEM.fields_by_name['additional_text_edits'].message_type = _TEXTEDIT
_TEXTEDIT.fields_by_name['range'].message_type = _DOCUMENTRANGE
_FIGUREDESCRIPTOR_CHARTDESCRIPTOR.fields_by_name['series'].message_type = _FIGUREDESCRIPTOR_SERIESDESCRIPTOR
_FIGUREDESCRIPTOR_CHARTDESCRIPTOR.fields_by_name['multi_series'].message_type = _FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR
_FIGUREDESCRIPTOR_CHARTDESCRIPTOR.fields_by_name['axes'].message_type = _FIGUREDESCRIPTOR_AXISDESCRIPTOR
_FIGUREDESCRIPTOR_CHARTDESCRIPTOR.fields_by_name['chart_type'].enum_type = _FIGUREDESCRIPTOR_CHARTDESCRIPTOR_CHARTTYPE
_FIGUREDESCRIPTOR_CHARTDESCRIPTOR.containing_type = _FIGUREDESCRIPTOR
_FIGUREDESCRIPTOR_CHARTDESCRIPTOR_CHARTTYPE.containing_type = _FIGUREDESCRIPTOR_CHARTDESCRIPTOR
_FIGUREDESCRIPTOR_CHARTDESCRIPTOR.oneofs_by_name['_title'].fields.append(
_FIGUREDESCRIPTOR_CHARTDESCRIPTOR.fields_by_name['title'])
_FIGUREDESCRIPTOR_CHARTDESCRIPTOR.fields_by_name['title'].containing_oneof = _FIGUREDESCRIPTOR_CHARTDESCRIPTOR.oneofs_by_name['_title']
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.fields_by_name['plot_style'].enum_type = _FIGUREDESCRIPTOR_SERIESPLOTSTYLE
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.fields_by_name['data_sources'].message_type = _FIGUREDESCRIPTOR_SOURCEDESCRIPTOR
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.containing_type = _FIGUREDESCRIPTOR
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.oneofs_by_name['_lines_visible'].fields.append(
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.fields_by_name['lines_visible'])
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.fields_by_name['lines_visible'].containing_oneof = _FIGUREDESCRIPTOR_SERIESDESCRIPTOR.oneofs_by_name['_lines_visible']
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.oneofs_by_name['_shapes_visible'].fields.append(
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.fields_by_name['shapes_visible'])
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.fields_by_name['shapes_visible'].containing_oneof = _FIGUREDESCRIPTOR_SERIESDESCRIPTOR.oneofs_by_name['_shapes_visible']
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.oneofs_by_name['_point_label_format'].fields.append(
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.fields_by_name['point_label_format'])
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.fields_by_name['point_label_format'].containing_oneof = _FIGUREDESCRIPTOR_SERIESDESCRIPTOR.oneofs_by_name['_point_label_format']
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.oneofs_by_name['_x_tool_tip_pattern'].fields.append(
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.fields_by_name['x_tool_tip_pattern'])
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.fields_by_name['x_tool_tip_pattern'].containing_oneof = _FIGUREDESCRIPTOR_SERIESDESCRIPTOR.oneofs_by_name['_x_tool_tip_pattern']
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.oneofs_by_name['_y_tool_tip_pattern'].fields.append(
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.fields_by_name['y_tool_tip_pattern'])
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.fields_by_name['y_tool_tip_pattern'].containing_oneof = _FIGUREDESCRIPTOR_SERIESDESCRIPTOR.oneofs_by_name['_y_tool_tip_pattern']
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.oneofs_by_name['_shape_size'].fields.append(
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.fields_by_name['shape_size'])
_FIGUREDESCRIPTOR_SERIESDESCRIPTOR.fields_by_name['shape_size'].containing_oneof = _FIGUREDESCRIPTOR_SERIESDESCRIPTOR.oneofs_by_name['_shape_size']
_FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR.fields_by_name['plot_style'].enum_type = _FIGUREDESCRIPTOR_SERIESPLOTSTYLE
_FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR.fields_by_name['line_color'].message_type = _FIGUREDESCRIPTOR_STRINGMAPWITHDEFAULT
_FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR.fields_by_name['point_color'].message_type = _FIGUREDESCRIPTOR_STRINGMAPWITHDEFAULT
_FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR.fields_by_name['lines_visible'].message_type = _FIGUREDESCRIPTOR_BOOLMAPWITHDEFAULT
_FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR.fields_by_name['points_visible'].message_type = _FIGUREDESCRIPTOR_BOOLMAPWITHDEFAULT
_FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR.fields_by_name['gradient_visible'].message_type = _FIGUREDESCRIPTOR_BOOLMAPWITHDEFAULT
_FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR.fields_by_name['point_label_format'].message_type = _FIGUREDESCRIPTOR_STRINGMAPWITHDEFAULT
_FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR.fields_by_name['x_tool_tip_pattern'].message_type = _FIGUREDESCRIPTOR_STRINGMAPWITHDEFAULT
_FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR.fields_by_name['y_tool_tip_pattern'].message_type = _FIGUREDESCRIPTOR_STRINGMAPWITHDEFAULT
_FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR.fields_by_name['point_label'].message_type = _FIGUREDESCRIPTOR_STRINGMAPWITHDEFAULT
_FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR.fields_by_name['point_size'].message_type = _FIGUREDESCRIPTOR_DOUBLEMAPWITHDEFAULT
_FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR.fields_by_name['point_shape'].message_type = _FIGUREDESCRIPTOR_STRINGMAPWITHDEFAULT
_FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR.fields_by_name['data_sources'].message_type = _FIGUREDESCRIPTOR_MULTISERIESSOURCEDESCRIPTOR
_FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR.containing_type = _FIGUREDESCRIPTOR
_FIGUREDESCRIPTOR_STRINGMAPWITHDEFAULT.containing_type = _FIGUREDESCRIPTOR
_FIGUREDESCRIPTOR_DOUBLEMAPWITHDEFAULT.containing_type = _FIGUREDESCRIPTOR
_FIGUREDESCRIPTOR_BOOLMAPWITHDEFAULT.containing_type = _FIGUREDESCRIPTOR
_FIGUREDESCRIPTOR_AXISDESCRIPTOR.fields_by_name['format_type'].enum_type = _FIGUREDESCRIPTOR_AXISDESCRIPTOR_AXISFORMATTYPE
_FIGUREDESCRIPTOR_AXISDESCRIPTOR.fields_by_name['type'].enum_type = _FIGUREDESCRIPTOR_AXISDESCRIPTOR_AXISTYPE
_FIGUREDESCRIPTOR_AXISDESCRIPTOR.fields_by_name['position'].enum_type = _FIGUREDESCRIPTOR_AXISDESCRIPTOR_AXISPOSITION
_FIGUREDESCRIPTOR_AXISDESCRIPTOR.fields_by_name['business_calendar_descriptor'].message_type = _FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR
_FIGUREDESCRIPTOR_AXISDESCRIPTOR.containing_type = _FIGUREDESCRIPTOR
_FIGUREDESCRIPTOR_AXISDESCRIPTOR_AXISFORMATTYPE.containing_type = _FIGUREDESCRIPTOR_AXISDESCRIPTOR
_FIGUREDESCRIPTOR_AXISDESCRIPTOR_AXISTYPE.containing_type = _FIGUREDESCRIPTOR_AXISDESCRIPTOR
_FIGUREDESCRIPTOR_AXISDESCRIPTOR_AXISPOSITION.containing_type = _FIGUREDESCRIPTOR_AXISDESCRIPTOR
_FIGUREDESCRIPTOR_AXISDESCRIPTOR.oneofs_by_name['_format_pattern'].fields.append(
_FIGUREDESCRIPTOR_AXISDESCRIPTOR.fields_by_name['format_pattern'])
_FIGUREDESCRIPTOR_AXISDESCRIPTOR.fields_by_name['format_pattern'].containing_oneof = _FIGUREDESCRIPTOR_AXISDESCRIPTOR.oneofs_by_name['_format_pattern']
_FIGUREDESCRIPTOR_AXISDESCRIPTOR.oneofs_by_name['_gap_between_major_ticks'].fields.append(
_FIGUREDESCRIPTOR_AXISDESCRIPTOR.fields_by_name['gap_between_major_ticks'])
_FIGUREDESCRIPTOR_AXISDESCRIPTOR.fields_by_name['gap_between_major_ticks'].containing_oneof = _FIGUREDESCRIPTOR_AXISDESCRIPTOR.oneofs_by_name['_gap_between_major_ticks']
_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_BUSINESSPERIOD.containing_type = _FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR
_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_HOLIDAY.fields_by_name['date'].message_type = _FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_LOCALDATE
_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_HOLIDAY.fields_by_name['business_periods'].message_type = _FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_BUSINESSPERIOD
_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_HOLIDAY.containing_type = _FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR
_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_LOCALDATE.containing_type = _FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR
_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR.fields_by_name['business_days'].enum_type = _FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_DAYOFWEEK
_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR.fields_by_name['business_periods'].message_type = _FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_BUSINESSPERIOD
_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR.fields_by_name['holidays'].message_type = _FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_HOLIDAY
_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR.containing_type = _FIGUREDESCRIPTOR
_FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_DAYOFWEEK.containing_type = _FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR
_FIGUREDESCRIPTOR_MULTISERIESSOURCEDESCRIPTOR.fields_by_name['type'].enum_type = _FIGUREDESCRIPTOR_SOURCETYPE
_FIGUREDESCRIPTOR_MULTISERIESSOURCEDESCRIPTOR.containing_type = _FIGUREDESCRIPTOR
_FIGUREDESCRIPTOR_SOURCEDESCRIPTOR.fields_by_name['type'].enum_type = _FIGUREDESCRIPTOR_SOURCETYPE
_FIGUREDESCRIPTOR_SOURCEDESCRIPTOR.fields_by_name['one_click'].message_type = _FIGUREDESCRIPTOR_ONECLICKDESCRIPTOR
_FIGUREDESCRIPTOR_SOURCEDESCRIPTOR.containing_type = _FIGUREDESCRIPTOR
_FIGUREDESCRIPTOR_ONECLICKDESCRIPTOR.containing_type = _FIGUREDESCRIPTOR
_FIGUREDESCRIPTOR.fields_by_name['charts'].message_type = _FIGUREDESCRIPTOR_CHARTDESCRIPTOR
_FIGUREDESCRIPTOR_SERIESPLOTSTYLE.containing_type = _FIGUREDESCRIPTOR
_FIGUREDESCRIPTOR_SOURCETYPE.containing_type = _FIGUREDESCRIPTOR
_FIGUREDESCRIPTOR.oneofs_by_name['_title'].fields.append(
_FIGUREDESCRIPTOR.fields_by_name['title'])
_FIGUREDESCRIPTOR.fields_by_name['title'].containing_oneof = _FIGUREDESCRIPTOR.oneofs_by_name['_title']
DESCRIPTOR.message_types_by_name['GetConsoleTypesRequest'] = _GETCONSOLETYPESREQUEST
DESCRIPTOR.message_types_by_name['GetConsoleTypesResponse'] = _GETCONSOLETYPESRESPONSE
DESCRIPTOR.message_types_by_name['StartConsoleRequest'] = _STARTCONSOLEREQUEST
DESCRIPTOR.message_types_by_name['StartConsoleResponse'] = _STARTCONSOLERESPONSE
DESCRIPTOR.message_types_by_name['LogSubscriptionRequest'] = _LOGSUBSCRIPTIONREQUEST
DESCRIPTOR.message_types_by_name['LogSubscriptionData'] = _LOGSUBSCRIPTIONDATA
DESCRIPTOR.message_types_by_name['ExecuteCommandRequest'] = _EXECUTECOMMANDREQUEST
DESCRIPTOR.message_types_by_name['ExecuteCommandResponse'] = _EXECUTECOMMANDRESPONSE
DESCRIPTOR.message_types_by_name['BindTableToVariableRequest'] = _BINDTABLETOVARIABLEREQUEST
DESCRIPTOR.message_types_by_name['BindTableToVariableResponse'] = _BINDTABLETOVARIABLERESPONSE
DESCRIPTOR.message_types_by_name['CancelCommandRequest'] = _CANCELCOMMANDREQUEST
DESCRIPTOR.message_types_by_name['CancelCommandResponse'] = _CANCELCOMMANDRESPONSE
DESCRIPTOR.message_types_by_name['AutoCompleteRequest'] = _AUTOCOMPLETEREQUEST
DESCRIPTOR.message_types_by_name['AutoCompleteResponse'] = _AUTOCOMPLETERESPONSE
DESCRIPTOR.message_types_by_name['BrowserNextResponse'] = _BROWSERNEXTRESPONSE
DESCRIPTOR.message_types_by_name['OpenDocumentRequest'] = _OPENDOCUMENTREQUEST
DESCRIPTOR.message_types_by_name['TextDocumentItem'] = _TEXTDOCUMENTITEM
DESCRIPTOR.message_types_by_name['CloseDocumentRequest'] = _CLOSEDOCUMENTREQUEST
DESCRIPTOR.message_types_by_name['ChangeDocumentRequest'] = _CHANGEDOCUMENTREQUEST
DESCRIPTOR.message_types_by_name['DocumentRange'] = _DOCUMENTRANGE
DESCRIPTOR.message_types_by_name['VersionedTextDocumentIdentifier'] = _VERSIONEDTEXTDOCUMENTIDENTIFIER
DESCRIPTOR.message_types_by_name['Position'] = _POSITION
DESCRIPTOR.message_types_by_name['GetCompletionItemsRequest'] = _GETCOMPLETIONITEMSREQUEST
DESCRIPTOR.message_types_by_name['CompletionContext'] = _COMPLETIONCONTEXT
DESCRIPTOR.message_types_by_name['GetCompletionItemsResponse'] = _GETCOMPLETIONITEMSRESPONSE
DESCRIPTOR.message_types_by_name['CompletionItem'] = _COMPLETIONITEM
DESCRIPTOR.message_types_by_name['TextEdit'] = _TEXTEDIT
DESCRIPTOR.message_types_by_name['FigureDescriptor'] = _FIGUREDESCRIPTOR
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GetConsoleTypesRequest = _reflection.GeneratedProtocolMessageType('GetConsoleTypesRequest', (_message.Message,), {
'DESCRIPTOR' : _GETCONSOLETYPESREQUEST,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.GetConsoleTypesRequest)
})
_sym_db.RegisterMessage(GetConsoleTypesRequest)
GetConsoleTypesResponse = _reflection.GeneratedProtocolMessageType('GetConsoleTypesResponse', (_message.Message,), {
'DESCRIPTOR' : _GETCONSOLETYPESRESPONSE,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.GetConsoleTypesResponse)
})
_sym_db.RegisterMessage(GetConsoleTypesResponse)
StartConsoleRequest = _reflection.GeneratedProtocolMessageType('StartConsoleRequest', (_message.Message,), {
'DESCRIPTOR' : _STARTCONSOLEREQUEST,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.StartConsoleRequest)
})
_sym_db.RegisterMessage(StartConsoleRequest)
StartConsoleResponse = _reflection.GeneratedProtocolMessageType('StartConsoleResponse', (_message.Message,), {
'DESCRIPTOR' : _STARTCONSOLERESPONSE,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.StartConsoleResponse)
})
_sym_db.RegisterMessage(StartConsoleResponse)
LogSubscriptionRequest = _reflection.GeneratedProtocolMessageType('LogSubscriptionRequest', (_message.Message,), {
'DESCRIPTOR' : _LOGSUBSCRIPTIONREQUEST,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.LogSubscriptionRequest)
})
_sym_db.RegisterMessage(LogSubscriptionRequest)
LogSubscriptionData = _reflection.GeneratedProtocolMessageType('LogSubscriptionData', (_message.Message,), {
'DESCRIPTOR' : _LOGSUBSCRIPTIONDATA,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.LogSubscriptionData)
})
_sym_db.RegisterMessage(LogSubscriptionData)
ExecuteCommandRequest = _reflection.GeneratedProtocolMessageType('ExecuteCommandRequest', (_message.Message,), {
'DESCRIPTOR' : _EXECUTECOMMANDREQUEST,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.ExecuteCommandRequest)
})
_sym_db.RegisterMessage(ExecuteCommandRequest)
ExecuteCommandResponse = _reflection.GeneratedProtocolMessageType('ExecuteCommandResponse', (_message.Message,), {
'DESCRIPTOR' : _EXECUTECOMMANDRESPONSE,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.ExecuteCommandResponse)
})
_sym_db.RegisterMessage(ExecuteCommandResponse)
BindTableToVariableRequest = _reflection.GeneratedProtocolMessageType('BindTableToVariableRequest', (_message.Message,), {
'DESCRIPTOR' : _BINDTABLETOVARIABLEREQUEST,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.BindTableToVariableRequest)
})
_sym_db.RegisterMessage(BindTableToVariableRequest)
BindTableToVariableResponse = _reflection.GeneratedProtocolMessageType('BindTableToVariableResponse', (_message.Message,), {
'DESCRIPTOR' : _BINDTABLETOVARIABLERESPONSE,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.BindTableToVariableResponse)
})
_sym_db.RegisterMessage(BindTableToVariableResponse)
CancelCommandRequest = _reflection.GeneratedProtocolMessageType('CancelCommandRequest', (_message.Message,), {
'DESCRIPTOR' : _CANCELCOMMANDREQUEST,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.CancelCommandRequest)
})
_sym_db.RegisterMessage(CancelCommandRequest)
CancelCommandResponse = _reflection.GeneratedProtocolMessageType('CancelCommandResponse', (_message.Message,), {
'DESCRIPTOR' : _CANCELCOMMANDRESPONSE,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.CancelCommandResponse)
})
_sym_db.RegisterMessage(CancelCommandResponse)
AutoCompleteRequest = _reflection.GeneratedProtocolMessageType('AutoCompleteRequest', (_message.Message,), {
'DESCRIPTOR' : _AUTOCOMPLETEREQUEST,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.AutoCompleteRequest)
})
_sym_db.RegisterMessage(AutoCompleteRequest)
AutoCompleteResponse = _reflection.GeneratedProtocolMessageType('AutoCompleteResponse', (_message.Message,), {
'DESCRIPTOR' : _AUTOCOMPLETERESPONSE,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.AutoCompleteResponse)
})
_sym_db.RegisterMessage(AutoCompleteResponse)
BrowserNextResponse = _reflection.GeneratedProtocolMessageType('BrowserNextResponse', (_message.Message,), {
'DESCRIPTOR' : _BROWSERNEXTRESPONSE,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.BrowserNextResponse)
})
_sym_db.RegisterMessage(BrowserNextResponse)
OpenDocumentRequest = _reflection.GeneratedProtocolMessageType('OpenDocumentRequest', (_message.Message,), {
'DESCRIPTOR' : _OPENDOCUMENTREQUEST,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.OpenDocumentRequest)
})
_sym_db.RegisterMessage(OpenDocumentRequest)
TextDocumentItem = _reflection.GeneratedProtocolMessageType('TextDocumentItem', (_message.Message,), {
'DESCRIPTOR' : _TEXTDOCUMENTITEM,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.TextDocumentItem)
})
_sym_db.RegisterMessage(TextDocumentItem)
CloseDocumentRequest = _reflection.GeneratedProtocolMessageType('CloseDocumentRequest', (_message.Message,), {
'DESCRIPTOR' : _CLOSEDOCUMENTREQUEST,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.CloseDocumentRequest)
})
_sym_db.RegisterMessage(CloseDocumentRequest)
ChangeDocumentRequest = _reflection.GeneratedProtocolMessageType('ChangeDocumentRequest', (_message.Message,), {
'TextDocumentContentChangeEvent' : _reflection.GeneratedProtocolMessageType('TextDocumentContentChangeEvent', (_message.Message,), {
'DESCRIPTOR' : _CHANGEDOCUMENTREQUEST_TEXTDOCUMENTCONTENTCHANGEEVENT,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.ChangeDocumentRequest.TextDocumentContentChangeEvent)
})
,
'DESCRIPTOR' : _CHANGEDOCUMENTREQUEST,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.ChangeDocumentRequest)
})
_sym_db.RegisterMessage(ChangeDocumentRequest)
_sym_db.RegisterMessage(ChangeDocumentRequest.TextDocumentContentChangeEvent)
DocumentRange = _reflection.GeneratedProtocolMessageType('DocumentRange', (_message.Message,), {
'DESCRIPTOR' : _DOCUMENTRANGE,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.DocumentRange)
})
_sym_db.RegisterMessage(DocumentRange)
VersionedTextDocumentIdentifier = _reflection.GeneratedProtocolMessageType('VersionedTextDocumentIdentifier', (_message.Message,), {
'DESCRIPTOR' : _VERSIONEDTEXTDOCUMENTIDENTIFIER,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.VersionedTextDocumentIdentifier)
})
_sym_db.RegisterMessage(VersionedTextDocumentIdentifier)
Position = _reflection.GeneratedProtocolMessageType('Position', (_message.Message,), {
'DESCRIPTOR' : _POSITION,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.Position)
})
_sym_db.RegisterMessage(Position)
GetCompletionItemsRequest = _reflection.GeneratedProtocolMessageType('GetCompletionItemsRequest', (_message.Message,), {
'DESCRIPTOR' : _GETCOMPLETIONITEMSREQUEST,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.GetCompletionItemsRequest)
})
_sym_db.RegisterMessage(GetCompletionItemsRequest)
CompletionContext = _reflection.GeneratedProtocolMessageType('CompletionContext', (_message.Message,), {
'DESCRIPTOR' : _COMPLETIONCONTEXT,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.CompletionContext)
})
_sym_db.RegisterMessage(CompletionContext)
GetCompletionItemsResponse = _reflection.GeneratedProtocolMessageType('GetCompletionItemsResponse', (_message.Message,), {
'DESCRIPTOR' : _GETCOMPLETIONITEMSRESPONSE,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.GetCompletionItemsResponse)
})
_sym_db.RegisterMessage(GetCompletionItemsResponse)
CompletionItem = _reflection.GeneratedProtocolMessageType('CompletionItem', (_message.Message,), {
'DESCRIPTOR' : _COMPLETIONITEM,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.CompletionItem)
})
_sym_db.RegisterMessage(CompletionItem)
TextEdit = _reflection.GeneratedProtocolMessageType('TextEdit', (_message.Message,), {
'DESCRIPTOR' : _TEXTEDIT,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.TextEdit)
})
_sym_db.RegisterMessage(TextEdit)
FigureDescriptor = _reflection.GeneratedProtocolMessageType('FigureDescriptor', (_message.Message,), {
'ChartDescriptor' : _reflection.GeneratedProtocolMessageType('ChartDescriptor', (_message.Message,), {
'DESCRIPTOR' : _FIGUREDESCRIPTOR_CHARTDESCRIPTOR,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.FigureDescriptor.ChartDescriptor)
})
,
'SeriesDescriptor' : _reflection.GeneratedProtocolMessageType('SeriesDescriptor', (_message.Message,), {
'DESCRIPTOR' : _FIGUREDESCRIPTOR_SERIESDESCRIPTOR,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SeriesDescriptor)
})
,
'MultiSeriesDescriptor' : _reflection.GeneratedProtocolMessageType('MultiSeriesDescriptor', (_message.Message,), {
'DESCRIPTOR' : _FIGUREDESCRIPTOR_MULTISERIESDESCRIPTOR,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesDescriptor)
})
,
'StringMapWithDefault' : _reflection.GeneratedProtocolMessageType('StringMapWithDefault', (_message.Message,), {
'DESCRIPTOR' : _FIGUREDESCRIPTOR_STRINGMAPWITHDEFAULT,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.FigureDescriptor.StringMapWithDefault)
})
,
'DoubleMapWithDefault' : _reflection.GeneratedProtocolMessageType('DoubleMapWithDefault', (_message.Message,), {
'DESCRIPTOR' : _FIGUREDESCRIPTOR_DOUBLEMAPWITHDEFAULT,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.FigureDescriptor.DoubleMapWithDefault)
})
,
'BoolMapWithDefault' : _reflection.GeneratedProtocolMessageType('BoolMapWithDefault', (_message.Message,), {
'DESCRIPTOR' : _FIGUREDESCRIPTOR_BOOLMAPWITHDEFAULT,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BoolMapWithDefault)
})
,
'AxisDescriptor' : _reflection.GeneratedProtocolMessageType('AxisDescriptor', (_message.Message,), {
'DESCRIPTOR' : _FIGUREDESCRIPTOR_AXISDESCRIPTOR,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.FigureDescriptor.AxisDescriptor)
})
,
'BusinessCalendarDescriptor' : _reflection.GeneratedProtocolMessageType('BusinessCalendarDescriptor', (_message.Message,), {
'BusinessPeriod' : _reflection.GeneratedProtocolMessageType('BusinessPeriod', (_message.Message,), {
'DESCRIPTOR' : _FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_BUSINESSPERIOD,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.BusinessPeriod)
})
,
'Holiday' : _reflection.GeneratedProtocolMessageType('Holiday', (_message.Message,), {
'DESCRIPTOR' : _FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_HOLIDAY,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.Holiday)
})
,
'LocalDate' : _reflection.GeneratedProtocolMessageType('LocalDate', (_message.Message,), {
'DESCRIPTOR' : _FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR_LOCALDATE,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor.LocalDate)
})
,
'DESCRIPTOR' : _FIGUREDESCRIPTOR_BUSINESSCALENDARDESCRIPTOR,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.FigureDescriptor.BusinessCalendarDescriptor)
})
,
'MultiSeriesSourceDescriptor' : _reflection.GeneratedProtocolMessageType('MultiSeriesSourceDescriptor', (_message.Message,), {
'DESCRIPTOR' : _FIGUREDESCRIPTOR_MULTISERIESSOURCEDESCRIPTOR,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.FigureDescriptor.MultiSeriesSourceDescriptor)
})
,
'SourceDescriptor' : _reflection.GeneratedProtocolMessageType('SourceDescriptor', (_message.Message,), {
'DESCRIPTOR' : _FIGUREDESCRIPTOR_SOURCEDESCRIPTOR,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.FigureDescriptor.SourceDescriptor)
})
,
'OneClickDescriptor' : _reflection.GeneratedProtocolMessageType('OneClickDescriptor', (_message.Message,), {
'DESCRIPTOR' : _FIGUREDESCRIPTOR_ONECLICKDESCRIPTOR,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.FigureDescriptor.OneClickDescriptor)
})
,
'DESCRIPTOR' : _FIGUREDESCRIPTOR,
'__module__' : 'pydeephaven.proto.console_pb2'
# @@protoc_insertion_point(class_scope:io.deephaven.proto.backplane.script.grpc.FigureDescriptor)
})
_sym_db.RegisterMessage(FigureDescriptor)
_sym_db.RegisterMessage(FigureDescriptor.ChartDescriptor)
_sym_db.RegisterMessage(FigureDescriptor.SeriesDescriptor)
_sym_db.RegisterMessage(FigureDescriptor.MultiSeriesDescriptor)
_sym_db.RegisterMessage(FigureDescriptor.StringMapWithDefault)
_sym_db.RegisterMessage(FigureDescriptor.DoubleMapWithDefault)
_sym_db.RegisterMessage(FigureDescriptor.BoolMapWithDefault)
_sym_db.RegisterMessage(FigureDescriptor.AxisDescriptor)
_sym_db.RegisterMessage(FigureDescriptor.BusinessCalendarDescriptor)
_sym_db.RegisterMessage(FigureDescriptor.BusinessCalendarDescriptor.BusinessPeriod)
_sym_db.RegisterMessage(FigureDescriptor.BusinessCalendarDescriptor.Holiday)
_sym_db.RegisterMessage(FigureDescriptor.BusinessCalendarDescriptor.LocalDate)
_sym_db.RegisterMessage(FigureDescriptor.MultiSeriesSourceDescriptor)
_sym_db.RegisterMessage(FigureDescriptor.SourceDescriptor)
_sym_db.RegisterMessage(FigureDescriptor.OneClickDescriptor)
DESCRIPTOR._options = None
_CONSOLESERVICE = _descriptor.ServiceDescriptor(
name='ConsoleService',
full_name='io.deephaven.proto.backplane.script.grpc.ConsoleService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=10067,
serialized_end=11474,
methods=[
_descriptor.MethodDescriptor(
name='GetConsoleTypes',
full_name='io.deephaven.proto.backplane.script.grpc.ConsoleService.GetConsoleTypes',
index=0,
containing_service=None,
input_type=_GETCONSOLETYPESREQUEST,
output_type=_GETCONSOLETYPESRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='StartConsole',
full_name='io.deephaven.proto.backplane.script.grpc.ConsoleService.StartConsole',
index=1,
containing_service=None,
input_type=_STARTCONSOLEREQUEST,
output_type=_STARTCONSOLERESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SubscribeToLogs',
full_name='io.deephaven.proto.backplane.script.grpc.ConsoleService.SubscribeToLogs',
index=2,
containing_service=None,
input_type=_LOGSUBSCRIPTIONREQUEST,
output_type=_LOGSUBSCRIPTIONDATA,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ExecuteCommand',
full_name='io.deephaven.proto.backplane.script.grpc.ConsoleService.ExecuteCommand',
index=3,
containing_service=None,
input_type=_EXECUTECOMMANDREQUEST,
output_type=_EXECUTECOMMANDRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CancelCommand',
full_name='io.deephaven.proto.backplane.script.grpc.ConsoleService.CancelCommand',
index=4,
containing_service=None,
input_type=_CANCELCOMMANDREQUEST,
output_type=_CANCELCOMMANDRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='BindTableToVariable',
full_name='io.deephaven.proto.backplane.script.grpc.ConsoleService.BindTableToVariable',
index=5,
containing_service=None,
input_type=_BINDTABLETOVARIABLEREQUEST,
output_type=_BINDTABLETOVARIABLERESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='AutoCompleteStream',
full_name='io.deephaven.proto.backplane.script.grpc.ConsoleService.AutoCompleteStream',
index=6,
containing_service=None,
input_type=_AUTOCOMPLETEREQUEST,
output_type=_AUTOCOMPLETERESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='OpenAutoCompleteStream',
full_name='io.deephaven.proto.backplane.script.grpc.ConsoleService.OpenAutoCompleteStream',
index=7,
containing_service=None,
input_type=_AUTOCOMPLETEREQUEST,
output_type=_AUTOCOMPLETERESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='NextAutoCompleteStream',
full_name='io.deephaven.proto.backplane.script.grpc.ConsoleService.NextAutoCompleteStream',
index=8,
containing_service=None,
input_type=_AUTOCOMPLETEREQUEST,
output_type=_BROWSERNEXTRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_CONSOLESERVICE)
DESCRIPTOR.services_by_name['ConsoleService'] = _CONSOLESERVICE
# @@protoc_insertion_point(module_scope)
| 52.318223
| 17,288
| 0.781095
| 20,865
| 174,272
| 6.205655
| 0.037815
| 0.039357
| 0.071756
| 0.071825
| 0.806999
| 0.774036
| 0.755825
| 0.730771
| 0.715619
| 0.663279
| 0
| 0.032301
| 0.104653
| 174,272
| 3,330
| 17,289
| 52.333934
| 0.797525
| 0.027704
| 0
| 0.694948
| 1
| 0.000318
| 0.269501
| 0.231464
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.002224
| 0
| 0.002224
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e631090fc061055346a98d6caaa9cfb39ae3daab
| 3,802
|
py
|
Python
|
y/interfaces/multicall2.py
|
cartercarlson/ypricemagic
|
f17fec155db7fb44ee624cd6e75193f17c6238cf
|
[
"MIT"
] | 1
|
2022-03-28T16:07:07.000Z
|
2022-03-28T16:07:07.000Z
|
y/interfaces/multicall2.py
|
cartercarlson/ypricemagic
|
f17fec155db7fb44ee624cd6e75193f17c6238cf
|
[
"MIT"
] | null | null | null |
y/interfaces/multicall2.py
|
cartercarlson/ypricemagic
|
f17fec155db7fb44ee624cd6e75193f17c6238cf
|
[
"MIT"
] | null | null | null |
MULTICALL2_ABI = [{"inputs":[{"components":[{"internalType":"address","name":"target","type":"address"},{"internalType":"bytes","name":"callData","type":"bytes"}],"internalType":"struct Multicall2.Call[]","name":"calls","type":"tuple[]"}],"name":"aggregate","outputs":[{"internalType":"uint256","name":"blockNumber","type":"uint256"},{"internalType":"bytes[]","name":"returnData","type":"bytes[]"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"components":[{"internalType":"address","name":"target","type":"address"},{"internalType":"bytes","name":"callData","type":"bytes"}],"internalType":"struct Multicall2.Call[]","name":"calls","type":"tuple[]"}],"name":"blockAndAggregate","outputs":[{"internalType":"uint256","name":"blockNumber","type":"uint256"},{"internalType":"bytes32","name":"blockHash","type":"bytes32"},{"components":[{"internalType":"bool","name":"success","type":"bool"},{"internalType":"bytes","name":"returnData","type":"bytes"}],"internalType":"struct Multicall2.Result[]","name":"returnData","type":"tuple[]"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"blockNumber","type":"uint256"}],"name":"getBlockHash","outputs":[{"internalType":"bytes32","name":"blockHash","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"getBlockNumber","outputs":[{"internalType":"uint256","name":"blockNumber","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"getCurrentBlockCoinbase","outputs":[{"internalType":"address","name":"coinbase","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"getCurrentBlockDifficulty","outputs":[{"internalType":"uint256","name":"difficulty","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"getCurrentBlockGasLimit","outputs":[{"internalType":"uint256","name":"gaslimit","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"getCurrentBlockTimestamp","outputs":[{"internalType":"uint256","name":"timestamp","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"addr","type":"address"}],"name":"getEthBalance","outputs":[{"internalType":"uint256","name":"balance","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"getLastBlockHash","outputs":[{"internalType":"bytes32","name":"blockHash","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bool","name":"requireSuccess","type":"bool"},{"components":[{"internalType":"address","name":"target","type":"address"},{"internalType":"bytes","name":"callData","type":"bytes"}],"internalType":"struct Multicall2.Call[]","name":"calls","type":"tuple[]"}],"name":"tryAggregate","outputs":[{"components":[{"internalType":"bool","name":"success","type":"bool"},{"internalType":"bytes","name":"returnData","type":"bytes"}],"internalType":"struct Multicall2.Result[]","name":"returnData","type":"tuple[]"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bool","name":"requireSuccess","type":"bool"},{"components":[{"internalType":"address","name":"target","type":"address"},{"internalType":"bytes","name":"callData","type":"bytes"}],"internalType":"struct Multicall2.Call[]","name":"calls","type":"tuple[]"}],"name":"tryBlockAndAggregate","outputs":[{"internalType":"uint256","name":"blockNumber","type":"uint256"},{"internalType":"bytes32","name":"blockHash","type":"bytes32"},{"components":[{"internalType":"bool","name":"success","type":"bool"},{"internalType":"bytes","name":"returnData","type":"bytes"}],"internalType":"struct Multicall2.Result[]","name":"returnData","type":"tuple[]"}],"stateMutability":"nonpayable","type":"function"}]
| 3,802
| 3,802
| 0.676223
| 353
| 3,802
| 7.280453
| 0.133144
| 0.056031
| 0.077043
| 0.093385
| 0.819066
| 0.804669
| 0.758755
| 0.724125
| 0.618288
| 0.618288
| 0
| 0.020564
| 0.002367
| 3,802
| 1
| 3,802
| 3,802
| 0.657
| 0
| 0
| 0
| 0
| 0
| 0.68446
| 0.02498
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e64174d86d0e165d2ecfc94880443351e64d6cc9
| 4,056
|
py
|
Python
|
tests/changes/artifacts/test_collection_artifact.py
|
vault-the/changes
|
37e23c3141b75e4785cf398d015e3dbca41bdd56
|
[
"Apache-2.0"
] | 443
|
2015-01-03T16:28:39.000Z
|
2021-04-26T16:39:46.000Z
|
tests/changes/artifacts/test_collection_artifact.py
|
vault-the/changes
|
37e23c3141b75e4785cf398d015e3dbca41bdd56
|
[
"Apache-2.0"
] | 12
|
2015-07-30T19:07:16.000Z
|
2016-11-07T23:11:21.000Z
|
tests/changes/artifacts/test_collection_artifact.py
|
vault-the/changes
|
37e23c3141b75e4785cf398d015e3dbca41bdd56
|
[
"Apache-2.0"
] | 47
|
2015-01-09T10:04:00.000Z
|
2020-11-18T17:58:19.000Z
|
from cStringIO import StringIO
import mock
from changes.artifacts.base import ArtifactParseError
from changes.artifacts.collection_artifact import CollectionArtifactHandler
from changes.config import db
from changes.constants import Result
from changes.models.failurereason import FailureReason
from changes.models.jobplan import JobPlan
from changes.testutils import TestCase
class CollectionArtifactHandlerTest(TestCase):
@mock.patch.object(JobPlan, 'get_build_step_for_job')
def test_valid_json(self, get_build_step_for_job):
buildstep = mock.Mock()
get_build_step_for_job.return_value = (None, buildstep)
project = self.create_project()
build = self.create_build(project)
job = self.create_job(build)
jobphase = self.create_jobphase(job)
jobstep = self.create_jobstep(jobphase)
artifact = self.create_artifact(jobstep, 'tests.json')
handler = CollectionArtifactHandler(jobstep)
handler.FILENAMES = ('/tests.json',)
handler.process(StringIO("{}"), artifact)
buildstep.expand_jobs.assert_called_once_with(jobstep, {})
# make sure changes were committed
db.session.rollback()
assert not FailureReason.query.filter(FailureReason.step_id == jobstep.id).first()
@mock.patch.object(JobPlan, 'get_build_step_for_job')
def test_invalid_json(self, get_build_step_for_job):
buildstep = mock.Mock()
get_build_step_for_job.return_value = (None, buildstep)
project = self.create_project()
build = self.create_build(project)
job = self.create_job(build)
jobphase = self.create_jobphase(job)
jobstep = self.create_jobstep(jobphase)
artifact = self.create_artifact(jobstep, 'tests.json')
handler = CollectionArtifactHandler(jobstep)
handler.FILENAMES = ('/tests.json',)
handler.process(StringIO(""), artifact)
assert buildstep.call_count == 0
# make sure changes were committed
db.session.rollback()
assert FailureReason.query.filter(FailureReason.step_id == jobstep.id).first()
@mock.patch.object(JobPlan, 'get_build_step_for_job')
def test_parse_error(self, get_build_step_for_job):
buildstep = mock.Mock()
get_build_step_for_job.return_value = (None, buildstep)
project = self.create_project()
build = self.create_build(project)
job = self.create_job(build)
jobphase = self.create_jobphase(job)
jobstep = self.create_jobstep(jobphase)
artifact = self.create_artifact(jobstep, 'tests.json')
handler = CollectionArtifactHandler(jobstep)
handler.FILENAMES = ('/tests.json',)
buildstep.expand_jobs.side_effect = ArtifactParseError('bad file')
handler.process(StringIO("{}"), artifact)
buildstep.expand_jobs.assert_called_once_with(jobstep, {})
# make sure changes were committed
db.session.rollback()
assert FailureReason.query.filter(FailureReason.step_id == jobstep.id).first()
@mock.patch.object(JobPlan, 'get_build_step_for_job')
def test_expand_jobs_error(self, get_build_step_for_job):
buildstep = mock.Mock()
get_build_step_for_job.return_value = (None, buildstep)
project = self.create_project()
build = self.create_build(project)
job = self.create_job(build)
jobphase = self.create_jobphase(job)
jobstep = self.create_jobstep(jobphase)
artifact = self.create_artifact(jobstep, 'tests.json')
handler = CollectionArtifactHandler(jobstep)
handler.FILENAMES = ('/tests.json',)
buildstep.expand_jobs.side_effect = Exception('error')
handler.process(StringIO("{}"), artifact)
buildstep.expand_jobs.assert_called_once_with(jobstep, {})
# make sure changes were committed
db.session.rollback()
assert jobstep.result == Result.infra_failed
assert not FailureReason.query.filter(FailureReason.step_id == jobstep.id).first()
| 43.612903
| 90
| 0.70217
| 466
| 4,056
| 5.879828
| 0.165236
| 0.087591
| 0.052555
| 0.065693
| 0.819708
| 0.819708
| 0.819708
| 0.819708
| 0.819708
| 0.819708
| 0
| 0.000308
| 0.198718
| 4,056
| 92
| 91
| 44.086957
| 0.842769
| 0.032298
| 0
| 0.753247
| 0
| 0
| 0.048724
| 0.022449
| 0
| 0
| 0
| 0
| 0.116883
| 1
| 0.051948
| false
| 0
| 0.116883
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e66898beb5f372de5e29153a9615aebcd35886d2
| 19,859
|
py
|
Python
|
models/csqa_dataset.py
|
kachiO/KagNet
|
f2b34eb644cad8f2d2fc03072f9449d0ff6454d1
|
[
"MIT"
] | 244
|
2019-09-06T07:53:57.000Z
|
2022-03-28T19:32:15.000Z
|
models/csqa_dataset.py
|
kachiO/KagNet
|
f2b34eb644cad8f2d2fc03072f9449d0ff6454d1
|
[
"MIT"
] | null | null | null |
models/csqa_dataset.py
|
kachiO/KagNet
|
f2b34eb644cad8f2d2fc03072f9449d0ff6454d1
|
[
"MIT"
] | 61
|
2019-09-14T07:06:57.000Z
|
2022-03-16T07:02:52.000Z
|
import torch
import torch.utils.data as data
import numpy as np
import json
from tqdm import tqdm
import timeit
import pickle
import os
import dgl
import networkx as nx
import random
def load_embeddings(path):
print("Loading glove concept embeddings with pooling:", path)
concept_vec = np.load(path)
print("done!")
return concept_vec
class data_with_paths(data.Dataset):
def __init__(self, statement_json_file, pf_json_file, pretrained_sent_vecs, num_choice=5, max_path_len=5, start=0, end=None, cut_off=3):
self.qids = []
self.statements = []
self.correct_labels = []
statement_json_data = []
print("loading statements from %s" % statement_json_file)
with open(statement_json_file, "r") as fp:
for line in fp.readlines():
statement_data = json.loads(line.strip())
statement_json_data.append(statement_data)
print("Done!")
print("loading sent_vecs from %s" % pretrained_sent_vecs)
self.input_sent_vecs = np.load(pretrained_sent_vecs)
print("Done!")
self.qa_text = []
statement_id = 0
# load all statements
for question_id in range(len(statement_json_data)):
statements = []
qa_text_cur = []
self.qids.append([statement_json_data[question_id]["id"]])
for k, s in enumerate(statement_json_data[question_id]["statements"]):
assert len(statement_json_data[question_id]["statements"]) == num_choice # 5
qa_text_cur.append((s["statement"], s['label']))
if s["label"] is True: # true of false
self.correct_labels.append(k) # the truth id [0,1,2,3,4]
statements.append(self.input_sent_vecs[statement_id])
statement_id += 1
self.statements.append(np.array(statements))
self.qa_text.append(qa_text_cur)
# load all qa and paths
self.qa_pair_data = []
self.cpt_path_data = []
self.rel_path_data = []
start_time = timeit.default_timer()
print("loading paths from %s" % pf_json_file)
with open(pf_json_file, 'rb') as handle:
pf_json_data = pickle.load(handle)
print('\t Done! Time: ', "{0:.2f} sec".format(float(timeit.default_timer() - start_time)))
assert len(statement_json_data) * num_choice == len(pf_json_data)
for s in tqdm(pf_json_data, desc="processing paths"):
paths = []
rels = []
qa_pairs = list()
for qas in s:
# (q,a) can be identified by the first and last node in every path
# qc = qas["qc"]
# ac = qas["ac"]
pf_res = qas["pf_res"]
if pf_res is not None:
for item in pf_res:
p = item["path"]
q = p[0] + 1
a = p[-1] + 1
new_qa_pair = False
if (q,a) not in qa_pairs:
qa_pairs.append((q,a))
new_qa_pair = True
if len(p) > cut_off and not new_qa_pair:
continue # cut off by length of concepts
# padding dummy concepts and relations
p = [n + 1 for n in p]
p.extend([0] * (max_path_len - len(p))) # padding
r = item["rel"]
for i_ in range(len(r)):
for j_ in range(len(r[i_])):
if r[i_][j_] - 17 in r[i_]:
r[i_][j_] -= 17 # to delete realtedto* and antonym*
r = [n[0] + 1 for n in r] # only pick the top relation when multiple ones are okay
r.extend([0] * (max_path_len - len(r))) # padding
paths.append(p)
rels.append(r)
self.qa_pair_data.append(list(qa_pairs))
self.cpt_path_data.append(paths)
self.rel_path_data.append(rels)
self.cpt_path_data = list(zip(*(iter(self.cpt_path_data),) * num_choice))
self.rel_path_data = list(zip(*(iter(self.rel_path_data),) * num_choice))
self.qa_pair_data = list(zip(*(iter(self.qa_pair_data),) * num_choice))
# slicing dataset
self.statements = self.statements[start:end]
self.correct_labels = self.correct_labels[start:end]
self.qids = self.qids[start:end]
self.cpt_path_data = self.cpt_path_data[start:end]
self.rel_path_data = self.rel_path_data[start:end]
self.qa_pair_data = self.qa_pair_data[start:end]
assert len(self.statements) == len(self.correct_labels) == len(self.qids) == len(self.cpt_path_data) == len(self.rel_path_data) == len(self.qa_pair_data)
self.n_samples = len(self.statements)
def __len__(self):
return self.n_samples
def __getitem__(self, index):
return torch.Tensor([self.statements[index]]), torch.Tensor([self.correct_labels[index]]), \
self.cpt_path_data[index], self.rel_path_data[index], self.qa_pair_data[index], self.qa_text[index]
class data_with_graphs(data.Dataset):
def __init__(self, statement_json_file, graph_ngx_file, pretrained_sent_vecs, num_choice=5, start=0, end=None, reload=True):
self.qids = []
self.statements = []
self.correct_labels = []
statement_json_data = []
print("loading statements from %s" % statement_json_file)
with open(statement_json_file, "r") as fp:
for line in fp.readlines():
statement_data = json.loads(line.strip())
statement_json_data.append(statement_data)
print("Done!")
print("loading sent_vecs from %s" % pretrained_sent_vecs)
self.input_sent_vecs = np.load(pretrained_sent_vecs)
print("Done!")
self.qa_text = []
statement_id = 0
# load all statements
for question_id in range(len(statement_json_data)):
statements = []
qa_text_cur = []
self.qids.append([statement_json_data[question_id]["id"]])
for k, s in enumerate(statement_json_data[question_id]["statements"]):
assert len(statement_json_data[question_id]["statements"]) == num_choice # 5
qa_text_cur.append((s["statement"], s['label']))
if s["label"] is True: # true of false
self.correct_labels.append(k) # the truth id [0,1,2,3,4]
statements.append(self.input_sent_vecs[statement_id])
statement_id += 1
self.statements.append(np.array(statements))
self.qa_text.append(qa_text_cur)
self.nxgs = []
self.dgs = []
start_time = timeit.default_timer()
print("loading paths from %s" % graph_ngx_file)
with open(graph_ngx_file, 'r') as fr:
for line in fr.readlines():
line = line.strip()
self.nxgs.append(line)
print('\t Done! Time: ', "{0:.2f} sec".format(float(timeit.default_timer() - start_time)))
save_file = graph_ngx_file + ".dgl.pk"
if reload and os.path.exists(save_file):
import gc
print("loading pickle for the dgl", save_file)
start_time = timeit.default_timer()
with open(save_file, 'rb') as handle:
gc.disable()
self.dgs = pickle.load(handle)
gc.enable()
print("finished loading in %.3f secs" % (float(timeit.default_timer() - start_time)))
else:
for index, nxg_str in tqdm(enumerate(self.nxgs), total=len(self.nxgs)):
nxg = nx.node_link_graph(json.loads(nxg_str))
dg = dgl.DGLGraph(multigraph=True)
# dg.from_networkx(nxg, edge_attrs=["rel"])
dg.from_networkx(nxg)
cids = [nxg.nodes[n_id]['cid']+1 for n_id in range(len(dg))] # -1 --> 0 and 0 stands for a palceholder concept
# rel_types = [nxg.edges[u, v, r]["rel"] + 1 for u, v, r in nxg.edges] # 0 is used for
# print(line)
# node_types = [mapping_type[nxg.nodes[n_id]['type']] for n_id in range(len(dg))]
# edge_weights = [nxg.edges[u, v].get("weight", 0.0) for u, v in nxg.edges] # -1 is used for the unk edges
# dg.edata.update({'weights': torch.FloatTensor(edge_weights)})
# dg.edata.update({'rel_types': torch.LongTensor(rel_types)})
dg.ndata.update({'cncpt_ids': torch.LongTensor(cids)})
self.dgs.append(dg)
save_file = graph_ngx_file + ".dgl.pk"
print("saving pickle for the dgl", save_file)
with open(save_file, 'wb') as handle:
pickle.dump(self.dgs, handle, protocol=pickle.HIGHEST_PROTOCOL)
# self.qa_pair_data = list(zip(*(iter(self.qa_pair_data),) * num_choice))
self.nxgs = list(zip(*(iter(self.nxgs),) * num_choice))
self.dgs = list(zip(*(iter(self.dgs),) * num_choice))
# slicing dataset
self.statements = self.statements[start:end]
self.correct_labels = self.correct_labels[start:end]
self.qids = self.qids[start:end]
self.nxgs = self.nxgs[start:end]
self.dgs = self.dgs[start:end]
assert len(self.statements) == len(self.correct_labels) == len(self.qids)
self.n_samples = len(self.statements)
def __len__(self):
return self.n_samples
def __getitem__(self, index):
return torch.Tensor([self.statements[index]]), torch.Tensor([self.correct_labels[index]]), self.dgs[index]
class data_with_graphs_and_paths(data.Dataset):
def __init__(self, statement_json_file, graph_ngx_file, pf_json_file, pretrained_sent_vecs, num_choice=5, start=0, end=None, reload=True, cut_off=3):
self.qids = []
self.statements = []
self.correct_labels = []
statement_json_data = []
print("loading statements from %s" % statement_json_file)
with open(statement_json_file, "r") as fp:
for line in fp.readlines():
statement_data = json.loads(line.strip())
statement_json_data.append(statement_data)
print("Done!")
print("loading sent_vecs from %s" % pretrained_sent_vecs)
self.input_sent_vecs = np.load(pretrained_sent_vecs)
print("Done!")
self.qa_text = []
statement_id = 0
# load all statements
for question_id in range(len(statement_json_data)):
statements = []
qa_text_cur = []
self.qids.append([statement_json_data[question_id]["id"]])
for k, s in enumerate(statement_json_data[question_id]["statements"]):
assert len(statement_json_data[question_id]["statements"]) == num_choice # 5
qa_text_cur.append((s["statement"], s['label']))
if s["label"] is True: # true of false
self.correct_labels.append(k) # the truth id [0,1,2,3,4]
statements.append(self.input_sent_vecs[statement_id])
statement_id += 1
self.statements.append(np.array(statements))
self.qa_text.append(qa_text_cur)
self.nxgs = []
self.dgs = []
start_time = timeit.default_timer()
print("loading paths from %s" % graph_ngx_file)
with open(graph_ngx_file, 'r') as fr:
for line in fr.readlines():
line = line.strip()
self.nxgs.append(line)
print('\t Done! Time: ', "{0:.2f} sec".format(float(timeit.default_timer() - start_time)))
save_file = graph_ngx_file + ".dgl.pk"
if reload and os.path.exists(save_file):
import gc
print("loading pickle for the dgl", save_file)
start_time = timeit.default_timer()
with open(save_file, 'rb') as handle:
gc.disable()
self.dgs = pickle.load(handle)
gc.enable()
print("finished loading in %.3f secs" % (float(timeit.default_timer() - start_time)))
else:
for index, nxg_str in tqdm(enumerate(self.nxgs), total=len(self.nxgs)):
nxg = nx.node_link_graph(json.loads(nxg_str))
dg = dgl.DGLGraph(multigraph=True)
# dg.from_networkx(nxg, edge_attrs=["rel"])
dg.from_networkx(nxg)
cids = [nxg.nodes[n_id]['cid']+1 for n_id in range(len(dg))] # -1 --> 0 and 0 stands for a palceholder concept
# rel_types = [nxg.edges[u, v, r]["rel"] + 1 for u, v, r in nxg.edges] # 0 is used for
# print(line)
# node_types = [mapping_type[nxg.nodes[n_id]['type']] for n_id in range(len(dg))]
# edge_weights = [nxg.edges[u, v].get("weight", 0.0) for u, v in nxg.edges] # -1 is used for the unk edges
# dg.edata.update({'weights': torch.FloatTensor(edge_weights)})
# dg.edata.update({'rel_types': torch.LongTensor(rel_types)})
dg.ndata.update({'cncpt_ids': torch.LongTensor(cids)})
self.dgs.append(dg)
save_file = graph_ngx_file + ".dgl.pk"
print("saving pickle for the dgl", save_file)
with open(save_file, 'wb') as handle:
pickle.dump(self.dgs, handle, protocol=pickle.HIGHEST_PROTOCOL)
# self.qa_pair_data = list(zip(*(iter(self.qa_pair_data),) * num_choice))
self.nxgs = list(zip(*(iter(self.nxgs),) * num_choice))
self.dgs = list(zip(*(iter(self.dgs),) * num_choice))
### loading graphs done
# load all qa and paths
self.qa_pair_data = []
self.cpt_path_data = []
self.rel_path_data = []
start_time = timeit.default_timer()
print("loading paths from %s" % pf_json_file)
with open(pf_json_file, 'rb') as handle:
pf_json_data = pickle.load(handle)
print('\t Done! Time: ', "{0:.2f} sec".format(float(timeit.default_timer() - start_time)))
assert len(statement_json_data) * num_choice == len(pf_json_data)
for s in tqdm(pf_json_data, desc="processing paths"):
paths = []
rels = []
qa_pairs = list()
for qas in s:
# (q,a) can be identified by the first and last node in every path
# qc = qas["qc"]
# ac = qas["ac"]
pf_res = qas["pf_res"]
if pf_res is not None:
for item in pf_res:
p = item["path"]
q = p[0] + 1
a = p[-1] + 1
if len(p) > cut_off:
continue # cut off by length of concepts
# padding dummy concepts and relations
p = [n + 1 for n in p]
p.extend([0] * (cut_off - len(p))) # padding
r = item["rel"]
for i_ in range(len(r)):
for j_ in range(len(r[i_])):
if r[i_][j_] - 17 in r[i_]:
r[i_][j_] -= 17 # to delete realtedto* and antonym*
r = [n[0] + 1 for n in r] # only pick the top relation when multiple ones are okay
r.extend([0] * (cut_off - len(r))) # padding
assert len(p) == cut_off
paths.append(p)
rels.append(r)
if (q, a) not in qa_pairs:
qa_pairs.append((q, a))
self.qa_pair_data.append(list(qa_pairs))
self.cpt_path_data.append(paths)
self.rel_path_data.append(rels)
self.cpt_path_data = list(zip(*(iter(self.cpt_path_data),) * num_choice))
self.rel_path_data = list(zip(*(iter(self.rel_path_data),) * num_choice))
self.qa_pair_data = list(zip(*(iter(self.qa_pair_data),) * num_choice))
# slicing dataset
self.statements = self.statements[start:end]
self.correct_labels = self.correct_labels[start:end]
self.qids = self.qids[start:end]
self.nxgs = self.nxgs[start:end]
self.dgs = self.dgs[start:end]
assert len(self.statements) == len(self.correct_labels) == len(self.qids)
self.n_samples = len(self.statements)
def slice(self, start=0, end=None):
# slicing dataset
all_lists = list(zip(self.statements, self.correct_labels, self.qids, self.nxgs, self.dgs))
random.shuffle(all_lists)
self.statements, self.correct_labels, self.qids, self.nxgs, self.dgs = zip(*all_lists)
self.statements = self.statements[start:end]
self.correct_labels = self.correct_labels[start:end]
self.qids = self.qids[start:end]
self.nxgs = self.nxgs[start:end]
self.dgs = self.dgs[start:end]
assert len(self.statements) == len(self.correct_labels) == len(self.qids)
self.n_samples = len(self.statements)
def __len__(self):
return self.n_samples
def __getitem__(self, index):
return torch.Tensor([self.statements[index]]), torch.Tensor([self.correct_labels[index]]), self.dgs[index], \
self.cpt_path_data[index], self.rel_path_data[index], self.qa_pair_data[index], self.qa_text[index]
def collate_csqa_paths(samples):
# The input `samples` is a list of pairs
# (graph, label, qid, aid).
statements, correct_labels, cpt_path_data, rel_path_data, qa_pair_data, qa_text = map(list, zip(*samples))
sents_vecs = torch.stack(statements)
return sents_vecs, torch.Tensor([[i] for i in correct_labels]), cpt_path_data, rel_path_data, qa_pair_data
def collate_csqa_graphs(samples):
# The input `samples` is a list of pairs
# (graph, label, qid, aid, sentv).
statements, correct_labels, graph_data = map(list, zip(*samples))
flat_graph_data = []
for gd in graph_data:
flat_graph_data.extend(gd)
# for k, g in enumerate(flat_graph_data):
# g.ndata["gid"] = torch.Tensor([k] * len(g.nodes()))
# g.edata["gid"] = torch.Tensor([k] * len(g.edges()[0]))
batched_graph = dgl.batch(flat_graph_data)
sents_vecs = torch.stack(statements)
return sents_vecs, torch.Tensor([[i] for i in correct_labels]), batched_graph
def collate_csqa_graphs_and_paths(samples):
# The input `samples` is a list of pairs
# (graph, label, qid, aid, sentv).
statements, correct_labels, graph_data, cpt_path_data, rel_path_data, qa_pair_data, qa_text = map(list, zip(*samples))
flat_graph_data = []
for gd in graph_data:
flat_graph_data.extend(gd)
concept_mapping_dicts = []
acc_start = 0
for k, g in enumerate(flat_graph_data):
# g.ndata["gid"] = torch.Tensor([k] * len(g.nodes()))
# g.edata["gid"] = torch.Tensor([k] * len(g.edges()[0]))
concept_mapping_dict = {}
for index, cncpt_id in enumerate(g.ndata['cncpt_ids']):
concept_mapping_dict[int(cncpt_id)] = acc_start + index
acc_start += len(g.nodes())
concept_mapping_dicts.append(concept_mapping_dict)
batched_graph = dgl.batch(flat_graph_data)
sents_vecs = torch.stack(statements)
return sents_vecs, torch.Tensor([[i] for i in correct_labels]), batched_graph, cpt_path_data, rel_path_data, qa_pair_data, concept_mapping_dicts
| 40.363821
| 161
| 0.574047
| 2,635
| 19,859
| 4.102087
| 0.088805
| 0.025164
| 0.036174
| 0.022019
| 0.9283
| 0.911648
| 0.905264
| 0.902674
| 0.902674
| 0.89555
| 0
| 0.006592
| 0.3049
| 19,859
| 491
| 162
| 40.446029
| 0.776442
| 0.12342
| 0
| 0.822289
| 0
| 0
| 0.04852
| 0
| 0
| 0
| 0
| 0
| 0.03012
| 1
| 0.042169
| false
| 0
| 0.039157
| 0.018072
| 0.120482
| 0.084337
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
053c437a379aab6b1215b9c8966bbf5c5d98df06
| 38
|
py
|
Python
|
src/lib/ntpath.py
|
DTenore/skulpt
|
098d20acfb088d6db85535132c324b7ac2f2d212
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
src/lib/ntpath.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
src/lib/ntpath.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
import _sk_fail; _sk_fail._("ntpath")
| 19
| 37
| 0.763158
| 6
| 38
| 4
| 0.666667
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 38
| 1
| 38
| 38
| 0.685714
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
055f601ec28e5f836d53b13f1a5ee3eb33c041a6
| 3,826
|
py
|
Python
|
py3/classes/sysflow/grammar/sfqlListener.py
|
sthagen/sysflow-telemetry-sf-apis
|
d95c98b34e8896dc1774fbe283ef7db6b8a61bce
|
[
"Apache-2.0"
] | 12
|
2019-11-07T15:03:41.000Z
|
2021-01-19T18:50:19.000Z
|
py3/classes/sysflow/grammar/sfqlListener.py
|
sthagen/sysflow-telemetry-sf-apis
|
d95c98b34e8896dc1774fbe283ef7db6b8a61bce
|
[
"Apache-2.0"
] | 3
|
2020-01-22T17:49:59.000Z
|
2022-02-07T22:24:34.000Z
|
py3/classes/sysflow/grammar/sfqlListener.py
|
sthagen/sysflow-telemetry-sf-apis
|
d95c98b34e8896dc1774fbe283ef7db6b8a61bce
|
[
"Apache-2.0"
] | 7
|
2020-01-11T14:27:28.000Z
|
2021-06-16T14:04:58.000Z
|
# Generated from sfql.g4 by ANTLR 4.9.2
from antlr4 import *
if __name__ is not None and "." in __name__:
from .sfqlParser import sfqlParser
else:
from sfqlParser import sfqlParser
# This class defines a complete listener for a parse tree produced by sfqlParser.
class sfqlListener(ParseTreeListener):
# Enter a parse tree produced by sfqlParser#definitions.
def enterDefinitions(self, ctx:sfqlParser.DefinitionsContext):
pass
# Exit a parse tree produced by sfqlParser#definitions.
def exitDefinitions(self, ctx:sfqlParser.DefinitionsContext):
pass
# Enter a parse tree produced by sfqlParser#f_query.
def enterF_query(self, ctx:sfqlParser.F_queryContext):
pass
# Exit a parse tree produced by sfqlParser#f_query.
def exitF_query(self, ctx:sfqlParser.F_queryContext):
pass
# Enter a parse tree produced by sfqlParser#f_macro.
def enterF_macro(self, ctx:sfqlParser.F_macroContext):
pass
# Exit a parse tree produced by sfqlParser#f_macro.
def exitF_macro(self, ctx:sfqlParser.F_macroContext):
pass
# Enter a parse tree produced by sfqlParser#f_list.
def enterF_list(self, ctx:sfqlParser.F_listContext):
pass
# Exit a parse tree produced by sfqlParser#f_list.
def exitF_list(self, ctx:sfqlParser.F_listContext):
pass
# Enter a parse tree produced by sfqlParser#expression.
def enterExpression(self, ctx:sfqlParser.ExpressionContext):
pass
# Exit a parse tree produced by sfqlParser#expression.
def exitExpression(self, ctx:sfqlParser.ExpressionContext):
pass
# Enter a parse tree produced by sfqlParser#or_expression.
def enterOr_expression(self, ctx:sfqlParser.Or_expressionContext):
pass
# Exit a parse tree produced by sfqlParser#or_expression.
def exitOr_expression(self, ctx:sfqlParser.Or_expressionContext):
pass
# Enter a parse tree produced by sfqlParser#and_expression.
def enterAnd_expression(self, ctx:sfqlParser.And_expressionContext):
pass
# Exit a parse tree produced by sfqlParser#and_expression.
def exitAnd_expression(self, ctx:sfqlParser.And_expressionContext):
pass
# Enter a parse tree produced by sfqlParser#term.
def enterTerm(self, ctx:sfqlParser.TermContext):
pass
# Exit a parse tree produced by sfqlParser#term.
def exitTerm(self, ctx:sfqlParser.TermContext):
pass
# Enter a parse tree produced by sfqlParser#items.
def enterItems(self, ctx:sfqlParser.ItemsContext):
pass
# Exit a parse tree produced by sfqlParser#items.
def exitItems(self, ctx:sfqlParser.ItemsContext):
pass
# Enter a parse tree produced by sfqlParser#var.
def enterVar(self, ctx:sfqlParser.VarContext):
pass
# Exit a parse tree produced by sfqlParser#var.
def exitVar(self, ctx:sfqlParser.VarContext):
pass
# Enter a parse tree produced by sfqlParser#atom.
def enterAtom(self, ctx:sfqlParser.AtomContext):
pass
# Exit a parse tree produced by sfqlParser#atom.
def exitAtom(self, ctx:sfqlParser.AtomContext):
pass
# Enter a parse tree produced by sfqlParser#binary_operator.
def enterBinary_operator(self, ctx:sfqlParser.Binary_operatorContext):
pass
# Exit a parse tree produced by sfqlParser#binary_operator.
def exitBinary_operator(self, ctx:sfqlParser.Binary_operatorContext):
pass
# Enter a parse tree produced by sfqlParser#unary_operator.
def enterUnary_operator(self, ctx:sfqlParser.Unary_operatorContext):
pass
# Exit a parse tree produced by sfqlParser#unary_operator.
def exitUnary_operator(self, ctx:sfqlParser.Unary_operatorContext):
pass
del sfqlParser
| 29.658915
| 81
| 0.719289
| 479
| 3,826
| 5.640919
| 0.177453
| 0.059956
| 0.099926
| 0.179867
| 0.835307
| 0.713916
| 0.702813
| 0.523316
| 0.336788
| 0
| 0
| 0.001666
| 0.215369
| 3,826
| 129
| 82
| 29.658915
| 0.898401
| 0.378463
| 0
| 0.440678
| 1
| 0
| 0.00043
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.440678
| false
| 0.440678
| 0.050847
| 0
| 0.508475
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
552e34a2c2861e6de3c0891410aaea72e7dc1476
| 152
|
py
|
Python
|
src/templates/template-vision-segmentation/models.py
|
rwiteshbera/code-generator
|
2cbc24b3908bc0b84088c4c272b8f5cb848410fd
|
[
"BSD-3-Clause"
] | 28
|
2021-04-20T13:33:12.000Z
|
2022-01-01T11:53:52.000Z
|
src/templates/template-vision-segmentation/models.py
|
rwiteshbera/code-generator
|
2cbc24b3908bc0b84088c4c272b8f5cb848410fd
|
[
"BSD-3-Clause"
] | 112
|
2021-04-22T02:07:43.000Z
|
2022-03-28T01:28:05.000Z
|
src/templates/template-vision-segmentation/models.py
|
rwiteshbera/code-generator
|
2cbc24b3908bc0b84088c4c272b8f5cb848410fd
|
[
"BSD-3-Clause"
] | 21
|
2021-04-21T07:48:29.000Z
|
2022-02-04T19:40:54.000Z
|
from torchvision.models.segmentation import deeplabv3_resnet50
def setup_model(config):
return deeplabv3_resnet50(num_classes=config.num_classes)
| 25.333333
| 62
| 0.848684
| 19
| 152
| 6.526316
| 0.736842
| 0.274194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 0.092105
| 152
| 5
| 63
| 30.4
| 0.855072
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
55a3a441607dc69ce06a1f224900284a4733f8be
| 6,189
|
py
|
Python
|
indicators/harmonics.py
|
zqngetsu96/PyForex
|
09783c7c9bc4bf0cfefea1ebca8c0328a58b176c
|
[
"MIT"
] | null | null | null |
indicators/harmonics.py
|
zqngetsu96/PyForex
|
09783c7c9bc4bf0cfefea1ebca8c0328a58b176c
|
[
"MIT"
] | null | null | null |
indicators/harmonics.py
|
zqngetsu96/PyForex
|
09783c7c9bc4bf0cfefea1ebca8c0328a58b176c
|
[
"MIT"
] | 1
|
2021-10-08T10:41:52.000Z
|
2021-10-08T10:41:52.000Z
|
import pandas as pd
import numpy as np
from scipy.signal import argrelextrema
def peak_detect(price):
# Find our relative extrema
# Return the max indexes of the extrema
max_idx = list(argrelextrema(price, np.greater, order=3)[0])
# Return the min indexes of the extrema
min_idx = list(argrelextrema(price, np.less, order=3)[0])
idx = max_idx + min_idx + [len(price) - 1]
idx.sort()
current_idx = idx[-10:]
start = min(current_idx)
end = max(current_idx)
current_pat = price[current_idx]
return current_idx, current_pat, start, end
def is_Gartley(moves, err_allowed):
XA = moves[0]
AB = moves[1]
BC = moves[2]
CD = moves[3]
AB_range = np.array([0.618 - err_allowed, 0.618 + err_allowed]) * abs(XA)
BC_range = np.array([0.382 - err_allowed, 0.886 + err_allowed]) * abs(AB)
CD_range = np.array([1.27 - err_allowed, 1.618 + err_allowed]) * abs(BC)
if XA>0 and AB<0 and BC>0 and CD<0:
if AB_range[0] < abs(AB) < AB_range[1] and BC_range[0] < abs(BC) < BC_range[1] and CD_range[0] < abs(CD) < CD_range[1]:
return 1
# plt.plot(np.arange(start, i+15), price.values[start:i+15])
# plt.scatter(idx, current_pat, c='r')
# plt.show()
else:
return np.NaN
elif XA<0 and AB>0 and BC<0 and CD>0:
# AB_range = np.array([0.618 - err_allowed, 0.618 + err_allowed]) * abs(XA)
# BC_range = np.array([0.382 - err_allowed, 0.886 + err_allowed]) * abs(AB)
# CD_range = np.array([1.27 - err_allowed, 1.618 + err_allowed]) * abs(BC)
if AB_range[0] < abs(AB) < AB_range[1] and BC_range[0] < abs(BC) < BC_range[1] and CD_range[0] < abs(CD) < \
CD_range[1]:
return -1
# plt.plot(np.arange(start, i+15), price.values[start:i+15])
# plt.scatter(idx, current_pat, c='r')
# plt.show()
else:
return np.NaN
else:
return np.NaN
def is_Butterfly(moves, err_allowed):
XA = moves[0]
AB = moves[1]
BC = moves[2]
CD = moves[3]
AB_range = np.array([0.786 - err_allowed, 0.786 + err_allowed]) * abs(XA)
BC_range = np.array([0.382 - err_allowed, 0.886 + err_allowed]) * abs(AB)
CD_range = np.array([1.618 - err_allowed, 2.618 + err_allowed]) * abs(BC)
if XA>0 and AB<0 and BC>0 and CD<0:
if AB_range[0] < abs(AB) < AB_range[1] and BC_range[0] < abs(BC) < BC_range[1] and CD_range[0] < abs(CD) < CD_range[1]:
return 1
# plt.plot(np.arange(start, i+15), price.values[start:i+15])
# plt.scatter(idx, current_pat, c='r')
# plt.show()
else:
return np.NaN
elif XA<0 and AB>0 and BC<0 and CD>0:
# AB_range = np.array([0.618 - err_allowed, 0.618 + err_allowed]) * abs(XA)
# BC_range = np.array([0.382 - err_allowed, 0.886 + err_allowed]) * abs(AB)
# CD_range = np.array([1.27 - err_allowed, 1.618 + err_allowed]) * abs(BC)
if AB_range[0] < abs(AB) < AB_range[1] and BC_range[0] < abs(BC) < BC_range[1] and CD_range[0] < abs(CD) < \
CD_range[1]:
return -1
# plt.plot(np.arange(start, i+15), price.values[start:i+15])
# plt.scatter(idx, current_pat, c='r')
# plt.show()
else:
return np.NaN
else:
return np.NaN
def is_Bat(moves, err_allowed):
XA = moves[0]
AB = moves[1]
BC = moves[2]
CD = moves[3]
AB_range = np.array([0.382 - err_allowed, 0.5 + err_allowed]) * abs(XA)
BC_range = np.array([0.382 - err_allowed, 0.886 + err_allowed]) * abs(AB)
CD_range = np.array([1.618 - err_allowed, 2.618 + err_allowed]) * abs(BC)
if XA>0 and AB<0 and BC>0 and CD<0:
if AB_range[0] < abs(AB) < AB_range[1] and BC_range[0] < abs(BC) < BC_range[1] and CD_range[0] < abs(CD) < CD_range[1]:
return 1
# plt.plot(np.arange(start, i+15), price.values[start:i+15])
# plt.scatter(idx, current_pat, c='r')
# plt.show()
else:
return np.NaN
elif XA<0 and AB>0 and BC<0 and CD>0:
# AB_range = np.array([0.618 - err_allowed, 0.618 + err_allowed]) * abs(XA)
# BC_range = np.array([0.382 - err_allowed, 0.886 + err_allowed]) * abs(AB)
# CD_range = np.array([1.27 - err_allowed, 1.618 + err_allowed]) * abs(BC)
if AB_range[0] < abs(AB) < AB_range[1] and BC_range[0] < abs(BC) < BC_range[1] and CD_range[0] < abs(CD) < \
CD_range[1]:
return -1
# plt.plot(np.arange(start, i+15), price.values[start:i+15])
# plt.scatter(idx, current_pat, c='r')
# plt.show()
else:
return np.NaN
else:
return np.NaN
def is_Crab(moves, err_allowed):
XA = moves[0]
AB = moves[1]
BC = moves[2]
CD = moves[3]
AB_range = np.array([0.382 - err_allowed, 0.618 + err_allowed]) * abs(XA)
BC_range = np.array([0.382 - err_allowed, 0.886 + err_allowed]) * abs(AB)
CD_range = np.array([2.24 - err_allowed, 3.618 + err_allowed]) * abs(BC)
if XA>0 and AB<0 and BC>0 and CD<0:
if AB_range[0] < abs(AB) < AB_range[1] and BC_range[0] < abs(BC) < BC_range[1] and CD_range[0] < abs(CD) < CD_range[1]:
return 1
# plt.plot(np.arange(start, i+15), price.values[start:i+15])
# plt.scatter(idx, current_pat, c='r')
# plt.show()
else:
return np.NaN
elif XA<0 and AB>0 and BC<0 and CD>0:
# AB_range = np.array([0.618 - err_allowed, 0.618 + err_allowed]) * abs(XA)
# BC_range = np.array([0.382 - err_allowed, 0.886 + err_allowed]) * abs(AB)
# CD_range = np.array([1.27 - err_allowed, 1.618 + err_allowed]) * abs(BC)
if AB_range[0] < abs(AB) < AB_range[1] and BC_range[0] < abs(BC) < BC_range[1] and CD_range[0] < abs(CD) < \
CD_range[1]:
return -1
# plt.plot(np.arange(start, i+15), price.values[start:i+15])
# plt.scatter(idx, current_pat, c='r')
# plt.show()
else:
return np.NaN
else:
return np.NaN
| 40.986755
| 127
| 0.561965
| 1,041
| 6,189
| 3.198847
| 0.073967
| 0.156156
| 0.086486
| 0.062462
| 0.880781
| 0.864565
| 0.864565
| 0.864565
| 0.864565
| 0.864565
| 0
| 0.073599
| 0.282113
| 6,189
| 151
| 128
| 40.986755
| 0.675895
| 0.297463
| 0
| 0.762887
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051546
| false
| 0
| 0.030928
| 0
| 0.298969
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.